Commit 845a6246 authored by Grzegorz Bizon's avatar Grzegorz Bizon

Merge branch 'zj-multiple-artifacts-ee' into 'master'

Multiple artifacts ee

See merge request gitlab-org/gitlab-ee!3276
parents 32ad854e 31dcfcc5
...@@ -42,8 +42,7 @@ class Projects::ArtifactsController < Projects::ApplicationController ...@@ -42,8 +42,7 @@ class Projects::ArtifactsController < Projects::ApplicationController
end end
def raw def raw
path = Gitlab::Ci::Build::Artifacts::Path path = Gitlab::Ci::Build::Artifacts::Path.new(params[:path])
.new(params[:path])
send_artifacts_entry(build, path) send_artifacts_entry(build, path)
end end
...@@ -72,7 +71,7 @@ class Projects::ArtifactsController < Projects::ApplicationController ...@@ -72,7 +71,7 @@ class Projects::ArtifactsController < Projects::ApplicationController
end end
def validate_artifacts! def validate_artifacts!
render_404 unless build && build.artifacts? render_404 unless build&.artifacts?
end end
def build def build
......
module Ci module Ci
class Build < CommitStatus class Build < CommitStatus
prepend ArtifactMigratable
include TokenAuthenticatable include TokenAuthenticatable
include AfterCommitQueue include AfterCommitQueue
include Presentable include Presentable
...@@ -13,9 +14,14 @@ module Ci ...@@ -13,9 +14,14 @@ module Ci
has_many :sourced_pipelines, class_name: Ci::Sources::Pipeline, foreign_key: :source_job_id has_many :sourced_pipelines, class_name: Ci::Sources::Pipeline, foreign_key: :source_job_id
has_many :deployments, as: :deployable has_many :deployments, as: :deployable
has_one :last_deployment, -> { order('deployments.id DESC') }, as: :deployable, class_name: 'Deployment' has_one :last_deployment, -> { order('deployments.id DESC') }, as: :deployable, class_name: 'Deployment'
has_many :trace_sections, class_name: 'Ci::BuildTraceSection' has_many :trace_sections, class_name: 'Ci::BuildTraceSection'
has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_one :job_artifacts_archive, -> { where(file_type: Ci::JobArtifact.file_types[:archive]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
has_one :job_artifacts_metadata, -> { where(file_type: Ci::JobArtifact.file_types[:metadata]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
# The "environment" field for builds is a String, and is the unexpanded name # The "environment" field for builds is a String, and is the unexpanded name
def persisted_environment def persisted_environment
@persisted_environment ||= Environment.find_by( @persisted_environment ||= Environment.find_by(
...@@ -34,16 +40,19 @@ module Ci ...@@ -34,16 +40,19 @@ module Ci
scope :unstarted, ->() { where(runner_id: nil) } scope :unstarted, ->() { where(runner_id: nil) }
scope :ignore_failures, ->() { where(allow_failure: false) } scope :ignore_failures, ->() { where(allow_failure: false) }
scope :with_artifacts, ->() { where.not(artifacts_file: [nil, '']) } scope :with_artifacts, ->() do
where('(artifacts_file IS NOT NULL AND artifacts_file <> ?) OR EXISTS (?)',
'', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id'))
end
scope :with_artifacts_not_expired, ->() { with_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) } scope :with_artifacts_not_expired, ->() { with_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
scope :with_expired_artifacts, ->() { with_artifacts.where('artifacts_expire_at < ?', Time.now) } scope :with_expired_artifacts, ->() { with_artifacts.where('artifacts_expire_at < ?', Time.now) }
scope :with_artifacts_stored_locally, ->() { with_artifacts.where(artifacts_file_store: [nil, ArtifactUploader::LOCAL_STORE]) } scope :with_artifacts_stored_locally, ->() { with_artifacts.where(artifacts_file_store: [nil, LegacyArtifactUploader::LOCAL_STORE]) }
scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) } scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + [:manual]) } scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + [:manual]) }
scope :ref_protected, -> { where(protected: true) } scope :ref_protected, -> { where(protected: true) }
mount_uploader :artifacts_file, ArtifactUploader mount_uploader :legacy_artifacts_file, LegacyArtifactUploader, mount_on: :artifacts_file
mount_uploader :artifacts_metadata, ArtifactUploader mount_uploader :legacy_artifacts_metadata, LegacyArtifactUploader, mount_on: :artifacts_metadata
acts_as_taggable acts_as_taggable
...@@ -330,18 +339,10 @@ module Ci ...@@ -330,18 +339,10 @@ module Ci
project.running_or_pending_build_count(force: true) project.running_or_pending_build_count(force: true)
end end
def artifacts?
!artifacts_expired? && artifacts_file.exists?
end
def browsable_artifacts? def browsable_artifacts?
artifacts_metadata? artifacts_metadata?
end end
def artifacts_metadata?
artifacts? && artifacts_metadata.exists?
end
def artifacts_metadata_entry(path, **options) def artifacts_metadata_entry(path, **options)
artifacts_metadata.use_file do |metadata_path| artifacts_metadata.use_file do |metadata_path|
metadata = Gitlab::Ci::Build::Artifacts::Metadata.new( metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
...@@ -396,6 +397,7 @@ module Ci ...@@ -396,6 +397,7 @@ module Ci
def keep_artifacts! def keep_artifacts!
self.update(artifacts_expire_at: nil) self.update(artifacts_expire_at: nil)
self.job_artifacts.update_all(expire_at: nil)
end end
def coverage_regex def coverage_regex
...@@ -483,11 +485,7 @@ module Ci ...@@ -483,11 +485,7 @@ module Ci
private private
def update_artifacts_size def update_artifacts_size
self.artifacts_size = if artifacts_file.exists? self.artifacts_size = legacy_artifacts_file&.size
artifacts_file.size
else
nil
end
end end
def erase_trace! def erase_trace!
......
module Ci
class JobArtifact < ActiveRecord::Base
extend Gitlab::Ci::Model
belongs_to :project
belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id
before_save :set_size, if: :file_changed?
mount_uploader :file, JobArtifactUploader
enum file_type: {
archive: 1,
metadata: 2
}
def self.artifacts_size_for(project)
self.where(project: project).sum(:size)
end
def set_size
self.size = file.size
end
def expire_in
expire_at - Time.now if expire_at
end
def expire_in=(value)
self.expire_at =
if value
ChronicDuration.parse(value)&.seconds&.from_now
end
end
end
end
# Adapter class to unify the interface between mounted uploaders and the
# Ci::Artifact model
# Meant to be prepended so the interface can stay the same
module ArtifactMigratable
def artifacts_file
job_artifacts_archive&.file || legacy_artifacts_file
end
def artifacts_metadata
job_artifacts_metadata&.file || legacy_artifacts_metadata
end
def artifacts?
!artifacts_expired? && artifacts_file.exists?
end
def artifacts_metadata?
artifacts? && artifacts_metadata.exists?
end
def artifacts_file_changed?
job_artifacts_archive&.file_changed? || attribute_changed?(:artifacts_file)
end
def remove_artifacts_file!
if job_artifacts_archive
job_artifacts_archive.destroy
else
remove_legacy_artifacts_file!
end
end
def remove_artifacts_metadata!
if job_artifacts_metadata
job_artifacts_metadata.destroy
else
remove_legacy_artifacts_metadata!
end
end
def artifacts_size
read_attribute(:artifacts_size).to_i +
job_artifacts_archive&.size.to_i + job_artifacts_metadata&.size.to_i
end
end
...@@ -39,7 +39,9 @@ class ProjectStatistics < ActiveRecord::Base ...@@ -39,7 +39,9 @@ class ProjectStatistics < ActiveRecord::Base
end end
def update_build_artifacts_size def update_build_artifacts_size
self.build_artifacts_size = project.builds.sum(:artifacts_size) self.build_artifacts_size =
project.builds.sum(:artifacts_size) +
Ci::JobArtifact.artifacts_size_for(self)
end end
def update_storage_size def update_storage_size
......
...@@ -18,7 +18,7 @@ module Projects ...@@ -18,7 +18,7 @@ module Projects
@status.enqueue! @status.enqueue!
@status.run! @status.run!
raise 'missing pages artifacts' unless build.artifacts_file? raise 'missing pages artifacts' unless build.artifacts?
raise 'pages are outdated' unless latest? raise 'pages are outdated' unless latest?
# Create temporary directory in which we will extract the artifacts # Create temporary directory in which we will extract the artifacts
......
class JobArtifactUploader < ObjectStoreUploader
storage_options Gitlab.config.artifacts
def self.local_store_path
Gitlab.config.artifacts.path
end
def self.artifacts_upload_path
File.join(self.local_store_path, 'tmp/uploads/')
end
def size
return super if model.size.nil?
model.size
end
private
def default_path
creation_date = model.created_at.utc.strftime('%Y_%m_%d')
File.join(disk_hash[0..1], disk_hash[2..3], disk_hash,
creation_date, model.job_id.to_s, model.id.to_s)
end
def disk_hash
@disk_hash ||= Digest::SHA2.hexdigest(model.project_id.to_s)
end
end
class ArtifactUploader < ObjectStoreUploader class LegacyArtifactUploader < ObjectStoreUploader
storage_options Gitlab.config.artifacts storage_options Gitlab.config.artifacts
def self.local_store_path def self.local_store_path
...@@ -12,6 +12,6 @@ class ArtifactUploader < ObjectStoreUploader ...@@ -12,6 +12,6 @@ class ArtifactUploader < ObjectStoreUploader
private private
def default_path def default_path
File.join(subject.created_at.utc.strftime('%Y_%m'), subject.project_id.to_s, subject.id.to_s) File.join(model.created_at.utc.strftime('%Y_%m'), model.project_id.to_s, model.id.to_s)
end end
end end
...@@ -7,12 +7,12 @@ class LfsObjectUploader < ObjectStoreUploader ...@@ -7,12 +7,12 @@ class LfsObjectUploader < ObjectStoreUploader
end end
def filename def filename
subject.oid[4..-1] model.oid[4..-1]
end end
private private
def default_path def default_path
"#{subject.oid[0, 2]}/#{subject.oid[2, 2]}" "#{model.oid[0, 2]}/#{model.oid[2, 2]}"
end end
end end
...@@ -764,6 +764,7 @@ test: ...@@ -764,6 +764,7 @@ test:
aws_secret_access_key: AWS_SECRET_ACCESS_KEY aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1 region: eu-central-1
artifacts: artifacts:
path: tmp/tests/artifacts
enabled: true enabled: true
# The location where build artifacts are stored (default: shared/artifacts). # The location where build artifacts are stored (default: shared/artifacts).
# path: shared/artifacts # path: shared/artifacts
......
...@@ -131,11 +131,11 @@ class Gitlab::Seeder::Pipelines ...@@ -131,11 +131,11 @@ class Gitlab::Seeder::Pipelines
return unless %w[build test].include?(build.stage) return unless %w[build test].include?(build.stage)
artifacts_cache_file(artifacts_archive_path) do |file| artifacts_cache_file(artifacts_archive_path) do |file|
build.artifacts_file = file build.job_artifacts.build(project: build.project, file_type: :archive, file: file)
end end
artifacts_cache_file(artifacts_metadata_path) do |file| artifacts_cache_file(artifacts_metadata_path) do |file|
build.artifacts_metadata = file build.job_artifacts.build(project: build.project, file_type: :metadata, file: file)
end end
end end
......
class CreateJobArtifacts < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
create_table :ci_job_artifacts do |t|
t.belongs_to :project, null: false, index: true, foreign_key: { on_delete: :cascade }
t.integer :job_id, null: false
t.integer :file_type, null: false
t.integer :size, limit: 8
t.datetime_with_timezone :created_at, null: false
t.datetime_with_timezone :updated_at, null: false
t.datetime_with_timezone :expire_at
t.string :file
t.foreign_key :ci_builds, column: :job_id, on_delete: :cascade
t.index [:job_id, :file_type], unique: true
end
end
end
class AddFileStoreJobArtifacts < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
DOWNTIME = false
def up
add_column(:ci_job_artifacts, :file_store, :integer)
end
def down
remove_column(:ci_job_artifacts, :file_store)
end
end
...@@ -396,6 +396,21 @@ ActiveRecord::Schema.define(version: 20171124165823) do ...@@ -396,6 +396,21 @@ ActiveRecord::Schema.define(version: 20171124165823) do
add_index "ci_group_variables", ["group_id", "key"], name: "index_ci_group_variables_on_group_id_and_key", unique: true, using: :btree add_index "ci_group_variables", ["group_id", "key"], name: "index_ci_group_variables_on_group_id_and_key", unique: true, using: :btree
create_table "ci_job_artifacts", force: :cascade do |t|
t.integer "project_id", null: false
t.integer "job_id", null: false
t.integer "file_type", null: false
t.integer "file_store"
t.integer "size", limit: 8
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
t.datetime_with_timezone "expire_at"
t.string "file"
end
add_index "ci_job_artifacts", ["job_id", "file_type"], name: "index_ci_job_artifacts_on_job_id_and_file_type", unique: true, using: :btree
add_index "ci_job_artifacts", ["project_id"], name: "index_ci_job_artifacts_on_project_id", using: :btree
create_table "ci_pipeline_schedule_variables", force: :cascade do |t| create_table "ci_pipeline_schedule_variables", force: :cascade do |t|
t.string "key", null: false t.string "key", null: false
t.text "value" t.text "value"
...@@ -2404,6 +2419,8 @@ ActiveRecord::Schema.define(version: 20171124165823) do ...@@ -2404,6 +2419,8 @@ ActiveRecord::Schema.define(version: 20171124165823) do
add_foreign_key "ci_builds", "ci_stages", column: "stage_id", name: "fk_3a9eaa254d", on_delete: :cascade add_foreign_key "ci_builds", "ci_stages", column: "stage_id", name: "fk_3a9eaa254d", on_delete: :cascade
add_foreign_key "ci_builds", "projects", name: "fk_befce0568a", on_delete: :cascade add_foreign_key "ci_builds", "projects", name: "fk_befce0568a", on_delete: :cascade
add_foreign_key "ci_group_variables", "namespaces", column: "group_id", name: "fk_33ae4d58d8", on_delete: :cascade add_foreign_key "ci_group_variables", "namespaces", column: "group_id", name: "fk_33ae4d58d8", on_delete: :cascade
add_foreign_key "ci_job_artifacts", "ci_builds", column: "job_id", on_delete: :cascade
add_foreign_key "ci_job_artifacts", "projects", on_delete: :cascade
add_foreign_key "ci_pipeline_schedule_variables", "ci_pipeline_schedules", column: "pipeline_schedule_id", name: "fk_41c35fda51", on_delete: :cascade add_foreign_key "ci_pipeline_schedule_variables", "ci_pipeline_schedules", column: "pipeline_schedule_id", name: "fk_41c35fda51", on_delete: :cascade
add_foreign_key "ci_pipeline_schedules", "projects", name: "fk_8ead60fcc4", on_delete: :cascade add_foreign_key "ci_pipeline_schedules", "projects", name: "fk_8ead60fcc4", on_delete: :cascade
add_foreign_key "ci_pipeline_schedules", "users", column: "owner_id", name: "fk_9ea99f58d2", on_delete: :nullify add_foreign_key "ci_pipeline_schedules", "users", column: "owner_id", name: "fk_9ea99f58d2", on_delete: :nullify
......
...@@ -38,13 +38,6 @@ class ObjectStoreUploader < CarrierWave::Uploader::Base ...@@ -38,13 +38,6 @@ class ObjectStoreUploader < CarrierWave::Uploader::Base
end end
end end
attr_reader :subject, :field
def initialize(subject, field)
@subject = subject
@field = field
end
def file_storage? def file_storage?
storage.is_a?(CarrierWave::Storage::File) storage.is_a?(CarrierWave::Storage::File)
end end
...@@ -54,7 +47,7 @@ class ObjectStoreUploader < CarrierWave::Uploader::Base ...@@ -54,7 +47,7 @@ class ObjectStoreUploader < CarrierWave::Uploader::Base
end end
def real_object_store def real_object_store
subject.public_send(:"#{field}_store") # rubocop:disable GitlabSecurity/PublicSend model.public_send(store_serialization_column) # rubocop:disable GitlabSecurity/PublicSend
end end
def object_store def object_store
...@@ -63,7 +56,7 @@ class ObjectStoreUploader < CarrierWave::Uploader::Base ...@@ -63,7 +56,7 @@ class ObjectStoreUploader < CarrierWave::Uploader::Base
def object_store=(value) def object_store=(value)
@storage = nil @storage = nil
subject.public_send(:"#{field}_store=", value) # rubocop:disable GitlabSecurity/PublicSend model.public_send(:"#{store_serialization_column}=", value) # rubocop:disable GitlabSecurity/PublicSend
end end
def store_dir def store_dir
...@@ -111,7 +104,7 @@ class ObjectStoreUploader < CarrierWave::Uploader::Base ...@@ -111,7 +104,7 @@ class ObjectStoreUploader < CarrierWave::Uploader::Base
# since we change storage store the new storage # since we change storage store the new storage
# in case of failure delete new file # in case of failure delete new file
begin begin
subject.save! model.save!
rescue => e rescue => e
new_file.delete new_file.delete
self.object_store = old_store self.object_store = old_store
...@@ -125,7 +118,7 @@ class ObjectStoreUploader < CarrierWave::Uploader::Base ...@@ -125,7 +118,7 @@ class ObjectStoreUploader < CarrierWave::Uploader::Base
def schedule_migration_to_object_storage(new_file) def schedule_migration_to_object_storage(new_file)
if self.class.object_store_enabled? && licensed? && file_storage? if self.class.object_store_enabled? && licensed? && file_storage?
ObjectStorageUploadWorker.perform_async(self.class.name, subject.class.name, field, subject.id) ObjectStorageUploadWorker.perform_async(self.class.name, model.class.name, mounted_as, model.id)
end end
end end
...@@ -194,6 +187,14 @@ class ObjectStoreUploader < CarrierWave::Uploader::Base ...@@ -194,6 +187,14 @@ class ObjectStoreUploader < CarrierWave::Uploader::Base
raise NotImplementedError raise NotImplementedError
end end
def serialization_column
model.class.uploader_option(mounted_as, :mount_on) || mounted_as
end
def store_serialization_column
:"#{serialization_column}_store"
end
def storage def storage
@storage ||= @storage ||=
if object_store == REMOTE_STORE if object_store == REMOTE_STORE
......
...@@ -44,8 +44,8 @@ class Spinach::Features::ProjectPages < Spinach::FeatureSteps ...@@ -44,8 +44,8 @@ class Spinach::Features::ProjectPages < Spinach::FeatureSteps
project: @project, project: @project,
pipeline: pipeline, pipeline: pipeline,
ref: 'HEAD', ref: 'HEAD',
artifacts_file: fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip'), legacy_artifacts_file: fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip'),
artifacts_metadata: fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip.meta') legacy_artifacts_metadata: fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip.meta')
) )
result = ::Projects::UpdatePagesService.new(@project, build).execute result = ::Projects::UpdatePagesService.new(@project, build).execute
......
...@@ -37,13 +37,13 @@ module SharedBuilds ...@@ -37,13 +37,13 @@ module SharedBuilds
step 'recent build has artifacts available' do step 'recent build has artifacts available' do
artifacts = Rails.root + 'spec/fixtures/ci_build_artifacts.zip' artifacts = Rails.root + 'spec/fixtures/ci_build_artifacts.zip'
archive = fixture_file_upload(artifacts, 'application/zip') archive = fixture_file_upload(artifacts, 'application/zip')
@build.update_attributes(artifacts_file: archive) @build.update_attributes(legacy_artifacts_file: archive)
end end
step 'recent build has artifacts metadata available' do step 'recent build has artifacts metadata available' do
metadata = Rails.root + 'spec/fixtures/ci_build_artifacts_metadata.gz' metadata = Rails.root + 'spec/fixtures/ci_build_artifacts_metadata.gz'
gzip = fixture_file_upload(metadata, 'application/x-gzip') gzip = fixture_file_upload(metadata, 'application/x-gzip')
@build.update_attributes(artifacts_metadata: gzip) @build.update_attributes(legacy_artifacts_metadata: gzip)
end end
step 'recent build has a build trace' do step 'recent build has a build trace' do
......
...@@ -1178,13 +1178,9 @@ module API ...@@ -1178,13 +1178,9 @@ module API
expose :type, :url, :username, :password expose :type, :url, :username, :password
end end
class ArtifactFile < Grape::Entity
expose :filename, :size
end
class Dependency < Grape::Entity class Dependency < Grape::Entity
expose :id, :name, :token expose :id, :name, :token
expose :artifacts_file, using: ArtifactFile, if: ->(job, _) { job.artifacts? } expose :artifacts_file, using: JobArtifactFile, if: ->(job, _) { job.artifacts? }
end end
class Response < Grape::Entity class Response < Grape::Entity
......
...@@ -215,18 +215,20 @@ module API ...@@ -215,18 +215,20 @@ module API
job = authenticate_job! job = authenticate_job!
forbidden!('Job is not running!') unless job.running? forbidden!('Job is not running!') unless job.running?
artifacts_upload_path = ArtifactUploader.artifacts_upload_path artifacts_upload_path = JobArtifactUploader.artifacts_upload_path
artifacts = uploaded_file(:file, artifacts_upload_path) artifacts = uploaded_file(:file, artifacts_upload_path)
metadata = uploaded_file(:metadata, artifacts_upload_path) metadata = uploaded_file(:metadata, artifacts_upload_path)
bad_request!('Missing artifacts file!') unless artifacts bad_request!('Missing artifacts file!') unless artifacts
file_to_large! unless artifacts.size < max_artifacts_size file_to_large! unless artifacts.size < max_artifacts_size
job.artifacts_file = artifacts expire_in = params['expire_in'] ||
job.artifacts_metadata = metadata
job.artifacts_expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
job.build_job_artifacts_archive(project: job.project, file_type: :archive, file: artifacts, expire_in: expire_in)
job.build_job_artifacts_metadata(project: job.project, file_type: :metadata, file: metadata, expire_in: expire_in) if metadata
job.artifacts_expire_in = expire_in
if job.save if job.save
present job, with: Entities::JobRequest::Response present job, with: Entities::JobRequest::Response
else else
......
...@@ -3,7 +3,7 @@ require 'backup/files' ...@@ -3,7 +3,7 @@ require 'backup/files'
module Backup module Backup
class Artifacts < Files class Artifacts < Files
def initialize def initialize
super('artifacts', ArtifactUploader.local_store_path) super('artifacts', LegacyArtifactUploader.local_store_path)
end end
def create_files_dir def create_files_dir
......
...@@ -58,7 +58,7 @@ module Gitlab ...@@ -58,7 +58,7 @@ module Gitlab
end end
def artifact_upload_ok def artifact_upload_ok
{ TempPath: ArtifactUploader.artifacts_upload_path } { TempPath: JobArtifactUploader.artifacts_upload_path }
end end
def send_git_blob(repository, blob) def send_git_blob(repository, blob)
......
...@@ -12,8 +12,8 @@ namespace :gitlab do ...@@ -12,8 +12,8 @@ namespace :gitlab do
.with_artifacts_stored_locally .with_artifacts_stored_locally
.find_each(batch_size: 10) do |build| .find_each(batch_size: 10) do |build|
begin begin
build.artifacts_file.migrate!(ArtifactUploader::REMOTE_STORE) build.artifacts_file.migrate!(ObjectStoreUploader::REMOTE_STORE)
build.artifacts_metadata.migrate!(ArtifactUploader::REMOTE_STORE) build.artifacts_metadata.migrate!(ObjectStoreUploader::REMOTE_STORE)
logger.info("Transferred artifacts of #{build.id} of #{build.artifacts_size} to object storage") logger.info("Transferred artifacts of #{build.id} of #{build.artifacts_size} to object storage")
rescue => e rescue => e
......
require 'spec_helper' require 'spec_helper'
describe Projects::ArtifactsController do describe Projects::ArtifactsController do
set(:user) { create(:user) } let(:user) { project.owner }
set(:project) { create(:project, :repository, :public) } set(:project) { create(:project, :repository, :public) }
let(:pipeline) do let(:pipeline) do
...@@ -15,8 +15,6 @@ describe Projects::ArtifactsController do ...@@ -15,8 +15,6 @@ describe Projects::ArtifactsController do
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) } let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
before do before do
project.add_developer(user)
sign_in(user) sign_in(user)
end end
...@@ -117,12 +115,12 @@ describe Projects::ArtifactsController do ...@@ -117,12 +115,12 @@ describe Projects::ArtifactsController do
context 'when the file exists' do context 'when the file exists' do
let(:path) { 'ci_artifacts.txt' } let(:path) { 'ci_artifacts.txt' }
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline, artifacts_file_store: store, artifacts_metadata_store: store) }
shared_examples 'a valid file' do shared_examples 'a valid file' do
it 'serves the file using workhorse' do it 'serves the file using workhorse' do
subject subject
expect(response).to have_gitlab_http_status(200)
expect(send_data).to start_with('artifacts-entry:') expect(send_data).to start_with('artifacts-entry:')
expect(params.keys).to eq(%w(Archive Entry)) expect(params.keys).to eq(%w(Archive Entry))
...@@ -146,8 +144,9 @@ describe Projects::ArtifactsController do ...@@ -146,8 +144,9 @@ describe Projects::ArtifactsController do
context 'when using local file storage' do context 'when using local file storage' do
it_behaves_like 'a valid file' do it_behaves_like 'a valid file' do
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
let(:store) { ObjectStoreUploader::LOCAL_STORE } let(:store) { ObjectStoreUploader::LOCAL_STORE }
let(:archive_path) { ArtifactUploader.local_store_path } let(:archive_path) { JobArtifactUploader.local_store_path }
end end
end end
...@@ -157,6 +156,8 @@ describe Projects::ArtifactsController do ...@@ -157,6 +156,8 @@ describe Projects::ArtifactsController do
end end
it_behaves_like 'a valid file' do it_behaves_like 'a valid file' do
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
let!(:job) { create(:ci_build, :success, pipeline: pipeline) }
let(:store) { ObjectStoreUploader::REMOTE_STORE } let(:store) { ObjectStoreUploader::REMOTE_STORE }
let(:archive_path) { 'https://' } let(:archive_path) { 'https://' }
end end
......
...@@ -154,39 +154,27 @@ FactoryGirl.define do ...@@ -154,39 +154,27 @@ FactoryGirl.define do
runner factory: :ci_runner runner factory: :ci_runner
end end
trait :artifacts do trait :legacy_artifacts do
after(:create) do |build, _| after(:create) do |build, _|
build.artifacts_file = build.update!(
fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), legacy_artifacts_file: fixture_file_upload(
'application/zip') Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip'),
legacy_artifacts_metadata: fixture_file_upload(
build.artifacts_metadata = Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'), 'application/x-gzip')
fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'), )
'application/x-gzip')
build.save!
end end
end end
trait :remote_store do trait :artifacts do
artifacts_file_store ArtifactUploader::REMOTE_STORE after(:create) do |build|
artifacts_metadata_store ArtifactUploader::REMOTE_STORE create(:ci_job_artifact, :archive, job: build)
create(:ci_job_artifact, :metadata, job: build)
build.reload
end end
trait :artifacts_expired do
after(:create) do |build, _|
build.artifacts_file =
fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts.zip'),
'application/zip')
build.artifacts_metadata =
fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'),
'application/x-gzip')
build.artifacts_expire_at = 1.minute.ago
build.save!
end end
trait :expired do
artifacts_expire_at 1.minute.ago
end end
trait :with_commit do trait :with_commit do
......
include ActionDispatch::TestProcess
FactoryGirl.define do
factory :ci_job_artifact, class: Ci::JobArtifact do
job factory: :ci_build
file_type :archive
trait :remote_store do
file_store JobArtifactUploader::REMOTE_STORE
end
after :build do |artifact|
artifact.project ||= artifact.job.project
end
trait :archive do
file_type :archive
after(:build) do |artifact, _|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
end
end
trait :metadata do
file_type :metadata
after(:build) do |artifact, _|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'), 'application/x-gzip')
end
end
end
end
...@@ -89,7 +89,7 @@ describe 'Commits' do ...@@ -89,7 +89,7 @@ describe 'Commits' do
context 'Download artifacts' do context 'Download artifacts' do
before do before do
build.update_attributes(artifacts_file: artifacts_file) build.update_attributes(legacy_artifacts_file: artifacts_file)
end end
it do it do
...@@ -146,7 +146,7 @@ describe 'Commits' do ...@@ -146,7 +146,7 @@ describe 'Commits' do
context "when logged as reporter" do context "when logged as reporter" do
before do before do
project.team << [user, :reporter] project.team << [user, :reporter]
build.update_attributes(artifacts_file: artifacts_file) build.update_attributes(legacy_artifacts_file: artifacts_file)
visit pipeline_path(pipeline) visit pipeline_path(pipeline)
end end
...@@ -168,7 +168,7 @@ describe 'Commits' do ...@@ -168,7 +168,7 @@ describe 'Commits' do
project.update( project.update(
visibility_level: Gitlab::VisibilityLevel::INTERNAL, visibility_level: Gitlab::VisibilityLevel::INTERNAL,
public_builds: false) public_builds: false)
build.update_attributes(artifacts_file: artifacts_file) build.update_attributes(legacy_artifacts_file: artifacts_file)
visit pipeline_path(pipeline) visit pipeline_path(pipeline)
end end
......
...@@ -28,14 +28,14 @@ feature 'Mini Pipeline Graph', :js do ...@@ -28,14 +28,14 @@ feature 'Mini Pipeline Graph', :js do
let(:artifacts_file2) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png') } let(:artifacts_file2) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png') }
before do before do
create(:ci_build, pipeline: pipeline, artifacts_file: artifacts_file1) create(:ci_build, pipeline: pipeline, legacy_artifacts_file: artifacts_file1)
create(:ci_build, pipeline: pipeline, when: 'manual') create(:ci_build, pipeline: pipeline, when: 'manual')
end end
it 'avoids repeated database queries' do it 'avoids repeated database queries' do
before = ActiveRecord::QueryRecorder.new { visit_merge_request(:json) } before = ActiveRecord::QueryRecorder.new { visit_merge_request(:json) }
create(:ci_build, pipeline: pipeline, artifacts_file: artifacts_file2) create(:ci_build, pipeline: pipeline, legacy_artifacts_file: artifacts_file2)
create(:ci_build, pipeline: pipeline, when: 'manual') create(:ci_build, pipeline: pipeline, when: 'manual')
after = ActiveRecord::QueryRecorder.new { visit_merge_request(:json) } after = ActiveRecord::QueryRecorder.new { visit_merge_request(:json) }
......
...@@ -187,7 +187,7 @@ feature 'Jobs' do ...@@ -187,7 +187,7 @@ feature 'Jobs' do
context "Download artifacts" do context "Download artifacts" do
before do before do
job.update_attributes(artifacts_file: artifacts_file) job.update_attributes(legacy_artifacts_file: artifacts_file)
visit project_job_path(project, job) visit project_job_path(project, job)
end end
...@@ -198,7 +198,7 @@ feature 'Jobs' do ...@@ -198,7 +198,7 @@ feature 'Jobs' do
context 'Artifacts expire date' do context 'Artifacts expire date' do
before do before do
job.update_attributes(artifacts_file: artifacts_file, job.update_attributes(legacy_artifacts_file: artifacts_file,
artifacts_expire_at: expire_at) artifacts_expire_at: expire_at)
visit project_job_path(project, job) visit project_job_path(project, job)
...@@ -433,14 +433,14 @@ feature 'Jobs' do ...@@ -433,14 +433,14 @@ feature 'Jobs' do
describe "GET /:project/jobs/:id/download" do describe "GET /:project/jobs/:id/download" do
before do before do
job.update_attributes(artifacts_file: artifacts_file) job.update_attributes(legacy_artifacts_file: artifacts_file)
visit project_job_path(project, job) visit project_job_path(project, job)
click_link 'Download' click_link 'Download'
end end
context "Build from other project" do context "Build from other project" do
before do before do
job2.update_attributes(artifacts_file: artifacts_file) job2.update_attributes(legacy_artifacts_file: artifacts_file)
visit download_project_job_artifacts_path(project, job2) visit download_project_job_artifacts_path(project, job2)
end end
......
...@@ -304,7 +304,7 @@ describe 'Pipelines', :js do ...@@ -304,7 +304,7 @@ describe 'Pipelines', :js do
context 'with artifacts expired' do context 'with artifacts expired' do
let!(:with_artifacts_expired) do let!(:with_artifacts_expired) do
create(:ci_build, :artifacts_expired, :success, create(:ci_build, :expired, :success,
pipeline: pipeline, pipeline: pipeline,
name: 'rspec', name: 'rspec',
stage: 'test') stage: 'test')
......
...@@ -16,20 +16,22 @@ describe MigrateOldArtifacts do ...@@ -16,20 +16,22 @@ describe MigrateOldArtifacts do
end end
context 'with migratable data' do context 'with migratable data' do
let(:project1) { create(:project, ci_id: 2) } set(:project1) { create(:project, ci_id: 2) }
let(:project2) { create(:project, ci_id: 3) } set(:project2) { create(:project, ci_id: 3) }
let(:project3) { create(:project) } set(:project3) { create(:project) }
let(:pipeline1) { create(:ci_empty_pipeline, project: project1) } set(:pipeline1) { create(:ci_empty_pipeline, project: project1) }
let(:pipeline2) { create(:ci_empty_pipeline, project: project2) } set(:pipeline2) { create(:ci_empty_pipeline, project: project2) }
let(:pipeline3) { create(:ci_empty_pipeline, project: project3) } set(:pipeline3) { create(:ci_empty_pipeline, project: project3) }
let!(:build_with_legacy_artifacts) { create(:ci_build, pipeline: pipeline1) } let!(:build_with_legacy_artifacts) { create(:ci_build, pipeline: pipeline1) }
let!(:build_without_artifacts) { create(:ci_build, pipeline: pipeline1) } let!(:build_without_artifacts) { create(:ci_build, pipeline: pipeline1) }
let!(:build2) { create(:ci_build, :artifacts, pipeline: pipeline2) } let!(:build2) { create(:ci_build, pipeline: pipeline2) }
let!(:build3) { create(:ci_build, :artifacts, pipeline: pipeline3) } let!(:build3) { create(:ci_build, pipeline: pipeline3) }
before do before do
setup_builds(build2, build3)
store_artifacts_in_legacy_path(build_with_legacy_artifacts) store_artifacts_in_legacy_path(build_with_legacy_artifacts)
end end
...@@ -38,7 +40,7 @@ describe MigrateOldArtifacts do ...@@ -38,7 +40,7 @@ describe MigrateOldArtifacts do
end end
it "legacy artifacts are set" do it "legacy artifacts are set" do
expect(build_with_legacy_artifacts.artifacts_file_identifier).not_to be_nil expect(build_with_legacy_artifacts.legacy_artifacts_file_identifier).not_to be_nil
end end
describe '#min_id' do describe '#min_id' do
...@@ -113,5 +115,24 @@ describe MigrateOldArtifacts do ...@@ -113,5 +115,24 @@ describe MigrateOldArtifacts do
build.project.ci_id.to_s, build.project.ci_id.to_s,
build.id.to_s) build.id.to_s)
end end
def new_legacy_path(build)
File.join(directory,
build.created_at.utc.strftime('%Y_%m'),
build.project_id.to_s,
build.id.to_s)
end
def setup_builds(*builds)
builds.each do |build|
FileUtils.mkdir_p(new_legacy_path(build))
build.update_columns(
artifacts_file: 'ci_build_artifacts.zip',
artifacts_metadata: 'ci_build_artifacts_metadata.gz')
build.reload
end
end
end end
end end
...@@ -24,6 +24,8 @@ describe Ci::Build do ...@@ -24,6 +24,8 @@ describe Ci::Build do
it { is_expected.to respond_to(:has_trace?) } it { is_expected.to respond_to(:has_trace?) }
it { is_expected.to respond_to(:trace) } it { is_expected.to respond_to(:trace) }
it { is_expected.to be_a(ArtifactMigratable) }
describe 'callbacks' do describe 'callbacks' do
context 'when running after_create callback' do context 'when running after_create callback' do
it 'triggers asynchronous build hooks worker' do it 'triggers asynchronous build hooks worker' do
...@@ -131,44 +133,65 @@ describe Ci::Build do ...@@ -131,44 +133,65 @@ describe Ci::Build do
end end
describe '#artifacts?' do describe '#artifacts?' do
context 'when new artifacts are used' do
let(:build) { create(:ci_build, :artifacts) }
subject { build.artifacts? } subject { build.artifacts? }
context 'artifacts archive does not exist' do context 'artifacts archive does not exist' do
before do let(:build) { create(:ci_build) }
build.update_attributes(artifacts_file: nil)
end
it { is_expected.to be_falsy } it { is_expected.to be_falsy }
end end
context 'artifacts archive exists' do context 'artifacts archive exists' do
let(:build) { create(:ci_build, :artifacts) }
it { is_expected.to be_truthy } it { is_expected.to be_truthy }
context 'is expired' do context 'is expired' do
before do let!(:build) { create(:ci_build, :artifacts, :expired) }
build.update(artifacts_expire_at: Time.now - 7.days)
end
it { is_expected.to be_falsy } it { is_expected.to be_falsy }
end end
context 'is not expired' do context 'is not expired' do
before do it { is_expected.to be_truthy }
build.update(artifacts_expire_at: Time.now + 7.days) end
end
end end
context 'when legacy artifacts are used' do
let(:build) { create(:ci_build, :legacy_artifacts) }
subject { build.artifacts? }
context 'artifacts archive does not exist' do
let(:build) { create(:ci_build) }
it { is_expected.to be_falsy }
end
context 'artifacts archive exists' do
it { is_expected.to be_truthy }
context 'is expired' do
let!(:build) { create(:ci_build, :legacy_artifacts, :expired) }
it { is_expected.to be_falsy }
end
context 'is not expired' do
it { is_expected.to be_truthy } it { is_expected.to be_truthy }
end end
end end
end end
end
describe '#browsable_artifacts?' do describe '#browsable_artifacts?' do
subject { build.browsable_artifacts? } subject { build.browsable_artifacts? }
context 'artifacts metadata does not exist' do context 'artifacts metadata does not exist' do
before do before do
build.update_attributes(artifacts_metadata: nil) build.update_attributes(legacy_artifacts_metadata: nil)
end end
it { is_expected.to be_falsy } it { is_expected.to be_falsy }
...@@ -631,11 +654,13 @@ describe Ci::Build do ...@@ -631,11 +654,13 @@ describe Ci::Build do
describe '#erasable?' do describe '#erasable?' do
subject { build.erasable? } subject { build.erasable? }
it { is_expected.to eq false } it { is_expected.to eq false }
end end
end end
context 'build is erasable' do context 'build is erasable' do
context 'new artifacts' do
let!(:build) { create(:ci_build, :trace, :success, :artifacts) } let!(:build) { create(:ci_build, :trace, :success, :artifacts) }
describe '#erase' do describe '#erase' do
...@@ -702,6 +727,77 @@ describe Ci::Build do ...@@ -702,6 +727,77 @@ describe Ci::Build do
end end
end end
context 'old artifacts' do
context 'build is erasable' do
context 'new artifacts' do
let!(:build) { create(:ci_build, :trace, :success, :legacy_artifacts) }
describe '#erase' do
before do
build.erase(erased_by: user)
end
context 'erased by user' do
let!(:user) { create(:user, username: 'eraser') }
include_examples 'erasable'
it 'records user who erased a build' do
expect(build.erased_by).to eq user
end
end
context 'erased by system' do
let(:user) { nil }
include_examples 'erasable'
it 'does not set user who erased a build' do
expect(build.erased_by).to be_nil
end
end
end
describe '#erasable?' do
subject { build.erasable? }
it { is_expected.to be_truthy }
end
describe '#erased?' do
let!(:build) { create(:ci_build, :trace, :success, :legacy_artifacts) }
subject { build.erased? }
context 'job has not been erased' do
it { is_expected.to be_falsey }
end
context 'job has been erased' do
before do
build.erase
end
it { is_expected.to be_truthy }
end
end
context 'metadata and build trace are not available' do
let!(:build) { create(:ci_build, :success, :legacy_artifacts) }
before do
build.remove_artifacts_metadata!
end
describe '#erase' do
it 'does not raise error' do
expect { build.erase }.not_to raise_error
end
end
end
end
end
end
end
describe '#first_pending' do describe '#first_pending' do
let!(:first) { create(:ci_build, pipeline: pipeline, status: 'pending', created_at: Date.yesterday) } let!(:first) { create(:ci_build, pipeline: pipeline, status: 'pending', created_at: Date.yesterday) }
let!(:second) { create(:ci_build, pipeline: pipeline, status: 'pending') } let!(:second) { create(:ci_build, pipeline: pipeline, status: 'pending') }
...@@ -931,11 +1027,23 @@ describe Ci::Build do ...@@ -931,11 +1027,23 @@ describe Ci::Build do
describe '#keep_artifacts!' do describe '#keep_artifacts!' do
let(:build) { create(:ci_build, artifacts_expire_at: Time.now + 7.days) } let(:build) { create(:ci_build, artifacts_expire_at: Time.now + 7.days) }
subject { build.keep_artifacts! }
it 'to reset expire_at' do it 'to reset expire_at' do
build.keep_artifacts! subject
expect(build.artifacts_expire_at).to be_nil expect(build.artifacts_expire_at).to be_nil
end end
context 'when having artifacts files' do
let!(:artifact) { create(:ci_job_artifact, job: build, expire_in: '7 days') }
it 'to reset dependent objects' do
subject
expect(artifact.reload.expire_at).to be_nil
end
end
end end
describe '#merge_request' do describe '#merge_request' do
......
require 'spec_helper'
describe Ci::JobArtifact do
set(:artifact) { create(:ci_job_artifact, :archive) }
describe "Associations" do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:job) }
end
it { is_expected.to respond_to(:file) }
it { is_expected.to respond_to(:created_at) }
it { is_expected.to respond_to(:updated_at) }
describe '#set_size' do
it 'sets the size' do
expect(artifact.size).to eq(106365)
end
end
describe '#file' do
subject { artifact.file }
context 'the uploader api' do
it { is_expected.to respond_to(:store_dir) }
it { is_expected.to respond_to(:cache_dir) }
it { is_expected.to respond_to(:work_dir) }
end
end
describe '#expire_in' do
subject { artifact.expire_in }
it { is_expected.to be_nil }
context 'when expire_at is specified' do
let(:expire_at) { Time.now + 7.days }
before do
artifact.expire_at = expire_at
end
it { is_expected.to be_within(5).of(expire_at - Time.now) }
end
end
describe '#expire_in=' do
subject { artifact.expire_in }
it 'when assigning valid duration' do
artifact.expire_in = '7 days'
is_expected.to be_within(10).of(7.days.to_i)
end
it 'when assigning invalid duration' do
expect { artifact.expire_in = '7 elephants' }.to raise_error(ChronicDuration::DurationParseError)
is_expected.to be_nil
end
it 'when resetting value' do
artifact.expire_in = nil
is_expected.to be_nil
end
it 'when setting to 0' do
artifact.expire_in = '0'
is_expected.to be_nil
end
end
end
...@@ -133,15 +133,29 @@ describe ProjectStatistics do ...@@ -133,15 +133,29 @@ describe ProjectStatistics do
describe '#update_build_artifacts_size' do describe '#update_build_artifacts_size' do
let!(:pipeline) { create(:ci_pipeline, project: project) } let!(:pipeline) { create(:ci_pipeline, project: project) }
let!(:build1) { create(:ci_build, pipeline: pipeline, artifacts_size: 45.megabytes) }
let!(:build2) { create(:ci_build, pipeline: pipeline, artifacts_size: 56.megabytes) } context 'when new job artifacts are calculated' do
let(:ci_build) { create(:ci_build, pipeline: pipeline) }
before do before do
create(:ci_job_artifact, :archive, project: pipeline.project, job: ci_build)
end
it "stores the size of related build artifacts" do
statistics.update_build_artifacts_size statistics.update_build_artifacts_size
expect(statistics.build_artifacts_size).to be(106365)
end end
end
context 'when legacy artifacts are used' do
let!(:ci_build) { create(:ci_build, pipeline: pipeline, artifacts_size: 10.megabytes) }
it "stores the size of related build artifacts" do it "stores the size of related build artifacts" do
expect(statistics.build_artifacts_size).to eq 101.megabytes statistics.update_build_artifacts_size
expect(statistics.build_artifacts_size).to eq(10.megabytes)
end
end end
end end
......
...@@ -299,13 +299,16 @@ describe API::Jobs do ...@@ -299,13 +299,16 @@ describe API::Jobs do
context 'normal authentication' do context 'normal authentication' do
before do before do
stub_artifacts_object_storage stub_artifacts_object_storage
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end end
context 'job with artifacts' do context 'job with artifacts' do
context 'when artifacts are stored locally' do context 'when artifacts are stored locally' do
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) } let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
before do
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
context 'authorized user' do context 'authorized user' do
it_behaves_like 'downloads artifact' it_behaves_like 'downloads artifact'
end end
...@@ -320,7 +323,14 @@ describe API::Jobs do ...@@ -320,7 +323,14 @@ describe API::Jobs do
end end
context 'when artifacts are stored remotely' do context 'when artifacts are stored remotely' do
let(:job) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) } let(:job) { create(:ci_build, pipeline: pipeline) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
before do
job.reload
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
it 'returns location redirect' do it 'returns location redirect' do
expect(response).to have_gitlab_http_status(302) expect(response).to have_gitlab_http_status(302)
...@@ -328,6 +338,8 @@ describe API::Jobs do ...@@ -328,6 +338,8 @@ describe API::Jobs do
end end
it 'does not return job artifacts if not uploaded' do it 'does not return job artifacts if not uploaded' do
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
expect(response).to have_gitlab_http_status(404) expect(response).to have_gitlab_http_status(404)
end end
end end
...@@ -440,7 +452,14 @@ describe API::Jobs do ...@@ -440,7 +452,14 @@ describe API::Jobs do
end end
context 'when artifacts are stored remotely' do context 'when artifacts are stored remotely' do
let(:job) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline, user: api_user) } let(:job) { create(:ci_build, pipeline: pipeline, user: api_user) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
before do
job.reload
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
it 'returns location redirect' do it 'returns location redirect' do
expect(response).to have_gitlab_http_status(302) expect(response).to have_gitlab_http_status(302)
......
...@@ -948,7 +948,7 @@ describe API::Runner do ...@@ -948,7 +948,7 @@ describe API::Runner do
context 'when artifacts are being stored inside of tmp path' do context 'when artifacts are being stored inside of tmp path' do
before do before do
# by configuring this path we allow to pass temp file from any path # by configuring this path we allow to pass temp file from any path
allow(ArtifactUploader).to receive(:artifacts_upload_path).and_return('/') allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return('/')
end end
context 'when job has been erased' do context 'when job has been erased' do
...@@ -988,15 +988,6 @@ describe API::Runner do ...@@ -988,15 +988,6 @@ describe API::Runner do
it_behaves_like 'successful artifacts upload' it_behaves_like 'successful artifacts upload'
end end
context 'when updates artifact' do
before do
upload_artifacts(file_upload2, headers_with_token)
upload_artifacts(file_upload, headers_with_token)
end
it_behaves_like 'successful artifacts upload'
end
context 'when using runners token' do context 'when using runners token' do
it 'responds with forbidden' do it 'responds with forbidden' do
upload_artifacts(file_upload, headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.project.runners_token)) upload_artifacts(file_upload, headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.project.runners_token))
...@@ -1109,7 +1100,7 @@ describe API::Runner do ...@@ -1109,7 +1100,7 @@ describe API::Runner do
expect(response).to have_gitlab_http_status(201) expect(response).to have_gitlab_http_status(201)
expect(stored_artifacts_file.original_filename).to eq(artifacts.original_filename) expect(stored_artifacts_file.original_filename).to eq(artifacts.original_filename)
expect(stored_metadata_file.original_filename).to eq(metadata.original_filename) expect(stored_metadata_file.original_filename).to eq(metadata.original_filename)
expect(stored_artifacts_size).to eq(71759) expect(stored_artifacts_size).to eq(72821)
end end
end end
...@@ -1134,7 +1125,7 @@ describe API::Runner do ...@@ -1134,7 +1125,7 @@ describe API::Runner do
# by configuring this path we allow to pass file from @tmpdir only # by configuring this path we allow to pass file from @tmpdir only
# but all temporary files are stored in system tmp directory # but all temporary files are stored in system tmp directory
@tmpdir = Dir.mktmpdir @tmpdir = Dir.mktmpdir
allow(ArtifactUploader).to receive(:artifacts_upload_path).and_return(@tmpdir) allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return(@tmpdir)
end end
after do after do
...@@ -1161,13 +1152,16 @@ describe API::Runner do ...@@ -1161,13 +1152,16 @@ describe API::Runner do
describe 'GET /api/v4/jobs/:id/artifacts' do describe 'GET /api/v4/jobs/:id/artifacts' do
let(:token) { job.token } let(:token) { job.token }
context 'when job has artifacts' do
let(:job) { create(:ci_build) }
let(:store) { JobArtifactUploader::LOCAL_STORE }
before do before do
create(:ci_job_artifact, :archive, file_store: store, job: job)
download_artifact download_artifact
end end
context 'when job has artifacts' do
let(:job) { create(:ci_build, :artifacts) }
context 'when using job token' do context 'when using job token' do
context 'when artifacts are stored locally' do context 'when artifacts are stored locally' do
let(:download_headers) do let(:download_headers) do
...@@ -1182,7 +1176,8 @@ describe API::Runner do ...@@ -1182,7 +1176,8 @@ describe API::Runner do
end end
context 'when artifacts are stored remotely' do context 'when artifacts are stored remotely' do
let(:job) { create(:ci_build, :artifacts, :remote_store) } let(:store) { JobArtifactUploader::REMOTE_STORE }
let!(:job) { create(:ci_build) }
it 'download artifacts' do it 'download artifacts' do
expect(response).to have_gitlab_http_status(302) expect(response).to have_gitlab_http_status(302)
...@@ -1201,12 +1196,16 @@ describe API::Runner do ...@@ -1201,12 +1196,16 @@ describe API::Runner do
context 'when job does not has artifacts' do context 'when job does not has artifacts' do
it 'responds with not found' do it 'responds with not found' do
download_artifact
expect(response).to have_gitlab_http_status(404) expect(response).to have_gitlab_http_status(404)
end end
end end
def download_artifact(params = {}, request_headers = headers) def download_artifact(params = {}, request_headers = headers)
params = params.merge(token: token) params = params.merge(token: token)
job.reload
get api("/jobs/#{job.id}/artifacts"), params, request_headers get api("/jobs/#{job.id}/artifacts"), params, request_headers
end end
end end
......
...@@ -3,7 +3,7 @@ require 'spec_helper' ...@@ -3,7 +3,7 @@ require 'spec_helper'
describe API::V3::Builds do describe API::V3::Builds do
set(:user) { create(:user) } set(:user) { create(:user) }
let(:api_user) { user } let(:api_user) { user }
let!(:project) { create(:project, :repository, creator: user, public_builds: false) } set(:project) { create(:project, :repository, creator: user, public_builds: false) }
let!(:developer) { create(:project_member, :developer, user: user, project: project) } let!(:developer) { create(:project_member, :developer, user: user, project: project) }
let(:reporter) { create(:project_member, :reporter, project: project) } let(:reporter) { create(:project_member, :reporter, project: project) }
let(:guest) { create(:project_member, :guest, project: project) } let(:guest) { create(:project_member, :guest, project: project) }
...@@ -215,9 +215,12 @@ describe API::V3::Builds do ...@@ -215,9 +215,12 @@ describe API::V3::Builds do
end end
context 'when artifacts are stored remotely' do context 'when artifacts are stored remotely' do
let(:build) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) } let(:build) { create(:ci_build, pipeline: pipeline) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: build) }
it 'returns location redirect' do it 'returns location redirect' do
get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
expect(response).to have_gitlab_http_status(302) expect(response).to have_gitlab_http_status(302)
end end
end end
...@@ -309,7 +312,14 @@ describe API::V3::Builds do ...@@ -309,7 +312,14 @@ describe API::V3::Builds do
end end
context 'when artifacts are stored remotely' do context 'when artifacts are stored remotely' do
let(:build) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) } let(:build) { create(:ci_build, pipeline: pipeline) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: build) }
before do
build.reload
get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
end
it 'returns location redirect' do it 'returns location redirect' do
expect(response).to have_gitlab_http_status(302) expect(response).to have_gitlab_http_status(302)
......
require 'spec_helper' require 'spec_helper'
describe PipelineSerializer do describe PipelineSerializer do
let(:user) { create(:user) } set(:user) { create(:user) }
let(:serializer) do let(:serializer) do
described_class.new(current_user: user) described_class.new(current_user: user)
...@@ -117,7 +117,8 @@ describe PipelineSerializer do ...@@ -117,7 +117,8 @@ describe PipelineSerializer do
shared_examples 'no N+1 queries' do shared_examples 'no N+1 queries' do
it 'verifies number of queries', :request_store do it 'verifies number of queries', :request_store do
recorded = ActiveRecord::QueryRecorder.new { subject } recorded = ActiveRecord::QueryRecorder.new { subject }
expect(recorded.count).to be_within(1).of(61)
expect(recorded.count).to be_within(1).of(40)
expect(recorded.cached_count).to eq(0) expect(recorded.cached_count).to eq(0)
end end
end end
......
...@@ -17,7 +17,7 @@ describe Ci::RetryBuildService do ...@@ -17,7 +17,7 @@ describe Ci::RetryBuildService do
%i[id status user token coverage trace runner artifacts_expire_at %i[id status user token coverage trace runner artifacts_expire_at
artifacts_file artifacts_metadata artifacts_size created_at artifacts_file artifacts_metadata artifacts_size created_at
updated_at started_at finished_at queued_at erased_by updated_at started_at finished_at queued_at erased_by
erased_at auto_canceled_by].freeze erased_at auto_canceled_by job_artifacts job_artifacts_archive job_artifacts_metadata].freeze
IGNORE_ACCESSORS = IGNORE_ACCESSORS =
%i[type lock_version target_url base_tags trace_sections %i[type lock_version target_url base_tags trace_sections
...@@ -35,7 +35,7 @@ describe Ci::RetryBuildService do ...@@ -35,7 +35,7 @@ describe Ci::RetryBuildService do
end end
let(:build) do let(:build) do
create(:ci_build, :failed, :artifacts_expired, :erased, create(:ci_build, :failed, :artifacts, :expired, :erased,
:queued, :coverage, :tags, :allowed_to_fail, :on_tag, :queued, :coverage, :tags, :allowed_to_fail, :on_tag,
:triggered, :trace, :teardown_environment, :triggered, :trace, :teardown_environment,
description: 'my-job', stage: 'test', pipeline: pipeline, description: 'my-job', stage: 'test', pipeline: pipeline,
......
require "spec_helper" require "spec_helper"
describe Projects::UpdatePagesService do describe Projects::UpdatePagesService do
let(:project) { create(:project, :repository) } set(:project) { create(:project, :repository) }
let(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) } set(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
let(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') } set(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') }
let(:invalid_file) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png') } let(:invalid_file) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png') }
let(:extension) { 'zip' }
let(:file) { fixture_file_upload(Rails.root + "spec/fixtures/pages.#{extension}") }
let(:empty_file) { fixture_file_upload(Rails.root + "spec/fixtures/pages_empty.#{extension}") }
let(:metadata) do
filename = Rails.root + "spec/fixtures/pages.#{extension}.meta"
fixture_file_upload(filename) if File.exist?(filename)
end
subject { described_class.new(project, build) } subject { described_class.new(project, build) }
...@@ -12,18 +20,85 @@ describe Projects::UpdatePagesService do ...@@ -12,18 +20,85 @@ describe Projects::UpdatePagesService do
project.remove_pages project.remove_pages
end end
context 'legacy artifacts' do
%w(tar.gz zip).each do |format| %w(tar.gz zip).each do |format|
let(:extension) { format }
context "for valid #{format}" do context "for valid #{format}" do
let(:file) { fixture_file_upload(Rails.root + "spec/fixtures/pages.#{format}") } before do
let(:empty_file) { fixture_file_upload(Rails.root + "spec/fixtures/pages_empty.#{format}") } build.update_attributes(legacy_artifacts_file: file)
let(:metadata) do build.update_attributes(legacy_artifacts_metadata: metadata)
filename = Rails.root + "spec/fixtures/pages.#{format}.meta"
fixture_file_upload(filename) if File.exist?(filename)
end end
describe 'pages artifacts' do
context 'with expiry date' do
before do before do
build.update_attributes(artifacts_file: file) build.artifacts_expire_in = "2 days"
build.update_attributes(artifacts_metadata: metadata) end
it "doesn't delete artifacts" do
expect(execute).to eq(:success)
expect(build.reload.artifacts?).to eq(true)
end
end
context 'without expiry date' do
it "does delete artifacts" do
expect(execute).to eq(:success)
expect(build.reload.artifacts?).to eq(false)
end
end
end
it 'succeeds' do
expect(project.pages_deployed?).to be_falsey
expect(execute).to eq(:success)
expect(project.pages_deployed?).to be_truthy
# Check that all expected files are extracted
%w[index.html zero .hidden/file].each do |filename|
expect(File.exist?(File.join(project.public_pages_path, filename))).to be_truthy
end
end
it 'limits pages size' do
stub_application_setting(max_pages_size: 1)
expect(execute).not_to eq(:success)
end
it 'removes pages after destroy' do
expect(PagesWorker).to receive(:perform_in)
expect(project.pages_deployed?).to be_falsey
expect(execute).to eq(:success)
expect(project.pages_deployed?).to be_truthy
project.destroy
expect(project.pages_deployed?).to be_falsey
end
it 'fails if sha on branch is not latest' do
build.update_attributes(ref: 'feature')
expect(execute).not_to eq(:success)
end
it 'fails for empty file fails' do
build.update_attributes(legacy_artifacts_file: empty_file)
expect(execute).not_to eq(:success)
end
end
end
end
context 'for new artifacts' do
context "for a valid job" do
before do
create(:ci_job_artifact, file: file, job: build)
create(:ci_job_artifact, file_type: :metadata, file: metadata, job: build)
build.reload
end end
describe 'pages artifacts' do describe 'pages artifacts' do
...@@ -35,7 +110,7 @@ describe Projects::UpdatePagesService do ...@@ -35,7 +110,7 @@ describe Projects::UpdatePagesService do
it "doesn't delete artifacts" do it "doesn't delete artifacts" do
expect(execute).to eq(:success) expect(execute).to eq(:success)
expect(build.reload.artifacts_file?).to eq(true) expect(build.artifacts?).to eq(true)
end end
end end
...@@ -43,7 +118,7 @@ describe Projects::UpdatePagesService do ...@@ -43,7 +118,7 @@ describe Projects::UpdatePagesService do
it "does delete artifacts" do it "does delete artifacts" do
expect(execute).to eq(:success) expect(execute).to eq(:success)
expect(build.reload.artifacts_file?).to eq(false) expect(build.reload.artifacts?).to eq(false)
end end
end end
end end
...@@ -74,13 +149,14 @@ describe Projects::UpdatePagesService do ...@@ -74,13 +149,14 @@ describe Projects::UpdatePagesService do
end end
it 'fails if sha on branch is not latest' do it 'fails if sha on branch is not latest' do
pipeline.update_attributes(sha: 'old_sha') build.update_attributes(ref: 'feature')
build.update_attributes(artifacts_file: file)
expect(execute).not_to eq(:success) expect(execute).not_to eq(:success)
end end
it 'fails for empty file fails' do it 'fails for empty file fails' do
build.update_attributes(artifacts_file: empty_file) build.job_artifacts_archive.update_attributes(file: empty_file)
expect(execute).not_to eq(:success) expect(execute).not_to eq(:success)
end end
end end
...@@ -97,7 +173,7 @@ describe Projects::UpdatePagesService do ...@@ -97,7 +173,7 @@ describe Projects::UpdatePagesService do
end end
it 'fails for invalid archive' do it 'fails for invalid archive' do
build.update_attributes(artifacts_file: invalid_file) build.update_attributes(legacy_artifacts_file: invalid_file)
expect(execute).not_to eq(:success) expect(execute).not_to eq(:success)
end end
...@@ -108,8 +184,8 @@ describe Projects::UpdatePagesService do ...@@ -108,8 +184,8 @@ describe Projects::UpdatePagesService do
file = fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip') file = fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip')
metafile = fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip.meta') metafile = fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip.meta')
build.update_attributes(artifacts_file: file) build.update_attributes(legacy_artifacts_file: file)
build.update_attributes(artifacts_metadata: metafile) build.update_attributes(legacy_artifacts_metadata: metafile)
allow(build).to receive(:artifacts_metadata_entry) allow(build).to receive(:artifacts_metadata_entry)
.and_return(metadata) .and_return(metadata)
......
...@@ -18,7 +18,7 @@ module StubConfiguration ...@@ -18,7 +18,7 @@ module StubConfiguration
def stub_artifacts_object_storage(**params) def stub_artifacts_object_storage(**params)
stub_object_storage_uploader(config: Gitlab.config.artifacts.object_store, stub_object_storage_uploader(config: Gitlab.config.artifacts.object_store,
uploader: ArtifactUploader, uploader: JobArtifactUploader,
remote_directory: 'artifacts', remote_directory: 'artifacts',
**params) **params)
end end
......
...@@ -121,6 +121,7 @@ module TestEnv ...@@ -121,6 +121,7 @@ module TestEnv
FileUtils.mkdir_p(repos_path) FileUtils.mkdir_p(repos_path)
FileUtils.mkdir_p(backup_path) FileUtils.mkdir_p(backup_path)
FileUtils.mkdir_p(pages_path) FileUtils.mkdir_p(pages_path)
FileUtils.mkdir_p(artifacts_path)
end end
def clean_gitlab_test_path def clean_gitlab_test_path
...@@ -234,6 +235,10 @@ module TestEnv ...@@ -234,6 +235,10 @@ module TestEnv
Gitlab.config.pages.path Gitlab.config.pages.path
end end
def artifacts_path
Gitlab.config.artifacts.path
end
# When no cached assets exist, manually hit the root path to create them # When no cached assets exist, manually hit the root path to create them
# #
# Otherwise they'd be created by the first test, often timing out and # Otherwise they'd be created by the first test, often timing out and
......
require 'rake_helper' require 'rake_helper'
describe 'gitlab:artifacts namespace rake task' do describe 'gitlab:artifacts namespace rake task' do
before :all do before(:context) do
Rake.application.rake_require 'tasks/gitlab/artifacts' Rake.application.rake_require 'tasks/gitlab/artifacts'
end end
describe 'migrate' do let(:object_storage_enabled) { false }
let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store, artifacts_metadata_store: store) }
before do
stub_artifacts_object_storage(enabled: object_storage_enabled)
end
subject { run_rake_task('gitlab:artifacts:migrate') } subject { run_rake_task('gitlab:artifacts:migrate') }
context 'legacy artifacts' do
describe 'migrate' do
let!(:build) { create(:ci_build, :legacy_artifacts, artifacts_file_store: store, artifacts_metadata_store: store) }
context 'when local storage is used' do context 'when local storage is used' do
let(:store) { ObjectStoreUploader::LOCAL_STORE } let(:store) { ObjectStoreUploader::LOCAL_STORE }
context 'and job does not have file store defined' do context 'and job does not have file store defined' do
before do let(:object_storage_enabled) { true }
stub_artifacts_object_storage let(:store) { nil }
job.update(artifacts_file_store: nil)
end
it "migrates file to remote storage" do it "migrates file to remote storage" do
subject subject
expect(job.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE) expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
expect(job.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE) expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
end end
end end
context 'and remote storage is defined' do context 'and remote storage is defined' do
before do let(:object_storage_enabled) { true }
stub_artifacts_object_storage
job
end
it "migrates file to remote storage" do it "migrates file to remote storage" do
subject subject
expect(job.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE) expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
expect(job.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE) expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
end end
end end
context 'and remote storage is not defined' do context 'and remote storage is not defined' do
before do
job
end
it "fails to migrate to remote storage" do it "fails to migrate to remote storage" do
subject subject
expect(job.reload.artifacts_file_store).to eq(ObjectStoreUploader::LOCAL_STORE) expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::LOCAL_STORE)
expect(job.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::LOCAL_STORE) expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::LOCAL_STORE)
end end
end end
end end
context 'when remote storage is used' do context 'when remote storage is used' do
let(:object_storage_enabled) { true }
let(:store) { ObjectStoreUploader::REMOTE_STORE } let(:store) { ObjectStoreUploader::REMOTE_STORE }
before do it "file stays on remote storage" do
stub_artifacts_object_storage subject
job
expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
end
end
end
end
context 'job artifacts' do
let!(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
context 'when local storage is used' do
let(:store) { ObjectStoreUploader::LOCAL_STORE }
context 'and job does not have file store defined' do
let(:object_storage_enabled) { true }
let(:store) { nil }
it "migrates file to remote storage" do
subject
expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
end
end
context 'and remote storage is defined' do
let(:object_storage_enabled) { true }
it "migrates file to remote storage" do
subject
expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
end
end end
context 'and remote storage is not defined' do
it "fails to migrate to remote storage" do
subject
expect(artifact.reload.file_store).to eq(ObjectStoreUploader::LOCAL_STORE)
end
end
end
context 'when remote storage is used' do
let(:object_storage_enabled) { true }
let(:store) { ObjectStoreUploader::REMOTE_STORE }
it "file stays on remote storage" do it "file stays on remote storage" do
subject subject
expect(job.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE) expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
expect(job.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
end end
end end
end end
......
require 'spec_helper'
describe JobArtifactUploader do
let(:store) { described_class::LOCAL_STORE }
let(:job_artifact) { create(:ci_job_artifact, file_store: store) }
let(:uploader) { described_class.new(job_artifact, :file) }
let(:local_path) { Gitlab.config.artifacts.path }
describe '#store_dir' do
subject { uploader.store_dir }
let(:path) { "#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/#{job_artifact.project_id}/#{job_artifact.id}" }
context 'when using local storage' do
it { is_expected.to start_with(local_path) }
it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) }
it { is_expected.to end_with(path) }
end
context 'when using remote storage' do
let(:store) { described_class::REMOTE_STORE }
before do
stub_artifacts_object_storage
end
it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) }
it { is_expected.to end_with(path) }
end
end
describe '#cache_dir' do
subject { uploader.cache_dir }
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('/tmp/cache') }
end
describe '#work_dir' do
subject { uploader.work_dir }
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('/tmp/work') }
end
context 'file is stored in valid local_path' do
let(:file) do
fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
end
before do
uploader.store!(file)
end
subject { uploader.file.path }
it { is_expected.to start_with(local_path) }
it { is_expected.to include("/#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/") }
it { is_expected.to include("/#{job_artifact.project_id}/") }
it { is_expected.to end_with("ci_build_artifacts.zip") }
end
end
require 'rails_helper' require 'rails_helper'
describe ArtifactUploader do describe LegacyArtifactUploader do
let(:store) { described_class::LOCAL_STORE } let(:store) { described_class::LOCAL_STORE }
let(:job) { create(:ci_build, artifacts_file_store: store) } let(:job) { create(:ci_build, artifacts_file_store: store) }
let(:uploader) { described_class.new(job, :artifacts_file) } let(:uploader) { described_class.new(job, :legacy_artifacts_file) }
let(:local_path) { Gitlab.config.artifacts.path } let(:local_path) { Gitlab.config.artifacts.path }
describe '.local_store_path' do describe '.local_store_path' do
...@@ -66,11 +66,23 @@ describe ArtifactUploader do ...@@ -66,11 +66,23 @@ describe ArtifactUploader do
subject { uploader.filename } subject { uploader.filename }
it { is_expected.to be_nil } it { is_expected.to be_nil }
end
context 'with artifacts' do context 'file is stored in valid path' do
let(:job) { create(:ci_build, :artifacts) } let(:file) do
fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
end
it { is_expected.not_to be_nil } before do
uploader.store!(file)
end end
subject { uploader.file.path }
it { is_expected.to start_with(local_path) }
it { is_expected.to include("/#{job.created_at.utc.strftime('%Y_%m')}/") }
it { is_expected.to include("/#{job.project_id}/") }
it { is_expected.to end_with("ci_build_artifacts.zip") }
end end
end end
...@@ -4,18 +4,22 @@ require 'carrierwave/storage/fog' ...@@ -4,18 +4,22 @@ require 'carrierwave/storage/fog'
describe ObjectStoreUploader do describe ObjectStoreUploader do
let(:uploader_class) { Class.new(described_class) } let(:uploader_class) { Class.new(described_class) }
let(:object) { double } let(:object) { double }
let(:uploader) { uploader_class.new(object, :artifacts_file) } let(:uploader) { uploader_class.new(object, :file) }
before do
allow(object.class).to receive(:uploader_option).with(:file, :mount_on) { nil }
end
describe '#object_store' do describe '#object_store' do
it "calls artifacts_file_store on object" do it "calls artifacts_file_store on object" do
expect(object).to receive(:artifacts_file_store) expect(object).to receive(:file_store)
uploader.object_store uploader.object_store
end end
context 'when store is null' do context 'when store is null' do
before do before do
expect(object).to receive(:artifacts_file_store).twice.and_return(nil) expect(object).to receive(:file_store).twice.and_return(nil)
end end
it "returns LOCAL_STORE" do it "returns LOCAL_STORE" do
...@@ -26,7 +30,7 @@ describe ObjectStoreUploader do ...@@ -26,7 +30,7 @@ describe ObjectStoreUploader do
context 'when value is set' do context 'when value is set' do
before do before do
expect(object).to receive(:artifacts_file_store).twice.and_return(described_class::REMOTE_STORE) expect(object).to receive(:file_store).twice.and_return(described_class::REMOTE_STORE)
end end
it "returns given value" do it "returns given value" do
...@@ -38,7 +42,7 @@ describe ObjectStoreUploader do ...@@ -38,7 +42,7 @@ describe ObjectStoreUploader do
describe '#object_store=' do describe '#object_store=' do
it "calls artifacts_file_store= on object" do it "calls artifacts_file_store= on object" do
expect(object).to receive(:artifacts_file_store=).with(described_class::REMOTE_STORE) expect(object).to receive(:file_store=).with(described_class::REMOTE_STORE)
uploader.object_store = described_class::REMOTE_STORE uploader.object_store = described_class::REMOTE_STORE
end end
...@@ -47,7 +51,7 @@ describe ObjectStoreUploader do ...@@ -47,7 +51,7 @@ describe ObjectStoreUploader do
describe '#file_storage?' do describe '#file_storage?' do
context 'when file storage is used' do context 'when file storage is used' do
before do before do
expect(object).to receive(:artifacts_file_store).and_return(described_class::LOCAL_STORE) expect(object).to receive(:file_store).and_return(described_class::LOCAL_STORE)
end end
it { expect(uploader).to be_file_storage } it { expect(uploader).to be_file_storage }
...@@ -57,7 +61,7 @@ describe ObjectStoreUploader do ...@@ -57,7 +61,7 @@ describe ObjectStoreUploader do
before do before do
uploader_class.storage_options double( uploader_class.storage_options double(
object_store: double(enabled: true)) object_store: double(enabled: true))
expect(object).to receive(:artifacts_file_store).and_return(described_class::REMOTE_STORE) expect(object).to receive(:file_store).and_return(described_class::REMOTE_STORE)
end end
it { expect(uploader).not_to be_file_storage } it { expect(uploader).not_to be_file_storage }
...@@ -82,9 +86,9 @@ describe ObjectStoreUploader do ...@@ -82,9 +86,9 @@ describe ObjectStoreUploader do
end end
end end
context 'when using ArtifactsUploader' do context 'when using JobArtifactsUploader' do
let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store) } let(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
let(:uploader) { job.artifacts_file } let(:uploader) { artifact.file }
context 'checking described_class' do context 'checking described_class' do
let(:store) { described_class::LOCAL_STORE } let(:store) { described_class::LOCAL_STORE }
...@@ -103,7 +107,7 @@ describe ObjectStoreUploader do ...@@ -103,7 +107,7 @@ describe ObjectStoreUploader do
let(:store) { nil } let(:store) { nil }
it "sets the store to LOCAL_STORE" do it "sets the store to LOCAL_STORE" do
expect(job.artifacts_file_store).to eq(described_class::LOCAL_STORE) expect(artifact.file_store).to eq(described_class::LOCAL_STORE)
end end
end end
...@@ -130,8 +134,8 @@ describe ObjectStoreUploader do ...@@ -130,8 +134,8 @@ describe ObjectStoreUploader do
end end
describe '#migrate!' do describe '#migrate!' do
let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store) } let(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
let(:uploader) { job.artifacts_file } let(:uploader) { artifact.file }
let(:store) { described_class::LOCAL_STORE } let(:store) { described_class::LOCAL_STORE }
subject { uploader.migrate!(new_store) } subject { uploader.migrate!(new_store) }
...@@ -214,7 +218,7 @@ describe ObjectStoreUploader do ...@@ -214,7 +218,7 @@ describe ObjectStoreUploader do
context 'when subject save fails' do context 'when subject save fails' do
before do before do
expect(job).to receive(:save!).and_raise(RuntimeError, "exception") expect(artifact).to receive(:save!).and_raise(RuntimeError, "exception")
end end
it "does catch an error" do it "does catch an error" do
...@@ -272,7 +276,7 @@ describe ObjectStoreUploader do ...@@ -272,7 +276,7 @@ describe ObjectStoreUploader do
context 'when using local storage' do context 'when using local storage' do
before do before do
expect(object).to receive(:artifacts_file_store) { described_class::LOCAL_STORE } expect(object).to receive(:file_store) { described_class::LOCAL_STORE }
end end
it "does not raise an error" do it "does not raise an error" do
...@@ -284,7 +288,7 @@ describe ObjectStoreUploader do ...@@ -284,7 +288,7 @@ describe ObjectStoreUploader do
before do before do
uploader_class.storage_options double( uploader_class.storage_options double(
object_store: double(enabled: true)) object_store: double(enabled: true))
expect(object).to receive(:artifacts_file_store) { described_class::REMOTE_STORE } expect(object).to receive(:file_store) { described_class::REMOTE_STORE }
end end
context 'feature is not available' do context 'feature is not available' do
......
...@@ -11,12 +11,8 @@ describe ExpireBuildInstanceArtifactsWorker do ...@@ -11,12 +11,8 @@ describe ExpireBuildInstanceArtifactsWorker do
end end
context 'with expired artifacts' do context 'with expired artifacts' do
let(:artifacts_expiry) { { artifacts_expire_at: Time.now - 7.days } }
context 'when associated project is valid' do context 'when associated project is valid' do
let(:build) do let(:build) { create(:ci_build, :artifacts, :expired) }
create(:ci_build, :artifacts, artifacts_expiry)
end
it 'does expire' do it 'does expire' do
expect(build.reload.artifacts_expired?).to be_truthy expect(build.reload.artifacts_expired?).to be_truthy
...@@ -26,14 +22,14 @@ describe ExpireBuildInstanceArtifactsWorker do ...@@ -26,14 +22,14 @@ describe ExpireBuildInstanceArtifactsWorker do
expect(build.reload.artifacts_file.exists?).to be_falsey expect(build.reload.artifacts_file.exists?).to be_falsey
end end
it 'does nullify artifacts_file column' do it 'does remove the job artifact record' do
expect(build.reload.artifacts_file_identifier).to be_nil expect(build.reload.job_artifacts_archive).to be_nil
end end
end end
end end
context 'with not yet expired artifacts' do context 'with not yet expired artifacts' do
let(:build) do set(:build) do
create(:ci_build, :artifacts, artifacts_expire_at: Time.now + 7.days) create(:ci_build, :artifacts, artifacts_expire_at: Time.now + 7.days)
end end
...@@ -45,8 +41,8 @@ describe ExpireBuildInstanceArtifactsWorker do ...@@ -45,8 +41,8 @@ describe ExpireBuildInstanceArtifactsWorker do
expect(build.reload.artifacts_file.exists?).to be_truthy expect(build.reload.artifacts_file.exists?).to be_truthy
end end
it 'does not nullify artifacts_file column' do it 'does not remove the job artifact record' do
expect(build.reload.artifacts_file_identifier).not_to be_nil expect(build.reload.job_artifacts_archive).not_to be_nil
end end
end end
...@@ -61,13 +57,13 @@ describe ExpireBuildInstanceArtifactsWorker do ...@@ -61,13 +57,13 @@ describe ExpireBuildInstanceArtifactsWorker do
expect(build.reload.artifacts_file.exists?).to be_truthy expect(build.reload.artifacts_file.exists?).to be_truthy
end end
it 'does not nullify artifacts_file column' do it 'does not remove the job artifact record' do
expect(build.reload.artifacts_file_identifier).not_to be_nil expect(build.reload.job_artifacts_archive).not_to be_nil
end end
end end
context 'for expired artifacts' do context 'for expired artifacts' do
let(:build) { create(:ci_build, artifacts_expire_at: Time.now - 7.days) } let(:build) { create(:ci_build, :expired) }
it 'is still expired' do it 'is still expired' do
expect(build.reload.artifacts_expired?).to be_truthy expect(build.reload.artifacts_expired?).to be_truthy
......
...@@ -48,12 +48,12 @@ describe ObjectStorageUploadWorker do ...@@ -48,12 +48,12 @@ describe ObjectStorageUploadWorker do
end end
end end
context 'for artifacts' do context 'for legacy artifacts' do
let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store, artifacts_metadata_store: store) } let(:build) { create(:ci_build, :legacy_artifacts) }
let(:uploader_class) { ArtifactUploader } let(:uploader_class) { LegacyArtifactUploader }
let(:subject_class) { Ci::Build } let(:subject_class) { Ci::Build }
let(:file_field) { :artifacts_file } let(:file_field) { :artifacts_file }
let(:subject_id) { job.id } let(:subject_id) { build.id }
context 'when local storage is used' do context 'when local storage is used' do
let(:store) { local } let(:store) { local }
...@@ -61,13 +61,12 @@ describe ObjectStorageUploadWorker do ...@@ -61,13 +61,12 @@ describe ObjectStorageUploadWorker do
context 'and remote storage is defined' do context 'and remote storage is defined' do
before do before do
stub_artifacts_object_storage stub_artifacts_object_storage
job
end end
it "migrates file to remote storage" do it "migrates file to remote storage" do
perform perform
expect(job.reload.artifacts_file_store).to eq(remote) expect(build.reload.artifacts_file_store).to eq(remote)
end end
context 'for artifacts_metadata' do context 'for artifacts_metadata' do
...@@ -76,10 +75,34 @@ describe ObjectStorageUploadWorker do ...@@ -76,10 +75,34 @@ describe ObjectStorageUploadWorker do
it 'migrates metadata to remote storage' do it 'migrates metadata to remote storage' do
perform perform
expect(job.reload.artifacts_metadata_store).to eq(remote) expect(build.reload.artifacts_metadata_store).to eq(remote)
end end
end end
end end
end end
end end
context 'for job artifacts' do
let(:artifact) { create(:ci_job_artifact, :archive) }
let(:uploader_class) { JobArtifactUploader }
let(:subject_class) { Ci::JobArtifact }
let(:file_field) { :file }
let(:subject_id) { artifact.id }
context 'when local storage is used' do
let(:store) { local }
context 'and remote storage is defined' do
before do
stub_artifacts_object_storage
end
it "migrates file to remote storage" do
perform
expect(artifact.reload.file_store).to eq(remote)
end
end
end
end
end end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment