Commit 003a816a authored by Grzegorz Bizon's avatar Grzegorz Bizon

Merge branch 'zj-multiple-artifacts' into 'master'

Multiple artifacts

See merge request gitlab-org/gitlab-ce!14367
parents 29be9c1a 9711b344
module Ci
class Build < CommitStatus
prepend ArtifactMigratable
include TokenAuthenticatable
include AfterCommitQueue
include Presentable
......@@ -10,9 +11,14 @@ module Ci
belongs_to :erased_by, class_name: 'User'
has_many :deployments, as: :deployable
has_one :last_deployment, -> { order('deployments.id DESC') }, as: :deployable, class_name: 'Deployment'
has_many :trace_sections, class_name: 'Ci::BuildTraceSection'
has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_one :job_artifacts_archive, -> { where(file_type: Ci::JobArtifact.file_types[:archive]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
has_one :job_artifacts_metadata, -> { where(file_type: Ci::JobArtifact.file_types[:metadata]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
# The "environment" field for builds is a String, and is the unexpanded name
def persisted_environment
@persisted_environment ||= Environment.find_by(
......@@ -31,15 +37,18 @@ module Ci
scope :unstarted, ->() { where(runner_id: nil) }
scope :ignore_failures, ->() { where(allow_failure: false) }
scope :with_artifacts, ->() { where.not(artifacts_file: [nil, '']) }
scope :with_artifacts, ->() do
where('(artifacts_file IS NOT NULL AND artifacts_file <> ?) OR EXISTS (?)',
'', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id'))
end
scope :with_artifacts_not_expired, ->() { with_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
scope :with_expired_artifacts, ->() { with_artifacts.where('artifacts_expire_at < ?', Time.now) }
scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + [:manual]) }
scope :ref_protected, -> { where(protected: true) }
mount_uploader :artifacts_file, ArtifactUploader
mount_uploader :artifacts_metadata, ArtifactUploader
mount_uploader :legacy_artifacts_file, LegacyArtifactUploader, mount_on: :artifacts_file
mount_uploader :legacy_artifacts_metadata, LegacyArtifactUploader, mount_on: :artifacts_metadata
acts_as_taggable
......@@ -326,14 +335,6 @@ module Ci
project.running_or_pending_build_count(force: true)
end
def artifacts?
!artifacts_expired? && artifacts_file.exists?
end
def artifacts_metadata?
artifacts? && artifacts_metadata.exists?
end
def artifacts_metadata_entry(path, **options)
metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
artifacts_metadata.path,
......@@ -386,6 +387,7 @@ module Ci
def keep_artifacts!
self.update(artifacts_expire_at: nil)
self.job_artifacts.update_all(expire_at: nil)
end
def coverage_regex
......@@ -473,11 +475,7 @@ module Ci
private
def update_artifacts_size
self.artifacts_size = if artifacts_file.exists?
artifacts_file.size
else
nil
end
self.artifacts_size = legacy_artifacts_file&.size
end
def erase_trace!
......
module Ci
class JobArtifact < ActiveRecord::Base
extend Gitlab::Ci::Model
belongs_to :project
belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id
before_save :set_size, if: :file_changed?
mount_uploader :file, JobArtifactUploader
enum file_type: {
archive: 1,
metadata: 2
}
def self.artifacts_size_for(project)
self.where(project: project).sum(:size)
end
def set_size
self.size = file.size
end
def expire_in
expire_at - Time.now if expire_at
end
def expire_in=(value)
self.expire_at =
if value
ChronicDuration.parse(value)&.seconds&.from_now
end
end
end
end
# Adapter class to unify the interface between mounted uploaders and the
# Ci::Artifact model
# Meant to be prepended so the interface can stay the same
module ArtifactMigratable
def artifacts_file
job_artifacts_archive&.file || legacy_artifacts_file
end
def artifacts_metadata
job_artifacts_metadata&.file || legacy_artifacts_metadata
end
def artifacts?
!artifacts_expired? && artifacts_file.exists?
end
def artifacts_metadata?
artifacts? && artifacts_metadata.exists?
end
def artifacts_file_changed?
job_artifacts_archive&.file_changed? || attribute_changed?(:artifacts_file)
end
def remove_artifacts_file!
if job_artifacts_archive
job_artifacts_archive.destroy
else
remove_legacy_artifacts_file!
end
end
def remove_artifacts_metadata!
if job_artifacts_metadata
job_artifacts_metadata.destroy
else
remove_legacy_artifacts_metadata!
end
end
def artifacts_size
read_attribute(:artifacts_size).to_i +
job_artifacts_archive&.size.to_i + job_artifacts_metadata&.size.to_i
end
end
......@@ -35,7 +35,9 @@ class ProjectStatistics < ActiveRecord::Base
end
def update_build_artifacts_size
self.build_artifacts_size = project.builds.sum(:artifacts_size)
self.build_artifacts_size =
project.builds.sum(:artifacts_size) +
Ci::JobArtifact.artifacts_size_for(self)
end
def update_storage_size
......
......@@ -18,7 +18,7 @@ module Projects
@status.enqueue!
@status.run!
raise 'missing pages artifacts' unless build.artifacts_file?
raise 'missing pages artifacts' unless build.artifacts?
raise 'pages are outdated' unless latest?
# Create temporary directory in which we will extract the artifacts
......
class JobArtifactUploader < GitlabUploader
storage :file
def self.local_store_path
Gitlab.config.artifacts.path
end
def self.artifacts_upload_path
File.join(self.local_store_path, 'tmp/uploads/')
end
def size
return super if model.size.nil?
model.size
end
def store_dir
default_local_path
end
def cache_dir
File.join(self.class.local_store_path, 'tmp/cache')
end
def work_dir
File.join(self.class.local_store_path, 'tmp/work')
end
private
def default_local_path
File.join(self.class.local_store_path, default_path)
end
def default_path
creation_date = model.created_at.utc.strftime('%Y_%m_%d')
File.join(disk_hash[0..1], disk_hash[2..3], disk_hash,
creation_date, model.job_id.to_s, model.id.to_s)
end
def disk_hash
@disk_hash ||= Digest::SHA2.hexdigest(model.project_id.to_s)
end
end
class ArtifactUploader < GitlabUploader
class LegacyArtifactUploader < GitlabUploader
storage :file
attr_reader :job, :field
def self.local_artifacts_store
def self.local_store_path
Gitlab.config.artifacts.path
end
def self.artifacts_upload_path
File.join(self.local_artifacts_store, 'tmp/uploads/')
end
def initialize(job, field)
@job, @field = job, field
File.join(self.local_store_path, 'tmp/uploads/')
end
def store_dir
......@@ -20,20 +14,20 @@ class ArtifactUploader < GitlabUploader
end
def cache_dir
File.join(self.class.local_artifacts_store, 'tmp/cache')
File.join(self.class.local_store_path, 'tmp/cache')
end
def work_dir
File.join(self.class.local_artifacts_store, 'tmp/work')
File.join(self.class.local_store_path, 'tmp/work')
end
private
def default_local_path
File.join(self.class.local_artifacts_store, default_path)
File.join(self.class.local_store_path, default_path)
end
def default_path
File.join(job.created_at.utc.strftime('%Y_%m'), job.project_id.to_s, job.id.to_s)
File.join(model.created_at.utc.strftime('%Y_%m'), model.project_id.to_s, model.id.to_s)
end
end
......@@ -649,6 +649,8 @@ test:
# user: YOUR_USERNAME
pages:
path: tmp/tests/pages
artifacts:
path: tmp/tests/artifacts
repositories:
storages:
default:
......
......@@ -124,11 +124,11 @@ class Gitlab::Seeder::Pipelines
return unless %w[build test].include?(build.stage)
artifacts_cache_file(artifacts_archive_path) do |file|
build.artifacts_file = file
build.job_artifacts.build(project: build.project, file_type: :archive, file: file)
end
artifacts_cache_file(artifacts_metadata_path) do |file|
build.artifacts_metadata = file
build.job_artifacts.build(project: build.project, file_type: :metadata, file: file)
end
end
......
class CreateJobArtifacts < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
create_table :ci_job_artifacts do |t|
t.belongs_to :project, null: false, index: true, foreign_key: { on_delete: :cascade }
t.integer :job_id, null: false
t.integer :file_type, null: false
t.integer :size, limit: 8
t.datetime_with_timezone :created_at, null: false
t.datetime_with_timezone :updated_at, null: false
t.datetime_with_timezone :expire_at
t.string :file
t.foreign_key :ci_builds, column: :job_id, on_delete: :cascade
t.index [:job_id, :file_type], unique: true
end
end
end
......@@ -319,6 +319,20 @@ ActiveRecord::Schema.define(version: 20171124150326) do
add_index "ci_group_variables", ["group_id", "key"], name: "index_ci_group_variables_on_group_id_and_key", unique: true, using: :btree
create_table "ci_job_artifacts", force: :cascade do |t|
t.integer "project_id", null: false
t.integer "job_id", null: false
t.integer "file_type", null: false
t.integer "size", limit: 8
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
t.datetime_with_timezone "expire_at"
t.string "file"
end
add_index "ci_job_artifacts", ["job_id", "file_type"], name: "index_ci_job_artifacts_on_job_id_and_file_type", unique: true, using: :btree
add_index "ci_job_artifacts", ["project_id"], name: "index_ci_job_artifacts_on_project_id", using: :btree
create_table "ci_pipeline_schedule_variables", force: :cascade do |t|
t.string "key", null: false
t.text "value"
......@@ -1909,6 +1923,8 @@ ActiveRecord::Schema.define(version: 20171124150326) do
add_foreign_key "ci_builds", "ci_stages", column: "stage_id", name: "fk_3a9eaa254d", on_delete: :cascade
add_foreign_key "ci_builds", "projects", name: "fk_befce0568a", on_delete: :cascade
add_foreign_key "ci_group_variables", "namespaces", column: "group_id", name: "fk_33ae4d58d8", on_delete: :cascade
add_foreign_key "ci_job_artifacts", "ci_builds", column: "job_id", on_delete: :cascade
add_foreign_key "ci_job_artifacts", "projects", on_delete: :cascade
add_foreign_key "ci_pipeline_schedule_variables", "ci_pipeline_schedules", column: "pipeline_schedule_id", name: "fk_41c35fda51", on_delete: :cascade
add_foreign_key "ci_pipeline_schedules", "projects", name: "fk_8ead60fcc4", on_delete: :cascade
add_foreign_key "ci_pipeline_schedules", "users", column: "owner_id", name: "fk_9ea99f58d2", on_delete: :nullify
......
......@@ -44,8 +44,8 @@ class Spinach::Features::ProjectPages < Spinach::FeatureSteps
project: @project,
pipeline: pipeline,
ref: 'HEAD',
artifacts_file: fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip'),
artifacts_metadata: fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip.meta')
legacy_artifacts_file: fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip'),
legacy_artifacts_metadata: fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip.meta')
)
result = ::Projects::UpdatePagesService.new(@project, build).execute
......
......@@ -37,13 +37,13 @@ module SharedBuilds
step 'recent build has artifacts available' do
artifacts = Rails.root + 'spec/fixtures/ci_build_artifacts.zip'
archive = fixture_file_upload(artifacts, 'application/zip')
@build.update_attributes(artifacts_file: archive)
@build.update_attributes(legacy_artifacts_file: archive)
end
step 'recent build has artifacts metadata available' do
metadata = Rails.root + 'spec/fixtures/ci_build_artifacts_metadata.gz'
gzip = fixture_file_upload(metadata, 'application/x-gzip')
@build.update_attributes(artifacts_metadata: gzip)
@build.update_attributes(legacy_artifacts_metadata: gzip)
end
step 'recent build has a build trace' do
......
......@@ -1050,13 +1050,9 @@ module API
expose :type, :url, :username, :password
end
class ArtifactFile < Grape::Entity
expose :filename, :size
end
class Dependency < Grape::Entity
expose :id, :name, :token
expose :artifacts_file, using: ArtifactFile, if: ->(job, _) { job.artifacts? }
expose :artifacts_file, using: JobArtifactFile, if: ->(job, _) { job.artifacts? }
end
class Response < Grape::Entity
......
......@@ -215,18 +215,20 @@ module API
job = authenticate_job!
forbidden!('Job is not running!') unless job.running?
artifacts_upload_path = ArtifactUploader.artifacts_upload_path
artifacts_upload_path = JobArtifactUploader.artifacts_upload_path
artifacts = uploaded_file(:file, artifacts_upload_path)
metadata = uploaded_file(:metadata, artifacts_upload_path)
bad_request!('Missing artifacts file!') unless artifacts
file_to_large! unless artifacts.size < max_artifacts_size
job.artifacts_file = artifacts
job.artifacts_metadata = metadata
job.artifacts_expire_in = params['expire_in'] ||
expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
job.build_job_artifacts_archive(project: job.project, file_type: :archive, file: artifacts, expire_in: expire_in)
job.build_job_artifacts_metadata(project: job.project, file_type: :metadata, file: metadata, expire_in: expire_in) if metadata
job.artifacts_expire_in = expire_in
if job.save
present job, with: Entities::JobRequest::Response
else
......
......@@ -3,7 +3,7 @@ require 'backup/files'
module Backup
class Artifacts < Files
def initialize
super('artifacts', ArtifactUploader.local_artifacts_store)
super('artifacts', LegacyArtifactUploader.local_store_path)
end
def create_files_dir
......
......@@ -58,7 +58,7 @@ module Gitlab
end
def artifact_upload_ok
{ TempPath: ArtifactUploader.artifacts_upload_path }
{ TempPath: JobArtifactUploader.artifacts_upload_path }
end
def send_git_blob(repository, blob)
......
......@@ -154,36 +154,29 @@ FactoryGirl.define do
runner factory: :ci_runner
end
trait :artifacts do
trait :legacy_artifacts do
after(:create) do |build, _|
build.artifacts_file =
fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts.zip'),
'application/zip')
build.artifacts_metadata =
fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'),
'application/x-gzip')
build.save!
build.update!(
legacy_artifacts_file: fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip'),
legacy_artifacts_metadata: fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'), 'application/x-gzip')
)
end
end
trait :artifacts_expired do
after(:create) do |build, _|
build.artifacts_file =
fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts.zip'),
'application/zip')
build.artifacts_metadata =
fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'),
'application/x-gzip')
build.artifacts_expire_at = 1.minute.ago
build.save!
trait :artifacts do
after(:create) do |build|
create(:ci_job_artifact, :archive, job: build)
create(:ci_job_artifact, :metadata, job: build)
build.reload
end
end
trait :expired do
artifacts_expire_at 1.minute.ago
end
trait :with_commit do
after(:build) do |build|
allow(build).to receive(:commit).and_return build(:commit, :without_author)
......
include ActionDispatch::TestProcess
FactoryGirl.define do
factory :ci_job_artifact, class: Ci::JobArtifact do
job factory: :ci_build
file_type :archive
after :build do |artifact|
artifact.project ||= artifact.job.project
end
trait :archive do
file_type :archive
after(:build) do |artifact, _|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
end
end
trait :metadata do
file_type :metadata
after(:build) do |artifact, _|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'), 'application/x-gzip')
end
end
end
end
......@@ -89,7 +89,7 @@ describe 'Commits' do
context 'Download artifacts' do
before do
build.update_attributes(artifacts_file: artifacts_file)
build.update_attributes(legacy_artifacts_file: artifacts_file)
end
it do
......@@ -146,7 +146,7 @@ describe 'Commits' do
context "when logged as reporter" do
before do
project.team << [user, :reporter]
build.update_attributes(artifacts_file: artifacts_file)
build.update_attributes(legacy_artifacts_file: artifacts_file)
visit pipeline_path(pipeline)
end
......@@ -168,7 +168,7 @@ describe 'Commits' do
project.update(
visibility_level: Gitlab::VisibilityLevel::INTERNAL,
public_builds: false)
build.update_attributes(artifacts_file: artifacts_file)
build.update_attributes(legacy_artifacts_file: artifacts_file)
visit pipeline_path(pipeline)
end
......
......@@ -28,14 +28,14 @@ feature 'Mini Pipeline Graph', :js do
let(:artifacts_file2) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png') }
before do
create(:ci_build, pipeline: pipeline, artifacts_file: artifacts_file1)
create(:ci_build, pipeline: pipeline, legacy_artifacts_file: artifacts_file1)
create(:ci_build, pipeline: pipeline, when: 'manual')
end
it 'avoids repeated database queries' do
before = ActiveRecord::QueryRecorder.new { visit_merge_request(:json) }
create(:ci_build, pipeline: pipeline, artifacts_file: artifacts_file2)
create(:ci_build, pipeline: pipeline, legacy_artifacts_file: artifacts_file2)
create(:ci_build, pipeline: pipeline, when: 'manual')
after = ActiveRecord::QueryRecorder.new { visit_merge_request(:json) }
......
......@@ -187,7 +187,7 @@ feature 'Jobs' do
context "Download artifacts" do
before do
job.update_attributes(artifacts_file: artifacts_file)
job.update_attributes(legacy_artifacts_file: artifacts_file)
visit project_job_path(project, job)
end
......@@ -198,7 +198,7 @@ feature 'Jobs' do
context 'Artifacts expire date' do
before do
job.update_attributes(artifacts_file: artifacts_file,
job.update_attributes(legacy_artifacts_file: artifacts_file,
artifacts_expire_at: expire_at)
visit project_job_path(project, job)
......@@ -422,14 +422,14 @@ feature 'Jobs' do
describe "GET /:project/jobs/:id/download" do
before do
job.update_attributes(artifacts_file: artifacts_file)
job.update_attributes(legacy_artifacts_file: artifacts_file)
visit project_job_path(project, job)
click_link 'Download'
end
context "Build from other project" do
before do
job2.update_attributes(artifacts_file: artifacts_file)
job2.update_attributes(legacy_artifacts_file: artifacts_file)
visit download_project_job_artifacts_path(project, job2)
end
......
......@@ -304,7 +304,7 @@ describe 'Pipelines', :js do
context 'with artifacts expired' do
let!(:with_artifacts_expired) do
create(:ci_build, :artifacts_expired, :success,
create(:ci_build, :expired, :success,
pipeline: pipeline,
name: 'rspec',
stage: 'test')
......
......@@ -16,20 +16,22 @@ describe MigrateOldArtifacts do
end
context 'with migratable data' do
let(:project1) { create(:project, ci_id: 2) }
let(:project2) { create(:project, ci_id: 3) }
let(:project3) { create(:project) }
set(:project1) { create(:project, ci_id: 2) }
set(:project2) { create(:project, ci_id: 3) }
set(:project3) { create(:project) }
let(:pipeline1) { create(:ci_empty_pipeline, project: project1) }
let(:pipeline2) { create(:ci_empty_pipeline, project: project2) }
let(:pipeline3) { create(:ci_empty_pipeline, project: project3) }
set(:pipeline1) { create(:ci_empty_pipeline, project: project1) }
set(:pipeline2) { create(:ci_empty_pipeline, project: project2) }
set(:pipeline3) { create(:ci_empty_pipeline, project: project3) }
let!(:build_with_legacy_artifacts) { create(:ci_build, pipeline: pipeline1) }
let!(:build_without_artifacts) { create(:ci_build, pipeline: pipeline1) }
let!(:build2) { create(:ci_build, :artifacts, pipeline: pipeline2) }
let!(:build3) { create(:ci_build, :artifacts, pipeline: pipeline3) }
let!(:build2) { create(:ci_build, pipeline: pipeline2) }
let!(:build3) { create(:ci_build, pipeline: pipeline3) }
before do
setup_builds(build2, build3)
store_artifacts_in_legacy_path(build_with_legacy_artifacts)
end
......@@ -38,7 +40,7 @@ describe MigrateOldArtifacts do
end
it "legacy artifacts are set" do
expect(build_with_legacy_artifacts.artifacts_file_identifier).not_to be_nil
expect(build_with_legacy_artifacts.legacy_artifacts_file_identifier).not_to be_nil
end
describe '#min_id' do
......@@ -113,5 +115,24 @@ describe MigrateOldArtifacts do
build.project.ci_id.to_s,
build.id.to_s)
end
def new_legacy_path(build)
File.join(directory,
build.created_at.utc.strftime('%Y_%m'),
build.project_id.to_s,
build.id.to_s)
end
def setup_builds(*builds)
builds.each do |build|
FileUtils.mkdir_p(new_legacy_path(build))
build.update_columns(
artifacts_file: 'ci_build_artifacts.zip',
artifacts_metadata: 'ci_build_artifacts_metadata.gz')
build.reload
end
end
end
end
This diff is collapsed.
require 'spec_helper'
describe Ci::JobArtifact do
set(:artifact) { create(:ci_job_artifact, :archive) }
describe "Associations" do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:job) }
end
it { is_expected.to respond_to(:file) }
it { is_expected.to respond_to(:created_at) }
it { is_expected.to respond_to(:updated_at) }
describe '#set_size' do
it 'sets the size' do
expect(artifact.size).to eq(106365)
end
end
describe '#file' do
subject { artifact.file }
context 'the uploader api' do
it { is_expected.to respond_to(:store_dir) }
it { is_expected.to respond_to(:cache_dir) }
it { is_expected.to respond_to(:work_dir) }
end
end
describe '#expire_in' do
subject { artifact.expire_in }
it { is_expected.to be_nil }
context 'when expire_at is specified' do
let(:expire_at) { Time.now + 7.days }
before do
artifact.expire_at = expire_at
end
it { is_expected.to be_within(5).of(expire_at - Time.now) }
end
end
describe '#expire_in=' do
subject { artifact.expire_in }
it 'when assigning valid duration' do
artifact.expire_in = '7 days'
is_expected.to be_within(10).of(7.days.to_i)
end
it 'when assigning invalid duration' do
expect { artifact.expire_in = '7 elephants' }.to raise_error(ChronicDuration::DurationParseError)
is_expected.to be_nil
end
it 'when resetting value' do
artifact.expire_in = nil
is_expected.to be_nil
end
it 'when setting to 0' do
artifact.expire_in = '0'
is_expected.to be_nil
end
end
end
......@@ -133,15 +133,29 @@ describe ProjectStatistics do
describe '#update_build_artifacts_size' do
let!(:pipeline) { create(:ci_pipeline, project: project) }
let!(:build1) { create(:ci_build, pipeline: pipeline, artifacts_size: 45.megabytes) }
let!(:build2) { create(:ci_build, pipeline: pipeline, artifacts_size: 56.megabytes) }
before do
statistics.update_build_artifacts_size
context 'when new job artifacts are calculated' do
let(:ci_build) { create(:ci_build, pipeline: pipeline) }
before do
create(:ci_job_artifact, :archive, project: pipeline.project, job: ci_build)
end
it "stores the size of related build artifacts" do
statistics.update_build_artifacts_size
expect(statistics.build_artifacts_size).to be(106365)
end
end
it "stores the size of related build artifacts" do
expect(statistics.build_artifacts_size).to eq 101.megabytes
context 'when legacy artifacts are used' do
let!(:ci_build) { create(:ci_build, pipeline: pipeline, artifacts_size: 10.megabytes) }
it "stores the size of related build artifacts" do
statistics.update_build_artifacts_size
expect(statistics.build_artifacts_size).to eq(10.megabytes)
end
end
end
......
......@@ -945,7 +945,7 @@ describe API::Runner do
context 'when artifacts are being stored inside of tmp path' do
before do
# by configuring this path we allow to pass temp file from any path
allow(ArtifactUploader).to receive(:artifacts_upload_path).and_return('/')
allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return('/')
end
context 'when job has been erased' do
......@@ -985,15 +985,6 @@ describe API::Runner do
it_behaves_like 'successful artifacts upload'
end
context 'when updates artifact' do
before do
upload_artifacts(file_upload2, headers_with_token)
upload_artifacts(file_upload, headers_with_token)
end
it_behaves_like 'successful artifacts upload'
end
context 'when using runners token' do
it 'responds with forbidden' do
upload_artifacts(file_upload, headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.project.runners_token))
......@@ -1106,7 +1097,7 @@ describe API::Runner do
expect(response).to have_gitlab_http_status(201)
expect(stored_artifacts_file.original_filename).to eq(artifacts.original_filename)
expect(stored_metadata_file.original_filename).to eq(metadata.original_filename)
expect(stored_artifacts_size).to eq(71759)
expect(stored_artifacts_size).to eq(72821)
end
end
......@@ -1131,7 +1122,7 @@ describe API::Runner do
# by configuring this path we allow to pass file from @tmpdir only
# but all temporary files are stored in system tmp directory
@tmpdir = Dir.mktmpdir
allow(ArtifactUploader).to receive(:artifacts_upload_path).and_return(@tmpdir)
allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return(@tmpdir)
end
after do
......
require 'spec_helper'
describe PipelineSerializer do
let(:user) { create(:user) }
set(:user) { create(:user) }
let(:serializer) do
described_class.new(current_user: user)
......@@ -117,7 +117,7 @@ describe PipelineSerializer do
shared_examples 'no N+1 queries' do
it 'verifies number of queries', :request_store do
recorded = ActiveRecord::QueryRecorder.new { subject }
expect(recorded.count).to be_within(1).of(57)
expect(recorded.count).to be_within(1).of(36)
expect(recorded.cached_count).to eq(0)
end
end
......
......@@ -17,7 +17,7 @@ describe Ci::RetryBuildService do
%i[id status user token coverage trace runner artifacts_expire_at
artifacts_file artifacts_metadata artifacts_size created_at
updated_at started_at finished_at queued_at erased_by
erased_at auto_canceled_by].freeze
erased_at auto_canceled_by job_artifacts job_artifacts_archive job_artifacts_metadata].freeze
IGNORE_ACCESSORS =
%i[type lock_version target_url base_tags trace_sections
......@@ -34,7 +34,7 @@ describe Ci::RetryBuildService do
end
let(:build) do
create(:ci_build, :failed, :artifacts_expired, :erased,
create(:ci_build, :failed, :artifacts, :expired, :erased,
:queued, :coverage, :tags, :allowed_to_fail, :on_tag,
:triggered, :trace, :teardown_environment,
description: 'my-job', stage: 'test', pipeline: pipeline,
......
require "spec_helper"
describe Projects::UpdatePagesService do
let(:project) { create(:project, :repository) }
let(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
let(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') }
set(:project) { create(:project, :repository) }
set(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
set(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') }
let(:invalid_file) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png') }
let(:extension) { 'zip' }
let(:file) { fixture_file_upload(Rails.root + "spec/fixtures/pages.#{extension}") }
let(:empty_file) { fixture_file_upload(Rails.root + "spec/fixtures/pages_empty.#{extension}") }
let(:metadata) do
filename = Rails.root + "spec/fixtures/pages.#{extension}.meta"
fixture_file_upload(filename) if File.exist?(filename)
end
subject { described_class.new(project, build) }
......@@ -12,18 +20,85 @@ describe Projects::UpdatePagesService do
project.remove_pages
end
%w(tar.gz zip).each do |format|
context "for valid #{format}" do
let(:file) { fixture_file_upload(Rails.root + "spec/fixtures/pages.#{format}") }
let(:empty_file) { fixture_file_upload(Rails.root + "spec/fixtures/pages_empty.#{format}") }
let(:metadata) do
filename = Rails.root + "spec/fixtures/pages.#{format}.meta"
fixture_file_upload(filename) if File.exist?(filename)
context 'legacy artifacts' do
%w(tar.gz zip).each do |format|
let(:extension) { format }
context "for valid #{format}" do
before do
build.update_attributes(legacy_artifacts_file: file)
build.update_attributes(legacy_artifacts_metadata: metadata)
end
describe 'pages artifacts' do
context 'with expiry date' do
before do
build.artifacts_expire_in = "2 days"
end
it "doesn't delete artifacts" do
expect(execute).to eq(:success)
expect(build.reload.artifacts?).to eq(true)
end
end
context 'without expiry date' do
it "does delete artifacts" do
expect(execute).to eq(:success)
expect(build.reload.artifacts?).to eq(false)
end
end
end
it 'succeeds' do
expect(project.pages_deployed?).to be_falsey
expect(execute).to eq(:success)
expect(project.pages_deployed?).to be_truthy
# Check that all expected files are extracted
%w[index.html zero .hidden/file].each do |filename|
expect(File.exist?(File.join(project.public_pages_path, filename))).to be_truthy
end
end
it 'limits pages size' do
stub_application_setting(max_pages_size: 1)
expect(execute).not_to eq(:success)
end
it 'removes pages after destroy' do
expect(PagesWorker).to receive(:perform_in)
expect(project.pages_deployed?).to be_falsey
expect(execute).to eq(:success)
expect(project.pages_deployed?).to be_truthy
project.destroy
expect(project.pages_deployed?).to be_falsey
end
it 'fails if sha on branch is not latest' do
build.update_attributes(ref: 'feature')
expect(execute).not_to eq(:success)
end
it 'fails for empty file fails' do
build.update_attributes(legacy_artifacts_file: empty_file)
expect(execute).not_to eq(:success)
end
end
end
end
context 'for new artifacts' do
context "for a valid job" do
before do
build.update_attributes(artifacts_file: file)
build.update_attributes(artifacts_metadata: metadata)
create(:ci_job_artifact, file: file, job: build)
create(:ci_job_artifact, file_type: :metadata, file: metadata, job: build)
build.reload
end
describe 'pages artifacts' do
......@@ -35,7 +110,7 @@ describe Projects::UpdatePagesService do
it "doesn't delete artifacts" do
expect(execute).to eq(:success)
expect(build.reload.artifacts_file?).to eq(true)
expect(build.artifacts?).to eq(true)
end
end
......@@ -43,7 +118,7 @@ describe Projects::UpdatePagesService do
it "does delete artifacts" do
expect(execute).to eq(:success)
expect(build.reload.artifacts_file?).to eq(false)
expect(build.reload.artifacts?).to eq(false)
end
end
end
......@@ -74,13 +149,14 @@ describe Projects::UpdatePagesService do
end
it 'fails if sha on branch is not latest' do
pipeline.update_attributes(sha: 'old_sha')
build.update_attributes(artifacts_file: file)
build.update_attributes(ref: 'feature')
expect(execute).not_to eq(:success)
end
it 'fails for empty file fails' do
build.update_attributes(artifacts_file: empty_file)
build.job_artifacts_archive.update_attributes(file: empty_file)
expect(execute).not_to eq(:success)
end
end
......@@ -97,7 +173,7 @@ describe Projects::UpdatePagesService do
end
it 'fails for invalid archive' do
build.update_attributes(artifacts_file: invalid_file)
build.update_attributes(legacy_artifacts_file: invalid_file)
expect(execute).not_to eq(:success)
end
......@@ -108,8 +184,8 @@ describe Projects::UpdatePagesService do
file = fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip')
metafile = fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip.meta')
build.update_attributes(artifacts_file: file)
build.update_attributes(artifacts_metadata: metafile)
build.update_attributes(legacy_artifacts_file: file)
build.update_attributes(legacy_artifacts_metadata: metafile)
allow(build).to receive(:artifacts_metadata_entry)
.and_return(metadata)
......
......@@ -120,6 +120,7 @@ module TestEnv
FileUtils.mkdir_p(repos_path)
FileUtils.mkdir_p(backup_path)
FileUtils.mkdir_p(pages_path)
FileUtils.mkdir_p(artifacts_path)
end
def clean_gitlab_test_path
......@@ -233,6 +234,10 @@ module TestEnv
Gitlab.config.pages.path
end
def artifacts_path
Gitlab.config.artifacts.path
end
# When no cached assets exist, manually hit the root path to create them
#
# Otherwise they'd be created by the first test, often timing out and
......
require 'spec_helper'
describe JobArtifactUploader do
let(:job_artifact) { create(:ci_job_artifact) }
let(:uploader) { described_class.new(job_artifact, :file) }
let(:local_path) { Gitlab.config.artifacts.path }
describe '#store_dir' do
subject { uploader.store_dir }
let(:path) { "#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/#{job_artifact.project_id}/#{job_artifact.id}" }
context 'when using local storage' do
it { is_expected.to start_with(local_path) }
it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) }
it { is_expected.to end_with(path) }
end
end
describe '#cache_dir' do
subject { uploader.cache_dir }
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('/tmp/cache') }
end
describe '#work_dir' do
subject { uploader.work_dir }
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('/tmp/work') }
end
context 'file is stored in valid local_path' do
let(:file) do
fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
end
before do
uploader.store!(file)
end
subject { uploader.file.path }
it { is_expected.to start_with(local_path) }
it { is_expected.to include("/#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/") }
it { is_expected.to include("/#{job_artifact.project_id}/") }
it { is_expected.to end_with("ci_build_artifacts.zip") }
end
end
require 'rails_helper'
describe ArtifactUploader do
describe LegacyArtifactUploader do
let(:job) { create(:ci_build) }
let(:uploader) { described_class.new(job, :artifacts_file) }
let(:path) { Gitlab.config.artifacts.path }
let(:uploader) { described_class.new(job, :legacy_artifacts_file) }
let(:local_path) { Gitlab.config.artifacts.path }
describe '.local_artifacts_store' do
subject { described_class.local_artifacts_store }
describe '.local_store_path' do
subject { described_class.local_store_path }
it "delegate to artifacts path" do
expect(Gitlab.config.artifacts).to receive(:path)
......@@ -18,28 +18,32 @@ describe ArtifactUploader do
describe '.artifacts_upload_path' do
subject { described_class.artifacts_upload_path }
it { is_expected.to start_with(path) }
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('tmp/uploads/') }
end
describe '#store_dir' do
subject { uploader.store_dir }
it { is_expected.to start_with(path) }
it { is_expected.to end_with("#{job.project_id}/#{job.id}") }
let(:path) { "#{job.created_at.utc.strftime('%Y_%m')}/#{job.project_id}/#{job.id}" }
context 'when using local storage' do
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with(path) }
end
end
describe '#cache_dir' do
subject { uploader.cache_dir }
it { is_expected.to start_with(path) }
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('/tmp/cache') }
end
describe '#work_dir' do
subject { uploader.work_dir }
it { is_expected.to start_with(path) }
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('/tmp/work') }
end
......@@ -51,11 +55,23 @@ describe ArtifactUploader do
subject { uploader.filename }
it { is_expected.to be_nil }
end
context 'with artifacts' do
let(:job) { create(:ci_build, :artifacts) }
context 'file is stored in valid path' do
let(:file) do
fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
end
it { is_expected.not_to be_nil }
before do
uploader.store!(file)
end
subject { uploader.file.path }
it { is_expected.to start_with(local_path) }
it { is_expected.to include("/#{job.created_at.utc.strftime('%Y_%m')}/") }
it { is_expected.to include("/#{job.project_id}/") }
it { is_expected.to end_with("ci_build_artifacts.zip") }
end
end
......@@ -11,12 +11,8 @@ describe ExpireBuildInstanceArtifactsWorker do
end
context 'with expired artifacts' do
let(:artifacts_expiry) { { artifacts_expire_at: Time.now - 7.days } }
context 'when associated project is valid' do
let(:build) do
create(:ci_build, :artifacts, artifacts_expiry)
end
let(:build) { create(:ci_build, :artifacts, :expired) }
it 'does expire' do
expect(build.reload.artifacts_expired?).to be_truthy
......@@ -26,14 +22,14 @@ describe ExpireBuildInstanceArtifactsWorker do
expect(build.reload.artifacts_file.exists?).to be_falsey
end
it 'does nullify artifacts_file column' do
expect(build.reload.artifacts_file_identifier).to be_nil
it 'does remove the job artifact record' do
expect(build.reload.job_artifacts_archive).to be_nil
end
end
end
context 'with not yet expired artifacts' do
let(:build) do
set(:build) do
create(:ci_build, :artifacts, artifacts_expire_at: Time.now + 7.days)
end
......@@ -45,8 +41,8 @@ describe ExpireBuildInstanceArtifactsWorker do
expect(build.reload.artifacts_file.exists?).to be_truthy
end
it 'does not nullify artifacts_file column' do
expect(build.reload.artifacts_file_identifier).not_to be_nil
it 'does not remove the job artifact record' do
expect(build.reload.job_artifacts_archive).not_to be_nil
end
end
......@@ -61,13 +57,13 @@ describe ExpireBuildInstanceArtifactsWorker do
expect(build.reload.artifacts_file.exists?).to be_truthy
end
it 'does not nullify artifacts_file column' do
expect(build.reload.artifacts_file_identifier).not_to be_nil
it 'does not remove the job artifact record' do
expect(build.reload.job_artifacts_archive).not_to be_nil
end
end
context 'for expired artifacts' do
let(:build) { create(:ci_build, artifacts_expire_at: Time.now - 7.days) }
let(:build) { create(:ci_build, :expired) }
it 'is still expired' do
expect(build.reload.artifacts_expired?).to be_truthy
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment