Commit 2d9670b5 authored by Matija Čupić's avatar Matija Čupić

Merge remote-tracking branch 'origin/master' into list-multiple-clusters

parents fc4f3164 8c77ae2d
...@@ -586,6 +586,7 @@ codequality: ...@@ -586,6 +586,7 @@ codequality:
paths: [codeclimate.json] paths: [codeclimate.json]
qa:internal: qa:internal:
<<: *except-docs
stage: test stage: test
variables: variables:
SETUP_DB: "false" SETUP_DB: "false"
......
...@@ -298,18 +298,21 @@ import ProjectVariables from './project_variables'; ...@@ -298,18 +298,21 @@ import ProjectVariables from './project_variables';
break; break;
case 'projects:snippets:show': case 'projects:snippets:show':
initNotes(); initNotes();
new ZenMode();
break; break;
case 'projects:snippets:new': case 'projects:snippets:new':
case 'projects:snippets:edit': case 'projects:snippets:edit':
case 'projects:snippets:create': case 'projects:snippets:create':
case 'projects:snippets:update': case 'projects:snippets:update':
new GLForm($('.snippet-form'), true); new GLForm($('.snippet-form'), true);
new ZenMode();
break; break;
case 'snippets:new': case 'snippets:new':
case 'snippets:edit': case 'snippets:edit':
case 'snippets:create': case 'snippets:create':
case 'snippets:update': case 'snippets:update':
new GLForm($('.snippet-form'), false); new GLForm($('.snippet-form'), false);
new ZenMode();
break; break;
case 'projects:releases:edit': case 'projects:releases:edit':
new ZenMode(); new ZenMode();
...@@ -546,6 +549,7 @@ import ProjectVariables from './project_variables'; ...@@ -546,6 +549,7 @@ import ProjectVariables from './project_variables';
new LineHighlighter(); new LineHighlighter();
new BlobViewer(); new BlobViewer();
initNotes(); initNotes();
new ZenMode();
break; break;
case 'import:fogbugz:new_user_map': case 'import:fogbugz:new_user_map':
new UsersSelect(); new UsersSelect();
......
...@@ -4,8 +4,8 @@ class Admin::AppearancesController < Admin::ApplicationController ...@@ -4,8 +4,8 @@ class Admin::AppearancesController < Admin::ApplicationController
def show def show
end end
def preview def preview_sign_in
render 'preview', layout: 'devise' render 'preview_sign_in', layout: 'devise'
end end
def create def create
...@@ -52,7 +52,7 @@ class Admin::AppearancesController < Admin::ApplicationController ...@@ -52,7 +52,7 @@ class Admin::AppearancesController < Admin::ApplicationController
def appearance_params def appearance_params
params.require(:appearance).permit( params.require(:appearance).permit(
:title, :description, :logo, :logo_cache, :header_logo, :header_logo_cache, :title, :description, :logo, :logo_cache, :header_logo, :header_logo_cache,
:updated_by :new_project_guidelines, :updated_by
) )
end end
end end
module AppearancesHelper module AppearancesHelper
def brand_title def brand_title
if brand_item && brand_item.title brand_item&.title.presence || 'GitLab Community Edition'
brand_item.title
else
'GitLab Community Edition'
end
end end
def brand_image def brand_image
if brand_item.logo? image_tag(brand_item.logo) if brand_item&.logo?
image_tag brand_item.logo
else
nil
end
end end
def brand_text def brand_text
markdown_field(brand_item, :description) markdown_field(brand_item, :description)
end end
def brand_new_project_guidelines
markdown_field(brand_item, :new_project_guidelines)
end
def brand_item def brand_item
@appearance ||= Appearance.current @appearance ||= Appearance.current
end end
def brand_header_logo def brand_header_logo
if brand_item && brand_item.header_logo? if brand_item&.header_logo?
image_tag brand_item.header_logo image_tag brand_item.header_logo
else else
render 'shared/logo.svg' render 'shared/logo.svg'
...@@ -33,7 +29,7 @@ module AppearancesHelper ...@@ -33,7 +29,7 @@ module AppearancesHelper
# Skip the 'GitLab' type logo when custom brand logo is set # Skip the 'GitLab' type logo when custom brand logo is set
def brand_header_logo_type def brand_header_logo_type
unless brand_item && brand_item.header_logo? unless brand_item&.header_logo?
render 'shared/logo_type.svg' render 'shared/logo_type.svg'
end end
end end
......
...@@ -2,9 +2,8 @@ class Appearance < ActiveRecord::Base ...@@ -2,9 +2,8 @@ class Appearance < ActiveRecord::Base
include CacheMarkdownField include CacheMarkdownField
cache_markdown_field :description cache_markdown_field :description
cache_markdown_field :new_project_guidelines
validates :title, presence: true
validates :description, presence: true
validates :logo, file_size: { maximum: 1.megabyte } validates :logo, file_size: { maximum: 1.megabyte }
validates :header_logo, file_size: { maximum: 1.megabyte } validates :header_logo, file_size: { maximum: 1.megabyte }
......
module Ci module Ci
class Build < CommitStatus class Build < CommitStatus
prepend ArtifactMigratable
include TokenAuthenticatable include TokenAuthenticatable
include AfterCommitQueue include AfterCommitQueue
include Presentable include Presentable
...@@ -10,9 +11,14 @@ module Ci ...@@ -10,9 +11,14 @@ module Ci
belongs_to :erased_by, class_name: 'User' belongs_to :erased_by, class_name: 'User'
has_many :deployments, as: :deployable has_many :deployments, as: :deployable
has_one :last_deployment, -> { order('deployments.id DESC') }, as: :deployable, class_name: 'Deployment' has_one :last_deployment, -> { order('deployments.id DESC') }, as: :deployable, class_name: 'Deployment'
has_many :trace_sections, class_name: 'Ci::BuildTraceSection' has_many :trace_sections, class_name: 'Ci::BuildTraceSection'
has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_one :job_artifacts_archive, -> { where(file_type: Ci::JobArtifact.file_types[:archive]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
has_one :job_artifacts_metadata, -> { where(file_type: Ci::JobArtifact.file_types[:metadata]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
# The "environment" field for builds is a String, and is the unexpanded name # The "environment" field for builds is a String, and is the unexpanded name
def persisted_environment def persisted_environment
@persisted_environment ||= Environment.find_by( @persisted_environment ||= Environment.find_by(
...@@ -31,15 +37,37 @@ module Ci ...@@ -31,15 +37,37 @@ module Ci
scope :unstarted, ->() { where(runner_id: nil) } scope :unstarted, ->() { where(runner_id: nil) }
scope :ignore_failures, ->() { where(allow_failure: false) } scope :ignore_failures, ->() { where(allow_failure: false) }
scope :with_artifacts, ->() { where.not(artifacts_file: [nil, '']) } scope :with_artifacts, ->() do
where('(artifacts_file IS NOT NULL AND artifacts_file <> ?) OR EXISTS (?)',
'', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id'))
end
scope :with_artifacts_not_expired, ->() { with_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) } scope :with_artifacts_not_expired, ->() { with_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
scope :with_expired_artifacts, ->() { with_artifacts.where('artifacts_expire_at < ?', Time.now) } scope :with_expired_artifacts, ->() { with_artifacts.where('artifacts_expire_at < ?', Time.now) }
scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) } scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + [:manual]) } scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + [:manual]) }
scope :ref_protected, -> { where(protected: true) } scope :ref_protected, -> { where(protected: true) }
mount_uploader :artifacts_file, ArtifactUploader scope :matches_tag_ids, -> (tag_ids) do
mount_uploader :artifacts_metadata, ArtifactUploader matcher = ::ActsAsTaggableOn::Tagging
.where(taggable_type: CommitStatus)
.where(context: 'tags')
.where('taggable_id = ci_builds.id')
.where.not(tag_id: tag_ids).select('1')
where("NOT EXISTS (?)", matcher)
end
scope :with_any_tags, -> do
matcher = ::ActsAsTaggableOn::Tagging
.where(taggable_type: CommitStatus)
.where(context: 'tags')
.where('taggable_id = ci_builds.id').select('1')
where("EXISTS (?)", matcher)
end
mount_uploader :legacy_artifacts_file, LegacyArtifactUploader, mount_on: :artifacts_file
mount_uploader :legacy_artifacts_metadata, LegacyArtifactUploader, mount_on: :artifacts_metadata
acts_as_taggable acts_as_taggable
...@@ -326,14 +354,6 @@ module Ci ...@@ -326,14 +354,6 @@ module Ci
project.running_or_pending_build_count(force: true) project.running_or_pending_build_count(force: true)
end end
def artifacts?
!artifacts_expired? && artifacts_file.exists?
end
def artifacts_metadata?
artifacts? && artifacts_metadata.exists?
end
def artifacts_metadata_entry(path, **options) def artifacts_metadata_entry(path, **options)
metadata = Gitlab::Ci::Build::Artifacts::Metadata.new( metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
artifacts_metadata.path, artifacts_metadata.path,
...@@ -386,6 +406,7 @@ module Ci ...@@ -386,6 +406,7 @@ module Ci
def keep_artifacts! def keep_artifacts!
self.update(artifacts_expire_at: nil) self.update(artifacts_expire_at: nil)
self.job_artifacts.update_all(expire_at: nil)
end end
def coverage_regex def coverage_regex
...@@ -473,11 +494,7 @@ module Ci ...@@ -473,11 +494,7 @@ module Ci
private private
def update_artifacts_size def update_artifacts_size
self.artifacts_size = if artifacts_file.exists? self.artifacts_size = legacy_artifacts_file&.size
artifacts_file.size
else
nil
end
end end
def erase_trace! def erase_trace!
......
module Ci
class JobArtifact < ActiveRecord::Base
extend Gitlab::Ci::Model
belongs_to :project
belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id
before_save :set_size, if: :file_changed?
mount_uploader :file, JobArtifactUploader
enum file_type: {
archive: 1,
metadata: 2
}
def self.artifacts_size_for(project)
self.where(project: project).sum(:size)
end
def set_size
self.size = file.size
end
def expire_in
expire_at - Time.now if expire_at
end
def expire_in=(value)
self.expire_at =
if value
ChronicDuration.parse(value)&.seconds&.from_now
end
end
end
end
...@@ -112,7 +112,7 @@ module Ci ...@@ -112,7 +112,7 @@ module Ci
def can_pick?(build) def can_pick?(build)
return false if self.ref_protected? && !build.protected? return false if self.ref_protected? && !build.protected?
assignable_for?(build.project) && accepting_tags?(build) assignable_for?(build.project_id) && accepting_tags?(build)
end end
def only_for?(project) def only_for?(project)
...@@ -171,8 +171,8 @@ module Ci ...@@ -171,8 +171,8 @@ module Ci
end end
end end
def assignable_for?(project) def assignable_for?(project_id)
is_shared? || projects.exists?(id: project.id) is_shared? || projects.exists?(id: project_id)
end end
def accepting_tags?(build) def accepting_tags?(build)
......
# Adapter class to unify the interface between mounted uploaders and the
# Ci::Artifact model
# Meant to be prepended so the interface can stay the same
module ArtifactMigratable
def artifacts_file
job_artifacts_archive&.file || legacy_artifacts_file
end
def artifacts_metadata
job_artifacts_metadata&.file || legacy_artifacts_metadata
end
def artifacts?
!artifacts_expired? && artifacts_file.exists?
end
def artifacts_metadata?
artifacts? && artifacts_metadata.exists?
end
def artifacts_file_changed?
job_artifacts_archive&.file_changed? || attribute_changed?(:artifacts_file)
end
def remove_artifacts_file!
if job_artifacts_archive
job_artifacts_archive.destroy
else
remove_legacy_artifacts_file!
end
end
def remove_artifacts_metadata!
if job_artifacts_metadata
job_artifacts_metadata.destroy
else
remove_legacy_artifacts_metadata!
end
end
def artifacts_size
read_attribute(:artifacts_size).to_i +
job_artifacts_archive&.size.to_i + job_artifacts_metadata&.size.to_i
end
end
...@@ -2,6 +2,7 @@ require 'carrierwave/orm/activerecord' ...@@ -2,6 +2,7 @@ require 'carrierwave/orm/activerecord'
class Group < Namespace class Group < Namespace
include Gitlab::ConfigHelper include Gitlab::ConfigHelper
include AfterCommitQueue
include AccessRequestable include AccessRequestable
include Avatarable include Avatarable
include Referable include Referable
......
...@@ -2,6 +2,7 @@ require 'digest/md5' ...@@ -2,6 +2,7 @@ require 'digest/md5'
class Key < ActiveRecord::Base class Key < ActiveRecord::Base
include Gitlab::CurrentSettings include Gitlab::CurrentSettings
include AfterCommitQueue
include Sortable include Sortable
belongs_to :user belongs_to :user
......
class Member < ActiveRecord::Base class Member < ActiveRecord::Base
include AfterCommitQueue
include Sortable include Sortable
include Importable include Importable
include Expirable include Expirable
......
...@@ -35,7 +35,9 @@ class ProjectStatistics < ActiveRecord::Base ...@@ -35,7 +35,9 @@ class ProjectStatistics < ActiveRecord::Base
end end
def update_build_artifacts_size def update_build_artifacts_size
self.build_artifacts_size = project.builds.sum(:artifacts_size) self.build_artifacts_size =
project.builds.sum(:artifacts_size) +
Ci::JobArtifact.artifacts_size_for(self)
end end
def update_storage_size def update_storage_size
......
...@@ -211,7 +211,7 @@ class Service < ActiveRecord::Base ...@@ -211,7 +211,7 @@ class Service < ActiveRecord::Base
def async_execute(data) def async_execute(data)
return unless supported_events.include?(data[:object_kind]) return unless supported_events.include?(data[:object_kind])
Sidekiq::Client.enqueue(ProjectServiceWorker, id, data) ProjectServiceWorker.perform_async(id, data)
end end
def issue_tracker? def issue_tracker?
......
...@@ -7,6 +7,7 @@ class User < ActiveRecord::Base ...@@ -7,6 +7,7 @@ class User < ActiveRecord::Base
include Gitlab::ConfigHelper include Gitlab::ConfigHelper
include Gitlab::CurrentSettings include Gitlab::CurrentSettings
include Gitlab::SQL::Pattern include Gitlab::SQL::Pattern
include AfterCommitQueue
include Avatarable include Avatarable
include Referable include Referable
include Sortable include Sortable
...@@ -903,6 +904,7 @@ class User < ActiveRecord::Base ...@@ -903,6 +904,7 @@ class User < ActiveRecord::Base
def post_destroy_hook def post_destroy_hook
log_info("User \"#{name}\" (#{email}) was removed") log_info("User \"#{name}\" (#{email}) was removed")
system_hook_service.execute_hooks_for(self, :destroy) system_hook_service.execute_hooks_for(self, :destroy)
end end
......
...@@ -22,6 +22,16 @@ module Ci ...@@ -22,6 +22,16 @@ module Ci
valid = true valid = true
if Feature.enabled?('ci_job_request_with_tags_matcher')
# pick builds that does not have other tags than runner's one
builds = builds.matches_tag_ids(runner.tags.ids)
# pick builds that have at least one tag
unless runner.run_untagged?
builds = builds.with_any_tags
end
end
builds.find do |build| builds.find do |build|
next unless runner.can_pick?(build) next unless runner.can_pick?(build)
......
...@@ -18,7 +18,7 @@ module Projects ...@@ -18,7 +18,7 @@ module Projects
@status.enqueue! @status.enqueue!
@status.run! @status.run!
raise 'missing pages artifacts' unless build.artifacts_file? raise 'missing pages artifacts' unless build.artifacts?
raise 'pages are outdated' unless latest? raise 'pages are outdated' unless latest?
# Create temporary directory in which we will extract the artifacts # Create temporary directory in which we will extract the artifacts
......
class SystemHooksService class SystemHooksService
def execute_hooks_for(model, event) def execute_hooks_for(model, event)
execute_hooks(build_event_data(model, event)) data = build_event_data(model, event)
model.run_after_commit_or_now do
SystemHooksService.new.execute_hooks(data)
end
end end
def execute_hooks(data, hooks_scope = :all) def execute_hooks(data, hooks_scope = :all)
......
...@@ -63,7 +63,7 @@ class WebHookService ...@@ -63,7 +63,7 @@ class WebHookService
end end
def async_execute def async_execute
Sidekiq::Client.enqueue(WebHookWorker, hook.id, data, hook_name) WebHookWorker.perform_async(hook.id, data, hook_name)
end end
private private
......
class JobArtifactUploader < GitlabUploader
storage :file
def self.local_store_path
Gitlab.config.artifacts.path
end
def self.artifacts_upload_path
File.join(self.local_store_path, 'tmp/uploads/')
end
def size
return super if model.size.nil?
model.size
end
def store_dir
default_local_path
end
def cache_dir
File.join(self.class.local_store_path, 'tmp/cache')
end
def work_dir
File.join(self.class.local_store_path, 'tmp/work')
end
private
def default_local_path
File.join(self.class.local_store_path, default_path)
end
def default_path
creation_date = model.created_at.utc.strftime('%Y_%m_%d')
File.join(disk_hash[0..1], disk_hash[2..3], disk_hash,
creation_date, model.job_id.to_s, model.id.to_s)
end
def disk_hash
@disk_hash ||= Digest::SHA2.hexdigest(model.project_id.to_s)
end
end
class ArtifactUploader < GitlabUploader class LegacyArtifactUploader < GitlabUploader
storage :file storage :file
attr_reader :job, :field def self.local_store_path
def self.local_artifacts_store
Gitlab.config.artifacts.path Gitlab.config.artifacts.path
end end
def self.artifacts_upload_path def self.artifacts_upload_path
File.join(self.local_artifacts_store, 'tmp/uploads/') File.join(self.local_store_path, 'tmp/uploads/')
end
def initialize(job, field)
@job, @field = job, field
end end
def store_dir def store_dir
...@@ -20,20 +14,20 @@ class ArtifactUploader < GitlabUploader ...@@ -20,20 +14,20 @@ class ArtifactUploader < GitlabUploader
end end
def cache_dir def cache_dir
File.join(self.class.local_artifacts_store, 'tmp/cache') File.join(self.class.local_store_path, 'tmp/cache')
end end
def work_dir def work_dir
File.join(self.class.local_artifacts_store, 'tmp/work') File.join(self.class.local_store_path, 'tmp/work')
end end
private private
def default_local_path def default_local_path
File.join(self.class.local_artifacts_store, default_path) File.join(self.class.local_store_path, default_path)
end end
def default_path def default_path
File.join(job.created_at.utc.strftime('%Y_%m'), job.project_id.to_s, job.id.to_s) File.join(model.created_at.utc.strftime('%Y_%m'), model.project_id.to_s, model.id.to_s)
end end
end end
= form_for @appearance, url: admin_appearances_path, html: { class: 'form-horizontal'} do |f| = form_for @appearance, url: admin_appearances_path, html: { class: 'form-horizontal'} do |f|
= form_errors(@appearance) = form_errors(@appearance)
%fieldset.app_logo
%legend
Navigation bar:
.form-group
= f.label :header_logo, 'Header logo', class: 'control-label'
.col-sm-10
- if @appearance.header_logo?
= image_tag @appearance.header_logo_url, class: 'appearance-light-logo-preview'
- if @appearance.persisted?
%br
= link_to 'Remove header logo', header_logos_admin_appearances_path, data: { confirm: "Header logo will be removed. Are you sure?"}, method: :delete, class: "btn btn-remove btn-sm remove-logo"
%hr
= f.hidden_field :header_logo_cache
= f.file_field :header_logo, class: ""
.hint
Maximum file size is 1MB. Pages are optimized for a 28px tall header logo
%fieldset.sign-in %fieldset.sign-in
%legend %legend
Sign in/Sign up pages: Sign in/Sign up pages:
...@@ -28,27 +45,22 @@ ...@@ -28,27 +45,22 @@
.hint .hint
Maximum file size is 1MB. Pages are optimized for a 640x360 px logo. Maximum file size is 1MB. Pages are optimized for a 640x360 px logo.
%fieldset.app_logo %fieldset
%legend %legend
Navigation bar: New project pages:
.form-group .form-group
= f.label :header_logo, 'Header logo', class: 'control-label' = f.label :new_project_guidelines, class: 'control-label'
.col-sm-10 .col-sm-10
- if @appearance.header_logo? = f.text_area :new_project_guidelines, class: "form-control", rows: 10
= image_tag @appearance.header_logo_url, class: 'appearance-light-logo-preview'
- if @appearance.persisted?
%br
= link_to 'Remove header logo', header_logos_admin_appearances_path, data: { confirm: "Header logo will be removed. Are you sure?"}, method: :delete, class: "btn btn-remove btn-sm remove-logo"
%hr
= f.hidden_field :header_logo_cache
= f.file_field :header_logo, class: ""
.hint .hint
Maximum file size is 1MB. Pages are optimized for a 28px tall header logo Guidelines parsed with #{link_to "GitLab Flavored Markdown", help_page_path('user/markdown'), target: '_blank'}.
.form-actions .form-actions
= f.submit 'Save', class: 'btn btn-save append-right-10' = f.submit 'Save', class: 'btn btn-save append-right-10'
- if @appearance.persisted? - if @appearance.persisted?
= link_to 'Preview last save', preview_admin_appearances_path, class: 'btn', target: '_blank', rel: 'noopener noreferrer' Preview last save:
= link_to 'Sign-in page', preview_sign_in_admin_appearances_path, class: 'btn', target: '_blank', rel: 'noopener noreferrer'
= link_to 'New project page', new_project_path, class: 'btn', target: '_blank', rel: 'noopener noreferrer'
- if @appearance.updated_at - if @appearance.updated_at
%span.pull-right %span.pull-right
......
...@@ -15,8 +15,8 @@ ...@@ -15,8 +15,8 @@
.col-sm-7.brand-holder.pull-left .col-sm-7.brand-holder.pull-left
%h1 %h1
= brand_title = brand_title
- if brand_item
= brand_image = brand_image
- if brand_item&.description?
= brand_text = brand_text
- else - else
%h3 Open source software to collaborate on code %h3 Open source software to collaborate on code
......
...@@ -25,7 +25,7 @@ ...@@ -25,7 +25,7 @@
= markdown_toolbar_button({ icon: "list-bulleted", data: { "md-tag" => "* ", "md-prepend" => true }, title: "Add a bullet list" }) = markdown_toolbar_button({ icon: "list-bulleted", data: { "md-tag" => "* ", "md-prepend" => true }, title: "Add a bullet list" })
= markdown_toolbar_button({ icon: "list-numbered", data: { "md-tag" => "1. ", "md-prepend" => true }, title: "Add a numbered list" }) = markdown_toolbar_button({ icon: "list-numbered", data: { "md-tag" => "1. ", "md-prepend" => true }, title: "Add a numbered list" })
= markdown_toolbar_button({ icon: "task-done", data: { "md-tag" => "* [ ] ", "md-prepend" => true }, title: "Add a task list" }) = markdown_toolbar_button({ icon: "task-done", data: { "md-tag" => "* [ ] ", "md-prepend" => true }, title: "Add a task list" })
%button.toolbar-btn.toolbar-fullscreen-btn.js-zen-enter.has-tooltip{ type: "button", tabindex: -1, aria: { label: "Go full screen" }, title: "Go full screen", data: { container: "body" } } %button.toolbar-btn.toolbar-fullscreen-btn.js-zen-enter.has-tooltip{ type: "button", tabindex: -1, "aria-label": "Go full screen", title: "Go full screen", data: { container: "body" } }
= sprite_icon("screen-full") = sprite_icon("screen-full")
.md-write-holder .md-write-holder
......
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
A project is where you house your files (repository), plan your work (issues), and publish your documentation (wiki), #{link_to 'among other things', help_page_path("user/project/index.md", anchor: "projects-features"), target: '_blank'}. A project is where you house your files (repository), plan your work (issues), and publish your documentation (wiki), #{link_to 'among other things', help_page_path("user/project/index.md", anchor: "projects-features"), target: '_blank'}.
%p %p
All features are enabled when you create a project, but you can disable the ones you don’t need in the project settings. All features are enabled when you create a project, but you can disable the ones you don’t need in the project settings.
= brand_new_project_guidelines
.col-lg-9.js-toggle-container .col-lg-9.js-toggle-container
%ul.nav-links.gitlab-tabs{ role: 'tablist' } %ul.nav-links.gitlab-tabs{ role: 'tablist' }
%li.active{ role: 'presentation' } %li.active{ role: 'presentation' }
......
class AdminEmailWorker class AdminEmailWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
def perform def perform
......
class AuthorizedProjectsWorker class AuthorizedProjectsWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
# Schedules multiple jobs and waits for them to be completed. # Schedules multiple jobs and waits for them to be completed.
def self.bulk_perform_and_wait(args_list) def self.bulk_perform_and_wait(args_list)
...@@ -17,11 +16,6 @@ class AuthorizedProjectsWorker ...@@ -17,11 +16,6 @@ class AuthorizedProjectsWorker
waiter.wait waiter.wait
end end
# Schedules multiple jobs to run in sidekiq without waiting for completion
def self.bulk_perform_async(args_list)
Sidekiq::Client.push_bulk('class' => self, 'queue' => sidekiq_options['queue'], 'args' => args_list)
end
# Performs multiple jobs directly. Failed jobs will be put into sidekiq so # Performs multiple jobs directly. Failed jobs will be put into sidekiq so
# they can benefit from retries # they can benefit from retries
def self.bulk_perform_inline(args_list) def self.bulk_perform_inline(args_list)
......
class BackgroundMigrationWorker class BackgroundMigrationWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
# Enqueues a number of jobs in bulk.
#
# The `jobs` argument should be an Array of Arrays, each sub-array must be in
# the form:
#
# [migration-class, [arg1, arg2, ...]]
def self.perform_bulk(jobs)
Sidekiq::Client.push_bulk('class' => self,
'queue' => sidekiq_options['queue'],
'args' => jobs)
end
# Schedules multiple jobs in bulk, with a delay.
#
def self.perform_bulk_in(delay, jobs)
now = Time.now.to_i
schedule = now + delay.to_i
if schedule <= now
raise ArgumentError, 'The schedule time must be in the future!'
end
Sidekiq::Client.push_bulk('class' => self,
'queue' => sidekiq_options['queue'],
'args' => jobs,
'at' => schedule)
end
# Performs the background migration. # Performs the background migration.
# #
......
class BuildCoverageWorker class BuildCoverageWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
def perform(build_id) def perform(build_id)
......
class BuildFinishedWorker class BuildFinishedWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :processing enqueue_in group: :processing
......
class BuildHooksWorker class BuildHooksWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :hooks enqueue_in group: :hooks
......
class BuildQueueWorker class BuildQueueWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :processing enqueue_in group: :processing
......
class BuildSuccessWorker class BuildSuccessWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :processing enqueue_in group: :processing
......
class BuildTraceSectionsWorker class BuildTraceSectionsWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
def perform(build_id) def perform(build_id)
......
class ClusterInstallAppWorker class ClusterInstallAppWorker
include Sidekiq::Worker include ApplicationWorker
include ClusterQueue include ClusterQueue
include ClusterApplications include ClusterApplications
......
class ClusterProvisionWorker class ClusterProvisionWorker
include Sidekiq::Worker include ApplicationWorker
include ClusterQueue include ClusterQueue
def perform(cluster_id) def perform(cluster_id)
......
class ClusterWaitForAppInstallationWorker class ClusterWaitForAppInstallationWorker
include Sidekiq::Worker include ApplicationWorker
include ClusterQueue include ClusterQueue
include ClusterApplications include ClusterApplications
......
Sidekiq::Worker.extend ActiveSupport::Concern
module ApplicationWorker
extend ActiveSupport::Concern
include Sidekiq::Worker
included do
sidekiq_options queue: base_queue_name
end
module ClassMethods
def base_queue_name
name
.sub(/\AGitlab::/, '')
.sub(/Worker\z/, '')
.underscore
.tr('/', '_')
end
def queue
get_sidekiq_options['queue'].to_s
end
def bulk_perform_async(args_list)
Sidekiq::Client.push_bulk('class' => self, 'args' => args_list)
end
def bulk_perform_in(delay, args_list)
now = Time.now.to_i
schedule = now + delay.to_i
if schedule <= now
raise ArgumentError, 'The schedule time must be in the future!'
end
Sidekiq::Client.push_bulk('class' => self, 'args' => args_list, 'at' => schedule)
end
end
end
# Concern that sets the queue of a Sidekiq worker based on the worker's class
# name/namespace.
module DedicatedSidekiqQueue
extend ActiveSupport::Concern
included do
sidekiq_options queue: name.sub(/Worker\z/, '').underscore.tr('/', '_')
end
end
...@@ -8,7 +8,7 @@ module Gitlab ...@@ -8,7 +8,7 @@ module Gitlab
extend ActiveSupport::Concern extend ActiveSupport::Concern
included do included do
include Sidekiq::Worker include ApplicationWorker
include GithubImport::Queue include GithubImport::Queue
include ReschedulingMethods include ReschedulingMethods
include NotifyUponDeath include NotifyUponDeath
......
class CreateGpgSignatureWorker class CreateGpgSignatureWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
def perform(commit_sha, project_id) def perform(commit_sha, project_id)
project = Project.find_by(id: project_id) project = Project.find_by(id: project_id)
......
class CreatePipelineWorker class CreatePipelineWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :creation enqueue_in group: :creation
......
class DeleteMergedBranchesWorker class DeleteMergedBranchesWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
def perform(project_id, user_id) def perform(project_id, user_id)
begin begin
......
class DeleteUserWorker class DeleteUserWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
def perform(current_user_id, delete_user_id, options = {}) def perform(current_user_id, delete_user_id, options = {})
delete_user = User.find(delete_user_id) delete_user = User.find(delete_user_id)
......
class EmailReceiverWorker class EmailReceiverWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
def perform(raw) def perform(raw)
return unless Gitlab::IncomingEmail.enabled? return unless Gitlab::IncomingEmail.enabled?
......
class EmailsOnPushWorker class EmailsOnPushWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
attr_reader :email, :skip_premailer attr_reader :email, :skip_premailer
......
class ExpireBuildArtifactsWorker class ExpireBuildArtifactsWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
def perform def perform
...@@ -8,6 +8,6 @@ class ExpireBuildArtifactsWorker ...@@ -8,6 +8,6 @@ class ExpireBuildArtifactsWorker
build_ids = Ci::Build.with_expired_artifacts.pluck(:id) build_ids = Ci::Build.with_expired_artifacts.pluck(:id)
build_ids = build_ids.map { |build_id| [build_id] } build_ids = build_ids.map { |build_id| [build_id] }
Sidekiq::Client.push_bulk('class' => ExpireBuildInstanceArtifactsWorker, 'args' => build_ids ) ExpireBuildInstanceArtifactsWorker.bulk_perform_async(build_ids)
end end
end end
class ExpireBuildInstanceArtifactsWorker class ExpireBuildInstanceArtifactsWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
def perform(build_id) def perform(build_id)
build = Ci::Build build = Ci::Build
......
class ExpireJobCacheWorker class ExpireJobCacheWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :cache enqueue_in group: :cache
......
class ExpirePipelineCacheWorker class ExpirePipelineCacheWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :cache enqueue_in group: :cache
......
class GitGarbageCollectWorker class GitGarbageCollectWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
include Gitlab::CurrentSettings include Gitlab::CurrentSettings
sidekiq_options retry: false sidekiq_options retry: false
......
...@@ -7,7 +7,7 @@ module Gitlab ...@@ -7,7 +7,7 @@ module Gitlab
# been completed this worker will advance the import process to the next # been completed this worker will advance the import process to the next
# stage. # stage.
class AdvanceStageWorker class AdvanceStageWorker
include Sidekiq::Worker include ApplicationWorker
sidekiq_options queue: 'github_importer_advance_stage', dead: false sidekiq_options queue: 'github_importer_advance_stage', dead: false
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
module Gitlab module Gitlab
module GithubImport module GithubImport
class RefreshImportJidWorker class RefreshImportJidWorker
include Sidekiq::Worker include ApplicationWorker
include GithubImport::Queue include GithubImport::Queue
# The interval to schedule new instances of this job at. # The interval to schedule new instances of this job at.
......
...@@ -4,7 +4,7 @@ module Gitlab ...@@ -4,7 +4,7 @@ module Gitlab
module GithubImport module GithubImport
module Stage module Stage
class FinishImportWorker class FinishImportWorker
include Sidekiq::Worker include ApplicationWorker
include GithubImport::Queue include GithubImport::Queue
include StageMethods include StageMethods
......
...@@ -4,7 +4,7 @@ module Gitlab ...@@ -4,7 +4,7 @@ module Gitlab
module GithubImport module GithubImport
module Stage module Stage
class ImportBaseDataWorker class ImportBaseDataWorker
include Sidekiq::Worker include ApplicationWorker
include GithubImport::Queue include GithubImport::Queue
include StageMethods include StageMethods
......
...@@ -4,7 +4,7 @@ module Gitlab ...@@ -4,7 +4,7 @@ module Gitlab
module GithubImport module GithubImport
module Stage module Stage
class ImportIssuesAndDiffNotesWorker class ImportIssuesAndDiffNotesWorker
include Sidekiq::Worker include ApplicationWorker
include GithubImport::Queue include GithubImport::Queue
include StageMethods include StageMethods
......
...@@ -4,7 +4,7 @@ module Gitlab ...@@ -4,7 +4,7 @@ module Gitlab
module GithubImport module GithubImport
module Stage module Stage
class ImportNotesWorker class ImportNotesWorker
include Sidekiq::Worker include ApplicationWorker
include GithubImport::Queue include GithubImport::Queue
include StageMethods include StageMethods
......
...@@ -4,7 +4,7 @@ module Gitlab ...@@ -4,7 +4,7 @@ module Gitlab
module GithubImport module GithubImport
module Stage module Stage
class ImportPullRequestsWorker class ImportPullRequestsWorker
include Sidekiq::Worker include ApplicationWorker
include GithubImport::Queue include GithubImport::Queue
include StageMethods include StageMethods
......
...@@ -4,7 +4,7 @@ module Gitlab ...@@ -4,7 +4,7 @@ module Gitlab
module GithubImport module GithubImport
module Stage module Stage
class ImportRepositoryWorker class ImportRepositoryWorker
include Sidekiq::Worker include ApplicationWorker
include GithubImport::Queue include GithubImport::Queue
include StageMethods include StageMethods
......
class GitlabShellWorker class GitlabShellWorker
include Sidekiq::Worker include ApplicationWorker
include Gitlab::ShellAdapter include Gitlab::ShellAdapter
include DedicatedSidekiqQueue
def perform(action, *arg) def perform(action, *arg)
gitlab_shell.__send__(action, *arg) # rubocop:disable GitlabSecurity/PublicSend gitlab_shell.__send__(action, *arg) # rubocop:disable GitlabSecurity/PublicSend
......
class GitlabUsagePingWorker class GitlabUsagePingWorker
LEASE_TIMEOUT = 86400 LEASE_TIMEOUT = 86400
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
def perform def perform
......
class GroupDestroyWorker class GroupDestroyWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
include ExceptionBacktrace include ExceptionBacktrace
def perform(group_id, user_id) def perform(group_id, user_id)
......
class ImportExportProjectCleanupWorker class ImportExportProjectCleanupWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
def perform def perform
......
class InvalidGpgSignatureUpdateWorker class InvalidGpgSignatureUpdateWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
def perform(gpg_key_id) def perform(gpg_key_id)
gpg_key = GpgKey.find_by(id: gpg_key_id) gpg_key = GpgKey.find_by(id: gpg_key_id)
......
...@@ -2,8 +2,7 @@ require 'json' ...@@ -2,8 +2,7 @@ require 'json'
require 'socket' require 'socket'
class IrkerWorker class IrkerWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
def perform(project_id, chans, colors, push_data, settings) def perform(project_id, chans, colors, push_data, settings)
project = Project.find(project_id) project = Project.find(project_id)
......
class MergeWorker class MergeWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
def perform(merge_request_id, current_user_id, params) def perform(merge_request_id, current_user_id, params)
params = params.with_indifferent_access params = params.with_indifferent_access
......
...@@ -5,14 +5,9 @@ ...@@ -5,14 +5,9 @@
# The worker will reject doing anything for projects that *do* have a # The worker will reject doing anything for projects that *do* have a
# namespace. For those use ProjectDestroyWorker instead. # namespace. For those use ProjectDestroyWorker instead.
class NamespacelessProjectDestroyWorker class NamespacelessProjectDestroyWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
include ExceptionBacktrace include ExceptionBacktrace
def self.bulk_perform_async(args_list)
Sidekiq::Client.push_bulk('class' => self, 'queue' => sidekiq_options['queue'], 'args' => args_list)
end
def perform(project_id) def perform(project_id)
begin begin
project = Project.unscoped.find(project_id) project = Project.unscoped.find(project_id)
......
class NewIssueWorker class NewIssueWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
include NewIssuable include NewIssuable
def perform(issue_id, user_id) def perform(issue_id, user_id)
......
class NewMergeRequestWorker class NewMergeRequestWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
include NewIssuable include NewIssuable
def perform(merge_request_id, user_id) def perform(merge_request_id, user_id)
......
class NewNoteWorker class NewNoteWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
# Keep extra parameter to preserve backwards compatibility with # Keep extra parameter to preserve backwards compatibility with
# old `NewNoteWorker` jobs (can remove later) # old `NewNoteWorker` jobs (can remove later)
......
class PagesWorker class PagesWorker
include Sidekiq::Worker include ApplicationWorker
sidekiq_options queue: :pages, retry: false sidekiq_options queue: :pages, retry: false
......
class PipelineHooksWorker class PipelineHooksWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :hooks enqueue_in group: :hooks
......
class PipelineMetricsWorker class PipelineMetricsWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
def perform(pipeline_id) def perform(pipeline_id)
......
class PipelineNotificationWorker class PipelineNotificationWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
def perform(pipeline_id, recipients = nil) def perform(pipeline_id, recipients = nil)
......
class PipelineProcessWorker class PipelineProcessWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :processing enqueue_in group: :processing
......
class PipelineScheduleWorker class PipelineScheduleWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
def perform def perform
......
class PipelineSuccessWorker class PipelineSuccessWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :processing enqueue_in group: :processing
......
class PipelineUpdateWorker class PipelineUpdateWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :processing enqueue_in group: :processing
......
class PostReceive class PostReceive
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
def perform(gl_repository, identifier, changes) def perform(gl_repository, identifier, changes)
project, is_wiki = Gitlab::GlRepository.parse(gl_repository) project, is_wiki = Gitlab::GlRepository.parse(gl_repository)
......
...@@ -5,8 +5,7 @@ ...@@ -5,8 +5,7 @@
# Consider using an extra worker if you need to add any extra (and potentially # Consider using an extra worker if you need to add any extra (and potentially
# slow) processing of commits. # slow) processing of commits.
class ProcessCommitWorker class ProcessCommitWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
# project_id - The ID of the project this commit belongs to. # project_id - The ID of the project this commit belongs to.
# user_id - The ID of the user that pushed the commit. # user_id - The ID of the user that pushed the commit.
......
# Worker for updating any project specific caches. # Worker for updating any project specific caches.
class ProjectCacheWorker class ProjectCacheWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
LEASE_TIMEOUT = 15.minutes.to_i LEASE_TIMEOUT = 15.minutes.to_i
......
class ProjectDestroyWorker class ProjectDestroyWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
include ExceptionBacktrace include ExceptionBacktrace
def perform(project_id, user_id, params) def perform(project_id, user_id, params)
......
class ProjectExportWorker class ProjectExportWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
include ExceptionBacktrace include ExceptionBacktrace
sidekiq_options retry: 3 sidekiq_options retry: 3
......
class ProjectMigrateHashedStorageWorker class ProjectMigrateHashedStorageWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
LEASE_TIMEOUT = 30.seconds.to_i LEASE_TIMEOUT = 30.seconds.to_i
......
class ProjectServiceWorker class ProjectServiceWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
sidekiq_options dead: false sidekiq_options dead: false
......
# Worker for updating any project specific caches. # Worker for updating any project specific caches.
class PropagateServiceTemplateWorker class PropagateServiceTemplateWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
LEASE_TIMEOUT = 4.hours.to_i LEASE_TIMEOUT = 4.hours.to_i
......
class PruneOldEventsWorker class PruneOldEventsWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
def perform def perform
......
class ReactiveCachingWorker class ReactiveCachingWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
def perform(class_name, id, *args) def perform(class_name, id, *args)
klass = begin klass = begin
......
class RemoveExpiredGroupLinksWorker class RemoveExpiredGroupLinksWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
def perform def perform
......
class RemoveExpiredMembersWorker class RemoveExpiredMembersWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
def perform def perform
......
class RemoveOldWebHookLogsWorker class RemoveOldWebHookLogsWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
WEB_HOOK_LOG_LIFETIME = 2.days WEB_HOOK_LOG_LIFETIME = 2.days
......
class RemoveUnreferencedLfsObjectsWorker class RemoveUnreferencedLfsObjectsWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
def perform def perform
......
class RepositoryArchiveCacheWorker class RepositoryArchiveCacheWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
def perform def perform
......
module RepositoryCheck module RepositoryCheck
class BatchWorker class BatchWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
RUN_TIME = 3600 RUN_TIME = 3600
......
module RepositoryCheck module RepositoryCheck
class ClearWorker class ClearWorker
include Sidekiq::Worker include ApplicationWorker
include RepositoryCheckQueue include RepositoryCheckQueue
def perform def perform
......
module RepositoryCheck module RepositoryCheck
class SingleRepositoryWorker class SingleRepositoryWorker
include Sidekiq::Worker include ApplicationWorker
include RepositoryCheckQueue include RepositoryCheckQueue
def perform(project_id) def perform(project_id)
......
class RepositoryForkWorker class RepositoryForkWorker
ForkError = Class.new(StandardError) ForkError = Class.new(StandardError)
include Sidekiq::Worker include ApplicationWorker
include Gitlab::ShellAdapter include Gitlab::ShellAdapter
include DedicatedSidekiqQueue
include ProjectStartImport include ProjectStartImport
sidekiq_options status_expiration: StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION sidekiq_options status_expiration: StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION
......
class RepositoryImportWorker class RepositoryImportWorker
ImportError = Class.new(StandardError) ImportError = Class.new(StandardError)
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
include ExceptionBacktrace include ExceptionBacktrace
include ProjectStartImport include ProjectStartImport
......
class RequestsProfilesWorker class RequestsProfilesWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
def perform def perform
......
class ScheduleUpdateUserActivityWorker class ScheduleUpdateUserActivityWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
def perform(batch_size = 500) def perform(batch_size = 500)
......
class StageUpdateWorker class StageUpdateWorker
include Sidekiq::Worker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :processing enqueue_in group: :processing
......
class StorageMigratorWorker class StorageMigratorWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
BATCH_SIZE = 100 BATCH_SIZE = 100
......
class StuckCiJobsWorker class StuckCiJobsWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
EXCLUSIVE_LEASE_KEY = 'stuck_ci_builds_worker_lease'.freeze EXCLUSIVE_LEASE_KEY = 'stuck_ci_builds_worker_lease'.freeze
......
class StuckImportJobsWorker class StuckImportJobsWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
IMPORT_JOBS_EXPIRATION = 15.hours.to_i IMPORT_JOBS_EXPIRATION = 15.hours.to_i
......
class StuckMergeJobsWorker class StuckMergeJobsWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
def perform def perform
......
class SystemHookPushWorker class SystemHookPushWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
def perform(push_data, hook_id) def perform(push_data, hook_id)
SystemHooksService.new.execute_hooks(push_data, hook_id) SystemHooksService.new.execute_hooks(push_data, hook_id)
......
class TrendingProjectsWorker class TrendingProjectsWorker
include Sidekiq::Worker include ApplicationWorker
include CronjobQueue include CronjobQueue
def perform def perform
......
class UpdateMergeRequestsWorker class UpdateMergeRequestsWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
LOG_TIME_THRESHOLD = 90 # seconds LOG_TIME_THRESHOLD = 90 # seconds
......
class UpdateUserActivityWorker class UpdateUserActivityWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
def perform(pairs) def perform(pairs)
pairs = cast_data(pairs) pairs = cast_data(pairs)
......
class UploadChecksumWorker class UploadChecksumWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
def perform(upload_id) def perform(upload_id)
upload = Upload.find(upload_id) upload = Upload.find(upload_id)
......
class WaitForClusterCreationWorker class WaitForClusterCreationWorker
include Sidekiq::Worker include ApplicationWorker
include ClusterQueue include ClusterQueue
def perform(cluster_id) def perform(cluster_id)
......
class WebHookWorker class WebHookWorker
include Sidekiq::Worker include ApplicationWorker
include DedicatedSidekiqQueue
sidekiq_options retry: 4, dead: false sidekiq_options retry: 4, dead: false
......
---
title: Init zen mode in snippets pages
merge_request:
author:
type: fixed
---
title: Add custom brand text on new project pages
merge_request: 15541
author: Markus Koller
type: changed
---
title: Perform SQL matching of Build&Runner tags to greatly speed-up job picking
merge_request:
author:
type: performance
...@@ -649,6 +649,8 @@ test: ...@@ -649,6 +649,8 @@ test:
# user: YOUR_USERNAME # user: YOUR_USERNAME
pages: pages:
path: tmp/tests/pages path: tmp/tests/pages
artifacts:
path: tmp/tests/artifacts
repositories: repositories:
storages: storages:
default: default:
......
...@@ -13,13 +13,9 @@ module Sidekiq ...@@ -13,13 +13,9 @@ module Sidekiq
module ClassMethods module ClassMethods
module NoSchedulingFromTransactions module NoSchedulingFromTransactions
NESTING = ::Rails.env.test? ? 1 : 0
%i(perform_async perform_at perform_in).each do |name| %i(perform_async perform_at perform_in).each do |name|
define_method(name) do |*args| define_method(name) do |*args|
return super(*args) if Sidekiq::Worker.skip_transaction_check if !Sidekiq::Worker.skip_transaction_check && AfterCommitQueue.inside_transaction?
return super(*args) unless ActiveRecord::Base.connection.open_transactions > NESTING
raise <<-MSG.strip_heredoc raise <<-MSG.strip_heredoc
`#{self}.#{name}` cannot be called inside a transaction as this can lead to `#{self}.#{name}` cannot be called inside a transaction as this can lead to
race conditions when the worker runs before the transaction is committed and race conditions when the worker runs before the transaction is committed and
...@@ -28,6 +24,9 @@ module Sidekiq ...@@ -28,6 +24,9 @@ module Sidekiq
Use an `after_commit` hook, or include `AfterCommitQueue` and use a `run_after_commit` block instead. Use an `after_commit` hook, or include `AfterCommitQueue` and use a `run_after_commit` block instead.
MSG MSG
end end
super(*args)
end
end end
end end
......
...@@ -64,13 +64,13 @@ end ...@@ -64,13 +64,13 @@ end
# The Sidekiq client API always adds the queue to the Sidekiq queue # The Sidekiq client API always adds the queue to the Sidekiq queue
# list, but mail_room and gitlab-shell do not. This is only necessary # list, but mail_room and gitlab-shell do not. This is only necessary
# for monitoring. # for monitoring.
config = YAML.load_file(Rails.root.join('config', 'sidekiq_queues.yml').to_s)
begin begin
queues = Gitlab::SidekiqConfig.worker_queues
Sidekiq.redis do |conn| Sidekiq.redis do |conn|
conn.pipelined do conn.pipelined do
config[:queues].each do |queue| queues.each do |queue|
conn.sadd('queues', queue[0]) conn.sadd('queues', queue)
end end
end end
end end
......
...@@ -97,7 +97,7 @@ namespace :admin do ...@@ -97,7 +97,7 @@ namespace :admin do
resource :appearances, only: [:show, :create, :update], path: 'appearance' do resource :appearances, only: [:show, :create, :update], path: 'appearance' do
member do member do
get :preview get :preview_sign_in
delete :logo delete :logo
delete :header_logos delete :header_logos
end end
......
...@@ -124,11 +124,11 @@ class Gitlab::Seeder::Pipelines ...@@ -124,11 +124,11 @@ class Gitlab::Seeder::Pipelines
return unless %w[build test].include?(build.stage) return unless %w[build test].include?(build.stage)
artifacts_cache_file(artifacts_archive_path) do |file| artifacts_cache_file(artifacts_archive_path) do |file|
build.artifacts_file = file build.job_artifacts.build(project: build.project, file_type: :archive, file: file)
end end
artifacts_cache_file(artifacts_metadata_path) do |file| artifacts_cache_file(artifacts_metadata_path) do |file|
build.artifacts_metadata = file build.job_artifacts.build(project: build.project, file_type: :metadata, file: file)
end end
end end
......
class CreateJobArtifacts < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
create_table :ci_job_artifacts do |t|
t.belongs_to :project, null: false, index: true, foreign_key: { on_delete: :cascade }
t.integer :job_id, null: false
t.integer :file_type, null: false
t.integer :size, limit: 8
t.datetime_with_timezone :created_at, null: false
t.datetime_with_timezone :updated_at, null: false
t.datetime_with_timezone :expire_at
t.string :file
t.foreign_key :ci_builds, column: :job_id, on_delete: :cascade
t.index [:job_id, :file_type], unique: true
end
end
end
class AddNewProjectGuidelinesToAppearances < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
change_table :appearances do |t|
t.text :new_project_guidelines
t.text :new_project_guidelines_html
end
end
end
...@@ -25,14 +25,14 @@ class ScheduleEventMigrations < ActiveRecord::Migration ...@@ -25,14 +25,14 @@ class ScheduleEventMigrations < ActiveRecord::Migration
# We push multiple jobs at a time to reduce the time spent in # We push multiple jobs at a time to reduce the time spent in
# Sidekiq/Redis operations. We're using this buffer based approach so we # Sidekiq/Redis operations. We're using this buffer based approach so we
# don't need to run additional queries for every range. # don't need to run additional queries for every range.
BackgroundMigrationWorker.perform_bulk(jobs) BackgroundMigrationWorker.bulk_perform_async(jobs)
jobs.clear jobs.clear
end end
jobs << ['MigrateEventsToPushEventPayloads', [min, max]] jobs << ['MigrateEventsToPushEventPayloads', [min, max]]
end end
BackgroundMigrationWorker.perform_bulk(jobs) unless jobs.empty? BackgroundMigrationWorker.bulk_perform_async(jobs) unless jobs.empty?
end end
def down def down
......
...@@ -19,7 +19,7 @@ class ScheduleCreateGpgKeySubkeysFromGpgKeys < ActiveRecord::Migration ...@@ -19,7 +19,7 @@ class ScheduleCreateGpgKeySubkeysFromGpgKeys < ActiveRecord::Migration
[MIGRATION, [id]] [MIGRATION, [id]]
end end
BackgroundMigrationWorker.perform_bulk(jobs) BackgroundMigrationWorker.bulk_perform_async(jobs)
end end
end end
......
...@@ -36,6 +36,8 @@ ActiveRecord::Schema.define(version: 20171124150326) do ...@@ -36,6 +36,8 @@ ActiveRecord::Schema.define(version: 20171124150326) do
t.datetime_with_timezone "updated_at", null: false t.datetime_with_timezone "updated_at", null: false
t.text "description_html" t.text "description_html"
t.integer "cached_markdown_version" t.integer "cached_markdown_version"
t.text "new_project_guidelines"
t.text "new_project_guidelines_html"
end end
create_table "application_settings", force: :cascade do |t| create_table "application_settings", force: :cascade do |t|
...@@ -319,6 +321,20 @@ ActiveRecord::Schema.define(version: 20171124150326) do ...@@ -319,6 +321,20 @@ ActiveRecord::Schema.define(version: 20171124150326) do
add_index "ci_group_variables", ["group_id", "key"], name: "index_ci_group_variables_on_group_id_and_key", unique: true, using: :btree add_index "ci_group_variables", ["group_id", "key"], name: "index_ci_group_variables_on_group_id_and_key", unique: true, using: :btree
create_table "ci_job_artifacts", force: :cascade do |t|
t.integer "project_id", null: false
t.integer "job_id", null: false
t.integer "file_type", null: false
t.integer "size", limit: 8
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
t.datetime_with_timezone "expire_at"
t.string "file"
end
add_index "ci_job_artifacts", ["job_id", "file_type"], name: "index_ci_job_artifacts_on_job_id_and_file_type", unique: true, using: :btree
add_index "ci_job_artifacts", ["project_id"], name: "index_ci_job_artifacts_on_project_id", using: :btree
create_table "ci_pipeline_schedule_variables", force: :cascade do |t| create_table "ci_pipeline_schedule_variables", force: :cascade do |t|
t.string "key", null: false t.string "key", null: false
t.text "value" t.text "value"
...@@ -1909,6 +1925,8 @@ ActiveRecord::Schema.define(version: 20171124150326) do ...@@ -1909,6 +1925,8 @@ ActiveRecord::Schema.define(version: 20171124150326) do
add_foreign_key "ci_builds", "ci_stages", column: "stage_id", name: "fk_3a9eaa254d", on_delete: :cascade add_foreign_key "ci_builds", "ci_stages", column: "stage_id", name: "fk_3a9eaa254d", on_delete: :cascade
add_foreign_key "ci_builds", "projects", name: "fk_befce0568a", on_delete: :cascade add_foreign_key "ci_builds", "projects", name: "fk_befce0568a", on_delete: :cascade
add_foreign_key "ci_group_variables", "namespaces", column: "group_id", name: "fk_33ae4d58d8", on_delete: :cascade add_foreign_key "ci_group_variables", "namespaces", column: "group_id", name: "fk_33ae4d58d8", on_delete: :cascade
add_foreign_key "ci_job_artifacts", "ci_builds", column: "job_id", on_delete: :cascade
add_foreign_key "ci_job_artifacts", "projects", on_delete: :cascade
add_foreign_key "ci_pipeline_schedule_variables", "ci_pipeline_schedules", column: "pipeline_schedule_id", name: "fk_41c35fda51", on_delete: :cascade add_foreign_key "ci_pipeline_schedule_variables", "ci_pipeline_schedules", column: "pipeline_schedule_id", name: "fk_41c35fda51", on_delete: :cascade
add_foreign_key "ci_pipeline_schedules", "projects", name: "fk_8ead60fcc4", on_delete: :cascade add_foreign_key "ci_pipeline_schedules", "projects", name: "fk_8ead60fcc4", on_delete: :cascade
add_foreign_key "ci_pipeline_schedules", "users", column: "owner_id", name: "fk_9ea99f58d2", on_delete: :nullify add_foreign_key "ci_pipeline_schedules", "users", column: "owner_id", name: "fk_9ea99f58d2", on_delete: :nullify
......
...@@ -189,6 +189,7 @@ have access to GitLab administration tools and settings. ...@@ -189,6 +189,7 @@ have access to GitLab administration tools and settings.
- [Issue closing pattern](administration/issue_closing_pattern.md): Customize how to close an issue from commit messages. - [Issue closing pattern](administration/issue_closing_pattern.md): Customize how to close an issue from commit messages.
- [Libravatar](customization/libravatar.md): Use Libravatar instead of Gravatar for user avatars. - [Libravatar](customization/libravatar.md): Use Libravatar instead of Gravatar for user avatars.
- [Welcome message](customization/welcome_message.md): Add a custom welcome message to the sign-in page. - [Welcome message](customization/welcome_message.md): Add a custom welcome message to the sign-in page.
- [New project page](customization/new_project_page.md): Customize the new project page.
### Admin tools ### Admin tools
......
...@@ -58,7 +58,9 @@ Runs the following rake tasks: ...@@ -58,7 +58,9 @@ Runs the following rake tasks:
It will check that each component was setup according to the installation guide and suggest fixes for issues found. It will check that each component was setup according to the installation guide and suggest fixes for issues found.
You may also have a look at our [Trouble Shooting Guide](https://github.com/gitlabhq/gitlab-public-wiki/wiki/Trouble-Shooting-Guide). You may also have a look at our Trouble Shooting Guides:
- [Trouble Shooting Guide (GitLab)](http://docs.gitlab.com/ee/README.html#troubleshooting)
- [Trouble Shooting Guide (Omnibus Gitlab)](http://docs.gitlab.com/omnibus/README.html#troubleshooting)
**Omnibus Installation** **Omnibus Installation**
......
# Customizing the new project page
It is possible to add a markdown-formatted message to your GitLab
new project page.
By default, the new project page shows a sidebar with general information:
![](new_project_page/default_new_project_page.png)
## Changing the appearance of the new project page
Navigate to the **Admin** area and go to the **Appearance** page.
Fill in your project guidelines:
![](new_project_page/appearance_settings.png)
After saving the page, your new project page will show the guidelines in the sidebar, below the general information:
![](new_project_page/custom_new_project_page.png)
...@@ -68,10 +68,10 @@ BackgroundMigrationWorker.perform_async('BackgroundMigrationClassName', [arg1, a ...@@ -68,10 +68,10 @@ BackgroundMigrationWorker.perform_async('BackgroundMigrationClassName', [arg1, a
``` ```
Usually it's better to enqueue jobs in bulk, for this you can use Usually it's better to enqueue jobs in bulk, for this you can use
`BackgroundMigrationWorker.perform_bulk`: `BackgroundMigrationWorker.bulk_perform_async`:
```ruby ```ruby
BackgroundMigrationWorker.perform_bulk( BackgroundMigrationWorker.bulk_perform_async(
[['BackgroundMigrationClassName', [1]], [['BackgroundMigrationClassName', [1]],
['BackgroundMigrationClassName', [2]]] ['BackgroundMigrationClassName', [2]]]
) )
...@@ -85,13 +85,13 @@ updates. Removals in turn can be handled by simply defining foreign keys with ...@@ -85,13 +85,13 @@ updates. Removals in turn can be handled by simply defining foreign keys with
cascading deletes. cascading deletes.
If you would like to schedule jobs in bulk with a delay, you can use If you would like to schedule jobs in bulk with a delay, you can use
`BackgroundMigrationWorker.perform_bulk_in`: `BackgroundMigrationWorker.bulk_perform_in`:
```ruby ```ruby
jobs = [['BackgroundMigrationClassName', [1]], jobs = [['BackgroundMigrationClassName', [1]],
['BackgroundMigrationClassName', [2]]] ['BackgroundMigrationClassName', [2]]]
BackgroundMigrationWorker.perform_bulk_in(5.minutes, jobs) BackgroundMigrationWorker.bulk_perform_in(5.minutes, jobs)
``` ```
## Cleaning Up ## Cleaning Up
...@@ -201,7 +201,7 @@ class ScheduleExtractServicesUrl < ActiveRecord::Migration ...@@ -201,7 +201,7 @@ class ScheduleExtractServicesUrl < ActiveRecord::Migration
['ExtractServicesUrl', [id]] ['ExtractServicesUrl', [id]]
end end
BackgroundMigrationWorker.perform_bulk(jobs) BackgroundMigrationWorker.bulk_perform_async(jobs)
end end
end end
......
...@@ -3,6 +3,12 @@ ...@@ -3,6 +3,12 @@
This document outlines various guidelines that should be followed when adding or This document outlines various guidelines that should be followed when adding or
modifying Sidekiq workers. modifying Sidekiq workers.
## ApplicationWorker
All workers should include `ApplicationWorker` instead of `Sidekiq::Worker`,
which adds some convenience methods and automatically sets the queue based on
the worker's name.
## Default Queue ## Default Queue
Use of the "default" queue is not allowed. Every worker should use a queue that Use of the "default" queue is not allowed. Every worker should use a queue that
...@@ -13,19 +19,10 @@ A list of all available queues can be found in `config/sidekiq_queues.yml`. ...@@ -13,19 +19,10 @@ A list of all available queues can be found in `config/sidekiq_queues.yml`.
## Dedicated Queues ## Dedicated Queues
Most workers should use their own queue. To ease this process a worker can Most workers should use their own queue, which is automatically set based on the
include the `DedicatedSidekiqQueue` concern as follows: worker class name. For a worker named `ProcessSomethingWorker`, the queue name
would be `process_something`. If you're not sure what a worker's queue name is,
```ruby you can find it using `SomeWorker.queue`.
class ProcessSomethingWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
end
```
This will set the queue name based on the class' name, minus the `Worker`
suffix. In the above example this would lead to the queue being
`process_something`.
In some cases multiple workers do use the same queue. For example, the various In some cases multiple workers do use the same queue. For example, the various
workers for updating CI pipelines all use the `pipeline` queue. Adding workers workers for updating CI pipelines all use the `pipeline` queue. Adding workers
......
...@@ -17,6 +17,9 @@ Taking the trigger term as `project-name`, the commands are: ...@@ -17,6 +17,9 @@ Taking the trigger term as `project-name`, the commands are:
| `/project-name issue search <query>` | Shows up to 5 issues matching `<query>` | | `/project-name issue search <query>` | Shows up to 5 issues matching `<query>` |
| `/project-name deploy <from> to <to>` | Deploy from the `<from>` environment to the `<to>` environment | | `/project-name deploy <from> to <to>` | Deploy from the `<from>` environment to the `<to>` environment |
Note that if you are using the [GitLab Slack application](https://docs.gitlab.com/ee/user/project/integrations/gitlab_slack_application.html) for
your GitLab.com projects, you need to [add the `gitlab` keyword at the beginning of the command](https://docs.gitlab.com/ee/user/project/integrations/gitlab_slack_application.html#usage).
## Issue commands ## Issue commands
It is possible to create new issue, display issue details and search up to 5 issues. It is possible to create new issue, display issue details and search up to 5 issues.
......
...@@ -44,8 +44,8 @@ class Spinach::Features::ProjectPages < Spinach::FeatureSteps ...@@ -44,8 +44,8 @@ class Spinach::Features::ProjectPages < Spinach::FeatureSteps
project: @project, project: @project,
pipeline: pipeline, pipeline: pipeline,
ref: 'HEAD', ref: 'HEAD',
artifacts_file: fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip'), legacy_artifacts_file: fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip'),
artifacts_metadata: fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip.meta') legacy_artifacts_metadata: fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip.meta')
) )
result = ::Projects::UpdatePagesService.new(@project, build).execute result = ::Projects::UpdatePagesService.new(@project, build).execute
......
...@@ -37,13 +37,13 @@ module SharedBuilds ...@@ -37,13 +37,13 @@ module SharedBuilds
step 'recent build has artifacts available' do step 'recent build has artifacts available' do
artifacts = Rails.root + 'spec/fixtures/ci_build_artifacts.zip' artifacts = Rails.root + 'spec/fixtures/ci_build_artifacts.zip'
archive = fixture_file_upload(artifacts, 'application/zip') archive = fixture_file_upload(artifacts, 'application/zip')
@build.update_attributes(artifacts_file: archive) @build.update_attributes(legacy_artifacts_file: archive)
end end
step 'recent build has artifacts metadata available' do step 'recent build has artifacts metadata available' do
metadata = Rails.root + 'spec/fixtures/ci_build_artifacts_metadata.gz' metadata = Rails.root + 'spec/fixtures/ci_build_artifacts_metadata.gz'
gzip = fixture_file_upload(metadata, 'application/x-gzip') gzip = fixture_file_upload(metadata, 'application/x-gzip')
@build.update_attributes(artifacts_metadata: gzip) @build.update_attributes(legacy_artifacts_metadata: gzip)
end end
step 'recent build has a build trace' do step 'recent build has a build trace' do
......
...@@ -6,12 +6,34 @@ module AfterCommitQueue ...@@ -6,12 +6,34 @@ module AfterCommitQueue
after_rollback :_clear_after_commit_queue after_rollback :_clear_after_commit_queue
end end
def run_after_commit(method = nil, &block) def run_after_commit(&block)
_after_commit_queue << proc { self.send(method) } if method # rubocop:disable GitlabSecurity/PublicSend
_after_commit_queue << block if block _after_commit_queue << block if block
true
end
def run_after_commit_or_now(&block)
if AfterCommitQueue.inside_transaction?
run_after_commit(&block)
else
instance_eval(&block)
end
true true
end end
def self.open_transactions_baseline
if ::Rails.env.test?
return DatabaseCleaner.connections.count { |conn| conn.strategy.is_a?(DatabaseCleaner::ActiveRecord::Transaction) }
end
0
end
def self.inside_transaction?
ActiveRecord::Base.connection.open_transactions > open_transactions_baseline
end
protected protected
def _run_after_commit_queue def _run_after_commit_queue
......
...@@ -1050,13 +1050,9 @@ module API ...@@ -1050,13 +1050,9 @@ module API
expose :type, :url, :username, :password expose :type, :url, :username, :password
end end
class ArtifactFile < Grape::Entity
expose :filename, :size
end
class Dependency < Grape::Entity class Dependency < Grape::Entity
expose :id, :name, :token expose :id, :name, :token
expose :artifacts_file, using: ArtifactFile, if: ->(job, _) { job.artifacts? } expose :artifacts_file, using: JobArtifactFile, if: ->(job, _) { job.artifacts? }
end end
class Response < Grape::Entity class Response < Grape::Entity
......
...@@ -215,18 +215,20 @@ module API ...@@ -215,18 +215,20 @@ module API
job = authenticate_job! job = authenticate_job!
forbidden!('Job is not running!') unless job.running? forbidden!('Job is not running!') unless job.running?
artifacts_upload_path = ArtifactUploader.artifacts_upload_path artifacts_upload_path = JobArtifactUploader.artifacts_upload_path
artifacts = uploaded_file(:file, artifacts_upload_path) artifacts = uploaded_file(:file, artifacts_upload_path)
metadata = uploaded_file(:metadata, artifacts_upload_path) metadata = uploaded_file(:metadata, artifacts_upload_path)
bad_request!('Missing artifacts file!') unless artifacts bad_request!('Missing artifacts file!') unless artifacts
file_to_large! unless artifacts.size < max_artifacts_size file_to_large! unless artifacts.size < max_artifacts_size
job.artifacts_file = artifacts expire_in = params['expire_in'] ||
job.artifacts_metadata = metadata
job.artifacts_expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
job.build_job_artifacts_archive(project: job.project, file_type: :archive, file: artifacts, expire_in: expire_in)
job.build_job_artifacts_metadata(project: job.project, file_type: :metadata, file: metadata, expire_in: expire_in) if metadata
job.artifacts_expire_in = expire_in
if job.save if job.save
present job, with: Entities::JobRequest::Response present job, with: Entities::JobRequest::Response
else else
......
...@@ -3,7 +3,7 @@ require 'backup/files' ...@@ -3,7 +3,7 @@ require 'backup/files'
module Backup module Backup
class Artifacts < Files class Artifacts < Files
def initialize def initialize
super('artifacts', ArtifactUploader.local_artifacts_store) super('artifacts', LegacyArtifactUploader.local_store_path)
end end
def create_files_dir def create_files_dir
......
...@@ -50,6 +50,10 @@ module Gitlab ...@@ -50,6 +50,10 @@ module Gitlab
postgresql? && version.to_f >= 9.3 postgresql? && version.to_f >= 9.3
end end
def self.replication_slots_supported?
postgresql? && version.to_f >= 9.4
end
def self.nulls_last_order(field, direction = 'ASC') def self.nulls_last_order(field, direction = 'ASC')
order = "#{field} #{direction}" order = "#{field} #{direction}"
......
...@@ -703,14 +703,14 @@ into similar problems in the future (e.g. when new tables are created). ...@@ -703,14 +703,14 @@ into similar problems in the future (e.g. when new tables are created).
# We push multiple jobs at a time to reduce the time spent in # We push multiple jobs at a time to reduce the time spent in
# Sidekiq/Redis operations. We're using this buffer based approach so we # Sidekiq/Redis operations. We're using this buffer based approach so we
# don't need to run additional queries for every range. # don't need to run additional queries for every range.
BackgroundMigrationWorker.perform_bulk(jobs) BackgroundMigrationWorker.bulk_perform_async(jobs)
jobs.clear jobs.clear
end end
jobs << [job_class_name, [start_id, end_id]] jobs << [job_class_name, [start_id, end_id]]
end end
BackgroundMigrationWorker.perform_bulk(jobs) unless jobs.empty? BackgroundMigrationWorker.bulk_perform_async(jobs) unless jobs.empty?
end end
# Queues background migration jobs for an entire table, batched by ID range. # Queues background migration jobs for an entire table, batched by ID range.
......
...@@ -213,6 +213,10 @@ module Gitlab ...@@ -213,6 +213,10 @@ module Gitlab
end end
def shas_with_signatures(repository, shas) def shas_with_signatures(repository, shas)
GitalyClient.migrate(:filter_shas_with_signatures) do |is_enabled|
if is_enabled
Gitlab::GitalyClient::CommitService.new(repository).filter_shas_with_signatures(shas)
else
shas.select do |sha| shas.select do |sha|
begin begin
Rugged::Commit.extract_signature(repository.rugged, sha) Rugged::Commit.extract_signature(repository.rugged, sha)
...@@ -222,6 +226,8 @@ module Gitlab ...@@ -222,6 +226,8 @@ module Gitlab
end end
end end
end end
end
end
def initialize(repository, raw_commit, head = nil) def initialize(repository, raw_commit, head = nil)
raise "Nil as raw commit passed" unless raw_commit raise "Nil as raw commit passed" unless raw_commit
......
...@@ -250,6 +250,26 @@ module Gitlab ...@@ -250,6 +250,26 @@ module Gitlab
consume_commits_response(response) consume_commits_response(response)
end end
def filter_shas_with_signatures(shas)
request = Gitaly::FilterShasWithSignaturesRequest.new(repository: @gitaly_repo)
enum = Enumerator.new do |y|
shas.each_slice(20) do |revs|
request.shas = GitalyClient.encode_repeated(revs)
y.yield request
request = Gitaly::FilterShasWithSignaturesRequest.new
end
end
response = GitalyClient.call(@repository.storage, :commit_service, :filter_shas_with_signatures, enum)
response.flat_map do |msg|
msg.shas.map { |sha| EncodingHelper.encode!(sha) }
end
end
private private
def call_commit_diff(request_params, options = {}) def call_commit_diff(request_params, options = {})
......
require 'yaml'
module Gitlab
module SidekiqConfig
def self.redis_queues
@redis_queues ||= Sidekiq::Queue.all.map(&:name)
end
# This method is called by `bin/sidekiq-cluster` in EE, which runs outside
# of bundler/Rails context, so we cannot use any gem or Rails methods.
def self.config_queues(rails_path = Rails.root.to_s)
@config_queues ||= begin
config = YAML.load_file(File.join(rails_path, 'config', 'sidekiq_queues.yml'))
config[:queues].map(&:first)
end
end
def self.cron_workers
@cron_workers ||= Settings.cron_jobs.map { |job_name, options| options['job_class'].constantize }
end
def self.workers
@workers ||= find_workers(Rails.root.join('app', 'workers'))
end
def self.default_queues
[ActionMailer::DeliveryJob.queue_name, 'default']
end
def self.worker_queues
@worker_queues ||= (workers.map(&:queue) + default_queues).uniq
end
def self.find_workers(root)
concerns = root.join('concerns').to_s
workers = Dir[root.join('**', '*.rb')]
.reject { |path| path.start_with?(concerns) }
workers.map! do |path|
ns = Pathname.new(path).relative_path_from(root).to_s.gsub('.rb', '')
ns.camelize.constantize
end
# Skip concerns
workers.select { |w| w < Sidekiq::Worker }
end
end
end
...@@ -58,7 +58,7 @@ module Gitlab ...@@ -58,7 +58,7 @@ module Gitlab
end end
def artifact_upload_ok def artifact_upload_ok
{ TempPath: ArtifactUploader.artifacts_upload_path } { TempPath: JobArtifactUploader.artifacts_upload_path }
end end
def send_git_blob(repository, blob) def send_git_blob(repository, blob)
......
...@@ -4,5 +4,6 @@ FactoryGirl.define do ...@@ -4,5 +4,6 @@ FactoryGirl.define do
factory :appearance do factory :appearance do
title "MepMep" title "MepMep"
description "This is my Community Edition instance" description "This is my Community Edition instance"
new_project_guidelines "Custom project guidelines"
end end
end end
...@@ -154,36 +154,29 @@ FactoryGirl.define do ...@@ -154,36 +154,29 @@ FactoryGirl.define do
runner factory: :ci_runner runner factory: :ci_runner
end end
trait :artifacts do trait :legacy_artifacts do
after(:create) do |build, _| after(:create) do |build, _|
build.artifacts_file = build.update!(
fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), legacy_artifacts_file: fixture_file_upload(
'application/zip') Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip'),
legacy_artifacts_metadata: fixture_file_upload(
build.artifacts_metadata = Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'), 'application/x-gzip')
fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'), )
'application/x-gzip')
build.save!
end end
end end
trait :artifacts_expired do trait :artifacts do
after(:create) do |build, _| after(:create) do |build|
build.artifacts_file = create(:ci_job_artifact, :archive, job: build)
fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), create(:ci_job_artifact, :metadata, job: build)
'application/zip') build.reload
build.artifacts_metadata =
fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'),
'application/x-gzip')
build.artifacts_expire_at = 1.minute.ago
build.save!
end end
end end
trait :expired do
artifacts_expire_at 1.minute.ago
end
trait :with_commit do trait :with_commit do
after(:build) do |build| after(:build) do |build|
allow(build).to receive(:commit).and_return build(:commit, :without_author) allow(build).to receive(:commit).and_return build(:commit, :without_author)
......
include ActionDispatch::TestProcess
FactoryGirl.define do
factory :ci_job_artifact, class: Ci::JobArtifact do
job factory: :ci_build
file_type :archive
after :build do |artifact|
artifact.project ||= artifact.job.project
end
trait :archive do
file_type :archive
after(:build) do |artifact, _|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
end
end
trait :metadata do
file_type :metadata
after(:build) do |artifact, _|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'), 'application/x-gzip')
end
end
end
end
...@@ -9,6 +9,7 @@ feature 'Admin Appearance' do ...@@ -9,6 +9,7 @@ feature 'Admin Appearance' do
fill_in 'appearance_title', with: 'MyCompany' fill_in 'appearance_title', with: 'MyCompany'
fill_in 'appearance_description', with: 'dev server' fill_in 'appearance_description', with: 'dev server'
fill_in 'appearance_new_project_guidelines', with: 'Custom project guidelines'
click_button 'Save' click_button 'Save'
expect(current_path).to eq admin_appearances_path expect(current_path).to eq admin_appearances_path
...@@ -16,21 +17,39 @@ feature 'Admin Appearance' do ...@@ -16,21 +17,39 @@ feature 'Admin Appearance' do
expect(page).to have_field('appearance_title', with: 'MyCompany') expect(page).to have_field('appearance_title', with: 'MyCompany')
expect(page).to have_field('appearance_description', with: 'dev server') expect(page).to have_field('appearance_description', with: 'dev server')
expect(page).to have_field('appearance_new_project_guidelines', with: 'Custom project guidelines')
expect(page).to have_content 'Last edit' expect(page).to have_content 'Last edit'
end end
scenario 'Preview appearance' do scenario 'Preview sign-in page appearance' do
sign_in(create(:admin)) sign_in(create(:admin))
visit admin_appearances_path visit admin_appearances_path
click_link "Preview" click_link "Sign-in page"
expect_page_has_custom_appearance(appearance) expect_custom_sign_in_appearance(appearance)
end
scenario 'Preview new project page appearance' do
sign_in(create(:admin))
visit admin_appearances_path
click_link "New project page"
expect_custom_new_project_appearance(appearance)
end end
scenario 'Custom sign-in page' do scenario 'Custom sign-in page' do
visit new_user_session_path visit new_user_session_path
expect_page_has_custom_appearance(appearance)
expect_custom_sign_in_appearance(appearance)
end
scenario 'Custom new project page' do
sign_in create(:user)
visit new_project_path
expect_custom_new_project_appearance(appearance)
end end
scenario 'Appearance logo' do scenario 'Appearance logo' do
...@@ -57,11 +76,15 @@ feature 'Admin Appearance' do ...@@ -57,11 +76,15 @@ feature 'Admin Appearance' do
expect(page).not_to have_css(header_logo_selector) expect(page).not_to have_css(header_logo_selector)
end end
def expect_page_has_custom_appearance(appearance) def expect_custom_sign_in_appearance(appearance)
expect(page).to have_content appearance.title expect(page).to have_content appearance.title
expect(page).to have_content appearance.description expect(page).to have_content appearance.description
end end
def expect_custom_new_project_appearance(appearance)
expect(page).to have_content appearance.new_project_guidelines
end
def logo_selector def logo_selector
'//img[data-src^="/uploads/-/system/appearance/logo"]' '//img[data-src^="/uploads/-/system/appearance/logo"]'
end end
......
...@@ -89,7 +89,7 @@ describe 'Commits' do ...@@ -89,7 +89,7 @@ describe 'Commits' do
context 'Download artifacts' do context 'Download artifacts' do
before do before do
build.update_attributes(artifacts_file: artifacts_file) build.update_attributes(legacy_artifacts_file: artifacts_file)
end end
it do it do
...@@ -146,7 +146,7 @@ describe 'Commits' do ...@@ -146,7 +146,7 @@ describe 'Commits' do
context "when logged as reporter" do context "when logged as reporter" do
before do before do
project.team << [user, :reporter] project.team << [user, :reporter]
build.update_attributes(artifacts_file: artifacts_file) build.update_attributes(legacy_artifacts_file: artifacts_file)
visit pipeline_path(pipeline) visit pipeline_path(pipeline)
end end
...@@ -168,7 +168,7 @@ describe 'Commits' do ...@@ -168,7 +168,7 @@ describe 'Commits' do
project.update( project.update(
visibility_level: Gitlab::VisibilityLevel::INTERNAL, visibility_level: Gitlab::VisibilityLevel::INTERNAL,
public_builds: false) public_builds: false)
build.update_attributes(artifacts_file: artifacts_file) build.update_attributes(legacy_artifacts_file: artifacts_file)
visit pipeline_path(pipeline) visit pipeline_path(pipeline)
end end
......
...@@ -28,14 +28,14 @@ feature 'Mini Pipeline Graph', :js do ...@@ -28,14 +28,14 @@ feature 'Mini Pipeline Graph', :js do
let(:artifacts_file2) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png') } let(:artifacts_file2) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png') }
before do before do
create(:ci_build, pipeline: pipeline, artifacts_file: artifacts_file1) create(:ci_build, pipeline: pipeline, legacy_artifacts_file: artifacts_file1)
create(:ci_build, pipeline: pipeline, when: 'manual') create(:ci_build, pipeline: pipeline, when: 'manual')
end end
it 'avoids repeated database queries' do it 'avoids repeated database queries' do
before = ActiveRecord::QueryRecorder.new { visit_merge_request(:json) } before = ActiveRecord::QueryRecorder.new { visit_merge_request(:json) }
create(:ci_build, pipeline: pipeline, artifacts_file: artifacts_file2) create(:ci_build, pipeline: pipeline, legacy_artifacts_file: artifacts_file2)
create(:ci_build, pipeline: pipeline, when: 'manual') create(:ci_build, pipeline: pipeline, when: 'manual')
after = ActiveRecord::QueryRecorder.new { visit_merge_request(:json) } after = ActiveRecord::QueryRecorder.new { visit_merge_request(:json) }
......
...@@ -187,7 +187,7 @@ feature 'Jobs' do ...@@ -187,7 +187,7 @@ feature 'Jobs' do
context "Download artifacts" do context "Download artifacts" do
before do before do
job.update_attributes(artifacts_file: artifacts_file) job.update_attributes(legacy_artifacts_file: artifacts_file)
visit project_job_path(project, job) visit project_job_path(project, job)
end end
...@@ -198,7 +198,7 @@ feature 'Jobs' do ...@@ -198,7 +198,7 @@ feature 'Jobs' do
context 'Artifacts expire date' do context 'Artifacts expire date' do
before do before do
job.update_attributes(artifacts_file: artifacts_file, job.update_attributes(legacy_artifacts_file: artifacts_file,
artifacts_expire_at: expire_at) artifacts_expire_at: expire_at)
visit project_job_path(project, job) visit project_job_path(project, job)
...@@ -422,14 +422,14 @@ feature 'Jobs' do ...@@ -422,14 +422,14 @@ feature 'Jobs' do
describe "GET /:project/jobs/:id/download" do describe "GET /:project/jobs/:id/download" do
before do before do
job.update_attributes(artifacts_file: artifacts_file) job.update_attributes(legacy_artifacts_file: artifacts_file)
visit project_job_path(project, job) visit project_job_path(project, job)
click_link 'Download' click_link 'Download'
end end
context "Build from other project" do context "Build from other project" do
before do before do
job2.update_attributes(artifacts_file: artifacts_file) job2.update_attributes(legacy_artifacts_file: artifacts_file)
visit download_project_job_artifacts_path(project, job2) visit download_project_job_artifacts_path(project, job2)
end end
......
...@@ -304,7 +304,7 @@ describe 'Pipelines', :js do ...@@ -304,7 +304,7 @@ describe 'Pipelines', :js do
context 'with artifacts expired' do context 'with artifacts expired' do
let!(:with_artifacts_expired) do let!(:with_artifacts_expired) do
create(:ci_build, :artifacts_expired, :success, create(:ci_build, :expired, :success,
pipeline: pipeline, pipeline: pipeline,
name: 'rspec', name: 'rspec',
stage: 'test') stage: 'test')
......
...@@ -39,6 +39,11 @@ describe 'Project snippets', :js do ...@@ -39,6 +39,11 @@ describe 'Project snippets', :js do
expect(page).to have_selector('.atwho-view') expect(page).to have_selector('.atwho-view')
end end
it 'should have zen mode' do
find('.js-zen-enter').click()
expect(page).to have_selector('.fullscreen')
end
end end
end end
end end
...@@ -942,8 +942,8 @@ describe Gitlab::Database::MigrationHelpers do ...@@ -942,8 +942,8 @@ describe Gitlab::Database::MigrationHelpers do
end end
it 'queues jobs in groups of buffer size 1' do it 'queues jobs in groups of buffer size 1' do
expect(BackgroundMigrationWorker).to receive(:perform_bulk).with([['FooJob', [id1, id2]]]) expect(BackgroundMigrationWorker).to receive(:bulk_perform_async).with([['FooJob', [id1, id2]]])
expect(BackgroundMigrationWorker).to receive(:perform_bulk).with([['FooJob', [id3, id3]]]) expect(BackgroundMigrationWorker).to receive(:bulk_perform_async).with([['FooJob', [id3, id3]]])
model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2) model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2)
end end
...@@ -960,7 +960,7 @@ describe Gitlab::Database::MigrationHelpers do ...@@ -960,7 +960,7 @@ describe Gitlab::Database::MigrationHelpers do
end end
it 'queues jobs in bulk all at once (big buffer size)' do it 'queues jobs in bulk all at once (big buffer size)' do
expect(BackgroundMigrationWorker).to receive(:perform_bulk).with([['FooJob', [id1, id2]], expect(BackgroundMigrationWorker).to receive(:bulk_perform_async).with([['FooJob', [id1, id2]],
['FooJob', [id3, id3]]]) ['FooJob', [id3, id3]]])
model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2) model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2)
......
...@@ -73,6 +73,28 @@ describe Gitlab::Database do ...@@ -73,6 +73,28 @@ describe Gitlab::Database do
end end
end end
describe '.replication_slots_supported?' do
it 'returns false when using MySQL' do
allow(described_class).to receive(:postgresql?).and_return(false)
expect(described_class.replication_slots_supported?).to eq(false)
end
it 'returns false when using PostgreSQL 9.3' do
allow(described_class).to receive(:postgresql?).and_return(true)
allow(described_class).to receive(:version).and_return('9.3.1')
expect(described_class.replication_slots_supported?).to eq(false)
end
it 'returns true when using PostgreSQL 9.4.0 or newer' do
allow(described_class).to receive(:postgresql?).and_return(true)
allow(described_class).to receive(:version).and_return('9.4.0')
expect(described_class.replication_slots_supported?).to eq(true)
end
end
describe '.nulls_last_order' do describe '.nulls_last_order' do
context 'when using PostgreSQL' do context 'when using PostgreSQL' do
before do before do
......
...@@ -278,6 +278,35 @@ describe Gitlab::Git::Commit, seed_helper: true do ...@@ -278,6 +278,35 @@ describe Gitlab::Git::Commit, seed_helper: true do
it { is_expected.not_to include(SeedRepo::FirstCommit::ID) } it { is_expected.not_to include(SeedRepo::FirstCommit::ID) }
end end
shared_examples '.shas_with_signatures' do
let(:signed_shas) { %w[5937ac0a7beb003549fc5fd26fc247adbce4a52e 570e7b2abdd848b95f2f578043fc23bd6f6fd24d] }
let(:unsigned_shas) { %w[19e2e9b4ef76b422ce1154af39a91323ccc57434 c642fe9b8b9f28f9225d7ea953fe14e74748d53b] }
let(:first_signed_shas) { %w[5937ac0a7beb003549fc5fd26fc247adbce4a52e c642fe9b8b9f28f9225d7ea953fe14e74748d53b] }
it 'has 2 signed shas' do
ret = described_class.shas_with_signatures(repository, signed_shas)
expect(ret).to eq(signed_shas)
end
it 'has 0 signed shas' do
ret = described_class.shas_with_signatures(repository, unsigned_shas)
expect(ret).to eq([])
end
it 'has 1 signed sha' do
ret = described_class.shas_with_signatures(repository, first_signed_shas)
expect(ret).to contain_exactly(first_signed_shas.first)
end
end
describe '.shas_with_signatures with gitaly on' do
it_should_behave_like '.shas_with_signatures'
end
describe '.shas_with_signatures with gitaly disabled', :disable_gitaly do
it_should_behave_like '.shas_with_signatures'
end
describe '.find_all' do describe '.find_all' do
shared_examples 'finding all commits' do shared_examples 'finding all commits' do
it 'should return a return a collection of commits' do it 'should return a return a collection of commits' do
......
require 'rails_helper'
describe Gitlab::SidekiqConfig do
describe '.workers' do
it 'includes all workers' do
workers = described_class.workers
expect(workers).to include(PostReceive)
expect(workers).to include(MergeWorker)
end
end
describe '.worker_queues' do
it 'includes all queues' do
queues = described_class.worker_queues
expect(queues).to include('post_receive')
expect(queues).to include('merge')
expect(queues).to include('cronjob')
expect(queues).to include('mailers')
expect(queues).to include('default')
end
end
end
...@@ -16,20 +16,22 @@ describe MigrateOldArtifacts do ...@@ -16,20 +16,22 @@ describe MigrateOldArtifacts do
end end
context 'with migratable data' do context 'with migratable data' do
let(:project1) { create(:project, ci_id: 2) } set(:project1) { create(:project, ci_id: 2) }
let(:project2) { create(:project, ci_id: 3) } set(:project2) { create(:project, ci_id: 3) }
let(:project3) { create(:project) } set(:project3) { create(:project) }
let(:pipeline1) { create(:ci_empty_pipeline, project: project1) } set(:pipeline1) { create(:ci_empty_pipeline, project: project1) }
let(:pipeline2) { create(:ci_empty_pipeline, project: project2) } set(:pipeline2) { create(:ci_empty_pipeline, project: project2) }
let(:pipeline3) { create(:ci_empty_pipeline, project: project3) } set(:pipeline3) { create(:ci_empty_pipeline, project: project3) }
let!(:build_with_legacy_artifacts) { create(:ci_build, pipeline: pipeline1) } let!(:build_with_legacy_artifacts) { create(:ci_build, pipeline: pipeline1) }
let!(:build_without_artifacts) { create(:ci_build, pipeline: pipeline1) } let!(:build_without_artifacts) { create(:ci_build, pipeline: pipeline1) }
let!(:build2) { create(:ci_build, :artifacts, pipeline: pipeline2) } let!(:build2) { create(:ci_build, pipeline: pipeline2) }
let!(:build3) { create(:ci_build, :artifacts, pipeline: pipeline3) } let!(:build3) { create(:ci_build, pipeline: pipeline3) }
before do before do
setup_builds(build2, build3)
store_artifacts_in_legacy_path(build_with_legacy_artifacts) store_artifacts_in_legacy_path(build_with_legacy_artifacts)
end end
...@@ -38,7 +40,7 @@ describe MigrateOldArtifacts do ...@@ -38,7 +40,7 @@ describe MigrateOldArtifacts do
end end
it "legacy artifacts are set" do it "legacy artifacts are set" do
expect(build_with_legacy_artifacts.artifacts_file_identifier).not_to be_nil expect(build_with_legacy_artifacts.legacy_artifacts_file_identifier).not_to be_nil
end end
describe '#min_id' do describe '#min_id' do
...@@ -113,5 +115,24 @@ describe MigrateOldArtifacts do ...@@ -113,5 +115,24 @@ describe MigrateOldArtifacts do
build.project.ci_id.to_s, build.project.ci_id.to_s,
build.id.to_s) build.id.to_s)
end end
def new_legacy_path(build)
File.join(directory,
build.created_at.utc.strftime('%Y_%m'),
build.project_id.to_s,
build.id.to_s)
end
def setup_builds(*builds)
builds.each do |build|
FileUtils.mkdir_p(new_legacy_path(build))
build.update_columns(
artifacts_file: 'ci_build_artifacts.zip',
artifacts_metadata: 'ci_build_artifacts_metadata.gz')
build.reload
end
end
end end
end end
...@@ -5,9 +5,6 @@ describe Appearance do ...@@ -5,9 +5,6 @@ describe Appearance do
it { is_expected.to be_valid } it { is_expected.to be_valid }
it { is_expected.to validate_presence_of(:title) }
it { is_expected.to validate_presence_of(:description) }
it { is_expected.to have_many(:uploads).dependent(:destroy) } it { is_expected.to have_many(:uploads).dependent(:destroy) }
describe '.current', :use_clean_rails_memory_store_caching do describe '.current', :use_clean_rails_memory_store_caching do
......
...@@ -23,6 +23,8 @@ describe Ci::Build do ...@@ -23,6 +23,8 @@ describe Ci::Build do
it { is_expected.to respond_to(:has_trace?) } it { is_expected.to respond_to(:has_trace?) }
it { is_expected.to respond_to(:trace) } it { is_expected.to respond_to(:trace) }
it { is_expected.to be_a(ArtifactMigratable) }
describe 'callbacks' do describe 'callbacks' do
context 'when running after_create callback' do context 'when running after_create callback' do
it 'triggers asynchronous build hooks worker' do it 'triggers asynchronous build hooks worker' do
...@@ -130,37 +132,58 @@ describe Ci::Build do ...@@ -130,37 +132,58 @@ describe Ci::Build do
end end
describe '#artifacts?' do describe '#artifacts?' do
context 'when new artifacts are used' do
let(:build) { create(:ci_build, :artifacts) }
subject { build.artifacts? } subject { build.artifacts? }
context 'artifacts archive does not exist' do context 'artifacts archive does not exist' do
before do let(:build) { create(:ci_build) }
build.update_attributes(artifacts_file: nil)
end
it { is_expected.to be_falsy } it { is_expected.to be_falsy }
end end
context 'artifacts archive exists' do context 'artifacts archive exists' do
let(:build) { create(:ci_build, :artifacts) }
it { is_expected.to be_truthy } it { is_expected.to be_truthy }
context 'is expired' do context 'is expired' do
before do let!(:build) { create(:ci_build, :artifacts, :expired) }
build.update(artifacts_expire_at: Time.now - 7.days)
end
it { is_expected.to be_falsy } it { is_expected.to be_falsy }
end end
context 'is not expired' do context 'is not expired' do
before do it { is_expected.to be_truthy }
build.update(artifacts_expire_at: Time.now + 7.days) end
end
end
context 'when legacy artifacts are used' do
let(:build) { create(:ci_build, :legacy_artifacts) }
subject { build.artifacts? }
context 'artifacts archive does not exist' do
let(:build) { create(:ci_build) }
it { is_expected.to be_falsy }
end
context 'artifacts archive exists' do
it { is_expected.to be_truthy }
context 'is expired' do
let!(:build) { create(:ci_build, :legacy_artifacts, :expired) }
it { is_expected.to be_falsy }
end end
context 'is not expired' do
it { is_expected.to be_truthy } it { is_expected.to be_truthy }
end end
end end
end end
end
describe '#artifacts_expired?' do describe '#artifacts_expired?' do
subject { build.artifacts_expired? } subject { build.artifacts_expired? }
...@@ -612,11 +635,13 @@ describe Ci::Build do ...@@ -612,11 +635,13 @@ describe Ci::Build do
describe '#erasable?' do describe '#erasable?' do
subject { build.erasable? } subject { build.erasable? }
it { is_expected.to eq false } it { is_expected.to eq false }
end end
end end
context 'build is erasable' do context 'build is erasable' do
context 'new artifacts' do
let!(:build) { create(:ci_build, :trace, :success, :artifacts) } let!(:build) { create(:ci_build, :trace, :success, :artifacts) }
describe '#erase' do describe '#erase' do
...@@ -683,6 +708,77 @@ describe Ci::Build do ...@@ -683,6 +708,77 @@ describe Ci::Build do
end end
end end
context 'old artifacts' do
context 'build is erasable' do
context 'new artifacts' do
let!(:build) { create(:ci_build, :trace, :success, :legacy_artifacts) }
describe '#erase' do
before do
build.erase(erased_by: user)
end
context 'erased by user' do
let!(:user) { create(:user, username: 'eraser') }
include_examples 'erasable'
it 'records user who erased a build' do
expect(build.erased_by).to eq user
end
end
context 'erased by system' do
let(:user) { nil }
include_examples 'erasable'
it 'does not set user who erased a build' do
expect(build.erased_by).to be_nil
end
end
end
describe '#erasable?' do
subject { build.erasable? }
it { is_expected.to be_truthy }
end
describe '#erased?' do
let!(:build) { create(:ci_build, :trace, :success, :legacy_artifacts) }
subject { build.erased? }
context 'job has not been erased' do
it { is_expected.to be_falsey }
end
context 'job has been erased' do
before do
build.erase
end
it { is_expected.to be_truthy }
end
end
context 'metadata and build trace are not available' do
let!(:build) { create(:ci_build, :success, :legacy_artifacts) }
before do
build.remove_artifacts_metadata!
end
describe '#erase' do
it 'does not raise error' do
expect { build.erase }.not_to raise_error
end
end
end
end
end
end
end
describe '#first_pending' do describe '#first_pending' do
let!(:first) { create(:ci_build, pipeline: pipeline, status: 'pending', created_at: Date.yesterday) } let!(:first) { create(:ci_build, pipeline: pipeline, status: 'pending', created_at: Date.yesterday) }
let!(:second) { create(:ci_build, pipeline: pipeline, status: 'pending') } let!(:second) { create(:ci_build, pipeline: pipeline, status: 'pending') }
...@@ -912,11 +1008,23 @@ describe Ci::Build do ...@@ -912,11 +1008,23 @@ describe Ci::Build do
describe '#keep_artifacts!' do describe '#keep_artifacts!' do
let(:build) { create(:ci_build, artifacts_expire_at: Time.now + 7.days) } let(:build) { create(:ci_build, artifacts_expire_at: Time.now + 7.days) }
subject { build.keep_artifacts! }
it 'to reset expire_at' do it 'to reset expire_at' do
build.keep_artifacts! subject
expect(build.artifacts_expire_at).to be_nil expect(build.artifacts_expire_at).to be_nil
end end
context 'when having artifacts files' do
let!(:artifact) { create(:ci_job_artifact, job: build, expire_in: '7 days') }
it 'to reset dependent objects' do
subject
expect(artifact.reload.expire_at).to be_nil
end
end
end end
describe '#merge_request' do describe '#merge_request' do
...@@ -1813,4 +1921,77 @@ describe Ci::Build do ...@@ -1813,4 +1921,77 @@ describe Ci::Build do
end end
end end
end end
describe '.matches_tag_ids' do
set(:build) { create(:ci_build, project: project, user: user) }
let(:tag_ids) { ::ActsAsTaggableOn::Tag.named_any(tag_list).ids }
subject { described_class.where(id: build).matches_tag_ids(tag_ids) }
before do
build.update(tag_list: build_tag_list)
end
context 'when have different tags' do
let(:build_tag_list) { %w(A B) }
let(:tag_list) { %w(C D) }
it "does not match a build" do
is_expected.not_to contain_exactly(build)
end
end
context 'when have a subset of tags' do
let(:build_tag_list) { %w(A B) }
let(:tag_list) { %w(A B C D) }
it "does match a build" do
is_expected.to contain_exactly(build)
end
end
context 'when build does not have tags' do
let(:build_tag_list) { [] }
let(:tag_list) { %w(C D) }
it "does match a build" do
is_expected.to contain_exactly(build)
end
end
context 'when does not have a subset of tags' do
let(:build_tag_list) { %w(A B C) }
let(:tag_list) { %w(C D) }
it "does not match a build" do
is_expected.not_to contain_exactly(build)
end
end
end
describe '.matches_tags' do
set(:build) { create(:ci_build, project: project, user: user) }
subject { described_class.where(id: build).with_any_tags }
before do
build.update(tag_list: tag_list)
end
context 'when does have tags' do
let(:tag_list) { %w(A B) }
it "does match a build" do
is_expected.to contain_exactly(build)
end
end
context 'when does not have tags' do
let(:tag_list) { [] }
it "does not match a build" do
is_expected.not_to contain_exactly(build)
end
end
end
end end
require 'spec_helper'
describe Ci::JobArtifact do
set(:artifact) { create(:ci_job_artifact, :archive) }
describe "Associations" do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:job) }
end
it { is_expected.to respond_to(:file) }
it { is_expected.to respond_to(:created_at) }
it { is_expected.to respond_to(:updated_at) }
describe '#set_size' do
it 'sets the size' do
expect(artifact.size).to eq(106365)
end
end
describe '#file' do
subject { artifact.file }
context 'the uploader api' do
it { is_expected.to respond_to(:store_dir) }
it { is_expected.to respond_to(:cache_dir) }
it { is_expected.to respond_to(:work_dir) }
end
end
describe '#expire_in' do
subject { artifact.expire_in }
it { is_expected.to be_nil }
context 'when expire_at is specified' do
let(:expire_at) { Time.now + 7.days }
before do
artifact.expire_at = expire_at
end
it { is_expected.to be_within(5).of(expire_at - Time.now) }
end
end
describe '#expire_in=' do
subject { artifact.expire_in }
it 'when assigning valid duration' do
artifact.expire_in = '7 days'
is_expected.to be_within(10).of(7.days.to_i)
end
it 'when assigning invalid duration' do
expect { artifact.expire_in = '7 elephants' }.to raise_error(ChronicDuration::DurationParseError)
is_expected.to be_nil
end
it 'when resetting value' do
artifact.expire_in = nil
is_expected.to be_nil
end
it 'when setting to 0' do
artifact.expire_in = '0'
is_expected.to be_nil
end
end
end
...@@ -133,15 +133,29 @@ describe ProjectStatistics do ...@@ -133,15 +133,29 @@ describe ProjectStatistics do
describe '#update_build_artifacts_size' do describe '#update_build_artifacts_size' do
let!(:pipeline) { create(:ci_pipeline, project: project) } let!(:pipeline) { create(:ci_pipeline, project: project) }
let!(:build1) { create(:ci_build, pipeline: pipeline, artifacts_size: 45.megabytes) }
let!(:build2) { create(:ci_build, pipeline: pipeline, artifacts_size: 56.megabytes) } context 'when new job artifacts are calculated' do
let(:ci_build) { create(:ci_build, pipeline: pipeline) }
before do before do
create(:ci_job_artifact, :archive, project: pipeline.project, job: ci_build)
end
it "stores the size of related build artifacts" do
statistics.update_build_artifacts_size statistics.update_build_artifacts_size
expect(statistics.build_artifacts_size).to be(106365)
end end
end
context 'when legacy artifacts are used' do
let!(:ci_build) { create(:ci_build, pipeline: pipeline, artifacts_size: 10.megabytes) }
it "stores the size of related build artifacts" do it "stores the size of related build artifacts" do
expect(statistics.build_artifacts_size).to eq 101.megabytes statistics.update_build_artifacts_size
expect(statistics.build_artifacts_size).to eq(10.megabytes)
end
end end
end end
......
...@@ -945,7 +945,7 @@ describe API::Runner do ...@@ -945,7 +945,7 @@ describe API::Runner do
context 'when artifacts are being stored inside of tmp path' do context 'when artifacts are being stored inside of tmp path' do
before do before do
# by configuring this path we allow to pass temp file from any path # by configuring this path we allow to pass temp file from any path
allow(ArtifactUploader).to receive(:artifacts_upload_path).and_return('/') allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return('/')
end end
context 'when job has been erased' do context 'when job has been erased' do
...@@ -985,15 +985,6 @@ describe API::Runner do ...@@ -985,15 +985,6 @@ describe API::Runner do
it_behaves_like 'successful artifacts upload' it_behaves_like 'successful artifacts upload'
end end
context 'when updates artifact' do
before do
upload_artifacts(file_upload2, headers_with_token)
upload_artifacts(file_upload, headers_with_token)
end
it_behaves_like 'successful artifacts upload'
end
context 'when using runners token' do context 'when using runners token' do
it 'responds with forbidden' do it 'responds with forbidden' do
upload_artifacts(file_upload, headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.project.runners_token)) upload_artifacts(file_upload, headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.project.runners_token))
...@@ -1106,7 +1097,7 @@ describe API::Runner do ...@@ -1106,7 +1097,7 @@ describe API::Runner do
expect(response).to have_gitlab_http_status(201) expect(response).to have_gitlab_http_status(201)
expect(stored_artifacts_file.original_filename).to eq(artifacts.original_filename) expect(stored_artifacts_file.original_filename).to eq(artifacts.original_filename)
expect(stored_metadata_file.original_filename).to eq(metadata.original_filename) expect(stored_metadata_file.original_filename).to eq(metadata.original_filename)
expect(stored_artifacts_size).to eq(71759) expect(stored_artifacts_size).to eq(72821)
end end
end end
...@@ -1131,7 +1122,7 @@ describe API::Runner do ...@@ -1131,7 +1122,7 @@ describe API::Runner do
# by configuring this path we allow to pass file from @tmpdir only # by configuring this path we allow to pass file from @tmpdir only
# but all temporary files are stored in system tmp directory # but all temporary files are stored in system tmp directory
@tmpdir = Dir.mktmpdir @tmpdir = Dir.mktmpdir
allow(ArtifactUploader).to receive(:artifacts_upload_path).and_return(@tmpdir) allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return(@tmpdir)
end end
after do after do
......
require 'spec_helper' require 'spec_helper'
describe PipelineSerializer do describe PipelineSerializer do
let(:user) { create(:user) } set(:user) { create(:user) }
let(:serializer) do let(:serializer) do
described_class.new(current_user: user) described_class.new(current_user: user)
...@@ -117,7 +117,7 @@ describe PipelineSerializer do ...@@ -117,7 +117,7 @@ describe PipelineSerializer do
shared_examples 'no N+1 queries' do shared_examples 'no N+1 queries' do
it 'verifies number of queries', :request_store do it 'verifies number of queries', :request_store do
recorded = ActiveRecord::QueryRecorder.new { subject } recorded = ActiveRecord::QueryRecorder.new { subject }
expect(recorded.count).to be_within(1).of(57) expect(recorded.count).to be_within(1).of(36)
expect(recorded.cached_count).to eq(0) expect(recorded.cached_count).to eq(0)
end end
end end
......
...@@ -15,16 +15,14 @@ module Ci ...@@ -15,16 +15,14 @@ module Ci
describe '#execute' do describe '#execute' do
context 'runner follow tag list' do context 'runner follow tag list' do
it "picks build with the same tag" do it "picks build with the same tag" do
pending_job.tag_list = ["linux"] pending_job.update(tag_list: ["linux"])
pending_job.save specific_runner.update(tag_list: ["linux"])
specific_runner.tag_list = ["linux"]
expect(execute(specific_runner)).to eq(pending_job) expect(execute(specific_runner)).to eq(pending_job)
end end
it "does not pick build with different tag" do it "does not pick build with different tag" do
pending_job.tag_list = ["linux"] pending_job.update(tag_list: ["linux"])
pending_job.save specific_runner.update(tag_list: ["win32"])
specific_runner.tag_list = ["win32"]
expect(execute(specific_runner)).to be_falsey expect(execute(specific_runner)).to be_falsey
end end
...@@ -33,13 +31,12 @@ module Ci ...@@ -33,13 +31,12 @@ module Ci
end end
it "does not pick build with tag" do it "does not pick build with tag" do
pending_job.tag_list = ["linux"] pending_job.update(tag_list: ["linux"])
pending_job.save
expect(execute(specific_runner)).to be_falsey expect(execute(specific_runner)).to be_falsey
end end
it "pick build without tag" do it "pick build without tag" do
specific_runner.tag_list = ["win32"] specific_runner.update(tag_list: ["win32"])
expect(execute(specific_runner)).to eq(pending_job) expect(execute(specific_runner)).to eq(pending_job)
end end
end end
...@@ -172,7 +169,7 @@ module Ci ...@@ -172,7 +169,7 @@ module Ci
context 'when first build is stalled' do context 'when first build is stalled' do
before do before do
pending_job.lock_version = 10 pending_job.update(lock_version: 0)
end end
subject { described_class.new(specific_runner).execute } subject { described_class.new(specific_runner).execute }
...@@ -182,7 +179,7 @@ module Ci ...@@ -182,7 +179,7 @@ module Ci
before do before do
allow_any_instance_of(Ci::RegisterJobService).to receive(:builds_for_specific_runner) allow_any_instance_of(Ci::RegisterJobService).to receive(:builds_for_specific_runner)
.and_return([pending_job, other_build]) .and_return(Ci::Build.where(id: [pending_job, other_build]))
end end
it "receives second build from the queue" do it "receives second build from the queue" do
...@@ -194,7 +191,7 @@ module Ci ...@@ -194,7 +191,7 @@ module Ci
context 'when single build is in queue' do context 'when single build is in queue' do
before do before do
allow_any_instance_of(Ci::RegisterJobService).to receive(:builds_for_specific_runner) allow_any_instance_of(Ci::RegisterJobService).to receive(:builds_for_specific_runner)
.and_return([pending_job]) .and_return(Ci::Build.where(id: pending_job))
end end
it "does not receive any valid result" do it "does not receive any valid result" do
...@@ -205,7 +202,7 @@ module Ci ...@@ -205,7 +202,7 @@ module Ci
context 'when there is no build in queue' do context 'when there is no build in queue' do
before do before do
allow_any_instance_of(Ci::RegisterJobService).to receive(:builds_for_specific_runner) allow_any_instance_of(Ci::RegisterJobService).to receive(:builds_for_specific_runner)
.and_return([]) .and_return(Ci::Build.none)
end end
it "does not receive builds but result is valid" do it "does not receive builds but result is valid" do
......
...@@ -17,7 +17,7 @@ describe Ci::RetryBuildService do ...@@ -17,7 +17,7 @@ describe Ci::RetryBuildService do
%i[id status user token coverage trace runner artifacts_expire_at %i[id status user token coverage trace runner artifacts_expire_at
artifacts_file artifacts_metadata artifacts_size created_at artifacts_file artifacts_metadata artifacts_size created_at
updated_at started_at finished_at queued_at erased_by updated_at started_at finished_at queued_at erased_by
erased_at auto_canceled_by].freeze erased_at auto_canceled_by job_artifacts job_artifacts_archive job_artifacts_metadata].freeze
IGNORE_ACCESSORS = IGNORE_ACCESSORS =
%i[type lock_version target_url base_tags trace_sections %i[type lock_version target_url base_tags trace_sections
...@@ -34,7 +34,7 @@ describe Ci::RetryBuildService do ...@@ -34,7 +34,7 @@ describe Ci::RetryBuildService do
end end
let(:build) do let(:build) do
create(:ci_build, :failed, :artifacts_expired, :erased, create(:ci_build, :failed, :artifacts, :expired, :erased,
:queued, :coverage, :tags, :allowed_to_fail, :on_tag, :queued, :coverage, :tags, :allowed_to_fail, :on_tag,
:triggered, :trace, :teardown_environment, :triggered, :trace, :teardown_environment,
description: 'my-job', stage: 'test', pipeline: pipeline, description: 'my-job', stage: 'test', pipeline: pipeline,
......
require "spec_helper" require "spec_helper"
describe Projects::UpdatePagesService do describe Projects::UpdatePagesService do
let(:project) { create(:project, :repository) } set(:project) { create(:project, :repository) }
let(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) } set(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
let(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') } set(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') }
let(:invalid_file) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png') } let(:invalid_file) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png') }
let(:extension) { 'zip' }
let(:file) { fixture_file_upload(Rails.root + "spec/fixtures/pages.#{extension}") }
let(:empty_file) { fixture_file_upload(Rails.root + "spec/fixtures/pages_empty.#{extension}") }
let(:metadata) do
filename = Rails.root + "spec/fixtures/pages.#{extension}.meta"
fixture_file_upload(filename) if File.exist?(filename)
end
subject { described_class.new(project, build) } subject { described_class.new(project, build) }
...@@ -12,18 +20,85 @@ describe Projects::UpdatePagesService do ...@@ -12,18 +20,85 @@ describe Projects::UpdatePagesService do
project.remove_pages project.remove_pages
end end
context 'legacy artifacts' do
%w(tar.gz zip).each do |format| %w(tar.gz zip).each do |format|
let(:extension) { format }
context "for valid #{format}" do context "for valid #{format}" do
let(:file) { fixture_file_upload(Rails.root + "spec/fixtures/pages.#{format}") } before do
let(:empty_file) { fixture_file_upload(Rails.root + "spec/fixtures/pages_empty.#{format}") } build.update_attributes(legacy_artifacts_file: file)
let(:metadata) do build.update_attributes(legacy_artifacts_metadata: metadata)
filename = Rails.root + "spec/fixtures/pages.#{format}.meta"
fixture_file_upload(filename) if File.exist?(filename)
end end
describe 'pages artifacts' do
context 'with expiry date' do
before do before do
build.update_attributes(artifacts_file: file) build.artifacts_expire_in = "2 days"
build.update_attributes(artifacts_metadata: metadata) end
it "doesn't delete artifacts" do
expect(execute).to eq(:success)
expect(build.reload.artifacts?).to eq(true)
end
end
context 'without expiry date' do
it "does delete artifacts" do
expect(execute).to eq(:success)
expect(build.reload.artifacts?).to eq(false)
end
end
end
it 'succeeds' do
expect(project.pages_deployed?).to be_falsey
expect(execute).to eq(:success)
expect(project.pages_deployed?).to be_truthy
# Check that all expected files are extracted
%w[index.html zero .hidden/file].each do |filename|
expect(File.exist?(File.join(project.public_pages_path, filename))).to be_truthy
end
end
it 'limits pages size' do
stub_application_setting(max_pages_size: 1)
expect(execute).not_to eq(:success)
end
it 'removes pages after destroy' do
expect(PagesWorker).to receive(:perform_in)
expect(project.pages_deployed?).to be_falsey
expect(execute).to eq(:success)
expect(project.pages_deployed?).to be_truthy
project.destroy
expect(project.pages_deployed?).to be_falsey
end
it 'fails if sha on branch is not latest' do
build.update_attributes(ref: 'feature')
expect(execute).not_to eq(:success)
end
it 'fails for empty file fails' do
build.update_attributes(legacy_artifacts_file: empty_file)
expect(execute).not_to eq(:success)
end
end
end
end
context 'for new artifacts' do
context "for a valid job" do
before do
create(:ci_job_artifact, file: file, job: build)
create(:ci_job_artifact, file_type: :metadata, file: metadata, job: build)
build.reload
end end
describe 'pages artifacts' do describe 'pages artifacts' do
...@@ -35,7 +110,7 @@ describe Projects::UpdatePagesService do ...@@ -35,7 +110,7 @@ describe Projects::UpdatePagesService do
it "doesn't delete artifacts" do it "doesn't delete artifacts" do
expect(execute).to eq(:success) expect(execute).to eq(:success)
expect(build.reload.artifacts_file?).to eq(true) expect(build.artifacts?).to eq(true)
end end
end end
...@@ -43,7 +118,7 @@ describe Projects::UpdatePagesService do ...@@ -43,7 +118,7 @@ describe Projects::UpdatePagesService do
it "does delete artifacts" do it "does delete artifacts" do
expect(execute).to eq(:success) expect(execute).to eq(:success)
expect(build.reload.artifacts_file?).to eq(false) expect(build.reload.artifacts?).to eq(false)
end end
end end
end end
...@@ -74,13 +149,14 @@ describe Projects::UpdatePagesService do ...@@ -74,13 +149,14 @@ describe Projects::UpdatePagesService do
end end
it 'fails if sha on branch is not latest' do it 'fails if sha on branch is not latest' do
pipeline.update_attributes(sha: 'old_sha') build.update_attributes(ref: 'feature')
build.update_attributes(artifacts_file: file)
expect(execute).not_to eq(:success) expect(execute).not_to eq(:success)
end end
it 'fails for empty file fails' do it 'fails for empty file fails' do
build.update_attributes(artifacts_file: empty_file) build.job_artifacts_archive.update_attributes(file: empty_file)
expect(execute).not_to eq(:success) expect(execute).not_to eq(:success)
end end
end end
...@@ -97,7 +173,7 @@ describe Projects::UpdatePagesService do ...@@ -97,7 +173,7 @@ describe Projects::UpdatePagesService do
end end
it 'fails for invalid archive' do it 'fails for invalid archive' do
build.update_attributes(artifacts_file: invalid_file) build.update_attributes(legacy_artifacts_file: invalid_file)
expect(execute).not_to eq(:success) expect(execute).not_to eq(:success)
end end
...@@ -108,8 +184,8 @@ describe Projects::UpdatePagesService do ...@@ -108,8 +184,8 @@ describe Projects::UpdatePagesService do
file = fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip') file = fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip')
metafile = fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip.meta') metafile = fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip.meta')
build.update_attributes(artifacts_file: file) build.update_attributes(legacy_artifacts_file: file)
build.update_attributes(artifacts_metadata: metafile) build.update_attributes(legacy_artifacts_metadata: metafile)
allow(build).to receive(:artifacts_metadata_entry) allow(build).to receive(:artifacts_metadata_entry)
.and_return(metadata) .and_return(metadata)
......
...@@ -146,7 +146,7 @@ describe WebHookService do ...@@ -146,7 +146,7 @@ describe WebHookService do
let(:system_hook) { create(:system_hook) } let(:system_hook) { create(:system_hook) }
it 'enqueue WebHookWorker' do it 'enqueue WebHookWorker' do
expect(Sidekiq::Client).to receive(:enqueue).with(WebHookWorker, project_hook.id, data, 'push_hooks') expect(WebHookWorker).to receive(:perform_async).with(project_hook.id, data, 'push_hooks')
described_class.new(project_hook, data, 'push_hooks').async_execute described_class.new(project_hook, data, 'push_hooks').async_execute
end end
......
...@@ -120,6 +120,7 @@ module TestEnv ...@@ -120,6 +120,7 @@ module TestEnv
FileUtils.mkdir_p(repos_path) FileUtils.mkdir_p(repos_path)
FileUtils.mkdir_p(backup_path) FileUtils.mkdir_p(backup_path)
FileUtils.mkdir_p(pages_path) FileUtils.mkdir_p(pages_path)
FileUtils.mkdir_p(artifacts_path)
end end
def clean_gitlab_test_path def clean_gitlab_test_path
...@@ -233,6 +234,10 @@ module TestEnv ...@@ -233,6 +234,10 @@ module TestEnv
Gitlab.config.pages.path Gitlab.config.pages.path
end end
def artifacts_path
Gitlab.config.artifacts.path
end
# When no cached assets exist, manually hit the root path to create them # When no cached assets exist, manually hit the root path to create them
# #
# Otherwise they'd be created by the first test, often timing out and # Otherwise they'd be created by the first test, often timing out and
......
require 'spec_helper'
describe JobArtifactUploader do
let(:job_artifact) { create(:ci_job_artifact) }
let(:uploader) { described_class.new(job_artifact, :file) }
let(:local_path) { Gitlab.config.artifacts.path }
describe '#store_dir' do
subject { uploader.store_dir }
let(:path) { "#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/#{job_artifact.project_id}/#{job_artifact.id}" }
context 'when using local storage' do
it { is_expected.to start_with(local_path) }
it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) }
it { is_expected.to end_with(path) }
end
end
describe '#cache_dir' do
subject { uploader.cache_dir }
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('/tmp/cache') }
end
describe '#work_dir' do
subject { uploader.work_dir }
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('/tmp/work') }
end
context 'file is stored in valid local_path' do
let(:file) do
fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
end
before do
uploader.store!(file)
end
subject { uploader.file.path }
it { is_expected.to start_with(local_path) }
it { is_expected.to include("/#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/") }
it { is_expected.to include("/#{job_artifact.project_id}/") }
it { is_expected.to end_with("ci_build_artifacts.zip") }
end
end
require 'rails_helper' require 'rails_helper'
describe ArtifactUploader do describe LegacyArtifactUploader do
let(:job) { create(:ci_build) } let(:job) { create(:ci_build) }
let(:uploader) { described_class.new(job, :artifacts_file) } let(:uploader) { described_class.new(job, :legacy_artifacts_file) }
let(:path) { Gitlab.config.artifacts.path } let(:local_path) { Gitlab.config.artifacts.path }
describe '.local_artifacts_store' do describe '.local_store_path' do
subject { described_class.local_artifacts_store } subject { described_class.local_store_path }
it "delegate to artifacts path" do it "delegate to artifacts path" do
expect(Gitlab.config.artifacts).to receive(:path) expect(Gitlab.config.artifacts).to receive(:path)
...@@ -18,28 +18,32 @@ describe ArtifactUploader do ...@@ -18,28 +18,32 @@ describe ArtifactUploader do
describe '.artifacts_upload_path' do describe '.artifacts_upload_path' do
subject { described_class.artifacts_upload_path } subject { described_class.artifacts_upload_path }
it { is_expected.to start_with(path) } it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('tmp/uploads/') } it { is_expected.to end_with('tmp/uploads/') }
end end
describe '#store_dir' do describe '#store_dir' do
subject { uploader.store_dir } subject { uploader.store_dir }
it { is_expected.to start_with(path) } let(:path) { "#{job.created_at.utc.strftime('%Y_%m')}/#{job.project_id}/#{job.id}" }
it { is_expected.to end_with("#{job.project_id}/#{job.id}") }
context 'when using local storage' do
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with(path) }
end
end end
describe '#cache_dir' do describe '#cache_dir' do
subject { uploader.cache_dir } subject { uploader.cache_dir }
it { is_expected.to start_with(path) } it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('/tmp/cache') } it { is_expected.to end_with('/tmp/cache') }
end end
describe '#work_dir' do describe '#work_dir' do
subject { uploader.work_dir } subject { uploader.work_dir }
it { is_expected.to start_with(path) } it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('/tmp/work') } it { is_expected.to end_with('/tmp/work') }
end end
...@@ -51,11 +55,23 @@ describe ArtifactUploader do ...@@ -51,11 +55,23 @@ describe ArtifactUploader do
subject { uploader.filename } subject { uploader.filename }
it { is_expected.to be_nil } it { is_expected.to be_nil }
end
context 'with artifacts' do context 'file is stored in valid path' do
let(:job) { create(:ci_build, :artifacts) } let(:file) do
fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
end
it { is_expected.not_to be_nil } before do
uploader.store!(file)
end end
subject { uploader.file.path }
it { is_expected.to start_with(local_path) }
it { is_expected.to include("/#{job.created_at.utc.strftime('%Y_%m')}/") }
it { is_expected.to include("/#{job.project_id}/") }
it { is_expected.to end_with("ci_build_artifacts.zip") }
end end
end end
...@@ -65,7 +65,6 @@ describe AuthorizedProjectsWorker do ...@@ -65,7 +65,6 @@ describe AuthorizedProjectsWorker do
args_list = build_args_list(project.owner.id) args_list = build_args_list(project.owner.id)
push_bulk_args = { push_bulk_args = {
'class' => described_class, 'class' => described_class,
'queue' => described_class.sidekiq_options['queue'],
'args' => args_list 'args' => args_list
} }
......
...@@ -10,35 +10,4 @@ describe BackgroundMigrationWorker, :sidekiq do ...@@ -10,35 +10,4 @@ describe BackgroundMigrationWorker, :sidekiq do
described_class.new.perform('Foo', [10, 20]) described_class.new.perform('Foo', [10, 20])
end end
end end
describe '.perform_bulk' do
it 'enqueues background migrations in bulk' do
Sidekiq::Testing.fake! do
described_class.perform_bulk([['Foo', [1]], ['Foo', [2]]])
expect(described_class.jobs.count).to eq 2
expect(described_class.jobs).to all(include('enqueued_at'))
end
end
end
describe '.perform_bulk_in' do
context 'when delay is valid' do
it 'correctly schedules background migrations' do
Sidekiq::Testing.fake! do
described_class.perform_bulk_in(1.minute, [['Foo', [1]], ['Foo', [2]]])
expect(described_class.jobs.count).to eq 2
expect(described_class.jobs).to all(include('at'))
end
end
end
context 'when delay is invalid' do
it 'raises an ArgumentError exception' do
expect { described_class.perform_bulk_in(-60, [['Foo']]) }
.to raise_error(ArgumentError)
end
end
end
end end
require 'spec_helper'
describe ApplicationWorker do
let(:worker) do
Class.new do
def self.name
'Gitlab::Foo::Bar::DummyWorker'
end
include ApplicationWorker
end
end
describe 'Sidekiq options' do
it 'sets the queue name based on the class name' do
expect(worker.sidekiq_options['queue']).to eq('foo_bar_dummy')
end
end
describe '.queue' do
it 'returns the queue name' do
worker.sidekiq_options queue: :some_queue
expect(worker.queue).to eq('some_queue')
end
end
describe '.bulk_perform_async' do
it 'enqueues jobs in bulk' do
Sidekiq::Testing.fake! do
worker.bulk_perform_async([['Foo', [1]], ['Foo', [2]]])
expect(worker.jobs.count).to eq 2
expect(worker.jobs).to all(include('enqueued_at'))
end
end
end
describe '.bulk_perform_in' do
context 'when delay is valid' do
it 'correctly schedules jobs' do
Sidekiq::Testing.fake! do
worker.bulk_perform_in(1.minute, [['Foo', [1]], ['Foo', [2]]])
expect(worker.jobs.count).to eq 2
expect(worker.jobs).to all(include('at'))
end
end
end
context 'when delay is invalid' do
it 'raises an ArgumentError exception' do
expect { worker.bulk_perform_in(-60, [['Foo']]) }
.to raise_error(ArgumentError)
end
end
end
end
...@@ -3,7 +3,11 @@ require 'spec_helper' ...@@ -3,7 +3,11 @@ require 'spec_helper'
describe ClusterQueue do describe ClusterQueue do
let(:worker) do let(:worker) do
Class.new do Class.new do
include Sidekiq::Worker def self.name
'DummyWorker'
end
include ApplicationWorker
include ClusterQueue include ClusterQueue
end end
end end
......
...@@ -3,7 +3,11 @@ require 'spec_helper' ...@@ -3,7 +3,11 @@ require 'spec_helper'
describe CronjobQueue do describe CronjobQueue do
let(:worker) do let(:worker) do
Class.new do Class.new do
include Sidekiq::Worker def self.name
'DummyWorker'
end
include ApplicationWorker
include CronjobQueue include CronjobQueue
end end
end end
......
require 'spec_helper'
describe DedicatedSidekiqQueue do
let(:worker) do
Class.new do
def self.name
'Foo::Bar::DummyWorker'
end
include Sidekiq::Worker
include DedicatedSidekiqQueue
end
end
describe 'queue names' do
it 'sets the queue name based on the class name' do
expect(worker.sidekiq_options['queue']).to eq('foo_bar_dummy')
end
end
end
...@@ -3,6 +3,10 @@ require 'spec_helper' ...@@ -3,6 +3,10 @@ require 'spec_helper'
describe Gitlab::GithubImport::ObjectImporter do describe Gitlab::GithubImport::ObjectImporter do
let(:worker) do let(:worker) do
Class.new do Class.new do
def self.name
'DummyWorker'
end
include(Gitlab::GithubImport::ObjectImporter) include(Gitlab::GithubImport::ObjectImporter)
def counter_name def counter_name
......
...@@ -3,7 +3,11 @@ require 'spec_helper' ...@@ -3,7 +3,11 @@ require 'spec_helper'
describe Gitlab::GithubImport::Queue do describe Gitlab::GithubImport::Queue do
it 'sets the Sidekiq options for the worker' do it 'sets the Sidekiq options for the worker' do
worker = Class.new do worker = Class.new do
include Sidekiq::Worker def self.name
'DummyWorker'
end
include ApplicationWorker
include Gitlab::GithubImport::Queue include Gitlab::GithubImport::Queue
end end
......
...@@ -3,7 +3,11 @@ require 'spec_helper' ...@@ -3,7 +3,11 @@ require 'spec_helper'
describe PipelineQueue do describe PipelineQueue do
let(:worker) do let(:worker) do
Class.new do Class.new do
include Sidekiq::Worker def self.name
'DummyWorker'
end
include ApplicationWorker
include PipelineQueue include PipelineQueue
end end
end end
......
...@@ -3,7 +3,11 @@ require 'spec_helper' ...@@ -3,7 +3,11 @@ require 'spec_helper'
describe RepositoryCheckQueue do describe RepositoryCheckQueue do
let(:worker) do let(:worker) do
Class.new do Class.new do
include Sidekiq::Worker def self.name
'DummyWorker'
end
include ApplicationWorker
include RepositoryCheckQueue include RepositoryCheckQueue
end end
end end
......
require 'spec_helper' require 'spec_helper'
describe 'Every Sidekiq worker' do describe 'Every Sidekiq worker' do
let(:workers) do it 'includes ApplicationWorker' do
root = Rails.root.join('app', 'workers') expect(Gitlab::SidekiqConfig.workers).to all(include(ApplicationWorker))
concerns = root.join('concerns').to_s
workers = Dir[root.join('**', '*.rb')]
.reject { |path| path.start_with?(concerns) }
workers.map do |path|
ns = Pathname.new(path).relative_path_from(root).to_s.gsub('.rb', '')
ns.camelize.constantize
end
end end
it 'does not use the default queue' do it 'does not use the default queue' do
workers.each do |worker| expect(Gitlab::SidekiqConfig.workers.map(&:queue)).not_to include('default')
expect(worker.sidekiq_options['queue'].to_s).not_to eq('default')
end
end end
it 'uses the cronjob queue when the worker runs as a cronjob' do it 'uses the cronjob queue when the worker runs as a cronjob' do
cron_workers = Settings.cron_jobs expect(Gitlab::SidekiqConfig.cron_workers.map(&:queue)).to all(eq('cronjob'))
.map { |job_name, options| options['job_class'].constantize }
.to_set
workers.each do |worker|
next unless cron_workers.include?(worker)
expect(worker.sidekiq_options['queue'].to_s).to eq('cronjob')
end
end end
it 'defines the queue in the Sidekiq configuration file' do it 'defines the queue in the Sidekiq configuration file' do
config = YAML.load_file(Rails.root.join('config', 'sidekiq_queues.yml').to_s) config_queue_names = Gitlab::SidekiqConfig.config_queues.to_set
queue_names = config[:queues].map { |(queue, _)| queue }.to_set
workers.each do |worker| expect(Gitlab::SidekiqConfig.worker_queues).to all(be_in(config_queue_names))
expect(queue_names).to include(worker.sidekiq_options['queue'].to_s)
end
end end
end end
...@@ -11,12 +11,8 @@ describe ExpireBuildInstanceArtifactsWorker do ...@@ -11,12 +11,8 @@ describe ExpireBuildInstanceArtifactsWorker do
end end
context 'with expired artifacts' do context 'with expired artifacts' do
let(:artifacts_expiry) { { artifacts_expire_at: Time.now - 7.days } }
context 'when associated project is valid' do context 'when associated project is valid' do
let(:build) do let(:build) { create(:ci_build, :artifacts, :expired) }
create(:ci_build, :artifacts, artifacts_expiry)
end
it 'does expire' do it 'does expire' do
expect(build.reload.artifacts_expired?).to be_truthy expect(build.reload.artifacts_expired?).to be_truthy
...@@ -26,14 +22,14 @@ describe ExpireBuildInstanceArtifactsWorker do ...@@ -26,14 +22,14 @@ describe ExpireBuildInstanceArtifactsWorker do
expect(build.reload.artifacts_file.exists?).to be_falsey expect(build.reload.artifacts_file.exists?).to be_falsey
end end
it 'does nullify artifacts_file column' do it 'does remove the job artifact record' do
expect(build.reload.artifacts_file_identifier).to be_nil expect(build.reload.job_artifacts_archive).to be_nil
end end
end end
end end
context 'with not yet expired artifacts' do context 'with not yet expired artifacts' do
let(:build) do set(:build) do
create(:ci_build, :artifacts, artifacts_expire_at: Time.now + 7.days) create(:ci_build, :artifacts, artifacts_expire_at: Time.now + 7.days)
end end
...@@ -45,8 +41,8 @@ describe ExpireBuildInstanceArtifactsWorker do ...@@ -45,8 +41,8 @@ describe ExpireBuildInstanceArtifactsWorker do
expect(build.reload.artifacts_file.exists?).to be_truthy expect(build.reload.artifacts_file.exists?).to be_truthy
end end
it 'does not nullify artifacts_file column' do it 'does not remove the job artifact record' do
expect(build.reload.artifacts_file_identifier).not_to be_nil expect(build.reload.job_artifacts_archive).not_to be_nil
end end
end end
...@@ -61,13 +57,13 @@ describe ExpireBuildInstanceArtifactsWorker do ...@@ -61,13 +57,13 @@ describe ExpireBuildInstanceArtifactsWorker do
expect(build.reload.artifacts_file.exists?).to be_truthy expect(build.reload.artifacts_file.exists?).to be_truthy
end end
it 'does not nullify artifacts_file column' do it 'does not remove the job artifact record' do
expect(build.reload.artifacts_file_identifier).not_to be_nil expect(build.reload.job_artifacts_archive).not_to be_nil
end end
end end
context 'for expired artifacts' do context 'for expired artifacts' do
let(:build) { create(:ci_build, artifacts_expire_at: Time.now - 7.days) } let(:build) { create(:ci_build, :expired) }
it 'is still expired' do it 'is still expired' do
expect(build.reload.artifacts_expired?).to be_truthy expect(build.reload.artifacts_expired?).to be_truthy
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment