Commit 0b1ec5c0 authored by GitLab Bot's avatar GitLab Bot

Merge remote-tracking branch 'upstream/master' into ce-to-ee-2018-04-06

# Conflicts:
#	doc/user/admin_area/settings/visibility_and_access_controls.md

[ci skip]
parents 89aca0bf b9c62c21
...@@ -17,6 +17,7 @@ ...@@ -17,6 +17,7 @@
* "text": "passed", * "text": "passed",
* "label": "passed", * "label": "passed",
* "group": "success", * "group": "success",
* "tooltip": "passed",
* "details_path": "/root/ci-mock/builds/4256", * "details_path": "/root/ci-mock/builds/4256",
* "action": { * "action": {
* "icon": "retry", * "icon": "retry",
...@@ -69,12 +70,12 @@ ...@@ -69,12 +70,12 @@
textBuilder.push(this.job.name); textBuilder.push(this.job.name);
} }
if (this.job.name && this.status.label) { if (this.job.name && this.status.tooltip) {
textBuilder.push('-'); textBuilder.push('-');
} }
if (this.status.label) { if (this.status.tooltip) {
textBuilder.push(`${this.job.status.label}`); textBuilder.push(`${this.job.status.tooltip}`);
} }
return textBuilder.join(' '); return textBuilder.join(' ');
...@@ -100,6 +101,7 @@ ...@@ -100,6 +101,7 @@
:title="tooltipText" :title="tooltipText"
:class="cssClassJobName" :class="cssClassJobName"
data-container="body" data-container="body"
data-html="true"
class="js-pipeline-graph-job-link" class="js-pipeline-graph-job-link"
> >
...@@ -115,6 +117,7 @@ ...@@ -115,6 +117,7 @@
class="js-job-component-tooltip" class="js-job-component-tooltip"
:title="tooltipText" :title="tooltipText"
:class="cssClassJobName" :class="cssClassJobName"
data-html="true"
data-container="body" data-container="body"
> >
......
...@@ -391,7 +391,7 @@ ...@@ -391,7 +391,7 @@
} }
&:hover { &:hover {
background-color: $row-hover; background-color: $dropdown-item-hover-bg;
} }
.icon-retry { .icon-retry {
......
...@@ -2,7 +2,6 @@ class Projects::JobsController < Projects::ApplicationController ...@@ -2,7 +2,6 @@ class Projects::JobsController < Projects::ApplicationController
include SendFileUpload include SendFileUpload
before_action :build, except: [:index, :cancel_all] before_action :build, except: [:index, :cancel_all]
before_action :authorize_read_build!, before_action :authorize_read_build!,
only: [:index, :show, :status, :raw, :trace] only: [:index, :show, :status, :raw, :trace]
before_action :authorize_update_build!, before_action :authorize_update_build!,
...@@ -45,8 +44,11 @@ class Projects::JobsController < Projects::ApplicationController ...@@ -45,8 +44,11 @@ class Projects::JobsController < Projects::ApplicationController
end end
def show def show
@builds = @project.pipelines.find_by_sha(@build.sha).builds.order('id DESC') @builds = @project.pipelines
@builds = @builds.where("id not in (?)", @build.id) .find_by_sha(@build.sha)
.builds
.order('id DESC')
.present(current_user: current_user)
@pipeline = @build.pipeline @pipeline = @build.pipeline
respond_to do |format| respond_to do |format|
...@@ -128,7 +130,7 @@ class Projects::JobsController < Projects::ApplicationController ...@@ -128,7 +130,7 @@ class Projects::JobsController < Projects::ApplicationController
if stream.file? if stream.file?
send_file stream.path, type: 'text/plain; charset=utf-8', disposition: 'inline' send_file stream.path, type: 'text/plain; charset=utf-8', disposition: 'inline'
else else
render_404 send_data stream.raw, type: 'text/plain; charset=utf-8', disposition: 'inline', filename: 'job.log'
end end
end end
end end
......
...@@ -31,7 +31,9 @@ class Projects::LfsStorageController < Projects::GitHttpClientController ...@@ -31,7 +31,9 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
render plain: 'Unprocessable entity', status: 422 render plain: 'Unprocessable entity', status: 422
end end
rescue ActiveRecord::RecordInvalid rescue ActiveRecord::RecordInvalid
render_400 render_lfs_forbidden
rescue UploadedFile::InvalidPathError
render_lfs_forbidden
rescue ObjectStorage::RemoteStoreError rescue ObjectStorage::RemoteStoreError
render_lfs_forbidden render_lfs_forbidden
end end
...@@ -66,10 +68,11 @@ class Projects::LfsStorageController < Projects::GitHttpClientController ...@@ -66,10 +68,11 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
end end
def create_file!(oid, size) def create_file!(oid, size)
LfsObject.new(oid: oid, size: size).tap do |object| uploaded_file = UploadedFile.from_params(
object.file.store_workhorse_file!(params, :file) params, :file, LfsObjectUploader.workhorse_local_upload_path)
object.save! return unless uploaded_file
end
LfsObject.create!(oid: oid, size: size, file: uploaded_file)
end end
def link_to_project!(object) def link_to_project!(object)
......
...@@ -8,6 +8,7 @@ module Ci ...@@ -8,6 +8,7 @@ module Ci
belongs_to :project belongs_to :project
belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id
before_save :update_file_store
before_save :set_size, if: :file_changed? before_save :set_size, if: :file_changed?
scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) } scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) }
...@@ -23,6 +24,10 @@ module Ci ...@@ -23,6 +24,10 @@ module Ci
trace: 3 trace: 3
} }
def update_file_store
self.file_store = file.object_store
end
def self.artifacts_size_for(project) def self.artifacts_size_for(project)
self.where(project: project).sum(:size) self.where(project: project).sum(:size)
end end
......
module Presentable module Presentable
extend ActiveSupport::Concern
class_methods do
def present(attributes)
all.map { |klass_object| klass_object.present(attributes) }
end
end
def present(**attributes) def present(**attributes)
Gitlab::View::Presenter::Factory Gitlab::View::Presenter::Factory
.new(self, attributes) .new(self, attributes)
......
...@@ -15,6 +15,8 @@ module Ci ...@@ -15,6 +15,8 @@ module Ci
def status_title def status_title
if auto_canceled? if auto_canceled?
"Job is redundant and is auto-canceled by Pipeline ##{auto_canceled_by_id}" "Job is redundant and is auto-canceled by Pipeline ##{auto_canceled_by_id}"
else
tooltip_for_badge
end end
end end
...@@ -28,5 +30,19 @@ module Ci ...@@ -28,5 +30,19 @@ module Ci
trigger_request.user_variables trigger_request.user_variables
end end
end end
def tooltip_message
"#{subject.name} - #{detailed_status.status_tooltip}"
end
private
def tooltip_for_badge
detailed_status.badge_tooltip.capitalize
end
def detailed_status
@detailed_status ||= subject.detailed_status(user)
end
end end
end end
...@@ -2,7 +2,7 @@ class StatusEntity < Grape::Entity ...@@ -2,7 +2,7 @@ class StatusEntity < Grape::Entity
include RequestAwareEntity include RequestAwareEntity
expose :icon, :text, :label, :group expose :icon, :text, :label, :group
expose :status_tooltip, as: :tooltip
expose :has_details?, as: :has_details expose :has_details?, as: :has_details
expose :details_path expose :details_path
......
...@@ -6,6 +6,9 @@ module Ci ...@@ -6,6 +6,9 @@ module Ci
attr_reader :runner attr_reader :runner
JOB_QUEUE_DURATION_SECONDS_BUCKETS = [1, 3, 10, 30].freeze
JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET = 5.freeze
Result = Struct.new(:build, :valid?) Result = Struct.new(:build, :valid?)
def initialize(runner) def initialize(runner)
...@@ -106,10 +109,22 @@ module Ci ...@@ -106,10 +109,22 @@ module Ci
end end
def register_success(job) def register_success(job)
job_queue_duration_seconds.observe({ shared_runner: @runner.shared? }, Time.now - job.created_at) labels = { shared_runner: runner.shared?,
jobs_running_for_project: jobs_running_for_project(job) }
job_queue_duration_seconds.observe(labels, Time.now - job.queued_at)
attempt_counter.increment attempt_counter.increment
end end
def jobs_running_for_project(job)
return '+Inf' unless runner.shared?
# excluding currently started job
running_jobs_count = job.project.builds.running.where(runner: Ci::Runner.shared)
.limit(JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET + 1).count - 1
running_jobs_count < JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET ? running_jobs_count : "#{JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET}+"
end
def failed_attempt_counter def failed_attempt_counter
@failed_attempt_counter ||= Gitlab::Metrics.counter(:job_register_attempts_failed_total, "Counts the times a runner tries to register a job") @failed_attempt_counter ||= Gitlab::Metrics.counter(:job_register_attempts_failed_total, "Counts the times a runner tries to register a job")
end end
...@@ -119,7 +134,7 @@ module Ci ...@@ -119,7 +134,7 @@ module Ci
end end
def job_queue_duration_seconds def job_queue_duration_seconds
@job_queue_duration_seconds ||= Gitlab::Metrics.histogram(:job_queue_duration_seconds, 'Request handling execution time') @job_queue_duration_seconds ||= Gitlab::Metrics.histogram(:job_queue_duration_seconds, 'Request handling execution time', {}, JOB_QUEUE_DURATION_SECONDS_BUCKETS)
end end
end end
end end
...@@ -2,6 +2,8 @@ class JobArtifactUploader < GitlabUploader ...@@ -2,6 +2,8 @@ class JobArtifactUploader < GitlabUploader
extend Workhorse::UploadPath extend Workhorse::UploadPath
include ObjectStorage::Concern include ObjectStorage::Concern
ObjectNotReadyError = Class.new(StandardError)
storage_options Gitlab.config.artifacts storage_options Gitlab.config.artifacts
def size def size
...@@ -25,6 +27,8 @@ class JobArtifactUploader < GitlabUploader ...@@ -25,6 +27,8 @@ class JobArtifactUploader < GitlabUploader
private private
def dynamic_segment def dynamic_segment
raise ObjectNotReadyError, 'JobArtifact is not ready' unless model.id
creation_date = model.created_at.utc.strftime('%Y_%m_%d') creation_date = model.created_at.utc.strftime('%Y_%m_%d')
File.join(disk_hash[0..1], disk_hash[2..3], disk_hash, File.join(disk_hash[0..1], disk_hash[2..3], disk_hash,
......
...@@ -2,6 +2,8 @@ class LegacyArtifactUploader < GitlabUploader ...@@ -2,6 +2,8 @@ class LegacyArtifactUploader < GitlabUploader
extend Workhorse::UploadPath extend Workhorse::UploadPath
include ObjectStorage::Concern include ObjectStorage::Concern
ObjectNotReadyError = Class.new(StandardError)
storage_options Gitlab.config.artifacts storage_options Gitlab.config.artifacts
def store_dir def store_dir
...@@ -11,6 +13,8 @@ class LegacyArtifactUploader < GitlabUploader ...@@ -11,6 +13,8 @@ class LegacyArtifactUploader < GitlabUploader
private private
def dynamic_segment def dynamic_segment
raise ObjectNotReadyError, 'Build is not ready' unless model.id
File.join(model.created_at.utc.strftime('%Y_%m'), model.project_id.to_s, model.id.to_s) File.join(model.created_at.utc.strftime('%Y_%m'), model.project_id.to_s, model.id.to_s)
end end
end end
...@@ -156,11 +156,10 @@ module ObjectStorage ...@@ -156,11 +156,10 @@ module ObjectStorage
end end
def workhorse_authorize def workhorse_authorize
if options = workhorse_remote_upload_options {
{ RemoteObject: options } RemoteObject: workhorse_remote_upload_options,
else TempPath: workhorse_local_upload_path
{ TempPath: workhorse_local_upload_path } }.compact
end
end end
def workhorse_local_upload_path def workhorse_local_upload_path
...@@ -293,16 +292,14 @@ module ObjectStorage ...@@ -293,16 +292,14 @@ module ObjectStorage
} }
end end
def store_workhorse_file!(params, identifier) def cache!(new_file = sanitized_file)
filename = params["#{identifier}.name"] # We intercept ::UploadedFile which might be stored on remote storage
# We use that for "accelerated" uploads, where we store result on remote storage
if remote_object_id = params["#{identifier}.remote_id"] if new_file.is_a?(::UploadedFile) && new_file.remote_id
store_remote_file!(remote_object_id, filename) return cache_remote_file!(new_file.remote_id, new_file.original_filename)
elsif local_path = params["#{identifier}.path"]
store_local_file!(local_path, filename)
else
raise RemoteStoreError, 'Bad file'
end end
super
end end
private private
...@@ -313,36 +310,29 @@ module ObjectStorage ...@@ -313,36 +310,29 @@ module ObjectStorage
self.file_storage? self.file_storage?
end end
def store_remote_file!(remote_object_id, filename) def cache_remote_file!(remote_object_id, original_filename)
raise RemoteStoreError, 'Missing filename' unless filename
file_path = File.join(TMP_UPLOAD_PATH, remote_object_id) file_path = File.join(TMP_UPLOAD_PATH, remote_object_id)
file_path = Pathname.new(file_path).cleanpath.to_s file_path = Pathname.new(file_path).cleanpath.to_s
raise RemoteStoreError, 'Bad file path' unless file_path.start_with?(TMP_UPLOAD_PATH + '/') raise RemoteStoreError, 'Bad file path' unless file_path.start_with?(TMP_UPLOAD_PATH + '/')
self.object_store = Store::REMOTE
# TODO: # TODO:
# This should be changed to make use of `tmp/cache` mechanism # This should be changed to make use of `tmp/cache` mechanism
# instead of using custom upload directory, # instead of using custom upload directory,
# using tmp/cache makes this implementation way easier than it is today # using tmp/cache makes this implementation way easier than it is today
CarrierWave::Storage::Fog::File.new(self, storage, file_path).tap do |file| CarrierWave::Storage::Fog::File.new(self, storage_for(Store::REMOTE), file_path).tap do |file|
raise RemoteStoreError, 'Missing file' unless file.exists? raise RemoteStoreError, 'Missing file' unless file.exists?
self.filename = filename # Remote stored file, we force to store on remote storage
self.file = storage.store!(file) self.object_store = Store::REMOTE
end
end
def store_local_file!(local_path, filename)
raise RemoteStoreError, 'Missing filename' unless filename
root_path = File.realpath(self.class.workhorse_local_upload_path)
file_path = File.realpath(local_path)
raise RemoteStoreError, 'Bad file path' unless file_path.start_with?(root_path)
self.object_store = Store::LOCAL # TODO:
self.store!(UploadedFile.new(file_path, filename)) # We store file internally and force it to be considered as `cached`
# This makes CarrierWave to store file in permament location (copy/delete)
# once this object is saved, but not sooner
@cache_id = "force-to-use-cache" # rubocop:disable Gitlab/ModuleWithInstanceVariables
@file = file # rubocop:disable Gitlab/ModuleWithInstanceVariables
@filename = original_filename # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
end end
# this is a hack around CarrierWave. The #migrate method needs to be # this is a hack around CarrierWave. The #migrate method needs to be
......
...@@ -4,10 +4,10 @@ ...@@ -4,10 +4,10 @@
- css_classes = "ci-status ci-#{status.group} #{'has-tooltip' if title.present?}" - css_classes = "ci-status ci-#{status.group} #{'has-tooltip' if title.present?}"
- if link && status.has_details? - if link && status.has_details?
= link_to status.details_path, class: css_classes, title: title do = link_to status.details_path, class: css_classes, title: title, data: { html: title.present? } do
= sprite_icon(status.icon) = sprite_icon(status.icon)
= status.text = status.text
- else - else
%span{ class: css_classes, title: title } %span{ class: css_classes, title: title, data: { html: title.present? } }
= sprite_icon(status.icon) = sprite_icon(status.icon)
= status.text = status.text
...@@ -3,14 +3,15 @@ ...@@ -3,14 +3,15 @@
- subject = local_assigns.fetch(:subject) - subject = local_assigns.fetch(:subject)
- status = subject.detailed_status(current_user) - status = subject.detailed_status(current_user)
- klass = "ci-status-icon ci-status-icon-#{status.group}" - klass = "ci-status-icon ci-status-icon-#{status.group}"
- tooltip = "#{subject.name} - #{status.label}" - tooltip = "#{subject.name} - #{status.status_tooltip}"
- if status.has_details? - if status.has_details?
= link_to status.details_path, class: 'mini-pipeline-graph-dropdown-item', data: { toggle: 'tooltip', title: tooltip, container: 'body' } do = link_to status.details_path, class: 'mini-pipeline-graph-dropdown-item', data: { toggle: 'tooltip', title: tooltip, html: true, container: 'body' } do
%span{ class: klass }= sprite_icon(status.icon) %span{ class: klass }= sprite_icon(status.icon)
%span.ci-build-text= subject.name %span.ci-build-text= subject.name
- else - else
.menu-item.mini-pipeline-graph-dropdown-item{ data: { toggle: 'tooltip', title: tooltip, container: 'body' } } .menu-item.mini-pipeline-graph-dropdown-item{ data: { toggle: 'tooltip', html: true, title: tooltip, container: 'body' } }
%span{ class: klass }= sprite_icon(status.icon) %span{ class: klass }= sprite_icon(status.icon)
%span.ci-build-text= subject.name %span.ci-build-text= subject.name
......
- builds = @build.pipeline.builds.to_a
%aside.right-sidebar.right-sidebar-expanded.build-sidebar.js-build-sidebar.js-right-sidebar{ data: { "offset-top" => "101", "spy" => "affix" } } %aside.right-sidebar.right-sidebar-expanded.build-sidebar.js-build-sidebar.js-right-sidebar{ data: { "offset-top" => "101", "spy" => "affix" } }
.sidebar-container .sidebar-container
.blocks-container .blocks-container
...@@ -91,7 +89,8 @@ ...@@ -91,7 +89,8 @@
- HasStatus::ORDERED_STATUSES.each do |build_status| - HasStatus::ORDERED_STATUSES.each do |build_status|
- builds.select{|build| build.status == build_status}.each do |build| - builds.select{|build| build.status == build_status}.each do |build|
.build-job{ class: sidebar_build_class(build, @build), data: { stage: build.stage } } .build-job{ class: sidebar_build_class(build, @build), data: { stage: build.stage } }
= link_to project_job_path(@project, build) do - tooltip = build.tooltip_message
= link_to(project_job_path(@project, build), data: { toggle: 'tooltip', html: true, title: tooltip, container: 'body' }) do
= sprite_icon('arrow-right', size:16, css_class: 'icon-arrow-right') = sprite_icon('arrow-right', size:16, css_class: 'icon-arrow-right')
%span{ class: "ci-status-icon-#{build.status}" } %span{ class: "ci-status-icon-#{build.status}" }
= ci_icon_for_status(build.status) = ci_icon_for_status(build.status)
...@@ -101,5 +100,4 @@ ...@@ -101,5 +100,4 @@
- else - else
= build.id = build.id
- if build.retried? - if build.retried?
%span.has-tooltip{ data: { container: 'body', placement: 'bottom' }, title: 'Job was retried' }
= sprite_icon('retry', size:16, css_class: 'icon-retry') = sprite_icon('retry', size:16, css_class: 'icon-retry')
...@@ -109,7 +109,7 @@ ...@@ -109,7 +109,7 @@
illustration_size: 'svg-430', illustration_size: 'svg-430',
title: _('This job has not started yet'), title: _('This job has not started yet'),
content: _('This job is in pending state and is waiting to be picked by a runner') content: _('This job is in pending state and is waiting to be picked by a runner')
= render "sidebar" = render "sidebar", builds: @builds
.js-build-options{ data: javascript_build_options } .js-build-options{ data: javascript_build_options }
......
---
title: Display error message on job's tooltip if this one fails
merge_request: 17782
author:
type: added
---
title: Fix `JobsController#raw` endpoint can not read traces in database
merge_request: 18101
author:
type: fixed
---
title: Partition job_queue_duration_seconds with jobs_running_for_project
merge_request: 17730
author:
type: changed
...@@ -350,6 +350,7 @@ Settings.artifacts['max_size'] ||= 100 # in megabytes ...@@ -350,6 +350,7 @@ Settings.artifacts['max_size'] ||= 100 # in megabytes
Settings.artifacts['object_store'] ||= Settingslogic.new({}) Settings.artifacts['object_store'] ||= Settingslogic.new({})
Settings.artifacts['object_store']['enabled'] = false if Settings.artifacts['object_store']['enabled'].nil? Settings.artifacts['object_store']['enabled'] = false if Settings.artifacts['object_store']['enabled'].nil?
Settings.artifacts['object_store']['remote_directory'] ||= nil Settings.artifacts['object_store']['remote_directory'] ||= nil
Settings.artifacts['object_store']['direct_upload'] = false if Settings.artifacts['object_store']['direct_upload'].nil?
Settings.artifacts['object_store']['background_upload'] = true if Settings.artifacts['object_store']['background_upload'].nil? Settings.artifacts['object_store']['background_upload'] = true if Settings.artifacts['object_store']['background_upload'].nil?
Settings.artifacts['object_store']['proxy_download'] = false if Settings.artifacts['object_store']['proxy_download'].nil? Settings.artifacts['object_store']['proxy_download'] = false if Settings.artifacts['object_store']['proxy_download'].nil?
# Convert upload connection settings to use string keys, to make Fog happy # Convert upload connection settings to use string keys, to make Fog happy
......
artifacts_object_store = Gitlab.config.artifacts.object_store
if artifacts_object_store.enabled &&
artifacts_object_store.direct_upload &&
artifacts_object_store.connection&.provider.to_s != 'Google'
raise "Only 'Google' is supported as a object storage provider when 'direct_upload' of artifacts is used"
end
...@@ -108,6 +108,7 @@ For source installations the following settings are nested under `artifacts:` an ...@@ -108,6 +108,7 @@ For source installations the following settings are nested under `artifacts:` an
|---------|-------------|---------| |---------|-------------|---------|
| `enabled` | Enable/disable object storage | `false` | | `enabled` | Enable/disable object storage | `false` |
| `remote_directory` | The bucket name where Artfacts will be stored| | | `remote_directory` | The bucket name where Artfacts will be stored| |
| `direct_upload` | Set to true to enable direct upload of Artifacts without the need of local shared storage. Option may be removed once we decide to support only single storage for all files. Currently only `Google` provider is supported | `false` |
| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` | | `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` | | `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
| `connection` | Various connection options described below | | | `connection` | Various connection options described below | |
......
...@@ -41,6 +41,7 @@ _Only SSH_ was selected. ...@@ -41,6 +41,7 @@ _Only SSH_ was selected.
block access to the server itself. The ports used for the protocol, be it SSH or block access to the server itself. The ports used for the protocol, be it SSH or
HTTP, will still be accessible. What GitLab does is restrict access on the HTTP, will still be accessible. What GitLab does is restrict access on the
application level. application level.
<<<<<<< HEAD
## Allow mirrors to be setup for projects ## Allow mirrors to be setup for projects
...@@ -53,4 +54,8 @@ work in every repository and can only be re-enabled on a per-project basis by an ...@@ -53,4 +54,8 @@ work in every repository and can only be re-enabled on a per-project basis by an
[ce-4696]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/4696 [ce-4696]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/4696
[ee-3586]: https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/3586 [ee-3586]: https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/3586
=======
[ce-4696]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/4696
>>>>>>> upstream/master
[ce-18021]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/18021 [ce-18021]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/18021
...@@ -89,6 +89,14 @@ module API ...@@ -89,6 +89,14 @@ module API
rack_response({ 'message' => '404 Not found' }.to_json, 404) rack_response({ 'message' => '404 Not found' }.to_json, 404)
end end
rescue_from UploadedFile::InvalidPathError do |e|
rack_response({ 'message' => e.message }.to_json, 400)
end
rescue_from ObjectStorage::RemoteStoreError do |e|
rack_response({ 'message' => e.message }.to_json, 500)
end
# Retain 405 error rather than a 500 error for Grape 0.15.0+. # Retain 405 error rather than a 500 error for Grape 0.15.0+.
# https://github.com/ruby-grape/grape/blob/a3a28f5b5dfbb2797442e006dbffd750b27f2a76/UPGRADING.md#changes-to-method-not-allowed-routes # https://github.com/ruby-grape/grape/blob/a3a28f5b5dfbb2797442e006dbffd750b27f2a76/UPGRADING.md#changes-to-method-not-allowed-routes
rescue_from Grape::Exceptions::MethodNotAllowed do |e| rescue_from Grape::Exceptions::MethodNotAllowed do |e|
......
...@@ -402,28 +402,6 @@ module API ...@@ -402,28 +402,6 @@ module API
# file helpers # file helpers
def uploaded_file(field, uploads_path)
if params[field]
bad_request!("#{field} is not a file") unless params[field][:filename]
return params[field]
end
return nil unless params["#{field}.path"] && params["#{field}.name"]
# sanitize file paths
# this requires all paths to exist
required_attributes! %W(#{field}.path)
uploads_path = File.realpath(uploads_path)
file_path = File.realpath(params["#{field}.path"])
bad_request!('Bad file path') unless file_path.start_with?(uploads_path)
UploadedFile.new(
file_path,
params["#{field}.name"],
params["#{field}.type"] || 'application/octet-stream'
)
end
def present_disk_file!(path, filename, content_type = 'application/octet-stream') def present_disk_file!(path, filename, content_type = 'application/octet-stream')
filename ||= File.basename(path) filename ||= File.basename(path)
header['Content-Disposition'] = "attachment; filename=#{filename}" header['Content-Disposition'] = "attachment; filename=#{filename}"
......
...@@ -186,7 +186,7 @@ module API ...@@ -186,7 +186,7 @@ module API
status 200 status 200
content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
Gitlab::Workhorse.artifact_upload_ok JobArtifactUploader.workhorse_authorize
end end
desc 'Upload artifacts for job' do desc 'Upload artifacts for job' do
...@@ -201,14 +201,15 @@ module API ...@@ -201,14 +201,15 @@ module API
requires :id, type: Integer, desc: %q(Job's ID) requires :id, type: Integer, desc: %q(Job's ID)
optional :token, type: String, desc: %q(Job's authentication token) optional :token, type: String, desc: %q(Job's authentication token)
optional :expire_in, type: String, desc: %q(Specify when artifacts should expire) optional :expire_in, type: String, desc: %q(Specify when artifacts should expire)
optional :file, type: File, desc: %q(Artifact's file)
optional 'file.path', type: String, desc: %q(path to locally stored body (generated by Workhorse)) optional 'file.path', type: String, desc: %q(path to locally stored body (generated by Workhorse))
optional 'file.name', type: String, desc: %q(real filename as send in Content-Disposition (generated by Workhorse)) optional 'file.name', type: String, desc: %q(real filename as send in Content-Disposition (generated by Workhorse))
optional 'file.type', type: String, desc: %q(real content type as send in Content-Type (generated by Workhorse)) optional 'file.type', type: String, desc: %q(real content type as send in Content-Type (generated by Workhorse))
optional 'file.sha256', type: String, desc: %q(sha256 checksum of the file) optional 'file.size', type: Integer, desc: %q(real size of file (generated by Workhorse))
optional 'file.sha256', type: String, desc: %q(sha256 checksum of the file (generated by Workhorse))
optional 'metadata.path', type: String, desc: %q(path to locally stored body (generated by Workhorse)) optional 'metadata.path', type: String, desc: %q(path to locally stored body (generated by Workhorse))
optional 'metadata.name', type: String, desc: %q(filename (generated by Workhorse)) optional 'metadata.name', type: String, desc: %q(filename (generated by Workhorse))
optional 'metadata.sha256', type: String, desc: %q(sha256 checksum of the file) optional 'metadata.size', type: Integer, desc: %q(real size of metadata (generated by Workhorse))
optional 'metadata.sha256', type: String, desc: %q(sha256 checksum of metadata (generated by Workhorse))
end end
post '/:id/artifacts' do post '/:id/artifacts' do
not_allowed! unless Gitlab.config.artifacts.enabled not_allowed! unless Gitlab.config.artifacts.enabled
...@@ -217,21 +218,34 @@ module API ...@@ -217,21 +218,34 @@ module API
job = authenticate_job! job = authenticate_job!
forbidden!('Job is not running!') unless job.running? forbidden!('Job is not running!') unless job.running?
workhorse_upload_path = JobArtifactUploader.workhorse_upload_path artifacts = UploadedFile.from_params(params, :file, JobArtifactUploader.workhorse_local_upload_path)
artifacts = uploaded_file(:file, workhorse_upload_path) metadata = UploadedFile.from_params(params, :metadata, JobArtifactUploader.workhorse_local_upload_path)
metadata = uploaded_file(:metadata, workhorse_upload_path)
bad_request!('Missing artifacts file!') unless artifacts bad_request!('Missing artifacts file!') unless artifacts
file_to_large! unless artifacts.size < max_artifacts_size file_to_large! unless artifacts.size < max_artifacts_size
bad_request!("Already uploaded") if job.job_artifacts_archive
expire_in = params['expire_in'] || expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
job.build_job_artifacts_archive(project: job.project, file_type: :archive, file: artifacts, file_sha256: params['file.sha256'], expire_in: expire_in) job.build_job_artifacts_archive(
job.build_job_artifacts_metadata(project: job.project, file_type: :metadata, file: metadata, file_sha256: params['metadata.sha256'], expire_in: expire_in) if metadata project: job.project,
job.artifacts_expire_in = expire_in file: artifacts,
file_type: :archive,
file_sha256: artifacts.sha256,
expire_in: expire_in)
if metadata
job.build_job_artifacts_metadata(
project: job.project,
file: metadata,
file_type: :metadata,
file_sha256: metadata.sha256,
expire_in: expire_in)
end
if job.save if job.update(artifacts_expire_in: expire_in)
present job, with: Entities::JobRequest::Response present job, with: Entities::JobRequest::Response
else else
render_validation_error!(job) render_validation_error!(job)
......
...@@ -6,10 +6,12 @@ module Gitlab ...@@ -6,10 +6,12 @@ module Gitlab
def self.extended_statuses def self.extended_statuses
[[Status::Build::Cancelable, [[Status::Build::Cancelable,
Status::Build::Retryable], Status::Build::Retryable],
[Status::Build::Failed],
[Status::Build::FailedAllowed, [Status::Build::FailedAllowed,
Status::Build::Play, Status::Build::Play,
Status::Build::Stop], Status::Build::Stop],
[Status::Build::Action]] [Status::Build::Action],
[Status::Build::Retried]]
end end
def self.common_helpers def self.common_helpers
......
module Gitlab
module Ci
module Status
module Build
class Failed < Status::Extended
REASONS = {
'unknown_failure' => 'unknown failure',
'script_failure' => 'script failure',
'api_failure' => 'API failure',
'stuck_or_timeout_failure' => 'stuck or timeout failure',
'runner_system_failure' => 'runner system failure',
'missing_dependency_failure' => 'missing dependency failure'
}.freeze
def status_tooltip
base_message
end
def badge_tooltip
base_message
end
def self.matches?(build, user)
build.failed?
end
private
def base_message
"#{s_('CiStatusLabel|failed')} #{description}"
end
def description
"<br> (#{REASONS[subject.failure_reason]})"
end
end
end
end
end
end
...@@ -4,7 +4,7 @@ module Gitlab ...@@ -4,7 +4,7 @@ module Gitlab
module Build module Build
class FailedAllowed < Status::Extended class FailedAllowed < Status::Extended
def label def label
'failed (allowed to fail)' "failed #{allowed_to_fail_title}"
end end
def icon def icon
...@@ -15,9 +15,19 @@ module Gitlab ...@@ -15,9 +15,19 @@ module Gitlab
'failed_with_warnings' 'failed_with_warnings'
end end
def status_tooltip
"#{@status.status_tooltip} #{allowed_to_fail_title}"
end
def self.matches?(build, user) def self.matches?(build, user)
build.failed? && build.allow_failure? build.failed? && build.allow_failure?
end end
private
def allowed_to_fail_title
"(allowed to fail)"
end
end end
end end
end end
......
module Gitlab
module Ci
module Status
module Build
class Retried < Status::Extended
def status_tooltip
@status.status_tooltip + " (retried)"
end
def self.matches?(build, user)
build.retried?
end
end
end
end
end
end
...@@ -57,6 +57,16 @@ module Gitlab ...@@ -57,6 +57,16 @@ module Gitlab
def action_title def action_title
raise NotImplementedError raise NotImplementedError
end end
# Hint that appears on all the pipeline graph tooltips and builds on the right sidebar in Job detail view
def status_tooltip
label
end
# Hint that appears on the build badges
def badge_tooltip
subject.status
end
end end
end end
end end
......
...@@ -8,7 +8,7 @@ module Gitlab ...@@ -8,7 +8,7 @@ module Gitlab
attr_reader :stream attr_reader :stream
delegate :close, :tell, :seek, :size, :path, :url, :truncate, to: :stream, allow_nil: true delegate :close, :tell, :seek, :size, :url, :truncate, to: :stream, allow_nil: true
delegate :valid?, to: :stream, as: :present?, allow_nil: true delegate :valid?, to: :stream, as: :present?, allow_nil: true
...@@ -25,6 +25,10 @@ module Gitlab ...@@ -25,6 +25,10 @@ module Gitlab
self.path.present? self.path.present?
end end
def path
self.stream.path if self.stream.respond_to?(:path)
end
def limit(last_bytes = LIMIT_SIZE) def limit(last_bytes = LIMIT_SIZE)
if last_bytes < size if last_bytes < size
stream.seek(-last_bytes, IO::SEEK_END) stream.seek(-last_bytes, IO::SEEK_END)
......
...@@ -134,6 +134,8 @@ excluded_attributes: ...@@ -134,6 +134,8 @@ excluded_attributes:
- :trace - :trace
- :token - :token
- :when - :when
- :artifacts_file
- :artifacts_metadata
push_event_payload: push_event_payload:
- :event_id - :event_id
project_badges: project_badges:
......
...@@ -82,7 +82,7 @@ module Gitlab ...@@ -82,7 +82,7 @@ module Gitlab
end end
def open_file(path, name) def open_file(path, name)
::UploadedFile.new(path, name || File.basename(path), 'application/octet-stream') ::UploadedFile.new(path, filename: name || File.basename(path), content_type: 'application/octet-stream')
end end
end end
......
...@@ -36,10 +36,6 @@ module Gitlab ...@@ -36,10 +36,6 @@ module Gitlab
} }
end end
def artifact_upload_ok
{ TempPath: JobArtifactUploader.workhorse_upload_path }
end
def send_git_blob(repository, blob) def send_git_blob(repository, blob)
params = if Gitlab::GitalyClient.feature_enabled?(:workhorse_raw_show, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) params = if Gitlab::GitalyClient.feature_enabled?(:workhorse_raw_show, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT)
{ {
......
require "tempfile" require "tempfile"
require "tmpdir"
require "fileutils" require "fileutils"
# Taken from: Rack::Test::UploadedFile
class UploadedFile class UploadedFile
InvalidPathError = Class.new(StandardError)
# The filename, *not* including the path, of the "uploaded" file # The filename, *not* including the path, of the "uploaded" file
attr_reader :original_filename attr_reader :original_filename
...@@ -12,14 +14,46 @@ class UploadedFile ...@@ -12,14 +14,46 @@ class UploadedFile
# The content type of the "uploaded" file # The content type of the "uploaded" file
attr_accessor :content_type attr_accessor :content_type
def initialize(path, filename, content_type = "text/plain") attr_reader :remote_id
raise "#{path} file does not exist" unless ::File.exist?(path) attr_reader :sha256
def initialize(path, filename: nil, content_type: "application/octet-stream", sha256: nil, remote_id: nil)
raise InvalidPathError, "#{path} file does not exist" unless ::File.exist?(path)
@content_type = content_type @content_type = content_type
@original_filename = filename || ::File.basename(path) @original_filename = filename || ::File.basename(path)
@content_type = content_type
@sha256 = sha256
@remote_id = remote_id
@tempfile = File.new(path, 'rb') @tempfile = File.new(path, 'rb')
end end
def self.from_params(params, field, upload_path)
unless params["#{field}.path"]
raise InvalidPathError, "file is invalid" if params["#{field}.remote_id"]
return
end
file_path = File.realpath(params["#{field}.path"])
unless self.allowed_path?(file_path, [upload_path, Dir.tmpdir].compact)
raise InvalidPathError, "insecure path used '#{file_path}'"
end
UploadedFile.new(file_path,
filename: params["#{field}.name"],
content_type: params["#{field}.type"] || 'application/octet-stream',
sha256: params["#{field}.sha256"],
remote_id: params["#{field}.remote_id"])
end
def self.allowed_path?(file_path, paths)
paths.any? do |path|
File.exist?(path) && file_path.start_with?(File.realpath(path))
end
end
def path def path
@tempfile.path @tempfile.path
end end
......
...@@ -513,13 +513,30 @@ describe Projects::JobsController do ...@@ -513,13 +513,30 @@ describe Projects::JobsController do
end end
end end
context 'when job has a trace in database' do
let(:job) { create(:ci_build, pipeline: pipeline) }
before do
job.update_column(:trace, 'Sample trace')
end
it 'send a trace file' do
response = subject
expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq 'text/plain; charset=utf-8'
expect(response.body).to eq 'Sample trace'
end
end
context 'when job does not have a trace file' do context 'when job does not have a trace file' do
let(:job) { create(:ci_build, pipeline: pipeline) } let(:job) { create(:ci_build, pipeline: pipeline) }
it 'returns not_found' do it 'returns not_found' do
response = subject response = subject
expect(response).to have_gitlab_http_status(:not_found) expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to eq ''
end end
end end
......
...@@ -62,6 +62,7 @@ FactoryBot.define do ...@@ -62,6 +62,7 @@ FactoryBot.define do
end end
trait :pending do trait :pending do
queued_at 'Di 29. Okt 09:50:59 CET 2013'
status 'pending' status 'pending'
end end
...@@ -237,5 +238,10 @@ FactoryBot.define do ...@@ -237,5 +238,10 @@ FactoryBot.define do
trait :protected do trait :protected do
protected true protected true
end end
trait :script_failure do
failed
failure_reason 1
end
end end
end end
...@@ -34,4 +34,26 @@ describe 'User browses a job', :js do ...@@ -34,4 +34,26 @@ describe 'User browses a job', :js do
expect(build.project.running_or_pending_build_count).to eq(build.project.builds.running_or_pending.count(:all)) expect(build.project.running_or_pending_build_count).to eq(build.project.builds.running_or_pending.count(:all))
end end
context 'with a failed job' do
let!(:build) { create(:ci_build, :failed, pipeline: pipeline) }
it 'displays the failure reason' do
within('.builds-container') do
build_link = first('.build-job > a')
expect(build_link['data-title']).to eq('test - failed <br> (unknown failure)')
end
end
end
context 'when a failed job has been retried' do
let!(:build) { create(:ci_build, :failed, :retried, pipeline: pipeline) }
it 'displays the failure reason and retried label' do
within('.builds-container') do
build_link = first('.build-job > a')
expect(build_link['data-title']).to eq('test - failed <br> (unknown failure) (retried)')
end
end
end
end end
...@@ -29,4 +29,15 @@ describe 'User browses jobs' do ...@@ -29,4 +29,15 @@ describe 'User browses jobs' do
expect(ci_lint_tool_link[:href]).to end_with(ci_lint_path) expect(ci_lint_tool_link[:href]).to end_with(ci_lint_path)
end end
end end
context 'with a failed job' do
let!(:build) { create(:ci_build, :coverage, :failed, pipeline: pipeline) }
it 'displays a tooltip with the failure reason' do
page.within('.ci-table') do
failed_job_link = page.find('.ci-failed')
expect(failed_job_link[:title]).to eq('Failed <br> (unknown failure)')
end
end
end
end end
...@@ -115,6 +115,13 @@ describe 'Pipeline', :js do ...@@ -115,6 +115,13 @@ describe 'Pipeline', :js do
expect(page).not_to have_content('Retry job') expect(page).not_to have_content('Retry job')
end end
it 'should include the failure reason' do
page.within('#ci-badge-test') do
build_link = page.find('.js-pipeline-graph-job-link')
expect(build_link['data-original-title']).to eq('test - failed <br> (unknown failure)')
end
end
end end
context 'when pipeline has manual jobs' do context 'when pipeline has manual jobs' do
...@@ -289,6 +296,15 @@ describe 'Pipeline', :js do ...@@ -289,6 +296,15 @@ describe 'Pipeline', :js do
it { expect(build_manual.reload).to be_pending } it { expect(build_manual.reload).to be_pending }
end end
context 'failed jobs' do
it 'displays a tooltip with the failure reason' do
page.within('.ci-table') do
failed_job_link = page.find('.ci-failed')
expect(failed_job_link[:title]).to eq('Failed <br> (unknown failure)')
end
end
end
end end
describe 'GET /:project/pipelines/:id/failures' do describe 'GET /:project/pipelines/:id/failures' do
......
...@@ -394,6 +394,23 @@ describe 'Pipelines', :js do ...@@ -394,6 +394,23 @@ describe 'Pipelines', :js do
expect(build.reload).to be_canceled expect(build.reload).to be_canceled
end end
end end
context 'for a failed pipeline' do
let!(:build) do
create(:ci_build, :failed, pipeline: pipeline,
stage: 'build',
name: 'build')
end
it 'should display the failure reason' do
find('.js-builds-dropdown-button').click
within('.js-builds-dropdown-list') do
build_element = page.find('.mini-pipeline-graph-dropdown-item')
expect(build_element['data-title']).to eq('build - failed <br> (unknown failure)')
end
end
end
end end
context 'with pagination' do context 'with pagination' do
......
require 'spec_helper'
describe 'Artifacts direct upload support' do
subject do
load Rails.root.join('config/initializers/artifacts_direct_upload_support.rb')
end
let(:connection) do
{ provider: provider }
end
before do
stub_artifacts_setting(
object_store: {
enabled: enabled,
direct_upload: direct_upload,
connection: connection
})
end
context 'when object storage is enabled' do
let(:enabled) { true }
context 'when direct upload is enabled' do
let(:direct_upload) { true }
context 'when provider is Google' do
let(:provider) { 'Google' }
it 'succeeds' do
expect { subject }.not_to raise_error
end
end
context 'when connection is empty' do
let(:connection) { nil }
it 'raises an error' do
expect { subject }.to raise_error /object storage provider when 'direct_upload' of artifacts is used/
end
end
context 'when other provider is used' do
let(:provider) { 'AWS' }
it 'raises an error' do
expect { subject }.to raise_error /object storage provider when 'direct_upload' of artifacts is used/
end
end
end
context 'when direct upload is disabled' do
let(:direct_upload) { false }
let(:provider) { 'AWS' }
it 'succeeds' do
expect { subject }.not_to raise_error
end
end
end
context 'when object storage is disabled' do
let(:enabled) { false }
let(:direct_upload) { false }
let(:provider) { 'AWS' }
it 'succeeds' do
expect { subject }.not_to raise_error
end
end
end
...@@ -13,6 +13,7 @@ describe('pipeline graph job component', () => { ...@@ -13,6 +13,7 @@ describe('pipeline graph job component', () => {
icon: 'icon_status_success', icon: 'icon_status_success',
text: 'passed', text: 'passed',
label: 'passed', label: 'passed',
tooltip: 'passed',
group: 'success', group: 'success',
details_path: '/root/ci-mock/builds/4256', details_path: '/root/ci-mock/builds/4256',
has_details: true, has_details: true,
...@@ -137,6 +138,7 @@ describe('pipeline graph job component', () => { ...@@ -137,6 +138,7 @@ describe('pipeline graph job component', () => {
status: { status: {
icon: 'icon_status_success', icon: 'icon_status_success',
label: 'success', label: 'success',
tooltip: 'success',
}, },
}, },
}); });
......
...@@ -53,4 +53,14 @@ describe Gitlab::Ci::Status::Build::Action do ...@@ -53,4 +53,14 @@ describe Gitlab::Ci::Status::Build::Action do
end end
end end
end end
describe '#badge_tooltip' do
let(:user) { create(:user) }
let(:build) { create(:ci_build, :non_playable) }
let(:status) { Gitlab::Ci::Status::Core.new(build, user) }
it 'returns the status' do
expect(subject.badge_tooltip).to eq('created')
end
end
end end
...@@ -40,6 +40,24 @@ describe Gitlab::Ci::Status::Build::Cancelable do ...@@ -40,6 +40,24 @@ describe Gitlab::Ci::Status::Build::Cancelable do
end end
end end
describe '#status_tooltip' do
it 'does not override status status_tooltip' do
expect(status).to receive(:status_tooltip)
subject.status_tooltip
end
end
describe '#badge_tooltip' do
let(:user) { create(:user) }
let(:build) { create(:ci_build) }
let(:status) { Gitlab::Ci::Status::Core.new(build, user) }
it 'returns the status' do
expect(subject.badge_tooltip).to eq('pending')
end
end
describe 'action details' do describe 'action details' do
let(:user) { create(:user) } let(:user) { create(:user) }
let(:build) { create(:ci_build) } let(:build) { create(:ci_build) }
......
...@@ -48,11 +48,11 @@ describe Gitlab::Ci::Status::Build::Factory do ...@@ -48,11 +48,11 @@ describe Gitlab::Ci::Status::Build::Factory do
it 'matches correct extended statuses' do it 'matches correct extended statuses' do
expect(factory.extended_statuses) expect(factory.extended_statuses)
.to eq [Gitlab::Ci::Status::Build::Retryable] .to eq [Gitlab::Ci::Status::Build::Retryable, Gitlab::Ci::Status::Build::Failed]
end end
it 'fabricates a retryable build status' do it 'fabricates a failed build status' do
expect(status).to be_a Gitlab::Ci::Status::Build::Retryable expect(status).to be_a Gitlab::Ci::Status::Build::Failed
end end
it 'fabricates status with correct details' do it 'fabricates status with correct details' do
...@@ -60,6 +60,7 @@ describe Gitlab::Ci::Status::Build::Factory do ...@@ -60,6 +60,7 @@ describe Gitlab::Ci::Status::Build::Factory do
expect(status.icon).to eq 'status_failed' expect(status.icon).to eq 'status_failed'
expect(status.favicon).to eq 'favicon_status_failed' expect(status.favicon).to eq 'favicon_status_failed'
expect(status.label).to eq 'failed' expect(status.label).to eq 'failed'
expect(status.status_tooltip).to eq 'failed <br> (unknown failure)'
expect(status).to have_details expect(status).to have_details
expect(status).to have_action expect(status).to have_action
end end
...@@ -75,6 +76,7 @@ describe Gitlab::Ci::Status::Build::Factory do ...@@ -75,6 +76,7 @@ describe Gitlab::Ci::Status::Build::Factory do
it 'matches correct extended statuses' do it 'matches correct extended statuses' do
expect(factory.extended_statuses) expect(factory.extended_statuses)
.to eq [Gitlab::Ci::Status::Build::Retryable, .to eq [Gitlab::Ci::Status::Build::Retryable,
Gitlab::Ci::Status::Build::Failed,
Gitlab::Ci::Status::Build::FailedAllowed] Gitlab::Ci::Status::Build::FailedAllowed]
end end
......
...@@ -3,6 +3,7 @@ require 'spec_helper' ...@@ -3,6 +3,7 @@ require 'spec_helper'
describe Gitlab::Ci::Status::Build::FailedAllowed do describe Gitlab::Ci::Status::Build::FailedAllowed do
let(:status) { double('core status') } let(:status) { double('core status') }
let(:user) { double('user') } let(:user) { double('user') }
let(:build) { create(:ci_build, :failed, :allowed_to_fail) }
subject do subject do
described_class.new(status) described_class.new(status)
...@@ -68,6 +69,28 @@ describe Gitlab::Ci::Status::Build::FailedAllowed do ...@@ -68,6 +69,28 @@ describe Gitlab::Ci::Status::Build::FailedAllowed do
end end
end end
describe '#badge_tooltip' do
let(:user) { create(:user) }
let(:failed_status) { Gitlab::Ci::Status::Failed.new(build, user) }
let(:build_status) { Gitlab::Ci::Status::Build::Failed.new(failed_status) }
let(:status) { described_class.new(build_status) }
it 'does override badge_tooltip' do
expect(status.badge_tooltip).to eq('failed <br> (unknown failure)')
end
end
describe '#status_tooltip' do
let(:user) { create(:user) }
let(:failed_status) { Gitlab::Ci::Status::Failed.new(build, user) }
let(:build_status) { Gitlab::Ci::Status::Build::Failed.new(failed_status) }
let(:status) { described_class.new(build_status) }
it 'does override status_tooltip' do
expect(status.status_tooltip).to eq 'failed <br> (unknown failure) (allowed to fail)'
end
end
describe '.matches?' do describe '.matches?' do
subject { described_class.matches?(build, user) } subject { described_class.matches?(build, user) }
......
require 'spec_helper'
describe Gitlab::Ci::Status::Build::Failed do
let(:build) { create(:ci_build, :script_failure) }
let(:status) { double('core status') }
let(:user) { double('user') }
subject { described_class.new(status) }
describe '#text' do
it 'does not override status text' do
expect(status).to receive(:text)
subject.text
end
end
describe '#icon' do
it 'does not override status icon' do
expect(status).to receive(:icon)
subject.icon
end
end
describe '#group' do
it 'does not override status group' do
expect(status).to receive(:group)
subject.group
end
end
describe '#favicon' do
it 'does not override status label' do
expect(status).to receive(:favicon)
subject.favicon
end
end
describe '#label' do
it 'does not override label' do
expect(status).to receive(:label)
subject.label
end
end
describe '#badge_tooltip' do
let(:user) { create(:user) }
let(:status) { Gitlab::Ci::Status::Failed.new(build, user) }
it 'does override badge_tooltip' do
expect(subject.badge_tooltip).to eq 'failed <br> (script failure)'
end
end
describe '#status_tooltip' do
let(:user) { create(:user) }
let(:status) { Gitlab::Ci::Status::Failed.new(build, user) }
it 'does override status_tooltip' do
expect(subject.status_tooltip).to eq 'failed <br> (script failure)'
end
end
describe '.matches?' do
context 'with a failed build' do
it 'returns true' do
expect(described_class.matches?(build, user)).to be_truthy
end
end
context 'with any other type of build' do
let(:build) { create(:ci_build, :success) }
it 'returns false' do
expect(described_class.matches?(build, user)).to be_falsy
end
end
end
end
...@@ -14,6 +14,22 @@ describe Gitlab::Ci::Status::Build::Play do ...@@ -14,6 +14,22 @@ describe Gitlab::Ci::Status::Build::Play do
end end
end end
describe '#status_tooltip' do
it 'does not override status status_tooltip' do
expect(status).to receive(:status_tooltip)
subject.status_tooltip
end
end
describe '#badge_tooltip' do
it 'does not override status badge_tooltip' do
expect(status).to receive(:badge_tooltip)
subject.badge_tooltip
end
end
describe '#has_action?' do describe '#has_action?' do
context 'when user is allowed to update build' do context 'when user is allowed to update build' do
context 'when user is allowed to trigger protected action' do context 'when user is allowed to trigger protected action' do
......
require 'spec_helper'
describe Gitlab::Ci::Status::Build::Retried do
let(:build) { create(:ci_build, :retried) }
let(:status) { double('core status') }
let(:user) { double('user') }
subject { described_class.new(status) }
describe '#text' do
it 'does not override status text' do
expect(status).to receive(:text)
subject.text
end
end
describe '#icon' do
it 'does not override status icon' do
expect(status).to receive(:icon)
subject.icon
end
end
describe '#group' do
it 'does not override status group' do
expect(status).to receive(:group)
subject.group
end
end
describe '#favicon' do
it 'does not override status label' do
expect(status).to receive(:favicon)
subject.favicon
end
end
describe '#label' do
it 'does not override status label' do
expect(status).to receive(:label)
subject.label
end
end
describe '#badge_tooltip' do
let(:user) { create(:user) }
let(:build) { create(:ci_build, :retried) }
let(:status) { Gitlab::Ci::Status::Success.new(build, user) }
it 'returns status' do
expect(status.badge_tooltip).to eq('pending')
end
end
describe '#status_tooltip' do
let(:user) { create(:user) }
context 'with a failed build' do
let(:build) { create(:ci_build, :failed, :retried) }
let(:failed_status) { Gitlab::Ci::Status::Failed.new(build, user) }
let(:status) { Gitlab::Ci::Status::Build::Failed.new(failed_status) }
it 'does override status_tooltip' do
expect(subject.status_tooltip).to eq 'failed <br> (unknown failure) (retried)'
end
end
context 'with another build' do
let(:build) { create(:ci_build, :retried) }
let(:status) { Gitlab::Ci::Status::Success.new(build, user) }
it 'does override status_tooltip' do
expect(subject.status_tooltip).to eq 'passed (retried)'
end
end
end
describe '.matches?' do
subject { described_class.matches?(build, user) }
context 'with a retried build' do
it { is_expected.to be_truthy }
end
context 'with a build that has not been retried' do
let(:build) { create(:ci_build, :success) }
it { is_expected.to be_falsy }
end
end
end
...@@ -40,6 +40,24 @@ describe Gitlab::Ci::Status::Build::Retryable do ...@@ -40,6 +40,24 @@ describe Gitlab::Ci::Status::Build::Retryable do
end end
end end
describe '#status_tooltip' do
it 'does not override status status_tooltip' do
expect(status).to receive(:status_tooltip)
subject.status_tooltip
end
end
describe '#badge_tooltip' do
let(:user) { create(:user) }
let(:build) { create(:ci_build) }
let(:status) { Gitlab::Ci::Status::Core.new(build, user) }
it 'does return status' do
expect(status.badge_tooltip).to eq('pending')
end
end
describe 'action details' do describe 'action details' do
let(:user) { create(:user) } let(:user) { create(:user) }
let(:build) { create(:ci_build) } let(:build) { create(:ci_build) }
......
...@@ -77,4 +77,24 @@ describe Gitlab::Ci::Status::Build::Stop do ...@@ -77,4 +77,24 @@ describe Gitlab::Ci::Status::Build::Stop do
end end
end end
end end
describe '#status_tooltip' do
it 'does not override status status_tooltip' do
expect(status).to receive(:status_tooltip)
subject.status_tooltip
end
end
describe '#badge_tooltip' do
let(:user) { create(:user) }
let(:build) { create(:ci_build, :playable) }
let(:status) { Gitlab::Ci::Status::Core.new(build, user) }
it 'does not override status badge_tooltip' do
expect(status).to receive(:badge_tooltip)
subject.badge_tooltip
end
end
end end
require 'spec_helper' require 'spec_helper'
describe Gitlab::Ci::Status::SuccessWarning do describe Gitlab::Ci::Status::SuccessWarning do
let(:status) { double('status') }
subject do subject do
described_class.new(double('status')) described_class.new(status)
end end
describe '#test' do describe '#test' do
......
...@@ -6181,12 +6181,6 @@ ...@@ -6181,12 +6181,6 @@
"user_id": null, "user_id": null,
"target_url": null, "target_url": null,
"description": null, "description": null,
"artifacts_file": {
"url": null
},
"artifacts_metadata": {
"url": null
},
"erased_by_id": null, "erased_by_id": null,
"erased_at": null, "erased_at": null,
"type": "Ci::Build", "type": "Ci::Build",
...@@ -6219,12 +6213,6 @@ ...@@ -6219,12 +6213,6 @@
"user_id": null, "user_id": null,
"target_url": null, "target_url": null,
"description": null, "description": null,
"artifacts_file": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/72/p5_build_artifacts.zip"
},
"artifacts_metadata": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/72/p5_build_artifacts_metadata.gz"
},
"erased_by_id": null, "erased_by_id": null,
"erased_at": null "erased_at": null
} }
...@@ -6293,12 +6281,6 @@ ...@@ -6293,12 +6281,6 @@
"user_id": null, "user_id": null,
"target_url": null, "target_url": null,
"description": null, "description": null,
"artifacts_file": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/74/p5_build_artifacts.zip"
},
"artifacts_metadata": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/74/p5_build_artifacts_metadata.gz"
},
"erased_by_id": null, "erased_by_id": null,
"erased_at": null "erased_at": null
}, },
...@@ -6328,12 +6310,6 @@ ...@@ -6328,12 +6310,6 @@
"user_id": null, "user_id": null,
"target_url": null, "target_url": null,
"description": null, "description": null,
"artifacts_file": {
"url": null
},
"artifacts_metadata": {
"url": null
},
"erased_by_id": null, "erased_by_id": null,
"erased_at": null "erased_at": null
} }
...@@ -6393,12 +6369,6 @@ ...@@ -6393,12 +6369,6 @@
"user_id": null, "user_id": null,
"target_url": null, "target_url": null,
"description": null, "description": null,
"artifacts_file": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/76/p5_build_artifacts.zip"
},
"artifacts_metadata": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/76/p5_build_artifacts_metadata.gz"
},
"erased_by_id": null, "erased_by_id": null,
"erased_at": null "erased_at": null
}, },
...@@ -6428,12 +6398,6 @@ ...@@ -6428,12 +6398,6 @@
"user_id": null, "user_id": null,
"target_url": null, "target_url": null,
"description": null, "description": null,
"artifacts_file": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/75/p5_build_artifacts.zip"
},
"artifacts_metadata": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/75/p5_build_artifacts_metadata.gz"
},
"erased_by_id": null, "erased_by_id": null,
"erased_at": null "erased_at": null
} }
...@@ -6493,12 +6457,6 @@ ...@@ -6493,12 +6457,6 @@
"user_id": null, "user_id": null,
"target_url": null, "target_url": null,
"description": null, "description": null,
"artifacts_file": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/78/p5_build_artifacts.zip"
},
"artifacts_metadata": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/78/p5_build_artifacts_metadata.gz"
},
"erased_by_id": null, "erased_by_id": null,
"erased_at": null "erased_at": null
}, },
...@@ -6528,12 +6486,6 @@ ...@@ -6528,12 +6486,6 @@
"user_id": null, "user_id": null,
"target_url": null, "target_url": null,
"description": null, "description": null,
"artifacts_file": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/77/p5_build_artifacts.zip"
},
"artifacts_metadata": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/77/p5_build_artifacts_metadata.gz"
},
"erased_by_id": null, "erased_by_id": null,
"erased_at": null "erased_at": null
} }
...@@ -6593,12 +6545,6 @@ ...@@ -6593,12 +6545,6 @@
"user_id": null, "user_id": null,
"target_url": null, "target_url": null,
"description": null, "description": null,
"artifacts_file": {
"url": null
},
"artifacts_metadata": {
"url": null
},
"erased_by_id": null, "erased_by_id": null,
"erased_at": null "erased_at": null
}, },
...@@ -6628,12 +6574,6 @@ ...@@ -6628,12 +6574,6 @@
"user_id": null, "user_id": null,
"target_url": null, "target_url": null,
"description": null, "description": null,
"artifacts_file": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/80/p5_build_artifacts.zip"
},
"artifacts_metadata": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/80/p5_build_artifacts_metadata.gz"
},
"erased_by_id": null, "erased_by_id": null,
"erased_at": null "erased_at": null
} }
......
require 'spec_helper'
describe UploadedFile do
describe ".from_params" do
let(:temp_dir) { Dir.tmpdir }
let(:temp_file) { Tempfile.new("test", temp_dir) }
let(:upload_path) { nil }
subject do
described_class.from_params(params, :file, upload_path)
end
before do
FileUtils.touch(temp_file)
end
after do
FileUtils.rm_f(temp_file)
FileUtils.rm_r(upload_path) if upload_path
end
context 'when valid file is specified' do
context 'only local path is specified' do
let(:params) do
{ 'file.path' => temp_file.path }
end
it "succeeds" do
is_expected.not_to be_nil
end
it "generates filename from path" do
expect(subject.original_filename).to eq(::File.basename(temp_file.path))
end
end
context 'all parameters are specified' do
let(:params) do
{ 'file.path' => temp_file.path,
'file.name' => 'my_file.txt',
'file.type' => 'my/type',
'file.sha256' => 'sha256',
'file.remote_id' => 'remote_id' }
end
it "succeeds" do
is_expected.not_to be_nil
end
it "generates filename from path" do
expect(subject.original_filename).to eq('my_file.txt')
expect(subject.content_type).to eq('my/type')
expect(subject.sha256).to eq('sha256')
expect(subject.remote_id).to eq('remote_id')
end
end
end
context 'when no params are specified' do
let(:params) do
{}
end
it "does not return an object" do
is_expected.to be_nil
end
end
context 'when only remote id is specified' do
let(:params) do
{ 'file.remote_id' => 'remote_id' }
end
it "raises an error" do
expect { subject }.to raise_error(UploadedFile::InvalidPathError, /file is invalid/)
end
end
context 'when verifying allowed paths' do
let(:params) do
{ 'file.path' => temp_file.path }
end
context 'when file is stored in system temporary folder' do
let(:temp_dir) { Dir.tmpdir }
it "succeeds" do
is_expected.not_to be_nil
end
end
context 'when file is stored in user provided upload path' do
let(:upload_path) { Dir.mktmpdir }
let(:temp_dir) { upload_path }
it "succeeds" do
is_expected.not_to be_nil
end
end
context 'when file is stored outside of user provided upload path' do
let!(:generated_dir) { Dir.mktmpdir }
let!(:temp_dir) { Dir.mktmpdir }
before do
# We overwrite default temporary path
allow(Dir).to receive(:tmpdir).and_return(generated_dir)
end
it "raises an error" do
expect { subject }.to raise_error(UploadedFile::InvalidPathError, /insecure path used/)
end
end
end
end
end
...@@ -72,13 +72,44 @@ describe Ci::BuildPresenter do ...@@ -72,13 +72,44 @@ describe Ci::BuildPresenter do
end end
end end
context 'when build is not auto-canceled' do context 'when build failed' do
before do let(:build) { create(:ci_build, :failed, pipeline: pipeline) }
expect(build).to receive(:auto_canceled?).and_return(false)
it 'returns the reason of failure' do
status_title = presenter.status_title
expect(status_title).to eq('Failed <br> (unknown failure)')
end
end end
it 'does not have a status title' do context 'when build has failed && retried' do
expect(presenter.status_title).to be_nil let(:build) { create(:ci_build, :failed, :retried, pipeline: pipeline) }
it 'does not include retried title' do
status_title = presenter.status_title
expect(status_title).not_to include('(retried)')
expect(status_title).to eq('Failed <br> (unknown failure)')
end
end
context 'when build has failed and is allowed to' do
let(:build) { create(:ci_build, :failed, :allowed_to_fail, pipeline: pipeline) }
it 'returns the reason of failure' do
status_title = presenter.status_title
expect(status_title).to eq('Failed <br> (unknown failure)')
end
end
context 'For any other build' do
let(:build) { create(:ci_build, :success, pipeline: pipeline) }
it 'returns the status' do
tooltip_description = presenter.status_title
expect(tooltip_description).to eq('Success')
end end
end end
end end
...@@ -134,4 +165,56 @@ describe Ci::BuildPresenter do ...@@ -134,4 +165,56 @@ describe Ci::BuildPresenter do
end end
end end
end end
describe '#tooltip_message' do
context 'When build has failed' do
let(:build) { create(:ci_build, :script_failure, pipeline: pipeline) }
it 'returns the reason of failure' do
tooltip = subject.tooltip_message
expect(tooltip).to eq("#{build.name} - failed <br> (script failure)")
end
end
context 'When build has failed and retried' do
let(:build) { create(:ci_build, :script_failure, :retried, pipeline: pipeline) }
it 'should include the reason of failure and the retried title' do
tooltip = subject.tooltip_message
expect(tooltip).to eq("#{build.name} - failed <br> (script failure) (retried)")
end
end
context 'When build has failed and is allowed to' do
let(:build) { create(:ci_build, :script_failure, :allowed_to_fail, pipeline: pipeline) }
it 'should include the reason of failure' do
tooltip = subject.tooltip_message
expect(tooltip).to eq("#{build.name} - failed <br> (script failure) (allowed to fail)")
end
end
context 'For any other build (no retried)' do
let(:build) { create(:ci_build, :success, pipeline: pipeline) }
it 'should include build name and status' do
tooltip = subject.tooltip_message
expect(tooltip).to eq("#{build.name} - passed")
end
end
context 'For any other build (retried)' do
let(:build) { create(:ci_build, :success, :retried, pipeline: pipeline) }
it 'should include build name and status' do
tooltip = subject.tooltip_message
expect(tooltip).to eq("#{build.name} - passed (retried)")
end
end
end
end end
...@@ -952,12 +952,53 @@ describe API::Runner do ...@@ -952,12 +952,53 @@ describe API::Runner do
describe 'POST /api/v4/jobs/:id/artifacts/authorize' do describe 'POST /api/v4/jobs/:id/artifacts/authorize' do
context 'when using token as parameter' do context 'when using token as parameter' do
it 'authorizes posting artifacts to running job' do context 'posting artifacts to running job' do
subject do
authorize_artifacts_with_token_in_params authorize_artifacts_with_token_in_params
end
shared_examples 'authorizes local file' do
it 'succeeds' do
subject
expect(response).to have_gitlab_http_status(200) expect(response).to have_gitlab_http_status(200)
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE) expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response['TempPath']).not_to be_nil expect(json_response['TempPath']).to eq(JobArtifactUploader.workhorse_local_upload_path)
expect(json_response['RemoteObject']).to be_nil
end
end
context 'when using local storage' do
it_behaves_like 'authorizes local file'
end
context 'when using remote storage' do
context 'when direct upload is enabled' do
before do
stub_artifacts_object_storage(enabled: true, direct_upload: true)
end
it 'succeeds' do
subject
expect(response).to have_gitlab_http_status(200)
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response['TempPath']).to eq(JobArtifactUploader.workhorse_local_upload_path)
expect(json_response['RemoteObject']).to have_key('ID')
expect(json_response['RemoteObject']).to have_key('GetURL')
expect(json_response['RemoteObject']).to have_key('StoreURL')
expect(json_response['RemoteObject']).to have_key('DeleteURL')
end
end
context 'when direct upload is disabled' do
before do
stub_artifacts_object_storage(enabled: true, direct_upload: false)
end
it_behaves_like 'authorizes local file'
end
end
end end
it 'fails to post too large artifact' do it 'fails to post too large artifact' do
...@@ -1053,22 +1094,47 @@ describe API::Runner do ...@@ -1053,22 +1094,47 @@ describe API::Runner do
end end
end end
context 'when uses regular file post' do context 'when uses accelerated file post' do
context 'for file stored locally' do
before do before do
upload_artifacts(file_upload, headers_with_token, false) upload_artifacts(file_upload, headers_with_token)
end end
it_behaves_like 'successful artifacts upload' it_behaves_like 'successful artifacts upload'
end end
context 'when uses accelerated file post' do context 'for file stored remotelly' do
let!(:fog_connection) do
stub_artifacts_object_storage(direct_upload: true)
end
before do before do
upload_artifacts(file_upload, headers_with_token, true) fog_connection.directories.get('artifacts').files.create(
key: 'tmp/upload/12312300',
body: 'content'
)
upload_artifacts(file_upload, headers_with_token,
{ 'file.remote_id' => remote_id })
end end
context 'when valid remote_id is used' do
let(:remote_id) { '12312300' }
it_behaves_like 'successful artifacts upload' it_behaves_like 'successful artifacts upload'
end end
context 'when invalid remote_id is used' do
let(:remote_id) { 'invalid id' }
it 'responds with bad request' do
expect(response).to have_gitlab_http_status(500)
expect(json_response['message']).to eq("Missing file")
end
end
end
end
context 'when using runners token' do context 'when using runners token' do
it 'responds with forbidden' do it 'responds with forbidden' do
upload_artifacts(file_upload, headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.project.runners_token)) upload_artifacts(file_upload, headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.project.runners_token))
...@@ -1210,15 +1276,19 @@ describe API::Runner do ...@@ -1210,15 +1276,19 @@ describe API::Runner do
end end
context 'when artifacts are being stored outside of tmp path' do context 'when artifacts are being stored outside of tmp path' do
let(:new_tmpdir) { Dir.mktmpdir }
before do before do
# init before overwriting tmp dir
file_upload
# by configuring this path we allow to pass file from @tmpdir only # by configuring this path we allow to pass file from @tmpdir only
# but all temporary files are stored in system tmp directory # but all temporary files are stored in system tmp directory
@tmpdir = Dir.mktmpdir allow(Dir).to receive(:tmpdir).and_return(new_tmpdir)
allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return(@tmpdir)
end end
after do after do
FileUtils.remove_entry @tmpdir FileUtils.remove_entry(new_tmpdir)
end end
it' "fails to post artifacts for outside of tmp path"' do it' "fails to post artifacts for outside of tmp path"' do
...@@ -1228,12 +1298,11 @@ describe API::Runner do ...@@ -1228,12 +1298,11 @@ describe API::Runner do
end end
end end
def upload_artifacts(file, headers = {}, accelerated = true) def upload_artifacts(file, headers = {}, params = {})
params = if accelerated params = params.merge({
{ 'file.path' => file.path, 'file.name' => file.original_filename } 'file.path' => file.path,
else 'file.name' => file.original_filename
{ 'file' => file } })
end
post api("/jobs/#{job.id}/artifacts"), params, headers post api("/jobs/#{job.id}/artifacts"), params, headers
end end
......
...@@ -1044,7 +1044,7 @@ describe 'Git LFS API and storage' do ...@@ -1044,7 +1044,7 @@ describe 'Git LFS API and storage' do
it_behaves_like 'a valid response' do it_behaves_like 'a valid response' do
it 'responds with status 200, location of lfs remote store and object details' do it 'responds with status 200, location of lfs remote store and object details' do
expect(json_response['TempPath']).to be_nil expect(json_response['TempPath']).to eq(LfsObjectUploader.workhorse_local_upload_path)
expect(json_response['RemoteObject']).to have_key('ID') expect(json_response['RemoteObject']).to have_key('ID')
expect(json_response['RemoteObject']).to have_key('GetURL') expect(json_response['RemoteObject']).to have_key('GetURL')
expect(json_response['RemoteObject']).to have_key('StoreURL') expect(json_response['RemoteObject']).to have_key('StoreURL')
...@@ -1101,7 +1101,9 @@ describe 'Git LFS API and storage' do ...@@ -1101,7 +1101,9 @@ describe 'Git LFS API and storage' do
['123123', '../../123123'].each do |remote_id| ['123123', '../../123123'].each do |remote_id|
context "with invalid remote_id: #{remote_id}" do context "with invalid remote_id: #{remote_id}" do
subject do subject do
put_finalize_with_args('file.remote_id' => remote_id) put_finalize(with_tempfile: true, args: {
'file.remote_id' => remote_id
})
end end
it 'responds with status 403' do it 'responds with status 403' do
...@@ -1121,9 +1123,10 @@ describe 'Git LFS API and storage' do ...@@ -1121,9 +1123,10 @@ describe 'Git LFS API and storage' do
end end
subject do subject do
put_finalize_with_args( put_finalize(with_tempfile: true, args: {
'file.remote_id' => '12312300', 'file.remote_id' => '12312300',
'file.name' => 'name') 'file.name' => 'name'
})
end end
it 'responds with status 200' do it 'responds with status 200' do
...@@ -1373,7 +1376,7 @@ describe 'Git LFS API and storage' do ...@@ -1373,7 +1376,7 @@ describe 'Git LFS API and storage' do
put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}/authorize", nil, authorize_headers put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}/authorize", nil, authorize_headers
end end
def put_finalize(lfs_tmp = lfs_tmp_file, with_tempfile: false) def put_finalize(lfs_tmp = lfs_tmp_file, with_tempfile: false, args: {})
upload_path = LfsObjectUploader.workhorse_local_upload_path upload_path = LfsObjectUploader.workhorse_local_upload_path
file_path = upload_path + '/' + lfs_tmp if lfs_tmp file_path = upload_path + '/' + lfs_tmp if lfs_tmp
...@@ -1382,12 +1385,12 @@ describe 'Git LFS API and storage' do ...@@ -1382,12 +1385,12 @@ describe 'Git LFS API and storage' do
FileUtils.touch(file_path) FileUtils.touch(file_path)
end end
args = { extra_args = {
'file.path' => file_path, 'file.path' => file_path,
'file.name' => File.basename(file_path) 'file.name' => File.basename(file_path)
}.compact }
put_finalize_with_args(args) put_finalize_with_args(args.merge(extra_args).compact)
end end
def put_finalize_with_args(args) def put_finalize_with_args(args)
......
...@@ -28,15 +28,31 @@ describe BuildSerializer do ...@@ -28,15 +28,31 @@ describe BuildSerializer do
end end
describe '#represent_status' do describe '#represent_status' do
context 'when represents only status' do context 'for a failed build' do
let(:resource) { create(:ci_build) } let(:resource) { create(:ci_build, :failed) }
let(:status) { resource.detailed_status(double('user')) }
subject { serializer.represent_status(resource) }
it 'serializes only status' do
expect(subject[:text]).to eq(status.text)
expect(subject[:label]).to eq('failed')
expect(subject[:tooltip]).to eq('failed <br> (unknown failure)')
expect(subject[:icon]).to eq(status.icon)
expect(subject[:favicon]).to match_asset_path("/assets/ci_favicons/#{status.favicon}.ico")
end
end
context 'for any other type of build' do
let(:resource) { create(:ci_build, :success) }
let(:status) { resource.detailed_status(double('user')) } let(:status) { resource.detailed_status(double('user')) }
subject { serializer.represent_status(resource) } subject { serializer.represent_status(resource) }
it 'serializes only status' do it 'serializes only status' do
expect(subject[:text]).to eq(status.text) expect(subject[:text]).to eq(status.text)
expect(subject[:label]).to eq(status.label) expect(subject[:label]).to eq('passed')
expect(subject[:tooltip]).to eq('passed')
expect(subject[:icon]).to eq(status.icon) expect(subject[:icon]).to eq(status.icon)
expect(subject[:favicon]).to match_asset_path("/assets/ci_favicons/#{status.favicon}.ico") expect(subject[:favicon]).to match_asset_path("/assets/ci_favicons/#{status.favicon}.ico")
end end
......
...@@ -38,7 +38,7 @@ describe JobEntity do ...@@ -38,7 +38,7 @@ describe JobEntity do
it 'contains details' do it 'contains details' do
expect(subject).to include :status expect(subject).to include :status
expect(subject[:status]).to include :icon, :favicon, :text, :label expect(subject[:status]).to include :icon, :favicon, :text, :label, :tooltip
end end
context 'when job is retryable' do context 'when job is retryable' do
...@@ -126,7 +126,29 @@ describe JobEntity do ...@@ -126,7 +126,29 @@ describe JobEntity do
it 'contains details' do it 'contains details' do
expect(subject).to include :status expect(subject).to include :status
expect(subject[:status]).to include :icon, :favicon, :text, :label expect(subject[:status]).to include :icon, :favicon, :text, :label, :tooltip
end
end
context 'when job failed' do
let(:job) { create(:ci_build, :script_failure) }
describe 'status' do
it 'should contain the failure reason inside label' do
expect(subject[:status]).to include :icon, :favicon, :text, :label, :tooltip
expect(subject[:status][:label]).to eq('failed')
expect(subject[:status][:tooltip]).to eq('failed <br> (script failure)')
end
end
end
context 'when job passed' do
let(:job) { create(:ci_build, :success) }
describe 'status' do
it 'should not contain the failure reason inside label' do
expect(subject[:status][:label]).to eq('passed')
end
end end
end end
end end
...@@ -30,7 +30,7 @@ describe PipelineEntity do ...@@ -30,7 +30,7 @@ describe PipelineEntity do
expect(subject).to include :details expect(subject).to include :details
expect(subject[:details]) expect(subject[:details])
.to include :duration, :finished_at .to include :duration, :finished_at
expect(subject[:details][:status]).to include :icon, :favicon, :text, :label expect(subject[:details][:status]).to include :icon, :favicon, :text, :label, :tooltip
end end
it 'contains flags' do it 'contains flags' do
......
...@@ -26,7 +26,7 @@ describe StageEntity do ...@@ -26,7 +26,7 @@ describe StageEntity do
end end
it 'contains detailed status' do it 'contains detailed status' do
expect(subject[:status]).to include :text, :label, :group, :icon expect(subject[:status]).to include :text, :label, :group, :icon, :tooltip
expect(subject[:status][:label]).to eq 'passed' expect(subject[:status][:label]).to eq 'passed'
end end
......
...@@ -16,7 +16,7 @@ describe StatusEntity do ...@@ -16,7 +16,7 @@ describe StatusEntity do
subject { entity.as_json } subject { entity.as_json }
it 'contains status details' do it 'contains status details' do
expect(subject).to include :text, :icon, :favicon, :label, :group expect(subject).to include :text, :icon, :favicon, :label, :group, :tooltip
expect(subject).to include :has_details, :details_path expect(subject).to include :has_details, :details_path
expect(subject[:favicon]).to match_asset_path('/assets/ci_favicons/favicon_status_success.ico') expect(subject[:favicon]).to match_asset_path('/assets/ci_favicons/favicon_status_success.ico')
end end
......
...@@ -370,10 +370,89 @@ module Ci ...@@ -370,10 +370,89 @@ module Ci
it_behaves_like 'validation is not active' it_behaves_like 'validation is not active'
end end
end end
end
describe '#register_success' do
let!(:current_time) { Time.new(2018, 4, 5, 14, 0, 0) }
let!(:attempt_counter) { double('Gitlab::Metrics::NullMetric') }
let!(:job_queue_duration_seconds) { double('Gitlab::Metrics::NullMetric') }
before do
allow(Time).to receive(:now).and_return(current_time)
# Stub defaults for any metrics other than the ones we're testing
allow(Gitlab::Metrics).to receive(:counter)
.with(any_args)
.and_return(Gitlab::Metrics::NullMetric.instance)
allow(Gitlab::Metrics).to receive(:histogram)
.with(any_args)
.and_return(Gitlab::Metrics::NullMetric.instance)
# Stub tested metrics
allow(Gitlab::Metrics).to receive(:counter)
.with(:job_register_attempts_total, anything)
.and_return(attempt_counter)
allow(Gitlab::Metrics).to receive(:histogram)
.with(:job_queue_duration_seconds, anything, anything, anything)
.and_return(job_queue_duration_seconds)
project.update(shared_runners_enabled: true)
pending_job.update(created_at: current_time - 3600, queued_at: current_time - 1800)
end
shared_examples 'metrics collector' do
it 'increments attempt counter' do
allow(job_queue_duration_seconds).to receive(:observe)
expect(attempt_counter).to receive(:increment)
execute(runner)
end
it 'counts job queuing time histogram with expected labels' do
allow(attempt_counter).to receive(:increment)
expect(job_queue_duration_seconds).to receive(:observe)
.with({ shared_runner: expected_shared_runner,
jobs_running_for_project: expected_jobs_running_for_project_first_job }, 1800)
execute(runner)
end
context 'when project already has running jobs' do
let!(:build2) { create( :ci_build, :running, pipeline: pipeline, runner: shared_runner) }
let!(:build3) { create( :ci_build, :running, pipeline: pipeline, runner: shared_runner) }
it 'counts job queuing time histogram with expected labels' do
allow(attempt_counter).to receive(:increment)
expect(job_queue_duration_seconds).to receive(:observe)
.with({ shared_runner: expected_shared_runner,
jobs_running_for_project: expected_jobs_running_for_project_third_job }, 1800)
execute(runner)
end
end
end
context 'when shared runner is used' do
let(:runner) { shared_runner }
let(:expected_shared_runner) { true }
let(:expected_jobs_running_for_project_first_job) { 0 }
let(:expected_jobs_running_for_project_third_job) { 2 }
it_behaves_like 'metrics collector'
end
context 'when specific runner is used' do
let(:runner) { specific_runner }
let(:expected_shared_runner) { false }
let(:expected_jobs_running_for_project_first_job) { '+Inf' }
let(:expected_jobs_running_for_project_third_job) { '+Inf' }
it_behaves_like 'metrics collector'
end
end
def execute(runner) def execute(runner)
described_class.new(runner).execute.build described_class.new(runner).execute.build
end end
end end
end
end end
...@@ -59,6 +59,10 @@ module StubConfiguration ...@@ -59,6 +59,10 @@ module StubConfiguration
allow(Gitlab.config.lfs).to receive_messages(to_settings(messages)) allow(Gitlab.config.lfs).to receive_messages(to_settings(messages))
end end
def stub_artifacts_setting(messages)
allow(Gitlab.config.artifacts).to receive_messages(to_settings(messages))
end
def stub_storage_settings(messages) def stub_storage_settings(messages)
messages.deep_stringify_keys! messages.deep_stringify_keys!
......
...@@ -528,108 +528,46 @@ describe ObjectStorage do ...@@ -528,108 +528,46 @@ describe ObjectStorage do
end end
end end
describe '#store_workhorse_file!' do describe '#cache!' do
subject do subject do
uploader.store_workhorse_file!(params, :file) uploader.cache!(uploaded_file)
end end
context 'when local file is used' do context 'when local file is used' do
context 'when valid file is used' do context 'when valid file is used' do
let(:target_path) do let(:uploaded_file) do
File.join(uploader_class.root, uploader_class::TMP_UPLOAD_PATH) fixture_file_upload(Rails.root + 'spec/fixtures/rails_sample.jpg', 'image/jpg')
end end
before do it "properly caches the file" do
FileUtils.mkdir_p(target_path)
end
context 'when no filename is specified' do
let(:params) do
{ "file.path" => "test/file" }
end
it 'raises an error' do
expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Missing filename/)
end
end
context 'when invalid file is specified' do
let(:file_path) do
File.join(target_path, "..", "test.file")
end
before do
FileUtils.touch(file_path)
end
let(:params) do
{ "file.path" => file_path,
"file.name" => "my_file.txt" }
end
it 'raises an error' do
expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Bad file path/)
end
end
context 'when filename is specified' do
let(:params) do
{ "file.path" => tmp_file,
"file.name" => "my_file.txt" }
end
let(:tmp_file) { Tempfile.new('filename', target_path) }
before do
FileUtils.touch(tmp_file)
end
after do
FileUtils.rm_f(tmp_file)
end
it 'succeeds' do
expect { subject }.not_to raise_error
expect(uploader).to be_exists
end
it 'proper path is being used' do
subject subject
expect(uploader).to be_exists
expect(uploader.path).to start_with(uploader_class.root) expect(uploader.path).to start_with(uploader_class.root)
expect(uploader.path).to end_with("my_file.txt") expect(uploader.filename).to eq('rails_sample.jpg')
end
it 'source file to not exist' do
subject
expect(File.exist?(tmp_file.path)).to be_falsey
end
end end
end end
end end
context 'when remote file is used' do context 'when remote file is used' do
let(:temp_file) { Tempfile.new("test") }
let!(:fog_connection) do let!(:fog_connection) do
stub_uploads_object_storage(uploader_class) stub_uploads_object_storage(uploader_class)
end end
context 'when valid file is used' do before do
context 'when no filename is specified' do FileUtils.touch(temp_file)
let(:params) do
{ "file.remote_id" => "test/123123" }
end end
it 'raises an error' do after do
expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Missing filename/) FileUtils.rm_f(temp_file)
end
end end
context 'when valid file is used' do
context 'when invalid file is specified' do context 'when invalid file is specified' do
let(:params) do let(:uploaded_file) do
{ "file.remote_id" => "../test/123123", UploadedFile.new(temp_file.path, remote_id: "../test/123123")
"file.name" => "my_file.txt" }
end end
it 'raises an error' do it 'raises an error' do
...@@ -638,9 +576,8 @@ describe ObjectStorage do ...@@ -638,9 +576,8 @@ describe ObjectStorage do
end end
context 'when non existing file is specified' do context 'when non existing file is specified' do
let(:params) do let(:uploaded_file) do
{ "file.remote_id" => "test/12312300", UploadedFile.new(temp_file.path, remote_id: "test/123123")
"file.name" => "my_file.txt" }
end end
it 'raises an error' do it 'raises an error' do
...@@ -648,10 +585,9 @@ describe ObjectStorage do ...@@ -648,10 +585,9 @@ describe ObjectStorage do
end end
end end
context 'when filename is specified' do context 'when valid file is specified' do
let(:params) do let(:uploaded_file) do
{ "file.remote_id" => "test/123123", UploadedFile.new(temp_file.path, filename: "my_file.txt", remote_id: "test/123123")
"file.name" => "my_file.txt" }
end end
let!(:fog_file) do let!(:fog_file) do
...@@ -661,36 +597,37 @@ describe ObjectStorage do ...@@ -661,36 +597,37 @@ describe ObjectStorage do
) )
end end
it 'succeeds' do it 'file to be cached and remote stored' do
expect { subject }.not_to raise_error expect { subject }.not_to raise_error
expect(uploader).to be_exists expect(uploader).to be_exists
expect(uploader).to be_cached
expect(uploader.path).not_to be_nil
expect(uploader.path).not_to include('tmp/cache')
expect(uploader.url).not_to be_nil
expect(uploader.path).not_to include('tmp/cache')
expect(uploader.object_store).to eq(described_class::Store::REMOTE)
end end
it 'path to not be temporary' do context 'when file is stored' do
subject subject do
uploader.store!(uploaded_file)
expect(uploader.path).not_to be_nil
expect(uploader.path).not_to include('tmp/upload')
expect(uploader.url).to include('/my_file.txt')
end end
it 'url is used' do it 'file to be remotely stored in permament location' do
subject subject
expect(uploader.url).not_to be_nil expect(uploader).to be_exists
expect(uploader).not_to be_cached
expect(uploader.path).not_to be_nil
expect(uploader.path).not_to include('tmp/upload')
expect(uploader.path).not_to include('tmp/cache')
expect(uploader.url).to include('/my_file.txt') expect(uploader.url).to include('/my_file.txt')
expect(uploader.object_store).to eq(described_class::Store::REMOTE)
end end
end end
end end
end end
context 'when no file is used' do
let(:params) { {} }
it 'raises an error' do
expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Bad file/)
end
end end
end end
end end
require 'spec_helper' require 'spec_helper'
describe 'projects/jobs/show' do describe 'projects/jobs/show' do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) } let(:project) { create(:project, :repository) }
let(:build) { create(:ci_build, pipeline: pipeline) } let(:build) { create(:ci_build, pipeline: pipeline) }
let(:builds) { project.builds.present(current_user: user) }
let(:pipeline) do let(:pipeline) do
create(:ci_pipeline, project: project, sha: project.commit.id) create(:ci_pipeline, project: project, sha: project.commit.id)
...@@ -11,6 +13,7 @@ describe 'projects/jobs/show' do ...@@ -11,6 +13,7 @@ describe 'projects/jobs/show' do
before do before do
assign(:build, build.present) assign(:build, build.present)
assign(:project, project) assign(:project, project)
assign(:builds, builds)
allow(view).to receive(:can?).and_return(true) allow(view).to receive(:can?).and_return(true)
end end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment