Commit 53353c84 authored by Grzegorz Bizon's avatar Grzegorz Bizon

Merge branch 'master' into backstage/gb/after-save-asynchronous-job-hooks

* master: (115 commits)
  Use event-based waiting in Gitlab::JobWaiter
  Make sure repository's removal work for legacy and hashed storages
  Use `@hashed` prefix for hashed paths on disk, to avoid collision with existing ones
  Refactor project and storage types
  Prevent using gitlab import task when hashed storage is enabled
  Some codestyle changes and fixes for GitLab pages
  Removed some useless code, codestyle changes and removed an index
  Fix repository reloading in some specs
  Changelog
  Moving away from the "extend" based factory to a more traditional one.
  Enable automatic hashed storage for new projects by application settings
  New storage is now "Hashed" instead of "UUID"
  Add UUID Storage to Project
  Move create_repository back to project model as we can use disk_path and share it
  Codestyle: move hooks to the same place and move dependent methods to private
  Use non-i18n values for setting new group-level issue/MR button text
  indexes external issue tracker
  copyedit
  indexes user/search/ from /user/index
  Correctly encode string params for Gitaly's TreeEntries RPC
  ...
parents 45d1c9a4 78a0d27e
...@@ -152,7 +152,7 @@ gem 'acts-as-taggable-on', '~> 4.0' ...@@ -152,7 +152,7 @@ gem 'acts-as-taggable-on', '~> 4.0'
gem 'sidekiq', '~> 5.0' gem 'sidekiq', '~> 5.0'
gem 'sidekiq-cron', '~> 0.6.0' gem 'sidekiq-cron', '~> 0.6.0'
gem 'redis-namespace', '~> 1.5.2' gem 'redis-namespace', '~> 1.5.2'
gem 'sidekiq-limit_fetch', '~> 3.4' gem 'sidekiq-limit_fetch', '~> 3.4', require: false
# Cron Parser # Cron Parser
gem 'rufus-scheduler', '~> 3.4' gem 'rufus-scheduler', '~> 3.4'
......
...@@ -4,10 +4,10 @@ export default class ProjectSelectComboButton { ...@@ -4,10 +4,10 @@ export default class ProjectSelectComboButton {
constructor(select) { constructor(select) {
this.projectSelectInput = $(select); this.projectSelectInput = $(select);
this.newItemBtn = $('.new-project-item-link'); this.newItemBtn = $('.new-project-item-link');
this.newItemBtnBaseText = this.newItemBtn.data('label'); this.resourceType = this.newItemBtn.data('type');
this.itemType = this.deriveItemTypeFromLabel(); this.resourceLabel = this.newItemBtn.data('label');
this.formattedText = this.deriveTextVariants();
this.groupId = this.projectSelectInput.data('groupId'); this.groupId = this.projectSelectInput.data('groupId');
this.bindEvents(); this.bindEvents();
this.initLocalStorage(); this.initLocalStorage();
} }
...@@ -23,9 +23,7 @@ export default class ProjectSelectComboButton { ...@@ -23,9 +23,7 @@ export default class ProjectSelectComboButton {
const localStorageIsSafe = AccessorUtilities.isLocalStorageAccessSafe(); const localStorageIsSafe = AccessorUtilities.isLocalStorageAccessSafe();
if (localStorageIsSafe) { if (localStorageIsSafe) {
const itemTypeKebabed = this.newItemBtnBaseText.toLowerCase().split(' ').join('-'); this.localStorageKey = ['group', this.groupId, this.formattedText.localStorageItemType, 'recent-project'].join('-');
this.localStorageKey = ['group', this.groupId, itemTypeKebabed, 'recent-project'].join('-');
this.setBtnTextFromLocalStorage(); this.setBtnTextFromLocalStorage();
} }
} }
...@@ -57,19 +55,14 @@ export default class ProjectSelectComboButton { ...@@ -57,19 +55,14 @@ export default class ProjectSelectComboButton {
setNewItemBtnAttributes(project) { setNewItemBtnAttributes(project) {
if (project) { if (project) {
this.newItemBtn.attr('href', project.url); this.newItemBtn.attr('href', project.url);
this.newItemBtn.text(`${this.newItemBtnBaseText} in ${project.name}`); this.newItemBtn.text(`${this.formattedText.defaultTextPrefix} in ${project.name}`);
this.newItemBtn.enable(); this.newItemBtn.enable();
} else { } else {
this.newItemBtn.text(`Select project to create ${this.itemType}`); this.newItemBtn.text(`Select project to create ${this.formattedText.presetTextSuffix}`);
this.newItemBtn.disable(); this.newItemBtn.disable();
} }
} }
deriveItemTypeFromLabel() {
// label is either 'New issue' or 'New merge request'
return this.newItemBtnBaseText.split(' ').slice(1).join(' ');
}
getProjectFromLocalStorage() { getProjectFromLocalStorage() {
const projectString = localStorage.getItem(this.localStorageKey); const projectString = localStorage.getItem(this.localStorageKey);
...@@ -81,5 +74,19 @@ export default class ProjectSelectComboButton { ...@@ -81,5 +74,19 @@ export default class ProjectSelectComboButton {
localStorage.setItem(this.localStorageKey, projectString); localStorage.setItem(this.localStorageKey, projectString);
} }
deriveTextVariants() {
const defaultTextPrefix = this.resourceLabel;
// the trailing slice call depluralizes each of these strings (e.g. new-issues -> new-issue)
const localStorageItemType = `new-${this.resourceType.split('_').join('-').slice(0, -1)}`;
const presetTextSuffix = this.resourceType.split('_').join(' ').slice(0, -1);
return {
localStorageItemType, // new-issue / new-merge-request
defaultTextPrefix, // New issue / New merge request
presetTextSuffix, // issue / merge request
};
}
} }
...@@ -761,7 +761,7 @@ ...@@ -761,7 +761,7 @@
&:hover, &:hover,
&:active, &:active,
&:focus { &:focus {
background-color: $gray-darker; background-color: $dropdown-item-hover-bg;
color: $gl-text-color; color: $gl-text-color;
} }
......
...@@ -264,3 +264,41 @@ ...@@ -264,3 +264,41 @@
.ajax-users-dropdown { .ajax-users-dropdown {
min-width: 250px !important; min-width: 250px !important;
} }
// TODO: change global style
.ajax-project-dropdown {
&.select2-drop {
color: $gl-text-color;
}
.select2-results {
.select2-no-results,
.select2-searching,
.select2-ajax-error,
.select2-selection-limit {
background: transparent;
}
.select2-result {
padding: 0 1px;
.select2-match {
font-weight: bold;
text-decoration: none;
}
.select2-result-label {
padding: #{$gl-padding / 2} $gl-padding;
}
&.select2-highlighted {
background-color: transparent !important;
color: $gl-text-color;
.select2-result-label {
background-color: $dropdown-item-hover-bg;
}
}
}
}
}
...@@ -294,7 +294,7 @@ $dropdown-input-focus-shadow: rgba($dropdown-input-focus-border, .4); ...@@ -294,7 +294,7 @@ $dropdown-input-focus-shadow: rgba($dropdown-input-focus-border, .4);
$dropdown-loading-bg: rgba(#fff, .6); $dropdown-loading-bg: rgba(#fff, .6);
$dropdown-chevron-size: 10px; $dropdown-chevron-size: 10px;
$dropdown-toggle-active-border-color: darken($border-color, 14%); $dropdown-toggle-active-border-color: darken($border-color, 14%);
$dropdown-item-hover-bg: $gray-darker;
/* /*
* Filtered Search * Filtered Search
......
...@@ -26,6 +26,13 @@ class GroupsController < Groups::ApplicationController ...@@ -26,6 +26,13 @@ class GroupsController < Groups::ApplicationController
def new def new
@group = Group.new @group = Group.new
if params[:parent_id].present?
parent = Group.find_by(id: params[:parent_id])
if can?(current_user, :create_subgroup, parent)
@group.parent = parent
end
end
end end
def create def create
......
...@@ -116,6 +116,7 @@ module ApplicationSettingsHelper ...@@ -116,6 +116,7 @@ module ApplicationSettingsHelper
:email_author_in_body, :email_author_in_body,
:enabled_git_access_protocol, :enabled_git_access_protocol,
:gravatar_enabled, :gravatar_enabled,
:hashed_storage_enabled,
:help_page_hide_commercial_content, :help_page_hide_commercial_content,
:help_page_support_url, :help_page_support_url,
:help_page_text, :help_page_text,
......
...@@ -176,7 +176,7 @@ module EventsHelper ...@@ -176,7 +176,7 @@ module EventsHelper
sanitize( sanitize(
text, text,
tags: %w(a img gl-emoji b pre code p span), tags: %w(a img gl-emoji b pre code p span),
attributes: Rails::Html::WhiteListSanitizer.allowed_attributes + ['style', 'data-name', 'data-unicode-version'] attributes: Rails::Html::WhiteListSanitizer.allowed_attributes + ['style', 'data-src', 'data-name', 'data-unicode-version']
) )
end end
......
...@@ -9,7 +9,7 @@ module Ci ...@@ -9,7 +9,7 @@ module Ci
belongs_to :owner, class_name: 'User' belongs_to :owner, class_name: 'User'
has_one :last_pipeline, -> { order(id: :desc) }, class_name: 'Ci::Pipeline' has_one :last_pipeline, -> { order(id: :desc) }, class_name: 'Ci::Pipeline'
has_many :pipelines has_many :pipelines
has_many :variables, class_name: 'Ci::PipelineScheduleVariable' has_many :variables, class_name: 'Ci::PipelineScheduleVariable', validate: false
validates :cron, unless: :importing?, cron: true, presence: { unless: :importing? } validates :cron, unless: :importing?, cron: true, presence: { unless: :importing? }
validates :cron_timezone, cron_timezone: true, presence: { unless: :importing? } validates :cron_timezone, cron_timezone: true, presence: { unless: :importing? }
......
...@@ -4,5 +4,7 @@ module Ci ...@@ -4,5 +4,7 @@ module Ci
include HasVariable include HasVariable
belongs_to :pipeline_schedule belongs_to :pipeline_schedule
validates :key, uniqueness: { scope: :pipeline_schedule_id }
end end
end end
module Ci module Ci
class Stage < ActiveRecord::Base class Stage < ActiveRecord::Base
extend Ci::Model extend Ci::Model
include Importable
include HasStatus
include Gitlab::OptimisticLocking
enum status: HasStatus::STATUSES_ENUM
belongs_to :project belongs_to :project
belongs_to :pipeline belongs_to :pipeline
has_many :statuses, class_name: 'CommitStatus', foreign_key: :commit_id has_many :statuses, class_name: 'CommitStatus', foreign_key: :stage_id
has_many :builds, foreign_key: :commit_id has_many :builds, foreign_key: :stage_id
validates :project, presence: true, unless: :importing?
validates :pipeline, presence: true, unless: :importing?
validates :name, presence: true, unless: :importing?
state_machine :status, initial: :created do
event :enqueue do
transition created: :pending
transition [:success, :failed, :canceled, :skipped] => :running
end
event :run do
transition any - [:running] => :running
end
event :skip do
transition any - [:skipped] => :skipped
end
event :drop do
transition any - [:failed] => :failed
end
event :succeed do
transition any - [:success] => :success
end
event :cancel do
transition any - [:canceled] => :canceled
end
event :block do
transition any - [:manual] => :manual
end
end
def update_status
retry_optimistic_lock(self) do
case statuses.latest.status
when 'pending' then enqueue
when 'running' then run
when 'success' then succeed
when 'failed' then drop
when 'canceled' then cancel
when 'manual' then block
when 'skipped' then skip
else skip
end
end
end
end end
end end
...@@ -39,14 +39,14 @@ class CommitStatus < ActiveRecord::Base ...@@ -39,14 +39,14 @@ class CommitStatus < ActiveRecord::Base
scope :after_stage, -> (index) { where('stage_idx > ?', index) } scope :after_stage, -> (index) { where('stage_idx > ?', index) }
state_machine :status do state_machine :status do
event :enqueue do
transition [:created, :skipped, :manual] => :pending
end
event :process do event :process do
transition [:skipped, :manual] => :created transition [:skipped, :manual] => :created
end end
event :enqueue do
transition [:created, :skipped, :manual] => :pending
end
event :run do event :run do
transition pending: :running transition pending: :running
end end
...@@ -91,6 +91,7 @@ class CommitStatus < ActiveRecord::Base ...@@ -91,6 +91,7 @@ class CommitStatus < ActiveRecord::Base
end end
end end
StageUpdateWorker.perform_async(commit_status.stage_id)
ExpireJobCacheWorker.perform_async(commit_status.id) ExpireJobCacheWorker.perform_async(commit_status.id)
end end
end end
......
...@@ -8,6 +8,8 @@ module HasStatus ...@@ -8,6 +8,8 @@ module HasStatus
ACTIVE_STATUSES = %w[pending running].freeze ACTIVE_STATUSES = %w[pending running].freeze
COMPLETED_STATUSES = %w[success failed canceled skipped].freeze COMPLETED_STATUSES = %w[success failed canceled skipped].freeze
ORDERED_STATUSES = %w[failed pending running manual canceled success skipped created].freeze ORDERED_STATUSES = %w[failed pending running manual canceled success skipped created].freeze
STATUSES_ENUM = { created: 0, pending: 1, running: 2, success: 3,
failed: 4, canceled: 5, skipped: 6, manual: 7 }.freeze
class_methods do class_methods do
def status_sql def status_sql
......
module Storage
module LegacyProject
extend ActiveSupport::Concern
def disk_path
full_path
end
def ensure_storage_path_exist
gitlab_shell.add_namespace(repository_storage_path, namespace.full_path)
end
def rename_repo
path_was = previous_changes['path'].first
old_path_with_namespace = File.join(namespace.full_path, path_was)
new_path_with_namespace = File.join(namespace.full_path, path)
Rails.logger.error "Attempting to rename #{old_path_with_namespace} -> #{new_path_with_namespace}"
if has_container_registry_tags?
Rails.logger.error "Project #{old_path_with_namespace} cannot be renamed because container registry tags are present!"
# we currently doesn't support renaming repository if it contains images in container registry
raise StandardError.new('Project cannot be renamed, because images are present in its container registry')
end
expire_caches_before_rename(old_path_with_namespace)
if gitlab_shell.mv_repository(repository_storage_path, old_path_with_namespace, new_path_with_namespace)
# If repository moved successfully we need to send update instructions to users.
# However we cannot allow rollback since we moved repository
# So we basically we mute exceptions in next actions
begin
gitlab_shell.mv_repository(repository_storage_path, "#{old_path_with_namespace}.wiki", "#{new_path_with_namespace}.wiki")
send_move_instructions(old_path_with_namespace)
expires_full_path_cache
@old_path_with_namespace = old_path_with_namespace
SystemHooksService.new.execute_hooks_for(self, :rename)
@repository = nil
rescue => e
Rails.logger.error "Exception renaming #{old_path_with_namespace} -> #{new_path_with_namespace}: #{e}"
# Returning false does not rollback after_* transaction but gives
# us information about failing some of tasks
false
end
else
Rails.logger.error "Repository could not be renamed: #{old_path_with_namespace} -> #{new_path_with_namespace}"
# if we cannot move namespace directory we should rollback
# db changes in order to prevent out of sync between db and fs
raise StandardError.new('repository cannot be renamed')
end
Gitlab::AppLogger.info "Project was renamed: #{old_path_with_namespace} -> #{new_path_with_namespace}"
Gitlab::UploadsTransfer.new.rename_project(path_was, path, namespace.full_path)
Gitlab::PagesTransfer.new.rename_project(path_was, path, namespace.full_path)
end
def create_repository(force: false)
# Forked import is handled asynchronously
return if forked? && !force
if gitlab_shell.add_repository(repository_storage_path, path_with_namespace)
repository.after_create
true
else
errors.add(:base, 'Failed to create repository via gitlab-shell')
false
end
end
end
end
...@@ -83,6 +83,10 @@ class Event < ActiveRecord::Base ...@@ -83,6 +83,10 @@ class Event < ActiveRecord::Base
self.inheritance_column = 'action' self.inheritance_column = 'action'
class << self class << self
def model_name
ActiveModel::Name.new(self, nil, 'event')
end
def find_sti_class(action) def find_sti_class(action)
if action.to_i == PUSHED if action.to_i == PUSHED
PushEvent PushEvent
...@@ -438,6 +442,12 @@ class Event < ActiveRecord::Base ...@@ -438,6 +442,12 @@ class Event < ActiveRecord::Base
EventForMigration.create!(new_attributes) EventForMigration.create!(new_attributes)
end end
def to_partial_path
# We are intentionally using `Event` rather than `self.class` so that
# subclasses also use the `Event` implementation.
Event._to_partial_path
end
private private
def recent_update? def recent_update?
......
...@@ -9,11 +9,8 @@ class Issue < ActiveRecord::Base ...@@ -9,11 +9,8 @@ class Issue < ActiveRecord::Base
include Spammable include Spammable
include FasterCacheKeys include FasterCacheKeys
include RelativePositioning include RelativePositioning
include IgnorableColumn
include CreatedAtFilterable include CreatedAtFilterable
ignore_column :position
DueDateStruct = Struct.new(:title, :name).freeze DueDateStruct = Struct.new(:title, :name).freeze
NoDueDate = DueDateStruct.new('No Due Date', '0').freeze NoDueDate = DueDateStruct.new('No Due Date', '0').freeze
AnyDueDate = DueDateStruct.new('Any Due Date', '').freeze AnyDueDate = DueDateStruct.new('Any Due Date', '').freeze
......
...@@ -7,7 +7,6 @@ class MergeRequest < ActiveRecord::Base ...@@ -7,7 +7,6 @@ class MergeRequest < ActiveRecord::Base
include IgnorableColumn include IgnorableColumn
include CreatedAtFilterable include CreatedAtFilterable
ignore_column :position
ignore_column :locked_at ignore_column :locked_at
belongs_to :target_project, class_name: "Project" belongs_to :target_project, class_name: "Project"
......
...@@ -17,7 +17,6 @@ class Project < ActiveRecord::Base ...@@ -17,7 +17,6 @@ class Project < ActiveRecord::Base
include ProjectFeaturesCompatibility include ProjectFeaturesCompatibility
include SelectForProjectAuthorization include SelectForProjectAuthorization
include Routable include Routable
include Storage::LegacyProject
extend Gitlab::ConfigHelper extend Gitlab::ConfigHelper
...@@ -25,6 +24,7 @@ class Project < ActiveRecord::Base ...@@ -25,6 +24,7 @@ class Project < ActiveRecord::Base
NUMBER_OF_PERMITTED_BOARDS = 1 NUMBER_OF_PERMITTED_BOARDS = 1
UNKNOWN_IMPORT_URL = 'http://unknown.git'.freeze UNKNOWN_IMPORT_URL = 'http://unknown.git'.freeze
LATEST_STORAGE_VERSION = 1
cache_markdown_field :description, pipeline: :description cache_markdown_field :description, pipeline: :description
...@@ -32,6 +32,8 @@ class Project < ActiveRecord::Base ...@@ -32,6 +32,8 @@ class Project < ActiveRecord::Base
:merge_requests_enabled?, :issues_enabled?, to: :project_feature, :merge_requests_enabled?, :issues_enabled?, to: :project_feature,
allow_nil: true allow_nil: true
delegate :base_dir, :disk_path, :ensure_storage_path_exists, to: :storage
default_value_for :archived, false default_value_for :archived, false
default_value_for :visibility_level, gitlab_config_features.visibility_level default_value_for :visibility_level, gitlab_config_features.visibility_level
default_value_for :container_registry_enabled, gitlab_config_features.container_registry default_value_for :container_registry_enabled, gitlab_config_features.container_registry
...@@ -44,32 +46,24 @@ class Project < ActiveRecord::Base ...@@ -44,32 +46,24 @@ class Project < ActiveRecord::Base
default_value_for :snippets_enabled, gitlab_config_features.snippets default_value_for :snippets_enabled, gitlab_config_features.snippets
default_value_for :only_allow_merge_if_all_discussions_are_resolved, false default_value_for :only_allow_merge_if_all_discussions_are_resolved, false
after_create :ensure_storage_path_exist add_authentication_token_field :runners_token
after_create :create_project_feature, unless: :project_feature before_save :ensure_runners_token
after_save :update_project_statistics, if: :namespace_id_changed?
# set last_activity_at to the same as created_at after_save :update_project_statistics, if: :namespace_id_changed?
after_create :create_project_feature, unless: :project_feature
after_create :set_last_activity_at after_create :set_last_activity_at
def set_last_activity_at
update_column(:last_activity_at, self.created_at)
end
after_create :set_last_repository_updated_at after_create :set_last_repository_updated_at
def set_last_repository_updated_at after_update :update_forks_visibility_level
update_column(:last_repository_updated_at, self.created_at)
end
before_destroy :remove_private_deploy_keys before_destroy :remove_private_deploy_keys
after_destroy -> { run_after_commit { remove_pages } } after_destroy -> { run_after_commit { remove_pages } }
# update visibility_level of forks
after_update :update_forks_visibility_level
after_validation :check_pending_delete after_validation :check_pending_delete
# Legacy Storage specific hooks # Storage specific hooks
after_initialize :use_hashed_storage
after_save :ensure_storage_path_exist, if: :namespace_id_changed? after_create :ensure_storage_path_exists
after_save :ensure_storage_path_exists, if: :namespace_id_changed?
acts_as_taggable acts_as_taggable
...@@ -238,9 +232,6 @@ class Project < ActiveRecord::Base ...@@ -238,9 +232,6 @@ class Project < ActiveRecord::Base
presence: true, presence: true,
inclusion: { in: ->(_object) { Gitlab.config.repositories.storages.keys } } inclusion: { in: ->(_object) { Gitlab.config.repositories.storages.keys } }
add_authentication_token_field :runners_token
before_save :ensure_runners_token
mount_uploader :avatar, AvatarUploader mount_uploader :avatar, AvatarUploader
has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
...@@ -487,6 +478,10 @@ class Project < ActiveRecord::Base ...@@ -487,6 +478,10 @@ class Project < ActiveRecord::Base
@repository ||= Repository.new(full_path, self, disk_path: disk_path) @repository ||= Repository.new(full_path, self, disk_path: disk_path)
end end
def reload_repository!
@repository = nil
end
def container_registry_url def container_registry_url
if Gitlab.config.registry.enabled if Gitlab.config.registry.enabled
"#{Gitlab.config.registry.host_port}/#{full_path.downcase}" "#{Gitlab.config.registry.host_port}/#{full_path.downcase}"
...@@ -1004,6 +999,19 @@ class Project < ActiveRecord::Base ...@@ -1004,6 +999,19 @@ class Project < ActiveRecord::Base
end end
end end
def create_repository(force: false)
# Forked import is handled asynchronously
return if forked? && !force
if gitlab_shell.add_repository(repository_storage_path, disk_path)
repository.after_create
true
else
errors.add(:base, 'Failed to create repository via gitlab-shell')
false
end
end
def hook_attrs(backward: true) def hook_attrs(backward: true)
attrs = { attrs = {
name: name, name: name,
...@@ -1086,6 +1094,7 @@ class Project < ActiveRecord::Base ...@@ -1086,6 +1094,7 @@ class Project < ActiveRecord::Base
!!repository.exists? !!repository.exists?
end end
# update visibility_level of forks
def update_forks_visibility_level def update_forks_visibility_level
return unless visibility_level < visibility_level_was return unless visibility_level < visibility_level_was
...@@ -1213,7 +1222,8 @@ class Project < ActiveRecord::Base ...@@ -1213,7 +1222,8 @@ class Project < ActiveRecord::Base
end end
def pages_path def pages_path
File.join(Settings.pages.path, disk_path) # TODO: when we migrate Pages to work with new storage types, change here to use disk_path
File.join(Settings.pages.path, full_path)
end end
def public_pages_path def public_pages_path
...@@ -1252,6 +1262,50 @@ class Project < ActiveRecord::Base ...@@ -1252,6 +1262,50 @@ class Project < ActiveRecord::Base
end end
end end
def rename_repo
new_full_path = build_full_path
Rails.logger.error "Attempting to rename #{full_path_was} -> #{new_full_path}"
if has_container_registry_tags?
Rails.logger.error "Project #{full_path_was} cannot be renamed because container registry tags are present!"
# we currently doesn't support renaming repository if it contains images in container registry
raise StandardError.new('Project cannot be renamed, because images are present in its container registry')
end
expire_caches_before_rename(full_path_was)
if storage.rename_repo
Gitlab::AppLogger.info "Project was renamed: #{full_path_was} -> #{new_full_path}"
rename_repo_notify!
after_rename_repo
else
Rails.logger.error "Repository could not be renamed: #{full_path_was} -> #{new_full_path}"
# if we cannot move namespace directory we should rollback
# db changes in order to prevent out of sync between db and fs
raise StandardError.new('repository cannot be renamed')
end
end
def rename_repo_notify!
send_move_instructions(full_path_was)
expires_full_path_cache
self.old_path_with_namespace = full_path_was
SystemHooksService.new.execute_hooks_for(self, :rename)
reload_repository!
end
def after_rename_repo
path_before_change = previous_changes['path'].first
Gitlab::UploadsTransfer.new.rename_project(path_before_change, self.path, namespace.full_path)
Gitlab::PagesTransfer.new.rename_project(path_before_change, self.path, namespace.full_path)
end
def running_or_pending_build_count(force: false) def running_or_pending_build_count(force: false)
Rails.cache.fetch(['projects', id, 'running_or_pending_build_count'], force: force) do Rails.cache.fetch(['projects', id, 'running_or_pending_build_count'], force: force) do
builds.running_or_pending.count(:all) builds.running_or_pending.count(:all)
...@@ -1410,6 +1464,10 @@ class Project < ActiveRecord::Base ...@@ -1410,6 +1464,10 @@ class Project < ActiveRecord::Base
end end
end end
def full_path_was
File.join(namespace.full_path, previous_changes['path'].first)
end
alias_method :name_with_namespace, :full_name alias_method :name_with_namespace, :full_name
alias_method :human_name, :full_name alias_method :human_name, :full_name
# @deprecated cannot remove yet because it has an index with its name in elasticsearch # @deprecated cannot remove yet because it has an index with its name in elasticsearch
...@@ -1419,8 +1477,36 @@ class Project < ActiveRecord::Base ...@@ -1419,8 +1477,36 @@ class Project < ActiveRecord::Base
Projects::ForksCountService.new(self).count Projects::ForksCountService.new(self).count
end end
def legacy_storage?
self.storage_version.nil?
end
private private
def storage
@storage ||=
if self.storage_version && self.storage_version >= 1
Storage::HashedProject.new(self)
else
Storage::LegacyProject.new(self)
end
end
def use_hashed_storage
if self.new_record? && current_application_settings.hashed_storage_enabled
self.storage_version = LATEST_STORAGE_VERSION
end
end
# set last_activity_at to the same as created_at
def set_last_activity_at
update_column(:last_activity_at, self.created_at)
end
def set_last_repository_updated_at
update_column(:last_repository_updated_at, self.created_at)
end
def cross_namespace_reference?(from) def cross_namespace_reference?(from)
case from case from
when Project when Project
......
module Storage
class HashedProject
attr_accessor :project
delegate :gitlab_shell, :repository_storage_path, to: :project
ROOT_PATH_PREFIX = '@hashed'.freeze
def initialize(project)
@project = project
end
# Base directory
#
# @return [String] directory where repository is stored
def base_dir
"#{ROOT_PATH_PREFIX}/#{disk_hash[0..1]}/#{disk_hash[2..3]}" if disk_hash
end
# Disk path is used to build repository and project's wiki path on disk
#
# @return [String] combination of base_dir and the repository own name without `.git` or `.wiki.git` extensions
def disk_path
"#{base_dir}/#{disk_hash}" if disk_hash
end
def ensure_storage_path_exists
gitlab_shell.add_namespace(repository_storage_path, base_dir)
end
def rename_repo
true
end
private
# Generates the hash for the project path and name on disk
# If you need to refer to the repository on disk, use the `#disk_path`
def disk_hash
@disk_hash ||= Digest::SHA2.hexdigest(project.id.to_s) if project.id
end
end
end
module Storage
class LegacyProject
attr_accessor :project
delegate :namespace, :gitlab_shell, :repository_storage_path, to: :project
def initialize(project)
@project = project
end
# Base directory
#
# @return [String] directory where repository is stored
def base_dir
namespace.full_path
end
# Disk path is used to build repository and project's wiki path on disk
#
# @return [String] combination of base_dir and the repository own name without `.git` or `.wiki.git` extensions
def disk_path
project.full_path
end
def ensure_storage_path_exists
return unless namespace
gitlab_shell.add_namespace(repository_storage_path, base_dir)
end
def rename_repo
new_full_path = project.build_full_path
if gitlab_shell.mv_repository(repository_storage_path, project.full_path_was, new_full_path)
# If repository moved successfully we need to send update instructions to users.
# However we cannot allow rollback since we moved repository
# So we basically we mute exceptions in next actions
begin
gitlab_shell.mv_repository(repository_storage_path, "#{project.full_path_was}.wiki", "#{new_full_path}.wiki")
return true
rescue => e
Rails.logger.error "Exception renaming #{project.full_path_was} -> #{new_full_path}: #{e}"
# Returning false does not rollback after_* transaction but gives
# us information about failing some of tasks
return false
end
end
false
end
end
end
...@@ -13,6 +13,8 @@ class GroupPolicy < BasePolicy ...@@ -13,6 +13,8 @@ class GroupPolicy < BasePolicy
condition(:master) { access_level >= GroupMember::MASTER } condition(:master) { access_level >= GroupMember::MASTER }
condition(:reporter) { access_level >= GroupMember::REPORTER } condition(:reporter) { access_level >= GroupMember::REPORTER }
condition(:nested_groups_supported, scope: :global) { Group.supports_nested_groups? }
condition(:has_projects) do condition(:has_projects) do
GroupProjectsFinder.new(group: @subject, current_user: @user).execute.any? GroupProjectsFinder.new(group: @subject, current_user: @user).execute.any?
end end
...@@ -42,7 +44,7 @@ class GroupPolicy < BasePolicy ...@@ -42,7 +44,7 @@ class GroupPolicy < BasePolicy
enable :change_visibility_level enable :change_visibility_level
end end
rule { owner & can_create_group }.enable :create_subgroup rule { owner & can_create_group & nested_groups_supported }.enable :create_subgroup
rule { public_group | logged_in_viewable }.enable :view_globally rule { public_group | logged_in_viewable }.enable :view_globally
......
...@@ -176,9 +176,14 @@ module Ci ...@@ -176,9 +176,14 @@ module Ci
end end
def error(message, save: false) def error(message, save: false)
pipeline.errors.add(:base, message) pipeline.tap do
pipeline.drop if save pipeline.errors.add(:base, message)
pipeline
if save
pipeline.drop
update_merge_requests_head_pipeline
end
end
end end
def pipeline_created_counter def pipeline_created_counter
......
...@@ -13,9 +13,9 @@ module Groups ...@@ -13,9 +13,9 @@ module Groups
return @group return @group
end end
if @group.parent && !can?(current_user, :admin_group, @group.parent) if @group.parent && !can?(current_user, :create_subgroup, @group.parent)
@group.parent = nil @group.parent = nil
@group.errors.add(:parent_id, 'manage access required to create subgroup') @group.errors.add(:parent_id, 'You don’t have permission to create a subgroup in this group.')
return @group return @group
end end
......
...@@ -13,7 +13,7 @@ module Groups ...@@ -13,7 +13,7 @@ module Groups
# Execute the destruction of the models immediately to ensure atomic cleanup. # Execute the destruction of the models immediately to ensure atomic cleanup.
# Skip repository removal because we remove directory with namespace # Skip repository removal because we remove directory with namespace
# that contain all these repositories # that contain all these repositories
::Projects::DestroyService.new(project, current_user, skip_repo: true).execute ::Projects::DestroyService.new(project, current_user, skip_repo: project.legacy_storage?).execute
end end
group.children.each do |group| group.children.each do |group|
......
...@@ -3,6 +3,8 @@ module MergeRequests ...@@ -3,6 +3,8 @@ module MergeRequests
def execute def execute
return error('Invalid issue iid') unless issue_iid.present? && issue.present? return error('Invalid issue iid') unless issue_iid.present? && issue.present?
params[:label_ids] = issue.label_ids if issue.label_ids.any?
result = CreateBranchService.new(project, current_user).execute(branch_name, ref) result = CreateBranchService.new(project, current_user).execute(branch_name, ref)
return result if result[:status] == :error return result if result[:status] == :error
...@@ -43,7 +45,8 @@ module MergeRequests ...@@ -43,7 +45,8 @@ module MergeRequests
{ {
source_project_id: project.id, source_project_id: project.id,
source_branch: branch_name, source_branch: branch_name,
target_project_id: project.id target_project_id: project.id,
milestone_id: issue.milestone_id
} }
end end
......
...@@ -35,16 +35,18 @@ module Users ...@@ -35,16 +35,18 @@ module Users
Groups::DestroyService.new(group, current_user).execute Groups::DestroyService.new(group, current_user).execute
end end
namespace = user.namespace
namespace.prepare_for_destroy
user.personal_projects.each do |project| user.personal_projects.each do |project|
# Skip repository removal because we remove directory with namespace # Skip repository removal because we remove directory with namespace
# that contain all this repositories # that contain all this repositories
::Projects::DestroyService.new(project, current_user, skip_repo: true).execute ::Projects::DestroyService.new(project, current_user, skip_repo: project.legacy_storage?).execute
end end
MigrateToGhostUserService.new(user).execute unless options[:hard_delete] MigrateToGhostUserService.new(user).execute unless options[:hard_delete]
# Destroy the namespace after destroying the user since certain methods may depend on the namespace existing # Destroy the namespace after destroying the user since certain methods may depend on the namespace existing
namespace = user.namespace
user_data = user.destroy user_data = user.destroy
namespace.really_destroy! namespace.really_destroy!
......
...@@ -362,7 +362,9 @@ ...@@ -362,7 +362,9 @@
%fieldset %fieldset
%legend Background Jobs %legend Background Jobs
%p %p
These settings require a restart to take effect. These settings require a
= link_to 'restart', help_page_path('administration/restart_gitlab')
to take effect.
.form-group .form-group
.col-sm-offset-2.col-sm-10 .col-sm-offset-2.col-sm-10
.checkbox .checkbox
...@@ -490,6 +492,16 @@ ...@@ -490,6 +492,16 @@
%fieldset %fieldset
%legend Repository Storage %legend Repository Storage
.form-group
.col-sm-offset-2.col-sm-10
.checkbox
= f.label :hashed_storage_enabled do
= f.check_box :hashed_storage_enabled
Create new projects using hashed storage paths
.help-block
Enable immutable, hash-based paths and repository names to store repositories on disk. This prevents
repositories from having to be moved or renamed when the Project URL changes and may improve disk I/O performance.
%em (EXPERIMENTAL)
.form-group .form-group
= f.label :repository_storages, 'Storage paths for new projects', class: 'control-label col-sm-2' = f.label :repository_storages, 'Storage paths for new projects', class: 'control-label col-sm-2'
.col-sm-10 .col-sm-10
...@@ -499,6 +511,7 @@ ...@@ -499,6 +511,7 @@
= succeed "." do = succeed "." do
= link_to "repository storages documentation", help_page_path("administration/repository_storages") = link_to "repository storages documentation", help_page_path("administration/repository_storages")
%fieldset %fieldset
%legend Repository Checks %legend Repository Checks
.form-group .form-group
......
...@@ -8,14 +8,14 @@ ...@@ -8,14 +8,14 @@
- content_for :breadcrumbs_extra do - content_for :breadcrumbs_extra do
= link_to params.merge(rss_url_options), class: 'btn has-tooltip append-right-10', title: 'Subscribe' do = link_to params.merge(rss_url_options), class: 'btn has-tooltip append-right-10', title: 'Subscribe' do
= icon('rss') = icon('rss')
= render 'shared/new_project_item_select', path: 'issues/new', label: "New issue", with_feature_enabled: 'issues' = render 'shared/new_project_item_select', path: 'issues/new', label: "New issue", with_feature_enabled: 'issues', type: :issues
.top-area .top-area
= render 'shared/issuable/nav', type: :issues = render 'shared/issuable/nav', type: :issues
.nav-controls{ class: ("visible-xs" if show_new_nav?) } .nav-controls{ class: ("visible-xs" if show_new_nav?) }
= link_to params.merge(rss_url_options), class: 'btn has-tooltip', title: 'Subscribe' do = link_to params.merge(rss_url_options), class: 'btn has-tooltip', title: 'Subscribe' do
= icon('rss') = icon('rss')
= render 'shared/new_project_item_select', path: 'issues/new', label: "New issue", with_feature_enabled: 'issues' = render 'shared/new_project_item_select', path: 'issues/new', label: "New issue", with_feature_enabled: 'issues', type: :issues
= render 'shared/issuable/filter', type: :issues = render 'shared/issuable/filter', type: :issues
= render 'shared/issues' = render 'shared/issues'
...@@ -4,12 +4,12 @@ ...@@ -4,12 +4,12 @@
- if show_new_nav? - if show_new_nav?
- content_for :breadcrumbs_extra do - content_for :breadcrumbs_extra do
= render 'shared/new_project_item_select', path: 'merge_requests/new', label: "New merge request", with_feature_enabled: 'merge_requests' = render 'shared/new_project_item_select', path: 'merge_requests/new', label: "New merge request", with_feature_enabled: 'merge_requests', type: :merge_requests
.top-area .top-area
= render 'shared/issuable/nav', type: :merge_requests = render 'shared/issuable/nav', type: :merge_requests
.nav-controls{ class: ("visible-xs" if show_new_nav?) } .nav-controls{ class: ("visible-xs" if show_new_nav?) }
= render 'shared/new_project_item_select', path: 'merge_requests/new', label: "New merge request", with_feature_enabled: 'merge_requests' = render 'shared/new_project_item_select', path: 'merge_requests/new', label: "New merge request", with_feature_enabled: 'merge_requests', type: :merge_requests
= render 'shared/issuable/filter', type: :merge_requests = render 'shared/issuable/filter', type: :merge_requests
= render 'shared/merge_requests' = render 'shared/merge_requests'
...@@ -4,13 +4,13 @@ ...@@ -4,13 +4,13 @@
- if show_new_nav? - if show_new_nav?
- content_for :breadcrumbs_extra do - content_for :breadcrumbs_extra do
= render 'shared/new_project_item_select', path: 'milestones/new', label: 'New milestone', include_groups: true = render 'shared/new_project_item_select', path: 'milestones/new', label: 'New milestone', include_groups: true, type: :milestones
.top-area .top-area
= render 'shared/milestones_filter', counts: @milestone_states = render 'shared/milestones_filter', counts: @milestone_states
.nav-controls{ class: ("visible-xs" if show_new_nav?) } .nav-controls{ class: ("visible-xs" if show_new_nav?) }
= render 'shared/new_project_item_select', path: 'milestones/new', label: 'New milestone', include_groups: true = render 'shared/new_project_item_select', path: 'milestones/new', label: 'New milestone', include_groups: true, type: :milestones
.milestones .milestones
%ul.content-list %ul.content-list
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
- content_for :breadcrumbs_extra do - content_for :breadcrumbs_extra do
= link_to params.merge(rss_url_options), class: 'btn btn-default append-right-10' do = link_to params.merge(rss_url_options), class: 'btn btn-default append-right-10' do
= icon('rss') = icon('rss')
= render 'shared/new_project_item_select', path: 'issues/new', label: "New issue" = render 'shared/new_project_item_select', path: 'issues/new', label: "New issue", type: :issues
- if group_issues_exists - if group_issues_exists
.top-area .top-area
...@@ -22,7 +22,7 @@ ...@@ -22,7 +22,7 @@
= icon('rss') = icon('rss')
%span.icon-label %span.icon-label
Subscribe Subscribe
= render 'shared/new_project_item_select', path: 'issues/new', label: "New issue" = render 'shared/new_project_item_select', path: 'issues/new', label: "New issue", type: :issues
= render 'shared/issuable/search_bar', type: :issues = render 'shared/issuable/search_bar', type: :issues
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
- if show_new_nav? && current_user - if show_new_nav? && current_user
- content_for :breadcrumbs_extra do - content_for :breadcrumbs_extra do
= render 'shared/new_project_item_select', path: 'merge_requests/new', label: "New merge request" = render 'shared/new_project_item_select', path: 'merge_requests/new', label: "New merge request", type: :merge_requests
- if @group_merge_requests.empty? - if @group_merge_requests.empty?
= render 'shared/empty_states/merge_requests', project_select_button: true = render 'shared/empty_states/merge_requests', project_select_button: true
...@@ -11,7 +11,7 @@ ...@@ -11,7 +11,7 @@
= render 'shared/issuable/nav', type: :merge_requests = render 'shared/issuable/nav', type: :merge_requests
- if current_user - if current_user
.nav-controls{ class: ("visible-xs" if show_new_nav?) } .nav-controls{ class: ("visible-xs" if show_new_nav?) }
= render 'shared/new_project_item_select', path: 'merge_requests/new', label: "New merge request" = render 'shared/new_project_item_select', path: 'merge_requests/new', label: "New merge request", type: :merge_requests
= render 'shared/issuable/filter', type: :merge_requests = render 'shared/issuable/filter', type: :merge_requests
......
- content_for :page_specific_javascripts do - content_for :page_specific_javascripts do
= page_specific_javascript_bundle_tag('group') = page_specific_javascript_bundle_tag('group')
- parent = GroupFinder.new(current_user).execute(id: params[:parent_id] || @group.parent_id) - parent = @group.parent
- group_path = root_url - group_path = root_url
- group_path << parent.full_path + '/' if parent - group_path << parent.full_path + '/' if parent
...@@ -13,13 +13,12 @@ ...@@ -13,13 +13,12 @@
%span>= root_url %span>= root_url
- if parent - if parent
%strong= parent.full_path + '/' %strong= parent.full_path + '/'
= f.hidden_field :parent_id
= f.text_field :path, placeholder: 'open-source', class: 'form-control', = f.text_field :path, placeholder: 'open-source', class: 'form-control',
autofocus: local_assigns[:autofocus] || false, required: true, autofocus: local_assigns[:autofocus] || false, required: true,
pattern: Gitlab::PathRegex::NAMESPACE_FORMAT_REGEX_JS, pattern: Gitlab::PathRegex::NAMESPACE_FORMAT_REGEX_JS,
title: 'Please choose a group path with no special characters.', title: 'Please choose a group path with no special characters.',
"data-bind-in" => "#{'create_chat_team' if Gitlab.config.mattermost.enabled}" "data-bind-in" => "#{'create_chat_team' if Gitlab.config.mattermost.enabled}"
- if parent
= f.hidden_field :parent_id, value: parent.id
- if @group.persisted? - if @group.persisted?
.alert.alert-warning.prepend-top-10 .alert.alert-warning.prepend-top-10
......
- if any_projects?(@projects) - if any_projects?(@projects)
.project-item-select-holder.btn-group.pull-right .project-item-select-holder.btn-group.pull-right
%a.btn.btn-new.new-project-item-link{ href: '', data: { label: local_assigns[:label] } } %a.btn.btn-new.new-project-item-link{ href: '', data: { label: local_assigns[:label], type: local_assigns[:type] } }
= icon('spinner spin') = icon('spinner spin')
= project_select_tag :project_path, class: "project-item-select", data: { include_groups: local_assigns[:include_groups], order_by: 'last_activity_at', relative_path: local_assigns[:path] }, with_feature_enabled: local_assigns[:with_feature_enabled] = project_select_tag :project_path, class: "project-item-select", data: { include_groups: local_assigns[:include_groups], order_by: 'last_activity_at', relative_path: local_assigns[:path] }, with_feature_enabled: local_assigns[:with_feature_enabled]
%button.btn.btn-new.new-project-item-select-button %button.btn.btn-new.new-project-item-select-button
......
...@@ -15,7 +15,7 @@ ...@@ -15,7 +15,7 @@
Issues can be bugs, tasks or ideas to be discussed. Issues can be bugs, tasks or ideas to be discussed.
Also, issues are searchable and filterable. Also, issues are searchable and filterable.
- if project_select_button - if project_select_button
= render 'shared/new_project_item_select', path: 'issues/new', label: 'New issue' = render 'shared/new_project_item_select', path: 'issues/new', label: 'New issue', type: :issues
- else - else
= link_to 'New issue', button_path, class: 'btn btn-new', title: 'New issue', id: 'new_issue_link' = link_to 'New issue', button_path, class: 'btn btn-new', title: 'New issue', id: 'new_issue_link'
- else - else
......
...@@ -14,7 +14,7 @@ ...@@ -14,7 +14,7 @@
%p %p
Interested parties can even contribute by pushing commits if they want to. Interested parties can even contribute by pushing commits if they want to.
- if project_select_button - if project_select_button
= render 'shared/new_project_item_select', path: 'merge_requests/new', label: 'New merge request' = render 'shared/new_project_item_select', path: 'merge_requests/new', label: 'New merge request', type: :merge_requests
- else - else
= link_to 'New merge request', button_path, class: 'btn btn-new', title: 'New merge request', id: 'new_merge_request_link' = link_to 'New merge request', button_path, class: 'btn btn-new', title: 'New merge request', id: 'new_merge_request_link'
- else - else
......
...@@ -4,18 +4,25 @@ class AuthorizedProjectsWorker ...@@ -4,18 +4,25 @@ class AuthorizedProjectsWorker
# Schedules multiple jobs and waits for them to be completed. # Schedules multiple jobs and waits for them to be completed.
def self.bulk_perform_and_wait(args_list) def self.bulk_perform_and_wait(args_list)
job_ids = bulk_perform_async(args_list) waiter = Gitlab::JobWaiter.new(args_list.size)
Gitlab::JobWaiter.new(job_ids).wait # Point all the bulk jobs at the same JobWaiter. Converts, [[1], [2], [3]]
# into [[1, "key"], [2, "key"], [3, "key"]]
waiting_args_list = args_list.map { |args| args << waiter.key }
bulk_perform_async(waiting_args_list)
waiter.wait
end end
def self.bulk_perform_async(args_list) def self.bulk_perform_async(args_list)
Sidekiq::Client.push_bulk('class' => self, 'queue' => sidekiq_options['queue'], 'args' => args_list) Sidekiq::Client.push_bulk('class' => self, 'queue' => sidekiq_options['queue'], 'args' => args_list)
end end
def perform(user_id) def perform(user_id, notify_key = nil)
user = User.find_by(id: user_id) user = User.find_by(id: user_id)
user&.refresh_authorized_projects user&.refresh_authorized_projects
ensure
Gitlab::JobWaiter.notify(notify_key, jid) if notify_key
end end
end end
class StageUpdateWorker
include Sidekiq::Worker
include PipelineQueue
def perform(stage_id)
Ci::Stage.find_by(id: stage_id).try do |stage|
stage.update_status
end
end
end
---
title: Hashed Storage support for Repositories (EXPERIMENTAL)
merge_request: 13246
author:
---
title: inherits milestone and labels when a merge request is created from issue
merge_request: 13461
author: haseebeqx
type: added
---
title: Improves subgroup creation permissions
merge_request: 13418
author:
type: bugifx
---
title: Fix merge request pipeline status when pipeline has errors
merge_request: 13664
author:
type: fixed
---
title: Only require Sidekiq throttling library when enabled, to reduce cache misses
merge_request:
author:
type: fixed
---
title: Remove CI API v1
merge_request:
author:
type: removed
namespace :ci do namespace :ci do
# CI API
Ci::API::API.logger Rails.logger
mount Ci::API::API => '/api'
resource :lint, only: [:show, :create] resource :lint, only: [:show, :create]
root to: redirect('/') root to: redirect('/')
......
class AddStatusToCiStages < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
add_column :ci_stages, :status, :integer
end
end
class AddLockVersionToCiStages < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
add_column :ci_stages, :lock_version, :integer
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddStorageFieldsToProject < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
add_column :projects, :storage_version, :integer, limit: 2
end
def down
remove_column :projects, :storage_version
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddHashedStorageToSettings < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_column_with_default :application_settings, :hashed_storage_enabled, :boolean, default: false
end
def down
remove_columns :application_settings, :hashed_storage_enabled
end
end
class MigrateStagesStatuses < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
BATCH_SIZE = 10000
RANGE_SIZE = 1000
MIGRATION = 'MigrateStageStatus'.freeze
class Stage < ActiveRecord::Base
self.table_name = 'ci_stages'
include ::EachBatch
end
def up
Stage.where(status: nil).each_batch(of: BATCH_SIZE) do |relation, index|
relation.each_batch(of: RANGE_SIZE) do |batch|
range = relation.pluck('MIN(id)', 'MAX(id)').first
schedule = index * 5.minutes
BackgroundMigrationWorker.perform_in(schedule, MIGRATION, range)
end
end
end
def down
disable_statement_timeout
update_column_in_batches(:ci_stages, :status, nil)
end
end
...@@ -128,6 +128,7 @@ ActiveRecord::Schema.define(version: 20170820100558) do ...@@ -128,6 +128,7 @@ ActiveRecord::Schema.define(version: 20170820100558) do
t.integer "performance_bar_allowed_group_id" t.integer "performance_bar_allowed_group_id"
t.boolean "password_authentication_enabled" t.boolean "password_authentication_enabled"
t.boolean "project_export_enabled", default: true, null: false t.boolean "project_export_enabled", default: true, null: false
t.boolean "hashed_storage_enabled", default: false, null: false
end end
create_table "audit_events", force: :cascade do |t| create_table "audit_events", force: :cascade do |t|
...@@ -379,6 +380,8 @@ ActiveRecord::Schema.define(version: 20170820100558) do ...@@ -379,6 +380,8 @@ ActiveRecord::Schema.define(version: 20170820100558) do
t.datetime "created_at" t.datetime "created_at"
t.datetime "updated_at" t.datetime "updated_at"
t.string "name" t.string "name"
t.integer "status"
t.integer "lock_version"
end end
add_index "ci_stages", ["pipeline_id", "name"], name: "index_ci_stages_on_pipeline_id_and_name", using: :btree add_index "ci_stages", ["pipeline_id", "name"], name: "index_ci_stages_on_pipeline_id_and_name", using: :btree
...@@ -1206,6 +1209,7 @@ ActiveRecord::Schema.define(version: 20170820100558) do ...@@ -1206,6 +1209,7 @@ ActiveRecord::Schema.define(version: 20170820100558) do
t.datetime "last_repository_updated_at" t.datetime "last_repository_updated_at"
t.string "ci_config_path" t.string "ci_config_path"
t.text "delete_error" t.text "delete_error"
t.integer "storage_version", limit: 2
end end
add_index "projects", ["ci_id"], name: "index_projects_on_ci_id", using: :btree add_index "projects", ["ci_id"], name: "index_projects_on_ci_id", using: :btree
......
...@@ -55,15 +55,10 @@ following locations: ...@@ -55,15 +55,10 @@ following locations:
- [Tags](tags.md) - [Tags](tags.md)
- [Todos](todos.md) - [Todos](todos.md)
- [Users](users.md) - [Users](users.md)
- [Validate CI configuration](ci/lint.md) - [Validate CI configuration](lint.md)
- [V3 to V4](v3_to_v4.md) - [V3 to V4](v3_to_v4.md)
- [Version](version.md) - [Version](version.md)
The following documentation is for the [internal CI API](ci/README.md):
- [Builds](ci/builds.md)
- [Runners](ci/runners.md)
## Road to GraphQL ## Road to GraphQL
Going forward, we will start on moving to Going forward, we will start on moving to
......
# GitLab CI API
## Purpose
The main purpose of GitLab CI API is to provide the necessary data and context
for GitLab CI Runners.
All relevant information about the consumer API can be found in a
[separate document](../../api/README.md).
## API Prefix
The current CI API prefix is `/ci/api/v1`.
You need to prepend this prefix to all examples in this documentation, like:
```bash
GET /ci/api/v1/builds/:id/artifacts
```
## Resources
- [Builds](builds.md)
- [Runners](runners.md)
# Builds API
API used by runners to receive and update builds.
>**Note:**
This API is intended to be used only by Runners as their own
communication channel. For the consumer API see the
[Jobs API](../jobs.md).
## Authentication
This API uses two types of authentication:
1. Unique Runner's token which is the token assigned to the Runner after it
has been registered.
2. Using the build authorization token.
This is project's CI token that can be found under the **Builds** section of
a project's settings. The build authorization token can be passed as a
parameter or a value of `BUILD-TOKEN` header.
These two methods of authentication are interchangeable.
## Builds
### Runs oldest pending build by runner
```
POST /ci/api/v1/builds/register
```
| Attribute | Type | Required | Description |
|-----------|---------|----------|---------------------|
| `token` | string | yes | Unique runner token |
```
curl --request POST "https://gitlab.example.com/ci/api/v1/builds/register" --form "token=t0k3n"
```
**Responses:**
| Status | Data |Description |
|--------|------|---------------------------------------------------------------------------|
| `201` | yes | When a build is scheduled for a runner |
| `204` | no | When no builds are scheduled for a runner (for GitLab Runner >= `v1.3.0`) |
| `403` | no | When invalid token is used or no token is sent |
| `404` | no | When no builds are scheduled for a runner (for GitLab Runner < `v1.3.0`) **or** when the runner is set to `paused` in GitLab runner's configuration page |
### Update details of an existing build
```
PUT /ci/api/v1/builds/:id
```
| Attribute | Type | Required | Description |
|-----------|---------|----------|----------------------|
| `id` | integer | yes | The ID of a project |
| `token` | string | yes | Unique runner token |
| `state` | string | no | The state of a build |
| `trace` | string | no | The trace of a build |
```
curl --request PUT "https://gitlab.example.com/ci/api/v1/builds/1234" --form "token=t0k3n" --form "state=running" --form "trace=Running git clone...\n"
```
### Incremental build trace update
Using this method you need to send trace content as a request body. You also need to provide the `Content-Range` header
with a range of sent trace part. Note that you need to send parts in the proper order, so the begining of the part
must start just after the end of the previous part. If you provide the wrong part, then GitLab CI API will return `416
Range Not Satisfiable` response with a header `Range: 0-X`, where `X` is the current trace length.
For example, if you receive `Range: 0-11` in the response, then your next part must contain a `Content-Range: 11-...`
header and a trace part covered by this range.
For a valid update API will return `202` response with:
* `Build-Status: {status}` header containing current status of the build,
* `Range: 0-{length}` header with the current trace length.
```
PATCH /ci/api/v1/builds/:id/trace.txt
```
Parameters:
| Attribute | Type | Required | Description |
|-----------|---------|----------|----------------------|
| `id` | integer | yes | The ID of a build |
Headers:
| Attribute | Type | Required | Description |
|-----------------|---------|----------|-----------------------------------|
| `BUILD-TOKEN` | string | yes | The build authorization token |
| `Content-Range` | string | yes | Bytes range of trace that is sent |
```
curl --request PATCH "https://gitlab.example.com/ci/api/v1/builds/1234/trace.txt" --header "BUILD-TOKEN=build_t0k3n" --header "Content-Range=0-21" --data "Running git clone...\n"
```
### Upload artifacts to build
```
POST /ci/api/v1/builds/:id/artifacts
```
| Attribute | Type | Required | Description |
|-----------|---------|----------|-------------------------------|
| `id` | integer | yes | The ID of a build |
| `token` | string | yes | The build authorization token |
| `file` | mixed | yes | Artifacts file |
```
curl --request POST "https://gitlab.example.com/ci/api/v1/builds/1234/artifacts" --form "token=build_t0k3n" --form "file=@/path/to/file"
```
### Download the artifacts file from build
```
GET /ci/api/v1/builds/:id/artifacts
```
| Attribute | Type | Required | Description |
|-----------|---------|----------|-------------------------------|
| `id` | integer | yes | The ID of a build |
| `token` | string | yes | The build authorization token |
```
curl "https://gitlab.example.com/ci/api/v1/builds/1234/artifacts" --form "token=build_t0k3n"
```
### Remove the artifacts file from build
```
DELETE /ci/api/v1/builds/:id/artifacts
```
| Attribute | Type | Required | Description |
|-----------|---------|----------|-------------------------------|
| ` id` | integer | yes | The ID of a build |
| `token` | string | yes | The build authorization token |
```
curl --request DELETE "https://gitlab.example.com/ci/api/v1/builds/1234/artifacts" --form "token=build_t0k3n"
```
# Register and Delete Runners API
API used by Runners to register and delete themselves.
>**Note:**
This API is intended to be used only by Runners as their own
communication channel. For the consumer API see the
[new Runners API](../runners.md).
## Authentication
This API uses two types of authentication:
1. Unique Runner's token, which is the token assigned to the Runner after it
has been registered. This token can be found on the Runner's edit page (go to
**Project > Runners**, select one of the Runners listed under **Runners activated for
this project**).
2. Using Runners' registration token.
This is a token that can be found in project's settings.
It can also be found in the **Admin > Runners** settings area.
There are two types of tokens you can pass: shared Runner registration
token or project specific registration token.
## Register a new runner
Used to make GitLab CI aware of available runners.
```sh
POST /ci/api/v1/runners/register
```
| Attribute | Type | Required | Description |
| --------- | ------- | --------- | ----------- |
| `token` | string | yes | Runner's registration token |
Example request:
```sh
curl --request POST "https://gitlab.example.com/ci/api/v1/runners/register" --form "token=t0k3n"
```
## Delete a Runner
Used to remove a Runner.
```sh
DELETE /ci/api/v1/runners/delete
```
| Attribute | Type | Required | Description |
| --------- | ------- | --------- | ----------- |
| `token` | string | yes | Unique Runner's token |
Example request:
```sh
curl --request DELETE "https://gitlab.example.com/ci/api/v1/runners/delete" --form "token=t0k3n"
```
...@@ -5,7 +5,7 @@ ...@@ -5,7 +5,7 @@
Checks if your .gitlab-ci.yml file is valid. Checks if your .gitlab-ci.yml file is valid.
``` ```
POST ci/lint POST /lint
``` ```
| Attribute | Type | Required | Description | | Attribute | Type | Required | Description |
...@@ -49,3 +49,4 @@ Example responses: ...@@ -49,3 +49,4 @@ Example responses:
``` ```
[ce-5953]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/5953 [ce-5953]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/5953
This document was moved to a [new location](../../api/ci/README.md).
This document was moved to a [new location](../../api/ci/builds.md).
This document was moved to a [new location](../../api/ci/runners.md).
...@@ -37,7 +37,6 @@ This page gathers all the resources for the topic **Authentication** within GitL ...@@ -37,7 +37,6 @@ This page gathers all the resources for the topic **Authentication** within GitL
- [Private Tokens](../../api/README.md#private-tokens) - [Private Tokens](../../api/README.md#private-tokens)
- [Impersonation tokens](../../api/README.md#impersonation-tokens) - [Impersonation tokens](../../api/README.md#impersonation-tokens)
- [GitLab as an OAuth2 provider](../../api/oauth2.md#gitlab-as-an-oauth2-provider) - [GitLab as an OAuth2 provider](../../api/oauth2.md#gitlab-as-an-oauth2-provider)
- [GitLab Runner API - Authentication](../../api/ci/runners.md#authentication)
## Third-party resources ## Third-party resources
......
...@@ -55,6 +55,12 @@ By doing so: ...@@ -55,6 +55,12 @@ By doing so:
- John mentions everyone from his team with `@john-team` - John mentions everyone from his team with `@john-team`
- John mentions only his marketing team with `@john-team/marketing` - John mentions only his marketing team with `@john-team/marketing`
## Issues and merge requests within a group
Issues and merge requests are part of projects. For a given group, view all the
[issues](../project/issues/index.md#issues-per-group) and [merge requests](../project/merge_requests/index.md#merge-requests-per-group) across all the projects in that group,
together in a single list view.
## Create a new group ## Create a new group
> **Notes:** > **Notes:**
......
...@@ -126,6 +126,10 @@ are a tool for working faster and more effectively with your team, ...@@ -126,6 +126,10 @@ are a tool for working faster and more effectively with your team,
by listing all user or group mentions, as well as issues and merge by listing all user or group mentions, as well as issues and merge
requests you're assigned to. requests you're assigned to.
## Search
[Search and filter](search/index.md) through groups, projects, issues, merge requests, files, code, and more.
## Snippets ## Snippets
[Snippets](snippets.md) are code blocks that you want to store in GitLab, from which [Snippets](snippets.md) are code blocks that you want to store in GitLab, from which
......
...@@ -7,7 +7,7 @@ of solving a problem. ...@@ -7,7 +7,7 @@ of solving a problem.
It allows you, your team, and your collaborators to share It allows you, your team, and your collaborators to share
and discuss proposals before and while implementing them. and discuss proposals before and while implementing them.
Issues and the GitLab Issue Tracker are available in all GitLab Issues and the GitLab Issue Tracker are available in all
[GitLab Products](https://about.gitlab.com/products/) as [GitLab Products](https://about.gitlab.com/products/) as
part of the [GitLab Workflow](https://about.gitlab.com/2016/10/25/gitlab-workflow-an-overview/). part of the [GitLab Workflow](https://about.gitlab.com/2016/10/25/gitlab-workflow-an-overview/).
...@@ -48,11 +48,27 @@ for feature proposals and another one for bug reports. ...@@ -48,11 +48,27 @@ for feature proposals and another one for bug reports.
## Issue Tracker ## Issue Tracker
The issue tracker is the collection of opened and closed issues created in a project. The Issue Tracker is the collection of opened and closed issues created in a project.
It is available for all projects, from the moment the project is created.
![Issue tracker](img/issue_tracker.png) Find the issue tracker by navigating to your **Project's homepage** > **Issues**.
Find the issue tracker by navigating to your **Project's Dashboard** > **Issues**. ### Issues per project
When you access your project's issues, GitLab will present them in a list,
and you can use the tabs available to quickly filter by open and closed issues.
![Project issues list view](img/project_issues_list_view.png)
You can also [search and filter](../../search/index.md#issues-and-merge-requests-per-project) the results more deeply with GitLab's search capacities.
### Issues per group
View all the issues in a group (that is, all the issues across all projects in that
group) by navigating to **Group > Issues**. This view also has the open and closed
issue tabs.
![Group Issues list view](img/group_issues_list_view.png)
## GitLab Issues Functionalities ## GitLab Issues Functionalities
...@@ -120,6 +136,12 @@ to find out more about this feature. ...@@ -120,6 +136,12 @@ to find out more about this feature.
With [GitLab Enterprise Edition Starter](https://about.gitlab.com/gitlab-ee/), you can also With [GitLab Enterprise Edition Starter](https://about.gitlab.com/gitlab-ee/), you can also
create various boards per project with [Multiple Issue Boards](https://docs.gitlab.com/ee/user/project/issue_board.html#multiple-issue-boards). create various boards per project with [Multiple Issue Boards](https://docs.gitlab.com/ee/user/project/issue_board.html#multiple-issue-boards).
### External Issue Tracker
Alternatively to GitLab's built-in Issue Tracker, you can also use an [external
tracker](../../../integration/external-issue-tracker.md) such as Jira, Redmine,
or Bugzilla.
### Issue's API ### Issue's API
Read through the [API documentation](../../../api/issues.md). Read through the [API documentation](../../../api/issues.md).
...@@ -56,6 +56,23 @@ B. Consider you're a web developer writing a webpage for your company's: ...@@ -56,6 +56,23 @@ B. Consider you're a web developer writing a webpage for your company's:
1. Once approved, your merge request is [squashed and merged](https://docs.gitlab.com/ee/user/project/merge_requests/squash_and_merge.html), and [deployed to staging with GitLab Pages](https://about.gitlab.com/2016/08/26/ci-deployment-and-environments/) (Squash and Merge is available in GitLab Enterprise Edition Starter) 1. Once approved, your merge request is [squashed and merged](https://docs.gitlab.com/ee/user/project/merge_requests/squash_and_merge.html), and [deployed to staging with GitLab Pages](https://about.gitlab.com/2016/08/26/ci-deployment-and-environments/) (Squash and Merge is available in GitLab Enterprise Edition Starter)
1. Your production team [cherry picks](#cherry-pick-changes) the merge commit into production 1. Your production team [cherry picks](#cherry-pick-changes) the merge commit into production
## Merge requests per project
View all the merge requests within a project by navigating to **Project > Merge Requests**.
When you access your project's merge requests, GitLab will present them in a list,
and you can use the tabs available to quickly filter by open and closed. You can also [search and filter the results](../../search/index.md#issues-and-merge-requests-per-project).
![Project merge requests list view](img/project_merge_requests_list_view.png)
## Merge requests per group
View all the merge requests in a group (that is, all the merge requests across all projects in that
group) by navigating to **Group > Merge Requests**. This view also has the open, merged, and closed
merge request tabs, from which you can [search and filter the results](../../search/index.md#issues-and-merge-requests-per-group).
![Group Issues list view](img/group_merge_requests_list_view.png)
## Authorization for merge requests ## Authorization for merge requests
There are two main ways to have a merge request flow with GitLab: There are two main ways to have a merge request flow with GitLab:
...@@ -141,7 +158,6 @@ all your changes will be available to preview by anyone with the Review Apps lin ...@@ -141,7 +158,6 @@ all your changes will be available to preview by anyone with the Review Apps lin
[Read more about Review Apps.](../../../ci/review_apps/index.md) [Read more about Review Apps.](../../../ci/review_apps/index.md)
## Tips ## Tips
Here are some tips that will help you be more efficient with merge requests in Here are some tips that will help you be more efficient with merge requests in
...@@ -230,4 +246,4 @@ git checkout origin/merge-requests/1 ...@@ -230,4 +246,4 @@ git checkout origin/merge-requests/1
``` ```
[protected branches]: ../protected_branches.md [protected branches]: ../protected_branches.md
[ee]: https://about.gitlab.com/gitlab-ee/ "GitLab Enterprise Edition" [ee]: https://about.gitlab.com/gitlab-ee/ "GitLab Enterprise Edition"
\ No newline at end of file
...@@ -27,7 +27,7 @@ on the search field on the top-right of your screen: ...@@ -27,7 +27,7 @@ on the search field on the top-right of your screen:
![shortcut to your issues and mrs](img/issues_mrs_shortcut.png) ![shortcut to your issues and mrs](img/issues_mrs_shortcut.png)
## Issues and merge requests per project ### Issues and merge requests per project
If you want to search for issues present in a specific project, navigate to If you want to search for issues present in a specific project, navigate to
a project's **Issues** tab, and click on the field **Search or filter results...**. It will a project's **Issues** tab, and click on the field **Search or filter results...**. It will
...@@ -40,7 +40,7 @@ The same process is valid for merge requests. Navigate to your project's **Merge ...@@ -40,7 +40,7 @@ The same process is valid for merge requests. Navigate to your project's **Merge
and click **Search or filter results...**. Merge requests can be filtered by author, assignee, and click **Search or filter results...**. Merge requests can be filtered by author, assignee,
milestone, and label. milestone, and label.
## Issues and merge requests per group ### Issues and merge requests per group
Similar to **Issues and merge requests per project**, you can also search for issues Similar to **Issues and merge requests per project**, you can also search for issues
within a group. Navigate to a group's **Issues** tab and query search results in within a group. Navigate to a group's **Issues** tab and query search results in
...@@ -48,6 +48,10 @@ the same way as you do for projects. ...@@ -48,6 +48,10 @@ the same way as you do for projects.
![filter issues in a group](img/group_issues_filter.png) ![filter issues in a group](img/group_issues_filter.png)
The same process is valid for merge requests. Navigate to your project's **Merge Requests** tab.
The search and filter UI currently uses dropdowns. In a future release, the same
dynamic UI as above will be carried over here.
## Search history ## Search history
You can view recent searches by clicking on the little arrow-clock icon, which is to the left of the search input. Click the search entry to run that search again. This feature is available for issues and merge requests. Searches are stored locally in your browser. You can view recent searches by clicking on the little arrow-clock icon, which is to the left of the search input. Click the search entry to run that search again. This feature is available for issues and merge requests. Searches are stored locally in your browser.
......
...@@ -75,7 +75,7 @@ module Backup ...@@ -75,7 +75,7 @@ module Backup
path_to_project_repo = path_to_repo(project) path_to_project_repo = path_to_repo(project)
path_to_project_bundle = path_to_bundle(project) path_to_project_bundle = path_to_bundle(project)
project.ensure_storage_path_exist project.ensure_storage_path_exists
cmd = if File.exist?(path_to_project_bundle) cmd = if File.exist?(path_to_project_bundle)
%W(#{Gitlab.config.git.bin_path} clone --bare #{path_to_project_bundle} #{path_to_project_repo}) %W(#{Gitlab.config.git.bin_path} clone --bare #{path_to_project_bundle} #{path_to_project_repo})
......
module Ci
module API
class API < Grape::API
include ::API::APIGuard
version 'v1', using: :path
rescue_from ActiveRecord::RecordNotFound do
rack_response({ 'message' => '404 Not found' }.to_json, 404)
end
# Retain 405 error rather than a 500 error for Grape 0.15.0+.
# https://github.com/ruby-grape/grape/blob/a3a28f5b5dfbb2797442e006dbffd750b27f2a76/UPGRADING.md#changes-to-method-not-allowed-routes
rescue_from Grape::Exceptions::MethodNotAllowed do |e|
error! e.message, e.status, e.headers
end
rescue_from Grape::Exceptions::Base do |e|
error! e.message, e.status, e.headers
end
rescue_from :all do |exception|
handle_api_exception(exception)
end
content_type :txt, 'text/plain'
content_type :json, 'application/json'
format :json
helpers ::SentryHelper
helpers ::Ci::API::Helpers
helpers ::API::Helpers
helpers Gitlab::CurrentSettings
mount ::Ci::API::Builds
mount ::Ci::API::Runners
mount ::Ci::API::Triggers
end
end
end
module Ci
module API
# Builds API
class Builds < Grape::API
resource :builds do
# Runs oldest pending build by runner - Runners only
#
# Parameters:
# token (required) - The uniq token of runner
#
# Example Request:
# POST /builds/register
post "register" do
authenticate_runner!
required_attributes! [:token]
not_found! unless current_runner.active?
update_runner_info
if current_runner.is_runner_queue_value_latest?(params[:last_update])
header 'X-GitLab-Last-Update', params[:last_update]
Gitlab::Metrics.add_event(:build_not_found_cached)
return build_not_found!
end
new_update = current_runner.ensure_runner_queue_value
result = Ci::RegisterJobService.new(current_runner).execute
if result.valid?
if result.build
Gitlab::Metrics.add_event(:build_found,
project: result.build.project.full_path)
present result.build, with: Entities::BuildDetails
else
Gitlab::Metrics.add_event(:build_not_found)
header 'X-GitLab-Last-Update', new_update
build_not_found!
end
else
# We received build that is invalid due to concurrency conflict
Gitlab::Metrics.add_event(:build_invalid)
conflict!
end
end
# Update an existing build - Runners only
#
# Parameters:
# id (required) - The ID of a project
# state (optional) - The state of a build
# trace (optional) - The trace of a build
# Example Request:
# PUT /builds/:id
put ":id" do
authenticate_runner!
build = Ci::Build.where(runner_id: current_runner.id).running.find(params[:id])
validate_build!(build)
update_runner_info
build.trace.set(params[:trace]) if params[:trace]
Gitlab::Metrics.add_event(:update_build,
project: build.project.full_path)
case params[:state].to_s
when 'success'
build.success
when 'failed'
build.drop
end
end
# Send incremental log update - Runners only
#
# Parameters:
# id (required) - The ID of a build
# Body:
# content of logs to append
# Headers:
# Content-Range (required) - range of content that was sent
# BUILD-TOKEN (required) - The build authorization token
# Example Request:
# PATCH /builds/:id/trace.txt
patch ":id/trace.txt" do
build = authenticate_build!
error!('400 Missing header Content-Range', 400) unless request.headers.key?('Content-Range')
content_range = request.headers['Content-Range']
content_range = content_range.split('-')
stream_size = build.trace.append(request.body.read, content_range[0].to_i)
if stream_size < 0
return error!('416 Range Not Satisfiable', 416, { 'Range' => "0-#{-stream_size}" })
end
status 202
header 'Build-Status', build.status
header 'Range', "0-#{stream_size}"
end
# Authorize artifacts uploading for build - Runners only
#
# Parameters:
# id (required) - The ID of a build
# token (required) - The build authorization token
# filesize (optional) - the size of uploaded file
# Example Request:
# POST /builds/:id/artifacts/authorize
post ":id/artifacts/authorize" do
require_gitlab_workhorse!
Gitlab::Workhorse.verify_api_request!(headers)
not_allowed! unless Gitlab.config.artifacts.enabled
build = authenticate_build!
forbidden!('build is not running') unless build.running?
if params[:filesize]
file_size = params[:filesize].to_i
file_to_large! unless file_size < max_artifacts_size
end
status 200
content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
Gitlab::Workhorse.artifact_upload_ok
end
# Upload artifacts to build - Runners only
#
# Parameters:
# id (required) - The ID of a build
# token (required) - The build authorization token
# file (required) - Artifacts file
# expire_in (optional) - Specify when artifacts should expire (ex. 7d)
# Parameters (accelerated by GitLab Workhorse):
# file.path - path to locally stored body (generated by Workhorse)
# file.name - real filename as send in Content-Disposition
# file.type - real content type as send in Content-Type
# metadata.path - path to locally stored body (generated by Workhorse)
# metadata.name - filename (generated by Workhorse)
# Headers:
# BUILD-TOKEN (required) - The build authorization token, the same as token
# Body:
# The file content
#
# Example Request:
# POST /builds/:id/artifacts
post ":id/artifacts" do
require_gitlab_workhorse!
not_allowed! unless Gitlab.config.artifacts.enabled
build = authenticate_build!
forbidden!('Build is not running!') unless build.running?
artifacts_upload_path = ArtifactUploader.artifacts_upload_path
artifacts = uploaded_file(:file, artifacts_upload_path)
metadata = uploaded_file(:metadata, artifacts_upload_path)
bad_request!('Missing artifacts file!') unless artifacts
file_to_large! unless artifacts.size < max_artifacts_size
build.artifacts_file = artifacts
build.artifacts_metadata = metadata
build.artifacts_expire_in =
params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings
.default_artifacts_expire_in
if build.save
present(build, with: Entities::BuildDetails)
else
render_validation_error!(build)
end
end
# Download the artifacts file from build - Runners only
#
# Parameters:
# id (required) - The ID of a build
# token (required) - The build authorization token
# Headers:
# BUILD-TOKEN (required) - The build authorization token, the same as token
# Example Request:
# GET /builds/:id/artifacts
get ":id/artifacts" do
build = authenticate_build!
artifacts_file = build.artifacts_file
unless artifacts_file.exists?
not_found!
end
unless artifacts_file.file_storage?
return redirect_to build.artifacts_file.url
end
present_file!(artifacts_file.path, artifacts_file.filename)
end
# Remove the artifacts file from build - Runners only
#
# Parameters:
# id (required) - The ID of a build
# token (required) - The build authorization token
# Headers:
# BUILD-TOKEN (required) - The build authorization token, the same as token
# Example Request:
# DELETE /builds/:id/artifacts
delete ":id/artifacts" do
build = authenticate_build!
status(200)
build.erase_artifacts!
end
end
end
end
end
module Ci
module API
module Entities
class Commit < Grape::Entity
expose :id, :sha, :project_id, :created_at
expose :status, :finished_at, :duration
expose :git_commit_message, :git_author_name, :git_author_email
end
class CommitWithBuilds < Commit
expose :builds
end
class ArtifactFile < Grape::Entity
expose :filename, :size
end
class BuildOptions < Grape::Entity
expose :image
expose :services
expose :artifacts
expose :cache
expose :dependencies
expose :after_script
end
class Build < Grape::Entity
expose :id, :ref, :tag, :sha, :status
expose :name, :token, :stage
expose :project_id
expose :project_name
expose :artifacts_file, using: ArtifactFile, if: ->(build, _) { build.artifacts? }
end
class BuildCredentials < Grape::Entity
expose :type, :url, :username, :password
end
class BuildDetails < Build
expose :commands
expose :repo_url
expose :before_sha
expose :allow_git_fetch
expose :token
expose :artifacts_expire_at, if: ->(build, _) { build.artifacts? }
expose :options do |model|
# This part ensures that output of old API is still the same after adding support
# for extended docker configuration options, used by new API
#
# I'm leaving this here, not in the model, because it should be removed at the same time
# when old API will be removed (planned for August 2017).
model.options.dup.tap do |options|
options[:image] = options[:image][:name] if options[:image].is_a?(Hash)
options[:services]&.map! do |service|
if service.is_a?(Hash)
service[:name]
else
service
end
end
end
end
expose :timeout do |model|
model.timeout
end
expose :variables
expose :depends_on_builds, using: Build
expose :credentials, using: BuildCredentials
end
class Runner < Grape::Entity
expose :id, :token
end
class RunnerProject < Grape::Entity
expose :id, :project_id, :runner_id
end
class WebHook < Grape::Entity
expose :id, :project_id, :url
end
class TriggerRequest < Grape::Entity
expose :id, :variables
expose :pipeline, using: Commit, as: :commit
end
end
end
end
module Ci
module API
module Helpers
BUILD_TOKEN_HEADER = "HTTP_BUILD_TOKEN".freeze
BUILD_TOKEN_PARAM = :token
UPDATE_RUNNER_EVERY = 10 * 60
def authenticate_runners!
forbidden! unless runner_registration_token_valid?
end
def authenticate_runner!
forbidden! unless current_runner
end
def authenticate_build!
build = Ci::Build.find_by_id(params[:id])
validate_build!(build) do
forbidden! unless build_token_valid?(build)
end
build
end
def validate_build!(build)
not_found! unless build
yield if block_given?
project = build.project
forbidden!('Project has been deleted!') if project.nil? || project.pending_delete?
forbidden!('Build has been erased!') if build.erased?
end
def runner_registration_token_valid?
ActiveSupport::SecurityUtils.variable_size_secure_compare(
params[:token],
current_application_settings.runners_registration_token)
end
def build_token_valid?(build)
token = (params[BUILD_TOKEN_PARAM] || env[BUILD_TOKEN_HEADER]).to_s
# We require to also check `runners_token` to maintain compatibility with old version of runners
token && (build.valid_token?(token) || build.project.valid_runners_token?(token))
end
def update_runner_info
return unless update_runner?
current_runner.contacted_at = Time.now
current_runner.assign_attributes(get_runner_version_from_params)
current_runner.save if current_runner.changed?
end
def update_runner?
# Use a random threshold to prevent beating DB updates.
# It generates a distribution between [40m, 80m].
#
contacted_at_max_age = UPDATE_RUNNER_EVERY + Random.rand(UPDATE_RUNNER_EVERY)
current_runner.contacted_at.nil? ||
(Time.now - current_runner.contacted_at) >= contacted_at_max_age
end
def build_not_found!
if headers['User-Agent'].to_s =~ /gitlab-ci-multi-runner \d+\.\d+\.\d+(~beta\.\d+\.g[0-9a-f]+)? /
no_content!
else
not_found!
end
end
def current_runner
@runner ||= Runner.find_by_token(params[:token].to_s)
end
def get_runner_version_from_params
return unless params["info"].present?
attributes_for_keys(%w(name version revision platform architecture), params["info"])
end
def max_artifacts_size
current_application_settings.max_artifacts_size.megabytes.to_i
end
end
end
end
module Ci
module API
class Runners < Grape::API
resource :runners do
desc 'Delete a runner'
params do
requires :token, type: String, desc: 'The unique token of the runner'
end
delete "delete" do
authenticate_runner!
status(200)
Ci::Runner.find_by_token(params[:token]).destroy
end
desc 'Register a new runner' do
success Entities::Runner
end
params do
requires :token, type: String, desc: 'The unique token of the runner'
optional :description, type: String, desc: 'The description of the runner'
optional :tag_list, type: Array[String], desc: 'A list of tags the runner should run for'
optional :run_untagged, type: Boolean, desc: 'Flag if the runner should execute untagged jobs'
optional :locked, type: Boolean, desc: 'Lock this runner for this specific project'
end
post "register" do
runner_params = declared(params, include_missing: false).except(:token)
runner =
if runner_registration_token_valid?
# Create shared runner. Requires admin access
Ci::Runner.create(runner_params.merge(is_shared: true))
elsif project = Project.find_by(runners_token: params[:token])
# Create a specific runner for project.
project.runners.create(runner_params)
end
return forbidden! unless runner
if runner.id
runner.update(get_runner_version_from_params)
present runner, with: Entities::Runner
else
not_found!
end
end
end
end
end
end
module Ci
module API
class Triggers < Grape::API
resource :projects do
desc 'Trigger a GitLab CI project build' do
success Entities::TriggerRequest
end
params do
requires :id, type: Integer, desc: 'The ID of a CI project'
requires :ref, type: String, desc: "The name of project's branch or tag"
requires :token, type: String, desc: 'The unique token of the trigger'
optional :variables, type: Hash, desc: 'Optional build variables'
end
post ":id/refs/:ref/trigger" do
project = Project.find_by(ci_id: params[:id])
trigger = Ci::Trigger.find_by_token(params[:token])
not_found! unless project && trigger
unauthorized! unless trigger.project == project
# Validate variables
variables = params[:variables].to_h
unless variables.all? { |key, value| key.is_a?(String) && value.is_a?(String) }
render_api_error!('variables needs to be a map of key-valued strings', 400)
end
# create request and trigger builds
result = Ci::CreateTriggerRequestService.execute(project, trigger, params[:ref], variables)
pipeline = result.pipeline
if pipeline.persisted?
present result.trigger_request, with: Entities::TriggerRequest
else
render_validation_error!(pipeline)
end
end
end
end
end
end
module Gitlab
module BackgroundMigration
class MigrateStageStatus
STATUSES = { created: 0, pending: 1, running: 2, success: 3,
failed: 4, canceled: 5, skipped: 6, manual: 7 }.freeze
class Build < ActiveRecord::Base
self.table_name = 'ci_builds'
scope :latest, -> { where(retried: [false, nil]) }
scope :created, -> { where(status: 'created') }
scope :running, -> { where(status: 'running') }
scope :pending, -> { where(status: 'pending') }
scope :success, -> { where(status: 'success') }
scope :failed, -> { where(status: 'failed') }
scope :canceled, -> { where(status: 'canceled') }
scope :skipped, -> { where(status: 'skipped') }
scope :manual, -> { where(status: 'manual') }
scope :failed_but_allowed, -> do
where(allow_failure: true, status: [:failed, :canceled])
end
scope :exclude_ignored, -> do
where("allow_failure = ? OR status IN (?)",
false, %w[created pending running success skipped])
end
def self.status_sql
scope_relevant = latest.exclude_ignored
scope_warnings = latest.failed_but_allowed
builds = scope_relevant.select('count(*)').to_sql
created = scope_relevant.created.select('count(*)').to_sql
success = scope_relevant.success.select('count(*)').to_sql
manual = scope_relevant.manual.select('count(*)').to_sql
pending = scope_relevant.pending.select('count(*)').to_sql
running = scope_relevant.running.select('count(*)').to_sql
skipped = scope_relevant.skipped.select('count(*)').to_sql
canceled = scope_relevant.canceled.select('count(*)').to_sql
warnings = scope_warnings.select('count(*) > 0').to_sql
<<-SQL.strip_heredoc
(CASE
WHEN (#{builds}) = (#{skipped}) AND (#{warnings}) THEN #{STATUSES[:success]}
WHEN (#{builds}) = (#{skipped}) THEN #{STATUSES[:skipped]}
WHEN (#{builds}) = (#{success}) THEN #{STATUSES[:success]}
WHEN (#{builds}) = (#{created}) THEN #{STATUSES[:created]}
WHEN (#{builds}) = (#{success}) + (#{skipped}) THEN #{STATUSES[:success]}
WHEN (#{builds}) = (#{success}) + (#{skipped}) + (#{canceled}) THEN #{STATUSES[:canceled]}
WHEN (#{builds}) = (#{created}) + (#{skipped}) + (#{pending}) THEN #{STATUSES[:pending]}
WHEN (#{running}) + (#{pending}) > 0 THEN #{STATUSES[:running]}
WHEN (#{manual}) > 0 THEN #{STATUSES[:manual]}
WHEN (#{created}) > 0 THEN #{STATUSES[:running]}
ELSE #{STATUSES[:failed]}
END)
SQL
end
end
def perform(start_id, stop_id)
status_sql = Build
.where('ci_builds.commit_id = ci_stages.pipeline_id')
.where('ci_builds.stage = ci_stages.name')
.status_sql
sql = <<-SQL
UPDATE ci_stages SET status = (#{status_sql})
WHERE ci_stages.status IS NULL
AND ci_stages.id BETWEEN #{start_id.to_i} AND #{stop_id.to_i}
SQL
ActiveRecord::Base.connection.execute(sql)
end
end
end
end
...@@ -64,7 +64,6 @@ module Gitlab ...@@ -64,7 +64,6 @@ module Gitlab
end end
delegate :empty?, delegate :empty?,
:bare?,
to: :rugged to: :rugged
delegate :exists?, to: :gitaly_repository_client delegate :exists?, to: :gitaly_repository_client
...@@ -126,6 +125,8 @@ module Gitlab ...@@ -126,6 +125,8 @@ module Gitlab
# This is to work around a bug in libgit2 that causes in-memory refs to # This is to work around a bug in libgit2 that causes in-memory refs to
# be stale/invalid when packed-refs is changed. # be stale/invalid when packed-refs is changed.
# See https://gitlab.com/gitlab-org/gitlab-ce/issues/15392#note_14538333 # See https://gitlab.com/gitlab-org/gitlab-ce/issues/15392#note_14538333
#
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/474
def find_branch(name, force_reload = false) def find_branch(name, force_reload = false)
reload_rugged if force_reload reload_rugged if force_reload
...@@ -231,10 +232,6 @@ module Gitlab ...@@ -231,10 +232,6 @@ module Gitlab
branch_names + tag_names branch_names + tag_names
end end
def has_commits?
!empty?
end
# Discovers the default branch based on the repository's available branches # Discovers the default branch based on the repository's available branches
# #
# - If no branches are present, returns nil # - If no branches are present, returns nil
...@@ -574,6 +571,8 @@ module Gitlab ...@@ -574,6 +571,8 @@ module Gitlab
end end
# Delete the specified branch from the repository # Delete the specified branch from the repository
#
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/476
def delete_branch(branch_name) def delete_branch(branch_name)
rugged.branches.delete(branch_name) rugged.branches.delete(branch_name)
end end
...@@ -583,6 +582,8 @@ module Gitlab ...@@ -583,6 +582,8 @@ module Gitlab
# Examples: # Examples:
# create_branch("feature") # create_branch("feature")
# create_branch("other-feature", "master") # create_branch("other-feature", "master")
#
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/476
def create_branch(ref, start_point = "HEAD") def create_branch(ref, start_point = "HEAD")
rugged_ref = rugged.branches.create(ref, start_point) rugged_ref = rugged.branches.create(ref, start_point)
target_commit = Gitlab::Git::Commit.find(self, rugged_ref.target) target_commit = Gitlab::Git::Commit.find(self, rugged_ref.target)
...@@ -592,38 +593,26 @@ module Gitlab ...@@ -592,38 +593,26 @@ module Gitlab
raise InvalidRef.new("Invalid reference #{start_point}") raise InvalidRef.new("Invalid reference #{start_point}")
end end
# Return an array of this repository's remote names
def remote_names
rugged.remotes.each_name.to_a
end
# Delete the specified remote from this repository. # Delete the specified remote from this repository.
def remote_delete(remote_name) def remote_delete(remote_name)
rugged.remotes.delete(remote_name) rugged.remotes.delete(remote_name)
nil
end end
# Add a new remote to this repository. Returns a Rugged::Remote object # Add a new remote to this repository.
def remote_add(remote_name, url) def remote_add(remote_name, url)
rugged.remotes.create(remote_name, url) rugged.remotes.create(remote_name, url)
nil
end end
# Update the specified remote using the values in the +options+ hash # Update the specified remote using the values in the +options+ hash
# #
# Example # Example
# repo.update_remote("origin", url: "path/to/repo") # repo.update_remote("origin", url: "path/to/repo")
def remote_update(remote_name, options = {}) def remote_update(remote_name, url:)
# TODO: Implement other remote options # TODO: Implement other remote options
rugged.remotes.set_url(remote_name, options[:url]) if options[:url] rugged.remotes.set_url(remote_name, url)
end nil
# Fetch the specified remote
def fetch(remote_name)
rugged.remotes[remote_name].fetch
end
# Push +*refspecs+ to the remote identified by +remote_name+.
def push(remote_name, *refspecs)
rugged.remotes[remote_name].push(refspecs)
end end
AUTOCRLF_VALUES = { AUTOCRLF_VALUES = {
......
...@@ -80,8 +80,8 @@ module Gitlab ...@@ -80,8 +80,8 @@ module Gitlab
def tree_entries(repository, revision, path) def tree_entries(repository, revision, path)
request = Gitaly::GetTreeEntriesRequest.new( request = Gitaly::GetTreeEntriesRequest.new(
repository: @gitaly_repo, repository: @gitaly_repo,
revision: revision, revision: GitalyClient.encode(revision),
path: path.presence || '.' path: path.present? ? GitalyClient.encode(path) : '.'
) )
response = GitalyClient.call(@repository.storage, :commit_service, :get_tree_entries, request) response = GitalyClient.call(@repository.storage, :commit_service, :get_tree_entries, request)
......
...@@ -98,6 +98,7 @@ excluded_attributes: ...@@ -98,6 +98,7 @@ excluded_attributes:
- :last_activity_at - :last_activity_at
- :last_repository_updated_at - :last_repository_updated_at
- :last_repository_check_at - :last_repository_check_at
- :storage_version
snippets: snippets:
- :expired_at - :expired_at
merge_request_diff: merge_request_diff:
......
module Gitlab module Gitlab
# JobWaiter can be used to wait for a number of Sidekiq jobs to complete. # JobWaiter can be used to wait for a number of Sidekiq jobs to complete.
#
# Its use requires the cooperation of the sidekiq jobs themselves. Set up the
# waiter, then start the jobs, passing them its `key`. Their `perform` methods
# should look like:
#
# def perform(args, notify_key)
# # do work
# ensure
# ::Gitlab::JobWaiter.notify(notify_key, jid)
# end
#
# The JobWaiter blocks popping items from a Redis array. All the sidekiq jobs
# push to that array when done. Once the waiter has popped `count` items, it
# knows all the jobs are done.
class JobWaiter class JobWaiter
# The sleep interval between checking keys, in seconds. def self.notify(key, jid)
INTERVAL = 0.1 Gitlab::Redis::SharedState.with { |redis| redis.lpush(key, jid) }
end
attr_reader :key, :jobs_remaining, :finished
# jobs - The job IDs to wait for. # jobs_remaining - the number of jobs left to wait for
def initialize(jobs) def initialize(jobs_remaining)
@jobs = jobs @key = "gitlab:job_waiter:#{SecureRandom.uuid}"
@jobs_remaining = jobs_remaining
@finished = []
end end
# Waits for all the jobs to be completed. # Waits for all the jobs to be completed.
...@@ -15,13 +34,33 @@ module Gitlab ...@@ -15,13 +34,33 @@ module Gitlab
# ensures we don't indefinitely block a caller in case a job takes # ensures we don't indefinitely block a caller in case a job takes
# long to process, or is never processed. # long to process, or is never processed.
def wait(timeout = 10) def wait(timeout = 10)
start = Time.current deadline = Time.now.utc + timeout
Gitlab::Redis::SharedState.with do |redis|
# Fallback key expiry: allow a long grace period to reduce the chance of
# a job pushing to an expired key and recreating it
redis.expire(key, [timeout * 2, 10.minutes.to_i].max)
while jobs_remaining > 0
# Redis will not take fractional seconds. Prefer waiting too long over
# not waiting long enough
seconds_left = (deadline - Time.now.utc).ceil
while (Time.current - start) <= timeout # Redis interprets 0 as "wait forever", so skip the final `blpop` call
break if SidekiqStatus.all_completed?(@jobs) break if seconds_left <= 0
sleep(INTERVAL) # to not overload Redis too much. list, jid = redis.blpop(key, timeout: seconds_left)
break unless list && jid # timed out
@finished << jid
@jobs_remaining -= 1
end
# All jobs have finished, so expire the key immediately
redis.expire(key, 0) if jobs_remaining == 0
end end
finished
end end
end end
end end
...@@ -3,6 +3,8 @@ module Gitlab ...@@ -3,6 +3,8 @@ module Gitlab
class << self class << self
def execute! def execute!
if Gitlab::CurrentSettings.sidekiq_throttling_enabled? if Gitlab::CurrentSettings.sidekiq_throttling_enabled?
require 'sidekiq-limit_fetch'
Gitlab::CurrentSettings.current_application_settings.sidekiq_throttling_queues.each do |queue| Gitlab::CurrentSettings.current_application_settings.sidekiq_throttling_queues.each do |queue|
Sidekiq::Queue[queue].limit = queue_limit Sidekiq::Queue[queue].limit = queue_limit
end end
......
...@@ -11,6 +11,12 @@ namespace :gitlab do ...@@ -11,6 +11,12 @@ namespace :gitlab do
# #
desc "GitLab | Import bare repositories from repositories -> storages into GitLab project instance" desc "GitLab | Import bare repositories from repositories -> storages into GitLab project instance"
task repos: :environment do task repos: :environment do
if Project.current_application_settings.hashed_storage_enabled
puts 'Cannot import repositories when Hashed Storage is enabled'.color(:red)
exit 1
end
Gitlab.config.repositories.storages.each_value do |repository_storage| Gitlab.config.repositories.storages.each_value do |repository_storage|
git_base_path = repository_storage['path'] git_base_path = repository_storage['path']
repos_to_import = Dir.glob(git_base_path + '/**/*.git') repos_to_import = Dir.glob(git_base_path + '/**/*.git')
......
...@@ -15,4 +15,12 @@ FactoryGirl.define do ...@@ -15,4 +15,12 @@ FactoryGirl.define do
warnings: warnings) warnings: warnings)
end end
end end
factory :ci_stage_entity, class: Ci::Stage do
project factory: :project
pipeline factory: :ci_empty_pipeline
name 'test'
status 'pending'
end
end end
...@@ -81,6 +81,10 @@ FactoryGirl.define do ...@@ -81,6 +81,10 @@ FactoryGirl.define do
archived true archived true
end end
trait :hashed do
storage_version Project::LATEST_STORAGE_VERSION
end
trait :access_requestable do trait :access_requestable do
request_access_enabled true request_access_enabled true
end end
......
...@@ -41,6 +41,8 @@ describe "User Feed" do ...@@ -41,6 +41,8 @@ describe "User Feed" do
target_project: project, target_project: project,
description: "Here is the fix: ![an image](image.png)") description: "Here is the fix: ![an image](image.png)")
end end
let(:push_event) { create(:push_event, project: project, author: user) }
let!(:push_event_payload) { create(:push_event_payload, event: push_event) }
before do before do
project.team << [user, :master] project.team << [user, :master]
...@@ -70,6 +72,10 @@ describe "User Feed" do ...@@ -70,6 +72,10 @@ describe "User Feed" do
it 'has XHTML summaries in merge request descriptions' do it 'has XHTML summaries in merge request descriptions' do
expect(body).to match /Here is the fix: <a[^>]*><img[^>]*\/><\/a>/ expect(body).to match /Here is the fix: <a[^>]*><img[^>]*\/><\/a>/
end end
it 'has push event commit ID' do
expect(body).to have_content(Commit.truncate_sha(push_event.commit_id))
end
end end
end end
......
...@@ -104,18 +104,15 @@ feature 'Group' do ...@@ -104,18 +104,15 @@ feature 'Group' do
end end
context 'as group owner' do context 'as group owner' do
let(:user) { create(:user) } it 'creates a nested group' do
user = create(:user)
before do
group.add_owner(user) group.add_owner(user)
sign_out(:user) sign_out(:user)
sign_in(user) sign_in(user)
visit subgroups_group_path(group) visit subgroups_group_path(group)
click_link 'New Subgroup' click_link 'New Subgroup'
end
it 'creates a nested group' do
fill_in 'Group path', with: 'bar' fill_in 'Group path', with: 'bar'
click_button 'Create group' click_button 'Create group'
...@@ -123,6 +120,16 @@ feature 'Group' do ...@@ -123,6 +120,16 @@ feature 'Group' do
expect(page).to have_content("Group 'bar' was successfully created.") expect(page).to have_content("Group 'bar' was successfully created.")
end end
end end
context 'when nested group feature is disabled' do
it 'renders 404' do
allow(Group).to receive(:supports_nested_groups?).and_return(false)
visit subgroups_group_path(group)
expect(page.status_code).to eq(404)
end
end
end end
it 'checks permissions to avoid exposing groups by parent_id' do it 'checks permissions to avoid exposing groups by parent_id' do
......
...@@ -62,6 +62,12 @@ describe EventsHelper do ...@@ -62,6 +62,12 @@ describe EventsHelper do
expect(helper.event_note(input)).to eq(expected) expect(helper.event_note(input)).to eq(expected)
end end
it 'preserves data-src for lazy images' do
input = "![ImageTest](/uploads/test.png)"
image_url = "data-src=\"/uploads/test.png\""
expect(helper.event_note(input)).to match(image_url)
end
context 'labels formatting' do context 'labels formatting' do
let(:input) { 'this should be ~label_1' } let(:input) { 'this should be ~label_1' }
......
.project-item-select-holder .project-item-select-holder
%input.project-item-select{ data: { group_id: '12345' , relative_path: 'issues/new' } } %input.project-item-select{ data: { group_id: '12345' , relative_path: 'issues/new' } }
%a.new-project-item-link{ data: { label: 'New issue' }, href: ''} %a.new-project-item-link{ data: { label: 'New issue', type: 'issues' }, href: ''}
%i.fa.fa-spinner.spin %i.fa.fa-spinner.spin
%a.new-project-item-select-button %a.new-project-item-select-button
%i.fa.fa-caret-down %i.fa.fa-caret-down
...@@ -101,5 +101,40 @@ describe('Project Select Combo Button', function () { ...@@ -101,5 +101,40 @@ describe('Project Select Combo Button', function () {
window.localStorage.clear(); window.localStorage.clear();
}); });
}); });
describe('deriveTextVariants', function () {
beforeEach(function () {
this.mockExecutionContext = {
resourceType: '',
resourceLabel: '',
};
this.comboButton = new ProjectSelectComboButton(this.projectSelectInput);
this.method = this.comboButton.deriveTextVariants.bind(this.mockExecutionContext);
});
it('correctly derives test variants for merge requests', function () {
this.mockExecutionContext.resourceType = 'merge_requests';
this.mockExecutionContext.resourceLabel = 'New merge request';
const returnedVariants = this.method();
expect(returnedVariants.localStorageItemType).toBe('new-merge-request');
expect(returnedVariants.defaultTextPrefix).toBe('New merge request');
expect(returnedVariants.presetTextSuffix).toBe('merge request');
});
it('correctly derives text variants for issues', function () {
this.mockExecutionContext.resourceType = 'issues';
this.mockExecutionContext.resourceLabel = 'New issue';
const returnedVariants = this.method();
expect(returnedVariants.localStorageItemType).toBe('new-issue');
expect(returnedVariants.defaultTextPrefix).toBe('New issue');
expect(returnedVariants.presetTextSuffix).toBe('issue');
});
});
}); });
require 'spec_helper'
describe Gitlab::BackgroundMigration::MigrateStageStatus, :migration, schema: 20170711145320 do
let(:projects) { table(:projects) }
let(:pipelines) { table(:ci_pipelines) }
let(:stages) { table(:ci_stages) }
let(:jobs) { table(:ci_builds) }
STATUSES = { created: 0, pending: 1, running: 2, success: 3,
failed: 4, canceled: 5, skipped: 6, manual: 7 }.freeze
before do
projects.create!(id: 1, name: 'gitlab1', path: 'gitlab1')
pipelines.create!(id: 1, project_id: 1, ref: 'master', sha: 'adf43c3a')
stages.create!(id: 1, pipeline_id: 1, project_id: 1, name: 'test', status: nil)
stages.create!(id: 2, pipeline_id: 1, project_id: 1, name: 'deploy', status: nil)
end
context 'when stage status is known' do
before do
create_job(project: 1, pipeline: 1, stage: 'test', status: 'success')
create_job(project: 1, pipeline: 1, stage: 'test', status: 'running')
create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'failed')
end
it 'sets a correct stage status' do
described_class.new.perform(1, 2)
expect(stages.first.status).to eq STATUSES[:running]
expect(stages.second.status).to eq STATUSES[:failed]
end
end
context 'when stage status is not known' do
it 'sets a skipped stage status' do
described_class.new.perform(1, 2)
expect(stages.first.status).to eq STATUSES[:skipped]
expect(stages.second.status).to eq STATUSES[:skipped]
end
end
context 'when stage status includes status of a retried job' do
before do
create_job(project: 1, pipeline: 1, stage: 'test', status: 'canceled')
create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'failed', retried: true)
create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'success')
end
it 'sets a correct stage status' do
described_class.new.perform(1, 2)
expect(stages.first.status).to eq STATUSES[:canceled]
expect(stages.second.status).to eq STATUSES[:success]
end
end
context 'when some job in the stage is blocked / manual' do
before do
create_job(project: 1, pipeline: 1, stage: 'test', status: 'failed')
create_job(project: 1, pipeline: 1, stage: 'test', status: 'manual')
create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'success', when: 'manual')
end
it 'sets a correct stage status' do
described_class.new.perform(1, 2)
expect(stages.first.status).to eq STATUSES[:manual]
expect(stages.second.status).to eq STATUSES[:success]
end
end
def create_job(project:, pipeline:, stage:, status:, **opts)
stages = { test: 1, build: 2, deploy: 3 }
jobs.create!(project_id: project, commit_id: pipeline,
stage_idx: stages[stage.to_sym], stage: stage,
status: status, **opts)
end
end
...@@ -235,18 +235,10 @@ describe Gitlab::Git::Repository, seed_helper: true do ...@@ -235,18 +235,10 @@ describe Gitlab::Git::Repository, seed_helper: true do
it { is_expected.to be < 2 } it { is_expected.to be < 2 }
end end
describe '#has_commits?' do
it { expect(repository.has_commits?).to be_truthy }
end
describe '#empty?' do describe '#empty?' do
it { expect(repository.empty?).to be_falsey } it { expect(repository.empty?).to be_falsey }
end end
describe '#bare?' do
it { expect(repository.bare?).to be_truthy }
end
describe '#ref_names' do describe '#ref_names' do
let(:ref_names) { repository.ref_names } let(:ref_names) { repository.ref_names }
subject { ref_names } subject { ref_names }
...@@ -441,15 +433,6 @@ describe Gitlab::Git::Repository, seed_helper: true do ...@@ -441,15 +433,6 @@ describe Gitlab::Git::Repository, seed_helper: true do
end end
end end
describe "#remote_names" do
let(:remotes) { repository.remote_names }
it "should have one entry: 'origin'" do
expect(remotes.size).to eq(1)
expect(remotes.first).to eq("origin")
end
end
describe "#refs_hash" do describe "#refs_hash" do
let(:refs) { repository.refs_hash } let(:refs) { repository.refs_hash }
......
...@@ -111,6 +111,20 @@ describe Gitlab::GitalyClient::CommitService do ...@@ -111,6 +111,20 @@ describe Gitlab::GitalyClient::CommitService do
client.tree_entries(repository, revision, path) client.tree_entries(repository, revision, path)
end end
context 'with UTF-8 params strings' do
let(:revision) { "branch\u011F" }
let(:path) { "foo/\u011F.txt" }
it 'handles string encodings correctly' do
expect_any_instance_of(Gitaly::CommitService::Stub)
.to receive(:get_tree_entries)
.with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
.and_return([])
client.tree_entries(repository, revision, path)
end
end
end end
describe '#find_commit' do describe '#find_commit' do
......
...@@ -227,6 +227,8 @@ Ci::Pipeline: ...@@ -227,6 +227,8 @@ Ci::Pipeline:
Ci::Stage: Ci::Stage:
- id - id
- name - name
- status
- lock_version
- project_id - project_id
- pipeline_id - pipeline_id
- created_at - created_at
......
require 'spec_helper' require 'spec_helper'
describe Gitlab::JobWaiter do describe Gitlab::JobWaiter do
describe '#wait' do describe '.notify' do
let(:waiter) { described_class.new(%w(a)) } it 'pushes the jid to the named queue' do
it 'returns when all jobs have been completed' do key = 'gitlab:job_waiter:foo'
expect(Gitlab::SidekiqStatus).to receive(:all_completed?).with(%w(a)) jid = 1
.and_return(true)
expect(waiter).not_to receive(:sleep) redis = double('redis')
expect(Gitlab::Redis::SharedState).to receive(:with).and_yield(redis)
expect(redis).to receive(:lpush).with(key, jid)
waiter.wait described_class.notify(key, jid)
end end
end
describe '#wait' do
let(:waiter) { described_class.new(2) }
it 'sleeps between checking the job statuses' do it 'returns when all jobs have been completed' do
expect(Gitlab::SidekiqStatus).to receive(:all_completed?) described_class.notify(waiter.key, 'a')
.with(%w(a)) described_class.notify(waiter.key, 'b')
.and_return(false, true)
expect(waiter).to receive(:sleep).with(described_class::INTERVAL) result = nil
expect { Timeout.timeout(1) { result = waiter.wait(2) } }.not_to raise_error
waiter.wait expect(result).to contain_exactly('a', 'b')
end end
it 'returns when timing out' do it 'times out if not all jobs complete' do
expect(waiter).not_to receive(:sleep) described_class.notify(waiter.key, 'a')
waiter.wait(0)
result = nil
expect { Timeout.timeout(2) { result = waiter.wait(1) } }.not_to raise_error
expect(result).to contain_exactly('a')
end end
end end
end end
require 'spec_helper' require 'spec_helper'
describe Gitlab::SidekiqThrottler do describe Gitlab::SidekiqThrottler do
before do
Sidekiq.options[:concurrency] = 35
stub_application_setting(
sidekiq_throttling_enabled: true,
sidekiq_throttling_factor: 0.1,
sidekiq_throttling_queues: %w[build project_cache]
)
end
describe '#execute!' do describe '#execute!' do
it 'sets limits on the selected queues' do context 'when job throttling is enabled' do
described_class.execute! before do
Sidekiq.options[:concurrency] = 35
stub_application_setting(
sidekiq_throttling_enabled: true,
sidekiq_throttling_factor: 0.1,
sidekiq_throttling_queues: %w[build project_cache]
)
end
it 'requires sidekiq-limit_fetch' do
expect(described_class).to receive(:require).with('sidekiq-limit_fetch').and_call_original
described_class.execute!
end
it 'sets limits on the selected queues' do
described_class.execute!
expect(Sidekiq::Queue['build'].limit).to eq 4
expect(Sidekiq::Queue['project_cache'].limit).to eq 4
end
it 'does not set limits on other queues' do
described_class.execute!
expect(Sidekiq::Queue['build'].limit).to eq 4 expect(Sidekiq::Queue['merge'].limit).to be_nil
expect(Sidekiq::Queue['project_cache'].limit).to eq 4 end
end end
it 'does not set limits on other queues' do context 'when job throttling is disabled' do
described_class.execute! it 'does not require sidekiq-limit_fetch' do
expect(described_class).not_to receive(:require).with('sidekiq-limit_fetch')
expect(Sidekiq::Queue['merge'].limit).to be_nil described_class.execute!
end
end end
end end
end end
...@@ -4,7 +4,7 @@ require Rails.root.join('db', 'post_migrate', '20170502101023_cleanup_namespacel ...@@ -4,7 +4,7 @@ require Rails.root.join('db', 'post_migrate', '20170502101023_cleanup_namespacel
describe CleanupNamespacelessPendingDeleteProjects do describe CleanupNamespacelessPendingDeleteProjects do
before do before do
# Stub after_save callbacks that will fail when Project has no namespace # Stub after_save callbacks that will fail when Project has no namespace
allow_any_instance_of(Project).to receive(:ensure_storage_path_exist).and_return(nil) allow_any_instance_of(Project).to receive(:ensure_storage_path_exists).and_return(nil)
allow_any_instance_of(Project).to receive(:update_project_statistics).and_return(nil) allow_any_instance_of(Project).to receive(:update_project_statistics).and_return(nil)
end end
......
...@@ -2,19 +2,6 @@ require 'spec_helper' ...@@ -2,19 +2,6 @@ require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170628080858_migrate_stage_id_reference_in_background') require Rails.root.join('db', 'post_migrate', '20170628080858_migrate_stage_id_reference_in_background')
describe MigrateStageIdReferenceInBackground, :migration, :sidekiq do describe MigrateStageIdReferenceInBackground, :migration, :sidekiq do
matcher :be_scheduled_migration do |delay, *expected|
match do |migration|
BackgroundMigrationWorker.jobs.any? do |job|
job['args'] == [migration, expected] &&
job['at'].to_i == (delay.to_i + Time.now.to_i)
end
end
failure_message do |migration|
"Migration `#{migration}` with args `#{expected.inspect}` not scheduled!"
end
end
let(:jobs) { table(:ci_builds) } let(:jobs) { table(:ci_builds) }
let(:stages) { table(:ci_stages) } let(:stages) { table(:ci_stages) }
let(:pipelines) { table(:ci_pipelines) } let(:pipelines) { table(:ci_pipelines) }
......
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170711145558_migrate_stages_statuses.rb')
describe MigrateStagesStatuses, :migration do
let(:jobs) { table(:ci_builds) }
let(:stages) { table(:ci_stages) }
let(:pipelines) { table(:ci_pipelines) }
let(:projects) { table(:projects) }
STATUSES = { created: 0, pending: 1, running: 2, success: 3,
failed: 4, canceled: 5, skipped: 6, manual: 7 }.freeze
before do
stub_const("#{described_class.name}::BATCH_SIZE", 2)
stub_const("#{described_class.name}::RANGE_SIZE", 2)
projects.create!(id: 1, name: 'gitlab1', path: 'gitlab1')
projects.create!(id: 2, name: 'gitlab2', path: 'gitlab2')
pipelines.create!(id: 1, project_id: 1, ref: 'master', sha: 'adf43c3a')
pipelines.create!(id: 2, project_id: 2, ref: 'feature', sha: '21a3deb')
create_job(project: 1, pipeline: 1, stage: 'test', status: 'success')
create_job(project: 1, pipeline: 1, stage: 'test', status: 'running')
create_job(project: 1, pipeline: 1, stage: 'build', status: 'success')
create_job(project: 1, pipeline: 1, stage: 'build', status: 'failed')
create_job(project: 2, pipeline: 2, stage: 'test', status: 'success')
create_job(project: 2, pipeline: 2, stage: 'test', status: 'success')
create_job(project: 2, pipeline: 2, stage: 'test', status: 'failed', retried: true)
stages.create!(id: 1, pipeline_id: 1, project_id: 1, name: 'test', status: nil)
stages.create!(id: 2, pipeline_id: 1, project_id: 1, name: 'build', status: nil)
stages.create!(id: 3, pipeline_id: 2, project_id: 2, name: 'test', status: nil)
end
it 'correctly migrates stages statuses' do
Sidekiq::Testing.inline! do
expect(stages.where(status: nil).count).to eq 3
migrate!
expect(stages.where(status: nil)).to be_empty
expect(stages.all.order('id ASC').pluck(:status))
.to eq [STATUSES[:running], STATUSES[:failed], STATUSES[:success]]
end
end
it 'correctly schedules background migrations' do
Sidekiq::Testing.fake! do
Timecop.freeze do
migrate!
expect(described_class::MIGRATION).to be_scheduled_migration(5.minutes, 1, 2)
expect(described_class::MIGRATION).to be_scheduled_migration(10.minutes, 3, 3)
expect(BackgroundMigrationWorker.jobs.size).to eq 2
end
end
end
def create_job(project:, pipeline:, stage:, status:, **opts)
stages = { test: 1, build: 2, deploy: 3 }
jobs.create!(project_id: project, commit_id: pipeline,
stage_idx: stages[stage.to_sym], stage: stage,
status: status, **opts)
end
end
require 'spec_helper'
describe Ci::Stage, :models do
let(:stage) { create(:ci_stage_entity) }
describe 'associations' do
before do
create(:ci_build, stage_id: stage.id)
create(:commit_status, stage_id: stage.id)
end
describe '#statuses' do
it 'returns all commit statuses' do
expect(stage.statuses.count).to be 2
end
end
describe '#builds' do
it 'returns only builds' do
expect(stage.builds).to be_one
end
end
end
describe '#status' do
context 'when stage is pending' do
let(:stage) { create(:ci_stage_entity, status: 'pending') }
it 'has a correct status value' do
expect(stage.status).to eq 'pending'
end
end
context 'when stage is success' do
let(:stage) { create(:ci_stage_entity, status: 'success') }
it 'has a correct status value' do
expect(stage.status).to eq 'success'
end
end
end
describe 'update_status' do
context 'when stage objects needs to be updated' do
before do
create(:ci_build, :success, stage_id: stage.id)
create(:ci_build, :running, stage_id: stage.id)
end
it 'updates stage status correctly' do
expect { stage.update_status }
.to change { stage.reload.status }
.to 'running'
end
end
context 'when stage is skipped' do
it 'updates status to skipped' do
expect { stage.update_status }
.to change { stage.reload.status }
.to 'skipped'
end
end
context 'when stage object is locked' do
before do
create(:ci_build, :failed, stage_id: stage.id)
end
it 'retries a lock to update a stage status' do
stage.lock_version = 100
stage.update_status
expect(stage.reload).to be_failed
end
end
end
end
...@@ -7,10 +7,10 @@ describe CommitStatus do ...@@ -7,10 +7,10 @@ describe CommitStatus do
create(:ci_pipeline, project: project, sha: project.commit.id) create(:ci_pipeline, project: project, sha: project.commit.id)
end end
let(:commit_status) { create_status } let(:commit_status) { create_status(stage: 'test') }
def create_status(args = {}) def create_status(**opts)
create(:commit_status, args.merge(pipeline: pipeline)) create(:commit_status, pipeline: pipeline, **opts)
end end
it { is_expected.to belong_to(:pipeline) } it { is_expected.to belong_to(:pipeline) }
......
...@@ -1251,60 +1251,6 @@ describe Project do ...@@ -1251,60 +1251,6 @@ describe Project do
end end
end end
describe '#rename_repo' do
let(:project) { create(:project, :repository) }
let(:gitlab_shell) { Gitlab::Shell.new }
before do
# Project#gitlab_shell returns a new instance of Gitlab::Shell on every
# call. This makes testing a bit easier.
allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
allow(project).to receive(:previous_changes).and_return('path' => ['foo'])
end
it 'renames a repository' do
stub_container_registry_config(enabled: false)
expect(gitlab_shell).to receive(:mv_repository)
.ordered
.with(project.repository_storage_path, "#{project.namespace.full_path}/foo", "#{project.full_path}")
.and_return(true)
expect(gitlab_shell).to receive(:mv_repository)
.ordered
.with(project.repository_storage_path, "#{project.namespace.full_path}/foo.wiki", "#{project.full_path}.wiki")
.and_return(true)
expect_any_instance_of(SystemHooksService)
.to receive(:execute_hooks_for)
.with(project, :rename)
expect_any_instance_of(Gitlab::UploadsTransfer)
.to receive(:rename_project)
.with('foo', project.path, project.namespace.full_path)
expect(project).to receive(:expire_caches_before_rename)
expect(project).to receive(:expires_full_path_cache)
project.rename_repo
end
context 'container registry with images' do
let(:container_repository) { create(:container_repository) }
before do
stub_container_registry_config(enabled: true)
stub_container_registry_tags(repository: :any, tags: ['tag'])
project.container_repositories << container_repository
end
subject { project.rename_repo }
it { expect {subject}.to raise_error(StandardError) }
end
end
describe '#expire_caches_before_rename' do describe '#expire_caches_before_rename' do
let(:project) { create(:project, :repository) } let(:project) { create(:project, :repository) }
let(:repo) { double(:repo, exists?: true) } let(:repo) { double(:repo, exists?: true) }
...@@ -2367,4 +2313,181 @@ describe Project do ...@@ -2367,4 +2313,181 @@ describe Project do
expect(project.forks_count).to eq(1) expect(project.forks_count).to eq(1)
end end
end end
context 'legacy storage' do
let(:project) { create(:project, :repository) }
let(:gitlab_shell) { Gitlab::Shell.new }
before do
allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
end
describe '#base_dir' do
it 'returns base_dir based on namespace only' do
expect(project.base_dir).to eq(project.namespace.full_path)
end
end
describe '#disk_path' do
it 'returns disk_path based on namespace and project path' do
expect(project.disk_path).to eq("#{project.namespace.full_path}/#{project.path}")
end
end
describe '#ensure_storage_path_exists' do
it 'delegates to gitlab_shell to ensure namespace is created' do
expect(gitlab_shell).to receive(:add_namespace).with(project.repository_storage_path, project.base_dir)
project.ensure_storage_path_exists
end
end
describe '#legacy_storage?' do
it 'returns true when storage_version is nil' do
project = build(:project)
expect(project.legacy_storage?).to be_truthy
end
end
describe '#rename_repo' do
before do
# Project#gitlab_shell returns a new instance of Gitlab::Shell on every
# call. This makes testing a bit easier.
allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
allow(project).to receive(:previous_changes).and_return('path' => ['foo'])
end
it 'renames a repository' do
stub_container_registry_config(enabled: false)
expect(gitlab_shell).to receive(:mv_repository)
.ordered
.with(project.repository_storage_path, "#{project.namespace.full_path}/foo", "#{project.full_path}")
.and_return(true)
expect(gitlab_shell).to receive(:mv_repository)
.ordered
.with(project.repository_storage_path, "#{project.namespace.full_path}/foo.wiki", "#{project.full_path}.wiki")
.and_return(true)
expect_any_instance_of(SystemHooksService)
.to receive(:execute_hooks_for)
.with(project, :rename)
expect_any_instance_of(Gitlab::UploadsTransfer)
.to receive(:rename_project)
.with('foo', project.path, project.namespace.full_path)
expect(project).to receive(:expire_caches_before_rename)
expect(project).to receive(:expires_full_path_cache)
project.rename_repo
end
context 'container registry with images' do
let(:container_repository) { create(:container_repository) }
before do
stub_container_registry_config(enabled: true)
stub_container_registry_tags(repository: :any, tags: ['tag'])
project.container_repositories << container_repository
end
subject { project.rename_repo }
it { expect { subject }.to raise_error(StandardError) }
end
end
describe '#pages_path' do
it 'returns a path where pages are stored' do
expect(project.pages_path).to eq(File.join(Settings.pages.path, project.namespace.full_path, project.path))
end
end
end
context 'hashed storage' do
let(:project) { create(:project, :repository) }
let(:gitlab_shell) { Gitlab::Shell.new }
let(:hash) { '6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b' }
before do
stub_application_setting(hashed_storage_enabled: true)
allow(Digest::SHA2).to receive(:hexdigest) { hash }
allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
end
describe '#base_dir' do
it 'returns base_dir based on hash of project id' do
expect(project.base_dir).to eq('@hashed/6b/86')
end
end
describe '#disk_path' do
it 'returns disk_path based on hash of project id' do
hashed_path = '@hashed/6b/86/6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b'
expect(project.disk_path).to eq(hashed_path)
end
end
describe '#ensure_storage_path_exists' do
it 'delegates to gitlab_shell to ensure namespace is created' do
expect(gitlab_shell).to receive(:add_namespace).with(project.repository_storage_path, '@hashed/6b/86')
project.ensure_storage_path_exists
end
end
describe '#rename_repo' do
before do
# Project#gitlab_shell returns a new instance of Gitlab::Shell on every
# call. This makes testing a bit easier.
allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
allow(project).to receive(:previous_changes).and_return('path' => ['foo'])
end
it 'renames a repository' do
stub_container_registry_config(enabled: false)
expect(gitlab_shell).not_to receive(:mv_repository)
expect_any_instance_of(SystemHooksService)
.to receive(:execute_hooks_for)
.with(project, :rename)
expect_any_instance_of(Gitlab::UploadsTransfer)
.to receive(:rename_project)
.with('foo', project.path, project.namespace.full_path)
expect(project).to receive(:expire_caches_before_rename)
expect(project).to receive(:expires_full_path_cache)
project.rename_repo
end
context 'container registry with images' do
let(:container_repository) { create(:container_repository) }
before do
stub_container_registry_config(enabled: true)
stub_container_registry_tags(repository: :any, tags: ['tag'])
project.container_repositories << container_repository
end
subject { project.rename_repo }
it { expect { subject }.to raise_error(StandardError) }
end
end
describe '#pages_path' do
it 'returns a path where pages are stored' do
expect(project.pages_path).to eq(File.join(Settings.pages.path, project.namespace.full_path, project.path))
end
end
end
end end
...@@ -123,6 +123,36 @@ describe GroupPolicy do ...@@ -123,6 +123,36 @@ describe GroupPolicy do
end end
end end
describe 'when nested group support feature is disabled' do
before do
allow(Group).to receive(:supports_nested_groups?).and_return(false)
end
context 'admin' do
let(:current_user) { admin }
it 'allows every owner permission except creating subgroups' do
create_subgroup_permission = [:create_subgroup]
updated_owner_permissions = owner_permissions - create_subgroup_permission
expect_disallowed(*create_subgroup_permission)
expect_allowed(*updated_owner_permissions)
end
end
context 'owner' do
let(:current_user) { owner }
it 'allows every owner permission except creating subgroups' do
create_subgroup_permission = [:create_subgroup]
updated_owner_permissions = owner_permissions - create_subgroup_permission
expect_disallowed(*create_subgroup_permission)
expect_allowed(*updated_owner_permissions)
end
end
end
describe 'private nested group use the highest access level from the group and inherited permissions', :nested_groups do describe 'private nested group use the highest access level from the group and inherited permissions', :nested_groups do
let(:nested_group) { create(:group, :private, parent: group) } let(:nested_group) { create(:group, :private, parent: group) }
......
require 'spec_helper'
describe Ci::API::Builds do
let(:runner) { FactoryGirl.create(:ci_runner, tag_list: %w(mysql ruby)) }
let(:project) { FactoryGirl.create(:project, shared_runners_enabled: false) }
let(:last_update) { nil }
describe "Builds API for runners" do
let(:pipeline) { create(:ci_pipeline_without_jobs, project: project, ref: 'master') }
before do
project.runners << runner
end
describe "POST /builds/register" do
let!(:build) { create(:ci_build, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
let(:user_agent) { 'gitlab-ci-multi-runner 1.5.2 (1-5-stable; go1.6.3; linux/amd64)' }
let!(:last_update) { }
let!(:new_update) { }
before do
stub_container_registry_config(enabled: false)
end
shared_examples 'no builds available' do
context 'when runner sends version in User-Agent' do
context 'for stable version' do
it 'gives 204 and set X-GitLab-Last-Update' do
expect(response).to have_http_status(204)
expect(response.header).to have_key('X-GitLab-Last-Update')
end
end
context 'when last_update is up-to-date' do
let(:last_update) { runner.ensure_runner_queue_value }
it 'gives 204 and set the same X-GitLab-Last-Update' do
expect(response).to have_http_status(204)
expect(response.header['X-GitLab-Last-Update'])
.to eq(last_update)
end
end
context 'when last_update is outdated' do
let(:last_update) { runner.ensure_runner_queue_value }
let(:new_update) { runner.tick_runner_queue }
it 'gives 204 and set a new X-GitLab-Last-Update' do
expect(response).to have_http_status(204)
expect(response.header['X-GitLab-Last-Update'])
.to eq(new_update)
end
end
context 'for beta version' do
let(:user_agent) { 'gitlab-ci-multi-runner 1.6.0~beta.167.g2b2bacc (1-5-stable; go1.6.3; linux/amd64)' }
it { expect(response).to have_http_status(204) }
end
end
context "when runner doesn't send version in User-Agent" do
let(:user_agent) { 'Go-http-client/1.1' }
it { expect(response).to have_http_status(404) }
end
context "when runner doesn't have a User-Agent" do
let(:user_agent) { nil }
it { expect(response).to have_http_status(404) }
end
end
context 'when an old image syntax is used' do
before do
build.update!(options: { image: 'codeclimate' })
end
it 'starts a build' do
register_builds info: { platform: :darwin }
expect(response).to have_http_status(201)
expect(json_response["options"]).to eq({ "image" => "codeclimate" })
end
end
context 'when a new image syntax is used' do
before do
build.update!(options: { image: { name: 'codeclimate' } })
end
it 'starts a build' do
register_builds info: { platform: :darwin }
expect(response).to have_http_status(201)
expect(json_response["options"]).to eq({ "image" => "codeclimate" })
end
end
context 'when an old service syntax is used' do
before do
build.update!(options: { services: ['mysql'] })
end
it 'starts a build' do
register_builds info: { platform: :darwin }
expect(response).to have_http_status(201)
expect(json_response["options"]).to eq({ "services" => ["mysql"] })
end
end
context 'when a new service syntax is used' do
before do
build.update!(options: { services: [name: 'mysql'] })
end
it 'starts a build' do
register_builds info: { platform: :darwin }
expect(response).to have_http_status(201)
expect(json_response["options"]).to eq({ "services" => ["mysql"] })
end
end
context 'when no image or service is defined' do
before do
build.update!(options: {})
end
it 'starts a build' do
register_builds info: { platform: :darwin }
expect(response).to have_http_status(201)
expect(json_response["options"]).to be_empty
end
end
context 'when there is a pending build' do
it 'starts a build' do
register_builds info: { platform: :darwin }
expect(response).to have_http_status(201)
expect(response.headers).not_to have_key('X-GitLab-Last-Update')
expect(json_response['sha']).to eq(build.sha)
expect(runner.reload.platform).to eq("darwin")
expect(json_response["options"]).to eq({ "image" => "ruby:2.1", "services" => ["postgres"] })
expect(json_response["variables"]).to include(
{ "key" => "CI_JOB_NAME", "value" => "spinach", "public" => true },
{ "key" => "CI_JOB_STAGE", "value" => "test", "public" => true },
{ "key" => "DB_NAME", "value" => "postgres", "public" => true }
)
end
it 'updates runner info' do
expect { register_builds }.to change { runner.reload.contacted_at }
end
context 'when concurrently updating build' do
before do
expect_any_instance_of(Ci::Build).to receive(:run!)
.and_raise(ActiveRecord::StaleObjectError.new(nil, nil))
end
it 'returns a conflict' do
register_builds info: { platform: :darwin }
expect(response).to have_http_status(409)
expect(response.headers).not_to have_key('X-GitLab-Last-Update')
end
end
context 'registry credentials' do
let(:registry_credentials) do
{ 'type' => 'registry',
'url' => 'registry.example.com:5005',
'username' => 'gitlab-ci-token',
'password' => build.token }
end
context 'when registry is enabled' do
before do
stub_container_registry_config(enabled: true, host_port: 'registry.example.com:5005')
end
it 'sends registry credentials key' do
register_builds info: { platform: :darwin }
expect(json_response).to have_key('credentials')
expect(json_response['credentials']).to include(registry_credentials)
end
end
context 'when registry is disabled' do
before do
stub_container_registry_config(enabled: false, host_port: 'registry.example.com:5005')
end
it 'does not send registry credentials' do
register_builds info: { platform: :darwin }
expect(json_response).to have_key('credentials')
expect(json_response['credentials']).not_to include(registry_credentials)
end
end
end
context 'when docker configuration options are used' do
let!(:build) { create(:ci_build, :extended_options, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
it 'starts a build' do
register_builds info: { platform: :darwin }
expect(response).to have_http_status(201)
expect(json_response['options']['image']).to eq('ruby:2.1')
expect(json_response['options']['services']).to eq(['postgres', 'docker:dind'])
end
end
end
context 'when builds are finished' do
before do
build.success
register_builds
end
it_behaves_like 'no builds available'
end
context 'for other project with builds' do
before do
build.success
create(:ci_build, :pending)
register_builds
end
it_behaves_like 'no builds available'
end
context 'for shared runner' do
let!(:runner) { create(:ci_runner, :shared, token: "SharedRunner") }
before do
register_builds(runner.token)
end
it_behaves_like 'no builds available'
end
context 'for triggered build' do
before do
trigger = create(:ci_trigger, project: project)
create(:ci_trigger_request_with_variables, pipeline: pipeline, builds: [build], trigger: trigger)
project.variables << Ci::Variable.new(key: "SECRET_KEY", value: "secret_value")
end
it "returns variables for triggers" do
register_builds info: { platform: :darwin }
expect(response).to have_http_status(201)
expect(json_response["variables"]).to include(
{ "key" => "CI_JOB_NAME", "value" => "spinach", "public" => true },
{ "key" => "CI_JOB_STAGE", "value" => "test", "public" => true },
{ "key" => "CI_PIPELINE_TRIGGERED", "value" => "true", "public" => true },
{ "key" => "DB_NAME", "value" => "postgres", "public" => true },
{ "key" => "SECRET_KEY", "value" => "secret_value", "public" => false },
{ "key" => "TRIGGER_KEY_1", "value" => "TRIGGER_VALUE_1", "public" => false }
)
end
end
context 'with multiple builds' do
before do
build.success
end
let!(:test_build) { create(:ci_build, pipeline: pipeline, name: 'deploy', stage: 'deploy', stage_idx: 1) }
it "returns dependent builds" do
register_builds info: { platform: :darwin }
expect(response).to have_http_status(201)
expect(json_response["id"]).to eq(test_build.id)
expect(json_response["depends_on_builds"].count).to eq(1)
expect(json_response["depends_on_builds"][0]).to include('id' => build.id, 'name' => 'spinach')
end
end
%w(name version revision platform architecture).each do |param|
context "updates runner #{param}" do
let(:value) { "#{param}_value" }
subject { runner.read_attribute(param.to_sym) }
it do
register_builds info: { param => value }
expect(response).to have_http_status(201)
runner.reload
is_expected.to eq(value)
end
end
end
context 'when build has no tags' do
before do
build.update(tags: [])
end
context 'when runner is allowed to pick untagged builds' do
before do
runner.update_column(:run_untagged, true)
end
it 'picks build' do
register_builds
expect(response).to have_http_status 201
end
end
context 'when runner is not allowed to pick untagged builds' do
before do
runner.update_column(:run_untagged, false)
register_builds
end
it_behaves_like 'no builds available'
end
end
context 'when runner is paused' do
let(:runner) { create(:ci_runner, :inactive, token: 'InactiveRunner') }
it 'responds with 404' do
register_builds
expect(response).to have_http_status 404
end
it 'does not update runner info' do
expect { register_builds }
.not_to change { runner.reload.contacted_at }
end
end
def register_builds(token = runner.token, **params)
new_params = params.merge(token: token, last_update: last_update)
post ci_api("/builds/register"), new_params, { 'User-Agent' => user_agent }
end
end
describe "PUT /builds/:id" do
let(:build) { create(:ci_build, :pending, :trace, pipeline: pipeline, runner_id: runner.id) }
before do
build.run!
put ci_api("/builds/#{build.id}"), token: runner.token
end
it "updates a running build" do
expect(response).to have_http_status(200)
end
it 'does not override trace information when no trace is given' do
expect(build.reload.trace.raw).to eq 'BUILD TRACE'
end
context 'job has been erased' do
let(:build) { create(:ci_build, runner_id: runner.id, erased_at: Time.now) }
it 'responds with forbidden' do
expect(response.status).to eq 403
end
end
end
describe 'PATCH /builds/:id/trace.txt' do
let(:build) do
attributes = { runner_id: runner.id, pipeline: pipeline }
create(:ci_build, :running, :trace, attributes)
end
let(:headers) { { Ci::API::Helpers::BUILD_TOKEN_HEADER => build.token, 'Content-Type' => 'text/plain' } }
let(:headers_with_range) { headers.merge({ 'Content-Range' => '11-20' }) }
let(:update_interval) { 10.seconds.to_i }
def patch_the_trace(content = ' appended', request_headers = nil)
unless request_headers
build.trace.read do |stream|
offset = stream.size
limit = offset + content.length - 1
request_headers = headers.merge({ 'Content-Range' => "#{offset}-#{limit}" })
end
end
Timecop.travel(build.updated_at + update_interval) do
patch ci_api("/builds/#{build.id}/trace.txt"), content, request_headers
build.reload
end
end
def initial_patch_the_trace
patch_the_trace(' appended', headers_with_range)
end
def force_patch_the_trace
2.times { patch_the_trace('') }
end
before do
initial_patch_the_trace
end
context 'when request is valid' do
it 'gets correct response' do
expect(response.status).to eq 202
expect(build.reload.trace.raw).to eq 'BUILD TRACE appended'
expect(response.header).to have_key 'Range'
expect(response.header).to have_key 'Build-Status'
end
context 'when build has been updated recently' do
it { expect { patch_the_trace }.not_to change { build.updated_at }}
it 'changes the build trace' do
patch_the_trace
expect(build.reload.trace.raw).to eq 'BUILD TRACE appended appended'
end
context 'when Runner makes a force-patch' do
it { expect { force_patch_the_trace }.not_to change { build.updated_at }}
it "doesn't change the build.trace" do
force_patch_the_trace
expect(build.reload.trace.raw).to eq 'BUILD TRACE appended'
end
end
end
context 'when build was not updated recently' do
let(:update_interval) { 15.minutes.to_i }
it { expect { patch_the_trace }.to change { build.updated_at } }
it 'changes the build.trace' do
patch_the_trace
expect(build.reload.trace.raw).to eq 'BUILD TRACE appended appended'
end
context 'when Runner makes a force-patch' do
it { expect { force_patch_the_trace }.to change { build.updated_at } }
it "doesn't change the build.trace" do
force_patch_the_trace
expect(build.reload.trace.raw).to eq 'BUILD TRACE appended'
end
end
end
context 'when project for the build has been deleted' do
let(:build) do
attributes = { runner_id: runner.id, pipeline: pipeline }
create(:ci_build, :running, :trace, attributes) do |build|
build.project.update(pending_delete: true)
end
end
it 'responds with forbidden' do
expect(response.status).to eq(403)
end
end
end
context 'when Runner makes a force-patch' do
before do
force_patch_the_trace
end
it 'gets correct response' do
expect(response.status).to eq 202
expect(build.reload.trace.raw).to eq 'BUILD TRACE appended'
expect(response.header).to have_key 'Range'
expect(response.header).to have_key 'Build-Status'
end
end
context 'when content-range start is too big' do
let(:headers_with_range) { headers.merge({ 'Content-Range' => '15-20' }) }
it 'gets 416 error response with range headers' do
expect(response.status).to eq 416
expect(response.header).to have_key 'Range'
expect(response.header['Range']).to eq '0-11'
end
end
context 'when content-range start is too small' do
let(:headers_with_range) { headers.merge({ 'Content-Range' => '8-20' }) }
it 'gets 416 error response with range headers' do
expect(response.status).to eq 416
expect(response.header).to have_key 'Range'
expect(response.header['Range']).to eq '0-11'
end
end
context 'when Content-Range header is missing' do
let(:headers_with_range) { headers }
it { expect(response.status).to eq 400 }
end
context 'when build has been errased' do
let(:build) { create(:ci_build, runner_id: runner.id, erased_at: Time.now) }
it { expect(response.status).to eq 403 }
end
end
context "Artifacts" do
let(:file_upload) { fixture_file_upload(Rails.root + 'spec/fixtures/banana_sample.gif', 'image/gif') }
let(:file_upload2) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/gif') }
let(:build) { create(:ci_build, :pending, pipeline: pipeline, runner_id: runner.id) }
let(:authorize_url) { ci_api("/builds/#{build.id}/artifacts/authorize") }
let(:post_url) { ci_api("/builds/#{build.id}/artifacts") }
let(:delete_url) { ci_api("/builds/#{build.id}/artifacts") }
let(:get_url) { ci_api("/builds/#{build.id}/artifacts") }
let(:jwt_token) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
let(:headers) { { "GitLab-Workhorse" => "1.0", Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => jwt_token } }
let(:token) { build.token }
let(:headers_with_token) { headers.merge(Ci::API::Helpers::BUILD_TOKEN_HEADER => token) }
before do
build.run!
end
describe "POST /builds/:id/artifacts/authorize" do
context "authorizes posting artifact to running build" do
it "using token as parameter" do
post authorize_url, { token: build.token }, headers
expect(response).to have_http_status(200)
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response["TempPath"]).not_to be_nil
end
it "using token as header" do
post authorize_url, {}, headers_with_token
expect(response).to have_http_status(200)
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response["TempPath"]).not_to be_nil
end
it "using runners token" do
post authorize_url, { token: build.project.runners_token }, headers
expect(response).to have_http_status(200)
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response["TempPath"]).not_to be_nil
end
it "reject requests that did not go through gitlab-workhorse" do
headers.delete(Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER)
post authorize_url, { token: build.token }, headers
expect(response).to have_http_status(500)
end
end
context "fails to post too large artifact" do
it "using token as parameter" do
stub_application_setting(max_artifacts_size: 0)
post authorize_url, { token: build.token, filesize: 100 }, headers
expect(response).to have_http_status(413)
end
it "using token as header" do
stub_application_setting(max_artifacts_size: 0)
post authorize_url, { filesize: 100 }, headers_with_token
expect(response).to have_http_status(413)
end
end
context 'authorization token is invalid' do
before do
post authorize_url, { token: 'invalid', filesize: 100 }
end
it 'responds with forbidden' do
expect(response).to have_http_status(403)
end
end
end
describe "POST /builds/:id/artifacts" do
context "disable sanitizer" do
before do
# by configuring this path we allow to pass temp file from any path
allow(ArtifactUploader).to receive(:artifacts_upload_path).and_return('/')
end
describe 'build has been erased' do
let(:build) { create(:ci_build, erased_at: Time.now) }
before do
upload_artifacts(file_upload, headers_with_token)
end
it 'responds with forbidden' do
expect(response.status).to eq 403
end
end
describe 'uploading artifacts for a running build' do
shared_examples 'successful artifacts upload' do
it 'updates successfully' do
response_filename =
json_response['artifacts_file']['filename']
expect(response).to have_http_status(201)
expect(response_filename).to eq(file_upload.original_filename)
end
end
context 'uses regular file post' do
before do
upload_artifacts(file_upload, headers_with_token, false)
end
it_behaves_like 'successful artifacts upload'
end
context 'uses accelerated file post' do
before do
upload_artifacts(file_upload, headers_with_token, true)
end
it_behaves_like 'successful artifacts upload'
end
context 'updates artifact' do
before do
upload_artifacts(file_upload2, headers_with_token)
upload_artifacts(file_upload, headers_with_token)
end
it_behaves_like 'successful artifacts upload'
end
context 'when using runners token' do
let(:token) { build.project.runners_token }
before do
upload_artifacts(file_upload, headers_with_token)
end
it_behaves_like 'successful artifacts upload'
end
end
context 'posts artifacts file and metadata file' do
let!(:artifacts) { file_upload }
let!(:metadata) { file_upload2 }
let(:stored_artifacts_file) { build.reload.artifacts_file.file }
let(:stored_metadata_file) { build.reload.artifacts_metadata.file }
let(:stored_artifacts_size) { build.reload.artifacts_size }
before do
post(post_url, post_data, headers_with_token)
end
context 'posts data accelerated by workhorse is correct' do
let(:post_data) do
{ 'file.path' => artifacts.path,
'file.name' => artifacts.original_filename,
'metadata.path' => metadata.path,
'metadata.name' => metadata.original_filename }
end
it 'stores artifacts and artifacts metadata' do
expect(response).to have_http_status(201)
expect(stored_artifacts_file.original_filename).to eq(artifacts.original_filename)
expect(stored_metadata_file.original_filename).to eq(metadata.original_filename)
expect(stored_artifacts_size).to eq(71759)
end
end
context 'no artifacts file in post data' do
let(:post_data) do
{ 'metadata' => metadata }
end
it 'is expected to respond with bad request' do
expect(response).to have_http_status(400)
end
it 'does not store metadata' do
expect(stored_metadata_file).to be_nil
end
end
end
context 'with an expire date' do
let!(:artifacts) { file_upload }
let(:default_artifacts_expire_in) {}
let(:post_data) do
{ 'file.path' => artifacts.path,
'file.name' => artifacts.original_filename,
'expire_in' => expire_in }
end
before do
stub_application_setting(
default_artifacts_expire_in: default_artifacts_expire_in)
post(post_url, post_data, headers_with_token)
end
context 'with an expire_in given' do
let(:expire_in) { '7 days' }
it 'updates when specified' do
build.reload
expect(response).to have_http_status(201)
expect(json_response['artifacts_expire_at']).not_to be_empty
expect(build.artifacts_expire_at)
.to be_within(5.minutes).of(7.days.from_now)
end
end
context 'with no expire_in given' do
let(:expire_in) { nil }
it 'ignores if not specified' do
build.reload
expect(response).to have_http_status(201)
expect(json_response['artifacts_expire_at']).to be_nil
expect(build.artifacts_expire_at).to be_nil
end
context 'with application default' do
context 'default to 5 days' do
let(:default_artifacts_expire_in) { '5 days' }
it 'sets to application default' do
build.reload
expect(response).to have_http_status(201)
expect(json_response['artifacts_expire_at'])
.not_to be_empty
expect(build.artifacts_expire_at)
.to be_within(5.minutes).of(5.days.from_now)
end
end
context 'default to 0' do
let(:default_artifacts_expire_in) { '0' }
it 'does not set expire_in' do
build.reload
expect(response).to have_http_status(201)
expect(json_response['artifacts_expire_at']).to be_nil
expect(build.artifacts_expire_at).to be_nil
end
end
end
end
end
context "artifacts file is too large" do
it "fails to post too large artifact" do
stub_application_setting(max_artifacts_size: 0)
upload_artifacts(file_upload, headers_with_token)
expect(response).to have_http_status(413)
end
end
context "artifacts post request does not contain file" do
it "fails to post artifacts without file" do
post post_url, {}, headers_with_token
expect(response).to have_http_status(400)
end
end
context 'GitLab Workhorse is not configured' do
it "fails to post artifacts without GitLab-Workhorse" do
post post_url, { token: build.token }, {}
expect(response).to have_http_status(403)
end
end
end
context "artifacts are being stored outside of tmp path" do
before do
# by configuring this path we allow to pass file from @tmpdir only
# but all temporary files are stored in system tmp directory
@tmpdir = Dir.mktmpdir
allow(ArtifactUploader).to receive(:artifacts_upload_path).and_return(@tmpdir)
end
after do
FileUtils.remove_entry @tmpdir
end
it "fails to post artifacts for outside of tmp path" do
upload_artifacts(file_upload, headers_with_token)
expect(response).to have_http_status(400)
end
end
def upload_artifacts(file, headers = {}, accelerated = true)
if accelerated
post post_url, {
'file.path' => file.path,
'file.name' => file.original_filename
}, headers
else
post post_url, { file: file }, headers
end
end
end
describe 'DELETE /builds/:id/artifacts' do
let(:build) { create(:ci_build, :artifacts) }
before do
delete delete_url, token: build.token
end
shared_examples 'having removable artifacts' do
it 'removes build artifacts' do
build.reload
expect(response).to have_http_status(200)
expect(build.artifacts_file.exists?).to be_falsy
expect(build.artifacts_metadata.exists?).to be_falsy
expect(build.artifacts_size).to be_nil
end
end
context 'when using build token' do
before do
delete delete_url, token: build.token
end
it_behaves_like 'having removable artifacts'
end
context 'when using runnners token' do
before do
delete delete_url, token: build.project.runners_token
end
it_behaves_like 'having removable artifacts'
end
end
describe 'GET /builds/:id/artifacts' do
before do
get get_url, token: token
end
context 'build has artifacts' do
let(:build) { create(:ci_build, :artifacts) }
let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
end
shared_examples 'having downloadable artifacts' do
it 'download artifacts' do
expect(response).to have_http_status(200)
expect(response.headers).to include download_headers
end
end
context 'when using build token' do
let(:token) { build.token }
it_behaves_like 'having downloadable artifacts'
end
context 'when using runnners token' do
let(:token) { build.project.runners_token }
it_behaves_like 'having downloadable artifacts'
end
end
context 'build does not has artifacts' do
let(:token) { build.token }
it 'responds with not found' do
expect(response).to have_http_status(404)
end
end
end
end
end
end
require 'spec_helper'
describe Ci::API::Runners do
include StubGitlabCalls
let(:registration_token) { 'abcdefg123456' }
before do
stub_gitlab_calls
stub_application_setting(runners_registration_token: registration_token)
end
describe "POST /runners/register" do
context 'when runner token is provided' do
before do
post ci_api("/runners/register"), token: registration_token
end
it 'creates runner with default values' do
expect(response).to have_http_status 201
expect(Ci::Runner.first.run_untagged).to be true
expect(Ci::Runner.first.token).not_to eq(registration_token)
end
end
context 'when runner description is provided' do
before do
post ci_api("/runners/register"), token: registration_token,
description: "server.hostname"
end
it 'creates runner' do
expect(response).to have_http_status 201
expect(Ci::Runner.first.description).to eq("server.hostname")
end
end
context 'when runner tags are provided' do
before do
post ci_api("/runners/register"), token: registration_token,
tag_list: "tag1, tag2"
end
it 'creates runner' do
expect(response).to have_http_status 201
expect(Ci::Runner.first.tag_list.sort).to eq(%w(tag1 tag2))
end
end
context 'when option for running untagged jobs is provided' do
context 'when tags are provided' do
it 'creates runner' do
post ci_api("/runners/register"), token: registration_token,
run_untagged: false,
tag_list: ['tag']
expect(response).to have_http_status 201
expect(Ci::Runner.first.run_untagged).to be false
end
end
context 'when tags are not provided' do
it 'does not create runner' do
post ci_api("/runners/register"), token: registration_token,
run_untagged: false
expect(response).to have_http_status 404
end
end
end
context 'when project token is provided' do
let(:project) { FactoryGirl.create(:project) }
before do
post ci_api("/runners/register"), token: project.runners_token
end
it 'creates runner' do
expect(response).to have_http_status 201
expect(project.runners.size).to eq(1)
expect(Ci::Runner.first.token).not_to eq(registration_token)
expect(Ci::Runner.first.token).not_to eq(project.runners_token)
end
end
context 'when token is invalid' do
it 'returns 403 error' do
post ci_api("/runners/register"), token: 'invalid'
expect(response).to have_http_status 403
end
end
context 'when no token provided' do
it 'returns 400 error' do
post ci_api("/runners/register")
expect(response).to have_http_status 400
end
end
%w(name version revision platform architecture).each do |param|
context "creates runner with #{param} saved" do
let(:value) { "#{param}_value" }
subject { Ci::Runner.first.read_attribute(param.to_sym) }
it do
post ci_api("/runners/register"), token: registration_token, info: { param => value }
expect(response).to have_http_status 201
is_expected.to eq(value)
end
end
end
end
describe "DELETE /runners/delete" do
it 'returns 200' do
runner = FactoryGirl.create(:ci_runner)
delete ci_api("/runners/delete"), token: runner.token
expect(response).to have_http_status 200
expect(Ci::Runner.count).to eq(0)
end
end
end
require 'spec_helper'
describe Ci::API::Triggers do
describe 'POST /projects/:project_id/refs/:ref/trigger' do
let!(:trigger_token) { 'secure token' }
let!(:project) { create(:project, :repository, ci_id: 10) }
let!(:project2) { create(:project, ci_id: 11) }
let!(:trigger) do
create(:ci_trigger,
project: project,
token: trigger_token,
owner: create(:user))
end
let(:options) do
{
token: trigger_token
}
end
before do
stub_ci_pipeline_to_return_yaml_file
project.add_developer(trigger.owner)
end
context 'Handles errors' do
it 'returns bad request if token is missing' do
post ci_api("/projects/#{project.ci_id}/refs/master/trigger")
expect(response).to have_http_status(400)
end
it 'returns not found if project is not found' do
post ci_api('/projects/0/refs/master/trigger'), options
expect(response).to have_http_status(404)
end
it 'returns unauthorized if token is for different project' do
post ci_api("/projects/#{project2.ci_id}/refs/master/trigger"), options
expect(response).to have_http_status(401)
end
end
context 'Have a commit' do
let(:pipeline) { project.pipelines.last }
it 'creates builds' do
post ci_api("/projects/#{project.ci_id}/refs/master/trigger"), options
expect(response).to have_http_status(201)
pipeline.builds.reload
expect(pipeline.builds.pending.size).to eq(2)
expect(pipeline.builds.size).to eq(5)
end
it 'returns bad request with no builds created if there\'s no commit for that ref' do
post ci_api("/projects/#{project.ci_id}/refs/other-branch/trigger"), options
expect(response).to have_http_status(400)
expect(json_response['message']['base'])
.to contain_exactly('Reference not found')
end
context 'Validates variables' do
let(:variables) do
{ 'TRIGGER_KEY' => 'TRIGGER_VALUE' }
end
it 'validates variables to be a hash' do
post ci_api("/projects/#{project.ci_id}/refs/master/trigger"), options.merge(variables: 'value')
expect(response).to have_http_status(400)
expect(json_response['error']).to eq('variables is invalid')
end
it 'validates variables needs to be a map of key-valued strings' do
post ci_api("/projects/#{project.ci_id}/refs/master/trigger"), options.merge(variables: { key: %w(1 2) })
expect(response).to have_http_status(400)
expect(json_response['message']).to eq('variables needs to be a map of key-valued strings')
end
it 'creates trigger request with variables' do
post ci_api("/projects/#{project.ci_id}/refs/master/trigger"), options.merge(variables: variables)
expect(response).to have_http_status(201)
pipeline.builds.reload
expect(pipeline.builds.first.trigger_request.variables).to eq(variables)
end
end
end
end
end
...@@ -55,10 +55,15 @@ describe Ci::CreatePipelineService do ...@@ -55,10 +55,15 @@ describe Ci::CreatePipelineService do
context 'when merge requests already exist for this source branch' do context 'when merge requests already exist for this source branch' do
it 'updates head pipeline of each merge request' do it 'updates head pipeline of each merge request' do
merge_request_1 = create(:merge_request, source_branch: 'master', target_branch: "branch_1", source_project: project) merge_request_1 = create(:merge_request, source_branch: 'master',
merge_request_2 = create(:merge_request, source_branch: 'master', target_branch: "branch_2", source_project: project) target_branch: "branch_1",
source_project: project)
head_pipeline = pipeline merge_request_2 = create(:merge_request, source_branch: 'master',
target_branch: "branch_2",
source_project: project)
head_pipeline = execute_service
expect(merge_request_1.reload.head_pipeline).to eq(head_pipeline) expect(merge_request_1.reload.head_pipeline).to eq(head_pipeline)
expect(merge_request_2.reload.head_pipeline).to eq(head_pipeline) expect(merge_request_2.reload.head_pipeline).to eq(head_pipeline)
...@@ -66,9 +71,11 @@ describe Ci::CreatePipelineService do ...@@ -66,9 +71,11 @@ describe Ci::CreatePipelineService do
context 'when there is no pipeline for source branch' do context 'when there is no pipeline for source branch' do
it "does not update merge request head pipeline" do it "does not update merge request head pipeline" do
merge_request = create(:merge_request, source_branch: 'feature', target_branch: "branch_1", source_project: project) merge_request = create(:merge_request, source_branch: 'feature',
target_branch: "branch_1",
source_project: project)
head_pipeline = pipeline head_pipeline = execute_service
expect(merge_request.reload.head_pipeline).not_to eq(head_pipeline) expect(merge_request.reload.head_pipeline).not_to eq(head_pipeline)
end end
...@@ -76,13 +83,19 @@ describe Ci::CreatePipelineService do ...@@ -76,13 +83,19 @@ describe Ci::CreatePipelineService do
context 'when merge request target project is different from source project' do context 'when merge request target project is different from source project' do
let!(:target_project) { create(:project, :repository) } let!(:target_project) { create(:project, :repository) }
let!(:forked_project_link) { create(:forked_project_link, forked_to_project: project, forked_from_project: target_project) }
let!(:forked_project_link) do
create(:forked_project_link, forked_to_project: project,
forked_from_project: target_project)
end
it 'updates head pipeline for merge request' do it 'updates head pipeline for merge request' do
merge_request = merge_request = create(:merge_request, source_branch: 'master',
create(:merge_request, source_branch: 'master', target_branch: "branch_1", source_project: project, target_project: target_project) target_branch: "branch_1",
source_project: project,
target_project: target_project)
head_pipeline = pipeline head_pipeline = execute_service
expect(merge_request.reload.head_pipeline).to eq(head_pipeline) expect(merge_request.reload.head_pipeline).to eq(head_pipeline)
end end
...@@ -90,15 +103,36 @@ describe Ci::CreatePipelineService do ...@@ -90,15 +103,36 @@ describe Ci::CreatePipelineService do
context 'when the pipeline is not the latest for the branch' do context 'when the pipeline is not the latest for the branch' do
it 'does not update merge request head pipeline' do it 'does not update merge request head pipeline' do
merge_request = create(:merge_request, source_branch: 'master', target_branch: "branch_1", source_project: project) merge_request = create(:merge_request, source_branch: 'master',
target_branch: "branch_1",
source_project: project)
allow_any_instance_of(Ci::Pipeline).to receive(:latest?).and_return(false) allow_any_instance_of(Ci::Pipeline)
.to receive(:latest?).and_return(false)
pipeline execute_service
expect(merge_request.reload.head_pipeline).to be_nil expect(merge_request.reload.head_pipeline).to be_nil
end end
end end
context 'when pipeline has errors' do
before do
stub_ci_pipeline_yaml_file('some invalid syntax')
end
it 'updates merge request head pipeline reference' do
merge_request = create(:merge_request, source_branch: 'master',
target_branch: 'feature',
source_project: project)
head_pipeline = execute_service
expect(head_pipeline).to be_persisted
expect(head_pipeline.yaml_errors).to be_present
expect(merge_request.reload.head_pipeline).to eq head_pipeline
end
end
end end
context 'auto-cancel enabled' do context 'auto-cancel enabled' do
......
...@@ -32,12 +32,24 @@ describe Groups::CreateService, '#execute' do ...@@ -32,12 +32,24 @@ describe Groups::CreateService, '#execute' do
end end
it { is_expected.to be_persisted } it { is_expected.to be_persisted }
context 'when nested groups feature is disabled' do
it 'does not save group and returns an error' do
allow(Group).to receive(:supports_nested_groups?).and_return(false)
is_expected.not_to be_persisted
expect(subject.errors[:parent_id]).to include('You don’t have permission to create a subgroup in this group.')
expect(subject.parent_id).to be_nil
end
end
end end
context 'as guest' do context 'as guest' do
it 'does not save group and returns an error' do it 'does not save group and returns an error' do
allow(Group).to receive(:supports_nested_groups?).and_return(true)
is_expected.not_to be_persisted is_expected.not_to be_persisted
expect(subject.errors[:parent_id].first).to eq('manage access required to create subgroup') expect(subject.errors[:parent_id].first).to eq('You don’t have permission to create a subgroup in this group.')
expect(subject.parent_id).to be_nil expect(subject.parent_id).to be_nil
end end
end end
......
...@@ -8,8 +8,8 @@ describe Groups::DestroyService do ...@@ -8,8 +8,8 @@ describe Groups::DestroyService do
let!(:nested_group) { create(:group, parent: group) } let!(:nested_group) { create(:group, parent: group) }
let!(:project) { create(:project, namespace: group) } let!(:project) { create(:project, namespace: group) }
let!(:notification_setting) { create(:notification_setting, source: group)} let!(:notification_setting) { create(:notification_setting, source: group)}
let!(:gitlab_shell) { Gitlab::Shell.new } let(:gitlab_shell) { Gitlab::Shell.new }
let!(:remove_path) { group.path + "+#{group.id}+deleted" } let(:remove_path) { group.path + "+#{group.id}+deleted" }
before do before do
group.add_user(user, Gitlab::Access::OWNER) group.add_user(user, Gitlab::Access::OWNER)
...@@ -134,4 +134,26 @@ describe Groups::DestroyService do ...@@ -134,4 +134,26 @@ describe Groups::DestroyService do
it_behaves_like 'group destruction', false it_behaves_like 'group destruction', false
end end
describe 'repository removal' do
before do
destroy_group(group, user, false)
end
context 'legacy storage' do
let!(:project) { create(:project, :empty_repo, namespace: group) }
it 'removes repository' do
expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_falsey
end
end
context 'hashed storage' do
let!(:project) { create(:project, :hashed, :empty_repo, namespace: group) }
it 'removes repository' do
expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_falsey
end
end
end
end end
...@@ -2,8 +2,10 @@ require 'spec_helper' ...@@ -2,8 +2,10 @@ require 'spec_helper'
describe MergeRequests::CreateFromIssueService do describe MergeRequests::CreateFromIssueService do
let(:project) { create(:project, :repository) } let(:project) { create(:project, :repository) }
let(:user) { create(:user) } let(:user) { create(:user) }
let(:issue) { create(:issue, project: project) } let(:label_ids) { create_pair(:label, project: project).map(&:id) }
let(:milestone_id) { create(:milestone, project: project).id }
let(:issue) { create(:issue, project: project, milestone_id: milestone_id) }
subject(:service) { described_class.new(project, user, issue_iid: issue.iid) } subject(:service) { described_class.new(project, user, issue_iid: issue.iid) }
...@@ -25,6 +27,20 @@ describe MergeRequests::CreateFromIssueService do ...@@ -25,6 +27,20 @@ describe MergeRequests::CreateFromIssueService do
described_class.new(project, user, issue_iid: -1).execute described_class.new(project, user, issue_iid: -1).execute
end end
it "inherits labels" do
issue.assign_attributes(label_ids: label_ids)
result = service.execute
expect(result[:merge_request].label_ids).to eq(label_ids)
end
it "inherits milestones" do
result = service.execute
expect(result[:merge_request].milestone_id).to eq(milestone_id)
end
it 'delegates the branch creation to CreateBranchService' do it 'delegates the branch creation to CreateBranchService' do
expect_any_instance_of(CreateBranchService).to receive(:execute).once.and_call_original expect_any_instance_of(CreateBranchService).to receive(:execute).once.and_call_original
......
...@@ -4,9 +4,10 @@ describe Users::DestroyService do ...@@ -4,9 +4,10 @@ describe Users::DestroyService do
describe "Deletes a user and all their personal projects" do describe "Deletes a user and all their personal projects" do
let!(:user) { create(:user) } let!(:user) { create(:user) }
let!(:admin) { create(:admin) } let!(:admin) { create(:admin) }
let!(:namespace) { create(:namespace, owner: user) } let!(:namespace) { user.namespace }
let!(:project) { create(:project, namespace: namespace) } let!(:project) { create(:project, namespace: namespace) }
let(:service) { described_class.new(admin) } let(:service) { described_class.new(admin) }
let(:gitlab_shell) { Gitlab::Shell.new }
context 'no options are given' do context 'no options are given' do
it 'deletes the user' do it 'deletes the user' do
...@@ -14,7 +15,7 @@ describe Users::DestroyService do ...@@ -14,7 +15,7 @@ describe Users::DestroyService do
expect { user_data['email'].to eq(user.email) } expect { user_data['email'].to eq(user.email) }
expect { User.find(user.id) }.to raise_error(ActiveRecord::RecordNotFound) expect { User.find(user.id) }.to raise_error(ActiveRecord::RecordNotFound)
expect { Namespace.with_deleted.find(user.namespace.id) }.to raise_error(ActiveRecord::RecordNotFound) expect { Namespace.with_deleted.find(namespace.id) }.to raise_error(ActiveRecord::RecordNotFound)
end end
it 'will delete the project' do it 'will delete the project' do
...@@ -165,5 +166,27 @@ describe Users::DestroyService do ...@@ -165,5 +166,27 @@ describe Users::DestroyService do
expect(Issue.exists?(issue.id)).to be_falsy expect(Issue.exists?(issue.id)).to be_falsy
end end
end end
describe "user personal's repository removal" do
before do
Sidekiq::Testing.inline! { service.execute(user) }
end
context 'legacy storage' do
let!(:project) { create(:project, :empty_repo, namespace: user.namespace) }
it 'removes repository' do
expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_falsey
end
end
context 'hashed storage' do
let!(:project) { create(:project, :empty_repo, :hashed, namespace: user.namespace) }
it 'removes repository' do
expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_falsey
end
end
end
end end
end end
RSpec::Matchers.define :be_scheduled_migration do |delay, *expected|
match do |migration|
BackgroundMigrationWorker.jobs.any? do |job|
job['args'] == [migration, expected] &&
job['at'].to_i == (delay.to_i + Time.now.to_i)
end
end
failure_message do |migration|
"Migration `#{migration}` with args `#{expected.inspect}` " \
'not scheduled in expected time!'
end
end
...@@ -29,21 +29,27 @@ describe AuthorizedProjectsWorker do ...@@ -29,21 +29,27 @@ describe AuthorizedProjectsWorker do
end end
describe '#perform' do describe '#perform' do
subject { described_class.new } let(:user) { create(:user) }
it "refreshes user's authorized projects" do subject(:job) { described_class.new }
user = create(:user)
it "refreshes user's authorized projects" do
expect_any_instance_of(User).to receive(:refresh_authorized_projects) expect_any_instance_of(User).to receive(:refresh_authorized_projects)
subject.perform(user.id) job.perform(user.id)
end
it 'notifies the JobWaiter when done if the key is provided' do
expect(Gitlab::JobWaiter).to receive(:notify).with('notify-key', job.jid)
job.perform(user.id, 'notify-key')
end end
context "when the user is not found" do context "when the user is not found" do
it "does nothing" do it "does nothing" do
expect_any_instance_of(User).not_to receive(:refresh_authorized_projects) expect_any_instance_of(User).not_to receive(:refresh_authorized_projects)
subject.perform(-1) job.perform(-1)
end end
end end
end end
......
...@@ -5,7 +5,7 @@ describe NamespacelessProjectDestroyWorker do ...@@ -5,7 +5,7 @@ describe NamespacelessProjectDestroyWorker do
before do before do
# Stub after_save callbacks that will fail when Project has no namespace # Stub after_save callbacks that will fail when Project has no namespace
allow_any_instance_of(Project).to receive(:ensure_storage_path_exist).and_return(nil) allow_any_instance_of(Project).to receive(:ensure_storage_path_exists).and_return(nil)
allow_any_instance_of(Project).to receive(:update_project_statistics).and_return(nil) allow_any_instance_of(Project).to receive(:update_project_statistics).and_return(nil)
end end
......
require 'spec_helper'
describe StageUpdateWorker do
describe '#perform' do
context 'when stage exists' do
let(:stage) { create(:ci_stage_entity) }
it 'updates stage status' do
expect_any_instance_of(Ci::Stage).to receive(:update_status)
described_class.new.perform(stage.id)
end
end
context 'when stage does not exist' do
it 'does not raise exception' do
expect { described_class.new.perform(123) }
.not_to raise_error
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment