Commit d700c6f0 authored by Simon Knox's avatar Simon Knox

Merge branch 'master' of gitlab.com:gitlab-org/gitlab-ee into issue_928_group_boards

parents fb77b058 fd0b838b
......@@ -6,6 +6,7 @@
},
"extends": "airbnb-base",
"globals": {
"__webpack_public_path__": true,
"_": false,
"gl": false,
"gon": false,
......
Please view this file on the master branch, on stable branches it's out of date.
## 9.5.1 (2017-08-23)
- [FIXED] Fix url for object store artifacts.
- [CHANGED] Ensure all database queries are routed through the database load balancer when load balancing is enabled
. !2707
## 9.5.0 (2017-08-22)
- [FIXED] Fix Copy to Clipboard for SSH Public Key on Pull Repository settings. !2692
......
......@@ -2,6 +2,14 @@
documentation](doc/development/changelog.md) for instructions on adding your own
entry.
## 9.5.1 (2017-08-23)
- [FIXED] Fix merge request pipeline status when pipeline has errors. !13664
- [FIXED] Commit rows would occasionally render with the wrong language.
- [FIXED] Fix caching of future broadcast messages.
- [FIXED] Only require Sidekiq throttling library when enabled, to reduce cache misses.
- Raise Housekeeping timeout to 24 hours. !13719
## 9.5.0 (2017-08-22)
- [FIXED] Fix timeouts when creating projects in groups with many members. !13508
......
9.5.0-pre
9.6.0-pre
/* eslint-disable no-underscore-dangle, camelcase */
/* global __webpack_public_path__ */
import monacoContext from 'monaco-editor/dev/vs/loader';
monacoContext.require.config({
paths: {
vs: `${__webpack_public_path__}monaco-editor/vs`,
vs: `${__webpack_public_path__}monaco-editor/vs`, // eslint-disable-line camelcase
},
});
// eslint-disable-next-line no-underscore-dangle
window.__monaco_context__ = monacoContext;
export default monacoContext.require;
......@@ -5,5 +5,5 @@
*/
if (gon && gon.webpack_public_path) {
__webpack_public_path__ = gon.webpack_public_path; // eslint-disable-line
__webpack_public_path__ = gon.webpack_public_path; // eslint-disable-line camelcase
}
......@@ -13,6 +13,9 @@
img {
/*max-width: 100%;*/
margin: 0 0 8px;
}
img.lazy {
min-width: 200px;
min-height: 100px;
background-color: $gray-lightest;
......
class Admin::LogsController < Admin::ApplicationController
def show
@loggers = [
Gitlab::AppLogger,
Gitlab::GitLogger,
Gitlab::EnvironmentLogger,
Gitlab::SidekiqLogger,
Gitlab::RepositoryCheckLogger
]
end
end
......@@ -26,6 +26,13 @@ class GroupsController < Groups::ApplicationController
def new
@group = Group.new
if params[:parent_id].present?
parent = Group.find_by(id: params[:parent_id])
if can?(current_user, :create_subgroup, parent)
@group.parent = parent
end
end
end
def create
......
......@@ -4,7 +4,6 @@ class Projects::ServicesController < Projects::ApplicationController
# Authorize
before_action :authorize_admin_project!
before_action :service, only: [:edit, :update, :test]
before_action :update_service, only: [:update, :test]
respond_to :html
......@@ -14,6 +13,8 @@ class Projects::ServicesController < Projects::ApplicationController
end
def update
@service.attributes = service_params[:service]
if @service.save(context: :manual_change)
redirect_to(project_settings_integrations_path(@project), notice: success_message)
else
......@@ -24,7 +25,7 @@ class Projects::ServicesController < Projects::ApplicationController
def test
message = {}
if @service.can_test?
if @service.can_test? && @service.update_attributes(service_params[:service])
data = @service.test_data(project, current_user)
outcome = @service.test(data)
......@@ -50,10 +51,6 @@ class Projects::ServicesController < Projects::ApplicationController
end
end
def update_service
@service.assign_attributes(service_params[:service])
end
def service
@service ||= @project.find_or_initialize_service(params[:id])
end
......
......@@ -118,6 +118,7 @@ module ApplicationSettingsHelper
:email_author_in_body,
:enabled_git_access_protocol,
:gravatar_enabled,
:hashed_storage_enabled,
:help_page_hide_commercial_content,
:help_page_support_url,
:help_page_text,
......
......@@ -12,11 +12,18 @@ module AvatarsHelper
avatar_size = options[:size] || 16
user_name = options[:user].try(:name) || options[:user_name]
avatar_url = options[:url] || avatar_icon(options[:user] || options[:user_email], avatar_size)
data_attributes = { container: 'body' }
has_tooltip = options[:has_tooltip].nil? ? true : options[:has_tooltip]
data_attributes = {}
css_class = %W[avatar s#{avatar_size}].push(*options[:css_class])
if has_tooltip
css_class.push('has-tooltip')
data_attributes = { container: 'body' }
end
image_tag(
avatar_url,
class: %W[avatar has-tooltip s#{avatar_size}].push(*options[:css_class]),
class: css_class,
alt: "#{user_name}'s avatar",
title: user_name,
data: data_attributes,
......
......@@ -181,6 +181,7 @@ module EventsHelper
end
def event_commit_title(message)
message ||= ''
(message.split("\n").first || "").truncate(70)
rescue
"--broken encoding"
......
......@@ -19,11 +19,21 @@ class BroadcastMessage < ActiveRecord::Base
after_commit :flush_redis_cache
def self.current
Rails.cache.fetch(CACHE_KEY) do
where('ends_at > :now AND starts_at <= :now', now: Time.zone.now)
.reorder(id: :asc)
.to_a
end
messages = Rails.cache.fetch(CACHE_KEY) { current_and_future_messages.to_a }
return messages if messages.empty?
now_or_future = messages.select(&:now_or_future?)
# If there are cached entries but none are to be displayed we'll purge the
# cache so we don't keep running this code all the time.
Rails.cache.delete(CACHE_KEY) if now_or_future.empty?
now_or_future.select(&:now?)
end
def self.current_and_future_messages
where('ends_at > :now', now: Time.zone.now).reorder(id: :asc)
end
def active?
......@@ -38,6 +48,18 @@ class BroadcastMessage < ActiveRecord::Base
ends_at < Time.zone.now
end
def now?
(starts_at..ends_at).cover?(Time.zone.now)
end
def future?
starts_at > Time.zone.now
end
def now_or_future?
now? || future?
end
def flush_redis_cache
Rails.cache.delete(CACHE_KEY)
end
......
module Storage
module LegacyProject
extend ActiveSupport::Concern
def disk_path
full_path
end
def ensure_storage_path_exist
gitlab_shell.add_namespace(repository_storage_path, namespace.full_path)
end
def rename_repo
path_was = previous_changes['path'].first
old_path_with_namespace = File.join(namespace.full_path, path_was)
new_path_with_namespace = File.join(namespace.full_path, path)
Rails.logger.error "Attempting to rename #{old_path_with_namespace} -> #{new_path_with_namespace}"
if has_container_registry_tags?
Rails.logger.error "Project #{old_path_with_namespace} cannot be renamed because container registry tags are present!"
# we currently doesn't support renaming repository if it contains images in container registry
raise StandardError.new('Project cannot be renamed, because images are present in its container registry')
end
expire_caches_before_rename(old_path_with_namespace)
if gitlab_shell.mv_repository(repository_storage_path, old_path_with_namespace, new_path_with_namespace)
# If repository moved successfully we need to send update instructions to users.
# However we cannot allow rollback since we moved repository
# So we basically we mute exceptions in next actions
begin
gitlab_shell.mv_repository(repository_storage_path, "#{old_path_with_namespace}.wiki", "#{new_path_with_namespace}.wiki")
send_move_instructions(old_path_with_namespace)
expires_full_path_cache
@old_path_with_namespace = old_path_with_namespace
SystemHooksService.new.execute_hooks_for(self, :rename)
@repository = nil
rescue => e
Rails.logger.error "Exception renaming #{old_path_with_namespace} -> #{new_path_with_namespace}: #{e}"
# Returning false does not rollback after_* transaction but gives
# us information about failing some of tasks
false
end
else
Rails.logger.error "Repository could not be renamed: #{old_path_with_namespace} -> #{new_path_with_namespace}"
# if we cannot move namespace directory we should rollback
# db changes in order to prevent out of sync between db and fs
raise StandardError.new('repository cannot be renamed')
end
Gitlab::AppLogger.info "Project was renamed: #{old_path_with_namespace} -> #{new_path_with_namespace}"
Gitlab::UploadsTransfer.new.rename_project(path_was, path, namespace.full_path)
Gitlab::PagesTransfer.new.rename_project(path_was, path, namespace.full_path)
end
def create_repository(force: false)
# Forked import is handled asynchronously
return if forked? && !force
if gitlab_shell.add_repository(repository_storage_path, path_with_namespace)
repository.after_create
true
else
errors.add(:base, 'Failed to create repository via gitlab-shell')
false
end
end
end
end
......@@ -412,7 +412,7 @@ class Event < ActiveRecord::Base
def body?
if push?
push_with_commits? || rm_ref?
push_with_commits?
elsif note?
true
else
......
......@@ -4,7 +4,16 @@ class Geo::ProjectRegistry < Geo::BaseRegistry
validates :project, presence: true
scope :dirty, -> { where(arel_table[:resync_repository].eq(true).or(arel_table[:resync_wiki].eq(true))) }
scope :failed, -> { where.not(last_repository_synced_at: nil).where(last_repository_successful_sync_at: nil) }
def self.failed
repository_sync_failed = arel_table[:last_repository_synced_at].not_eq(nil)
.and(arel_table[:last_repository_successful_sync_at].eq(nil))
wiki_sync_failed = arel_table[:last_wiki_synced_at].not_eq(nil)
.and(arel_table[:last_wiki_successful_sync_at].eq(nil))
where(repository_sync_failed.or(wiki_sync_failed))
end
def self.synced
where.not(last_repository_synced_at: nil, last_repository_successful_sync_at: nil)
......
......@@ -17,7 +17,6 @@ class Project < ActiveRecord::Base
include ProjectFeaturesCompatibility
include SelectForProjectAuthorization
include Routable
include Storage::LegacyProject
# EE specific modules
prepend EE::Project
......@@ -28,6 +27,7 @@ class Project < ActiveRecord::Base
NUMBER_OF_PERMITTED_BOARDS = 1
UNKNOWN_IMPORT_URL = 'http://unknown.git'.freeze
LATEST_STORAGE_VERSION = 1
cache_markdown_field :description, pipeline: :description
......@@ -35,6 +35,8 @@ class Project < ActiveRecord::Base
:merge_requests_enabled?, :issues_enabled?, to: :project_feature,
allow_nil: true
delegate :base_dir, :disk_path, :ensure_storage_path_exists, to: :storage
default_value_for :archived, false
default_value_for :visibility_level, gitlab_config_features.visibility_level
default_value_for :container_registry_enabled, gitlab_config_features.container_registry
......@@ -47,32 +49,24 @@ class Project < ActiveRecord::Base
default_value_for :snippets_enabled, gitlab_config_features.snippets
default_value_for :only_allow_merge_if_all_discussions_are_resolved, false
after_create :ensure_storage_path_exist
after_create :create_project_feature, unless: :project_feature
after_save :update_project_statistics, if: :namespace_id_changed?
add_authentication_token_field :runners_token
before_save :ensure_runners_token
# set last_activity_at to the same as created_at
after_save :update_project_statistics, if: :namespace_id_changed?
after_create :create_project_feature, unless: :project_feature
after_create :set_last_activity_at
def set_last_activity_at
update_column(:last_activity_at, self.created_at)
end
after_create :set_last_repository_updated_at
def set_last_repository_updated_at
update_column(:last_repository_updated_at, self.created_at)
end
after_update :update_forks_visibility_level
before_destroy :remove_private_deploy_keys
after_destroy -> { run_after_commit { remove_pages } }
# update visibility_level of forks
after_update :update_forks_visibility_level
after_validation :check_pending_delete
# Legacy Storage specific hooks
after_save :ensure_storage_path_exist, if: :namespace_id_changed?
# Storage specific hooks
after_initialize :use_hashed_storage
after_create :ensure_storage_path_exists
after_save :ensure_storage_path_exists, if: :namespace_id_changed?
acts_as_taggable
......@@ -242,9 +236,6 @@ class Project < ActiveRecord::Base
presence: true,
inclusion: { in: ->(_object) { Gitlab.config.repositories.storages.keys } }
add_authentication_token_field :runners_token
before_save :ensure_runners_token
mount_uploader :avatar, AvatarUploader
has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
......@@ -486,6 +477,10 @@ class Project < ActiveRecord::Base
@repository ||= Repository.new(full_path, self, disk_path: disk_path)
end
def reload_repository!
@repository = nil
end
def container_registry_url
if Gitlab.config.registry.enabled
"#{Gitlab.config.registry.host_port}/#{full_path.downcase}"
......@@ -1004,6 +999,19 @@ class Project < ActiveRecord::Base
end
end
def create_repository(force: false)
# Forked import is handled asynchronously
return if forked? && !force
if gitlab_shell.add_repository(repository_storage_path, disk_path)
repository.after_create
true
else
errors.add(:base, 'Failed to create repository via gitlab-shell')
false
end
end
def hook_attrs(backward: true)
attrs = {
name: name,
......@@ -1086,6 +1094,7 @@ class Project < ActiveRecord::Base
!!repository.exists?
end
# update visibility_level of forks
def update_forks_visibility_level
return unless visibility_level < visibility_level_was
......@@ -1213,7 +1222,8 @@ class Project < ActiveRecord::Base
end
def pages_path
File.join(Settings.pages.path, disk_path)
# TODO: when we migrate Pages to work with new storage types, change here to use disk_path
File.join(Settings.pages.path, full_path)
end
def public_pages_path
......@@ -1252,6 +1262,50 @@ class Project < ActiveRecord::Base
end
end
def rename_repo
new_full_path = build_full_path
Rails.logger.error "Attempting to rename #{full_path_was} -> #{new_full_path}"
if has_container_registry_tags?
Rails.logger.error "Project #{full_path_was} cannot be renamed because container registry tags are present!"
# we currently doesn't support renaming repository if it contains images in container registry
raise StandardError.new('Project cannot be renamed, because images are present in its container registry')
end
expire_caches_before_rename(full_path_was)
if storage.rename_repo
Gitlab::AppLogger.info "Project was renamed: #{full_path_was} -> #{new_full_path}"
rename_repo_notify!
after_rename_repo
else
Rails.logger.error "Repository could not be renamed: #{full_path_was} -> #{new_full_path}"
# if we cannot move namespace directory we should rollback
# db changes in order to prevent out of sync between db and fs
raise StandardError.new('repository cannot be renamed')
end
end
def rename_repo_notify!
send_move_instructions(full_path_was)
expires_full_path_cache
self.old_path_with_namespace = full_path_was
SystemHooksService.new.execute_hooks_for(self, :rename)
reload_repository!
end
def after_rename_repo
path_before_change = previous_changes['path'].first
Gitlab::UploadsTransfer.new.rename_project(path_before_change, self.path, namespace.full_path)
Gitlab::PagesTransfer.new.rename_project(path_before_change, self.path, namespace.full_path)
end
def running_or_pending_build_count(force: false)
Rails.cache.fetch(['projects', id, 'running_or_pending_build_count'], force: force) do
builds.running_or_pending.count(:all)
......@@ -1418,6 +1472,10 @@ class Project < ActiveRecord::Base
feature_available?(:issue_board_milestone, user)
end
def full_path_was
File.join(namespace.full_path, previous_changes['path'].first)
end
alias_method :name_with_namespace, :full_name
alias_method :human_name, :full_name
# @deprecated cannot remove yet because it has an index with its name in elasticsearch
......@@ -1427,8 +1485,36 @@ class Project < ActiveRecord::Base
Projects::ForksCountService.new(self).count
end
def legacy_storage?
self.storage_version.nil?
end
private
def storage
@storage ||=
if self.storage_version && self.storage_version >= 1
Storage::HashedProject.new(self)
else
Storage::LegacyProject.new(self)
end
end
def use_hashed_storage
if self.new_record? && current_application_settings.hashed_storage_enabled
self.storage_version = LATEST_STORAGE_VERSION
end
end
# set last_activity_at to the same as created_at
def set_last_activity_at
update_column(:last_activity_at, self.created_at)
end
def set_last_repository_updated_at
update_column(:last_repository_updated_at, self.created_at)
end
def cross_namespace_reference?(from)
case from
when Project
......
......@@ -26,6 +26,8 @@ class KubernetesService < DeploymentService
validates :token
end
before_validation :enforce_namespace_to_lower_case
validates :namespace,
allow_blank: true,
length: 1..63,
......@@ -209,4 +211,8 @@ class KubernetesService < DeploymentService
max_session_time: current_application_settings.terminal_max_session_time
}
end
def enforce_namespace_to_lower_case
self.namespace = self.namespace&.downcase
end
end
module Storage
class HashedProject
attr_accessor :project
delegate :gitlab_shell, :repository_storage_path, to: :project
ROOT_PATH_PREFIX = '@hashed'.freeze
def initialize(project)
@project = project
end
# Base directory
#
# @return [String] directory where repository is stored
def base_dir
"#{ROOT_PATH_PREFIX}/#{disk_hash[0..1]}/#{disk_hash[2..3]}" if disk_hash
end
# Disk path is used to build repository and project's wiki path on disk
#
# @return [String] combination of base_dir and the repository own name without `.git` or `.wiki.git` extensions
def disk_path
"#{base_dir}/#{disk_hash}" if disk_hash
end
def ensure_storage_path_exists
gitlab_shell.add_namespace(repository_storage_path, base_dir)
end
def rename_repo
true
end
private
# Generates the hash for the project path and name on disk
# If you need to refer to the repository on disk, use the `#disk_path`
def disk_hash
@disk_hash ||= Digest::SHA2.hexdigest(project.id.to_s) if project.id
end
end
end
module Storage
class LegacyProject
attr_accessor :project
delegate :namespace, :gitlab_shell, :repository_storage_path, to: :project
def initialize(project)
@project = project
end
# Base directory
#
# @return [String] directory where repository is stored
def base_dir
namespace.full_path
end
# Disk path is used to build repository and project's wiki path on disk
#
# @return [String] combination of base_dir and the repository own name without `.git` or `.wiki.git` extensions
def disk_path
project.full_path
end
def ensure_storage_path_exists
return unless namespace
gitlab_shell.add_namespace(repository_storage_path, base_dir)
end
def rename_repo
new_full_path = project.build_full_path
if gitlab_shell.mv_repository(repository_storage_path, project.full_path_was, new_full_path)
# If repository moved successfully we need to send update instructions to users.
# However we cannot allow rollback since we moved repository
# So we basically we mute exceptions in next actions
begin
gitlab_shell.mv_repository(repository_storage_path, "#{project.full_path_was}.wiki", "#{new_full_path}.wiki")
return true
rescue => e
Rails.logger.error "Exception renaming #{project.full_path_was} -> #{new_full_path}: #{e}"
# Returning false does not rollback after_* transaction but gives
# us information about failing some of tasks
return false
end
end
false
end
end
end
......@@ -15,6 +15,8 @@ class GroupPolicy < BasePolicy
condition(:master) { access_level >= GroupMember::MASTER }
condition(:reporter) { access_level >= GroupMember::REPORTER }
condition(:nested_groups_supported, scope: :global) { Group.supports_nested_groups? }
condition(:has_projects) do
GroupProjectsFinder.new(group: @subject, current_user: @user).execute.any?
end
......@@ -44,7 +46,7 @@ class GroupPolicy < BasePolicy
enable :change_visibility_level
end
rule { owner & can_create_group }.enable :create_subgroup
rule { owner & can_create_group & nested_groups_supported }.enable :create_subgroup
rule { public_group | logged_in_viewable }.enable :view_globally
......
......@@ -16,7 +16,7 @@ module Geo
project.expire_caches_before_rename(old_path_with_namespace)
# Make sure target directory exists (used when transfering repositories)
project.ensure_storage_path_exist
project.ensure_storage_path_exists
if gitlab_shell.mv_repository(project.repository_storage_path,
old_path_with_namespace, new_path_with_namespace)
......
......@@ -17,9 +17,9 @@ module Groups
limit = params.delete(:repository_size_limit)
@group.repository_size_limit = Gitlab::Utils.try_megabytes_to_bytes(limit) if limit
if @group.parent && !can?(current_user, :admin_group, @group.parent)
if @group.parent && !can?(current_user, :create_subgroup, @group.parent)
@group.parent = nil
@group.errors.add(:parent_id, 'manage access required to create subgroup')
@group.errors.add(:parent_id, 'You don’t have permission to create a subgroup in this group.')
return @group
end
......
......@@ -13,7 +13,7 @@ module Groups
# Execute the destruction of the models immediately to ensure atomic cleanup.
# Skip repository removal because we remove directory with namespace
# that contain all these repositories
::Projects::DestroyService.new(project, current_user, skip_repo: true).execute
::Projects::DestroyService.new(project, current_user, skip_repo: project.legacy_storage?).execute
end
group.children.each do |group|
......
......@@ -9,7 +9,8 @@ module Projects
class HousekeepingService < BaseService
include Gitlab::CurrentSettings
LEASE_TIMEOUT = 3600
# Timeout set to 24h
LEASE_TIMEOUT = 86400
class LeaseTaken < StandardError
def to_s
......
......@@ -35,10 +35,13 @@ module Users
Groups::DestroyService.new(group, current_user).execute
end
namespace = user.namespace
namespace.prepare_for_destroy
user.personal_projects.each do |project|
# Skip repository removal because we remove directory with namespace
# that contain all this repositories
::Projects::DestroyService.new(project, current_user, skip_repo: true).execute
::Projects::DestroyService.new(project, current_user, skip_repo: project.legacy_storage?).execute
end
Project.includes(group: :owners).where(mirror_user: user).find_each do |project|
......@@ -50,7 +53,6 @@ module Users
MigrateToGhostUserService.new(user).execute unless options[:hard_delete]
# Destroy the namespace after destroying the user since certain methods may depend on the namespace existing
namespace = user.namespace
user_data = user.destroy
namespace.really_destroy!
......
......@@ -527,6 +527,16 @@
%fieldset
%legend Repository Storage
.form-group
.col-sm-offset-2.col-sm-10
.checkbox
= f.label :hashed_storage_enabled do
= f.check_box :hashed_storage_enabled
Create new projects using hashed storage paths
.help-block
Enable immutable, hash-based paths and repository names to store repositories on disk. This prevents
repositories from having to be moved or renamed when the Project URL changes and may improve disk I/O performance.
%em (EXPERIMENTAL)
.form-group
= f.label :repository_storages, 'Storage paths for new projects', class: 'control-label col-sm-2'
.col-sm-10
......@@ -536,6 +546,7 @@
= succeed "." do
= link_to "repository storages documentation", help_page_path("administration/repository_storages")
%fieldset
%legend Repository Checks
.form-group
......
- @no_container = true
- page_title "Logs"
- loggers = [Gitlab::GitLogger, Gitlab::AppLogger,
Gitlab::EnvironmentLogger, Gitlab::SidekiqLogger,
Gitlab::RepositoryCheckLogger]
= render 'admin/monitoring/head'
%div{ class: container_class }
%ul.nav-links.log-tabs
- loggers.each do |klass|
%li{ class: active_when(klass == Gitlab::GitLogger) }>
= link_to klass::file_name, "##{klass::file_name_noext}",
'data-toggle' => 'tab'
- @loggers.each do |klass|
%li{ class: active_when(klass == @loggers.first) }>
= link_to klass.file_name, "##{klass.file_name_noext}", data: { toggle: 'tab' }
.row-content-block
To prevent performance issues admin logs output the last 2000 lines
.tab-content
- loggers.each do |klass|
.tab-pane{ class: active_when(klass == Gitlab::GitLogger), id: klass::file_name_noext }
- @loggers.each do |klass|
.tab-pane{ class: active_when(klass == @loggers.first), id: klass.file_name_noext }
.file-holder#README
.js-file-title.file-title
%i.fa.fa-file
= klass::file_name
= klass.file_name
.pull-right
= link_to '#', class: 'log-bottom' do
%i.fa.fa-arrow-down
......
......@@ -5,9 +5,10 @@
%i
at
= event.created_at.to_s(:short)
%blockquote= markdown(escape_once(event.commit_title), pipeline: :atom, project: event.project, author: event.author)
- if event.commits_count > 1
%p
%i
\... and
= pluralize(event.commits_count - 1, "more commit")
- unless event.rm_ref?
%blockquote= markdown(escape_once(event.commit_title), pipeline: :atom, project: event.project, author: event.author)
- if event.commits_count > 1
%p
%i
\... and
= pluralize(event.commits_count - 1, "more commit")
......@@ -41,7 +41,3 @@
%li.commits-stat
= link_to create_mr_path(project.default_branch, event.ref_name, project) do
Create Merge Request
- elsif event.rm_ref?
.event-body
%ul.well-list.event_commits
= render "events/commit", project: project, event: event
......@@ -25,7 +25,7 @@
= hidden_field_tag :namespace_id, value: current_user.namespace_id
.form-group.col-xs-12.col-sm-6.project-path
= label_tag :path, 'Project name', class: 'label-light'
= text_field_tag :path, nil, placeholder: "my-awesome-project", class: "js-path-name form-control", tabindex: 2, autofocus: true, required: true
= text_field_tag :path, @path, placeholder: "my-awesome-project", class: "js-path-name form-control", tabindex: 2, autofocus: true, required: true
.row
.form-group.col-md-12
......@@ -33,7 +33,6 @@
.row
.form-group.col-sm-12
= hidden_field_tag :namespace_id, @namespace.id
= hidden_field_tag :path, @path
= label_tag :file, 'GitLab project export', class: 'label-light'
.form-group
= file_field_tag :file, class: ''
......
......@@ -223,8 +223,6 @@
.sub-section.rename-respository
%h4.warning-title
Rename repository
%p
Export this project with all its related data in order to move your project to a new GitLab instance. Once the export is finished, you can import the file from the "New Project" page.
= render 'projects/errors'
= form_for([@project.namespace.becomes(Namespace), @project]) do |f|
.form-group.project_name_holder
......
- content_for :page_specific_javascripts do
= page_specific_javascript_bundle_tag('group')
- parent = GroupFinder.new(current_user).execute(id: params[:parent_id] || @group.parent_id)
- parent = @group.parent
- group_path = root_url
- group_path << parent.full_path + '/' if parent
......@@ -13,13 +13,12 @@
%span>= root_url
- if parent
%strong= parent.full_path + '/'
= f.hidden_field :parent_id
= f.text_field :path, placeholder: 'open-source', class: 'form-control',
autofocus: local_assigns[:autofocus] || false, required: true,
pattern: Gitlab::PathRegex::NAMESPACE_FORMAT_REGEX_JS,
title: 'Please choose a group path with no special characters.',
"data-bind-in" => "#{'create_chat_team' if Gitlab.config.mattermost.enabled}"
- if parent
= f.hidden_field :parent_id, value: parent.id
- if @group.persisted?
.alert.alert-warning.prepend-top-10
......
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 14 14"><g fill-rule="evenodd"><path fill-rule="nonzero" d="m0 7c0-3.866 3.142-7 7-7 3.866 0 7 3.142 7 7 0 3.866-3.142 7-7 7-3.866 0-7-3.142-7-7m1 0c0 3.309 2.69 6 6 6 3.309 0 6-2.69 6-6 0-3.309-2.69-6-6-6-3.309 0-6 2.69-6 6"/><path d="m7 6h-2.702c-.154 0-.298.132-.298.295v1.41c0 .164.133.295.298.295h2.702v1.694c0 .18.095.209.213.09l2.539-2.568c.115-.116.118-.312 0-.432l-2.539-2.568c-.115-.116-.213-.079-.213.09v1.694"/></g></svg>
<svg height="16" viewBox="0 0 16 16" width="16" xmlns="http://www.w3.org/2000/svg"><path d="m9 6h-7a2 2 0 1 0 0 4h7v2.586a1 1 0 0 0 1.707.707l4.586-4.586a1 1 0 0 0 0-1.414l-4.586-4.586a1 1 0 0 0 -1.707.707z" fill-rule="evenodd"/></svg>
<svg width="1792" height="1792" viewBox="0 0 1792 1792" xmlns="http://www.w3.org/2000/svg"><path d="M1472 930v318q0 119-84.5 203.5t-203.5 84.5h-832q-119 0-203.5-84.5t-84.5-203.5v-832q0-119 84.5-203.5t203.5-84.5h832q63 0 117 25 15 7 18 23 3 17-9 29l-49 49q-10 10-23 10-3 0-9-2-23-6-45-6h-832q-66 0-113 47t-47 113v832q0 66 47 113t113 47h832q66 0 113-47t47-113v-254q0-13 9-22l64-64q10-10 23-10 6 0 12 3 20 8 20 29zm231-489l-814 814q-24 24-57 24t-57-24l-430-430q-24-24-24-57t24-57l110-110q24-24 57-24t57 24l263 263 647-647q24-24 57-24t57 24l110 110q24 24 24 57t-24 57z"/></svg>
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 16 16"><path d="M7.536 8.657l2.828-2.829a1 1 0 0 1 1.414 1.415l-3.535 3.535a.997.997 0 0 1-1.415 0l-2.12-2.121A1 1 0 0 1 6.12 7.243l1.415 1.414zM3 0h10a3 3 0 0 1 3 3v10a3 3 0 0 1-3 3H3a3 3 0 0 1-3-3V3a3 3 0 0 1 3-3zm0 2a1 1 0 0 0-1 1v10a1 1 0 0 0 1 1h10a1 1 0 0 0 1-1V3a1 1 0 0 0-1-1H3z"/></svg>
<svg width="1792" height="1792" viewBox="0 0 1792 1792" xmlns="http://www.w3.org/2000/svg"><path d="M1024 544v448q0 14-9 23t-23 9h-320q-14 0-23-9t-9-23v-64q0-14 9-23t23-9h224v-352q0-14 9-23t23-9h64q14 0 23 9t9 23zm416 352q0-148-73-273t-198-198-273-73-273 73-198 198-73 273 73 273 198 198 273 73 273-73 198-198 73-273zm224 0q0 209-103 385.5t-279.5 279.5-385.5 103-385.5-103-279.5-279.5-103-385.5 103-385.5 279.5-279.5 385.5-103 385.5 103 279.5 279.5 103 385.5z"/></svg>
<svg height="16" viewBox="0 0 16 16" width="16" xmlns="http://www.w3.org/2000/svg"><path d="M9 7h1c.552 0 1 .448 1 1s-.448 1-1 1H8c-.276 0-.526-.112-.707-.293S7 8.277 7 8V5c0-.552.448-1 1-1s1 .448 1 1zm-1 9c-4.418 0-8-3.582-8-8s3.582-8 8-8 8 3.582 8 8-3.582 8-8 8zm0-2c3.314 0 6-2.686 6-6s-2.686-6-6-6-6 2.686-6 6 2.686 6 6 6z"/></svg>
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="14" height="14" viewBox="0 0 14 14">
<path d="M13 12.75v-8.5q0-0.102-0.074-0.176t-0.176-0.074h-8.5q-0.102 0-0.176 0.074t-0.074 0.176v8.5q0 0.102 0.074 0.176t0.176 0.074h8.5q0.102 0 0.176-0.074t0.074-0.176zM14 4.25v8.5q0 0.516-0.367 0.883t-0.883 0.367h-8.5q-0.516 0-0.883-0.367t-0.367-0.883v-8.5q0-0.516 0.367-0.883t0.883-0.367h8.5q0.516 0 0.883 0.367t0.367 0.883zM11 1.25v1.25h-1v-1.25q0-0.102-0.074-0.176t-0.176-0.074h-8.5q-0.102 0-0.176 0.074t-0.074 0.176v8.5q0 0.102 0.074 0.176t0.176 0.074h1.25v1h-1.25q-0.516 0-0.883-0.367t-0.367-0.883v-8.5q0-0.516 0.367-0.883t0.883-0.367h8.5q0.516 0 0.883 0.367t0.367 0.883z"></path>
</svg>
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 16 16"><path d="M10.874 2H12a3 3 0 0 1 3 3v8a3 3 0 0 1-3 3h-2c-.918 0-1.74-.413-2.29-1.063a3.987 3.987 0 0 0 1.988-.984A1 1 0 0 0 10 14h2a1 1 0 0 0 1-1V5a1 1 0 0 0-1-1h-1V3c0-.345-.044-.68-.126-1zM4 0h3a3 3 0 0 1 3 3v8a3 3 0 0 1-3 3H4a3 3 0 0 1-3-3V3a3 3 0 0 1 3-3zm0 2a1 1 0 0 0-1 1v8a1 1 0 0 0 1 1h3a1 1 0 0 0 1-1V3a1 1 0 0 0-1-1H4z"/></svg>
<svg width="1792" height="1792" viewBox="0 0 1792 1792" xmlns="http://www.w3.org/2000/svg"><path d="M672 1472q0-40-28-68t-68-28-68 28-28 68 28 68 68 28 68-28 28-68zm0-1152q0-40-28-68t-68-28-68 28-28 68 28 68 68 28 68-28 28-68zm640 128q0-40-28-68t-68-28-68 28-28 68 28 68 68 28 68-28 28-68zm96 0q0 52-26 96.5t-70 69.5q-2 287-226 414-68 38-203 81-128 40-169.5 71t-41.5 100v26q44 25 70 69.5t26 96.5q0 80-56 136t-136 56-136-56-56-136q0-52 26-96.5t70-69.5v-820q-44-25-70-69.5t-26-96.5q0-80 56-136t136-56 136 56 56 136q0 52-26 96.5t-70 69.5v497q54-26 154-57 55-17 87.5-29.5t70.5-31 59-39.5 40.5-51 28-69.5 8.5-91.5q-44-25-70-69.5t-26-96.5q0-80 56-136t136-56 136 56 56 136z"/></svg>
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 16 16"><path d="M6 11.978v.29a2 2 0 1 1-2 0V3.732a2 2 0 1 1 2 0v3.849c.592-.491 1.31-.854 2.15-1.081 1.308-.353 1.875-.882 1.893-1.743a2 2 0 1 1 2.002-.051C12.053 6.54 10.857 7.84 8.67 8.43 7.056 8.867 6.195 9.98 6 11.978zM5 3a1 1 0 1 0 0-2 1 1 0 0 0 0 2zm6 1a1 1 0 1 0 0-2 1 1 0 0 0 0 2zM5 15a1 1 0 1 0 0-2 1 1 0 0 0 0 2z"/></svg>
<svg width="1792" height="1792" viewBox="0 0 1792 1792" xmlns="http://www.w3.org/2000/svg"><path d="M896 384q-204 0-381.5 69.5t-282 187.5-104.5 255q0 112 71.5 213.5t201.5 175.5l87 50-27 96q-24 91-70 172 152-63 275-171l43-38 57 6q69 8 130 8 204 0 381.5-69.5t282-187.5 104.5-255-104.5-255-282-187.5-381.5-69.5zm896 512q0 174-120 321.5t-326 233-450 85.5q-70 0-145-8-198 175-460 242-49 14-114 22h-5q-15 0-27-10.5t-16-27.5v-1q-3-4-.5-12t2-10 4.5-9.5l6-9 7-8.5 8-9q7-8 31-34.5t34.5-38 31-39.5 32.5-51 27-59 26-76q-157-89-247.5-220t-90.5-281q0-174 120-321.5t326-233 450-85.5 450 85.5 326 233 120 321.5z"/></svg>
<svg height="16" viewBox="0 0 16 16" width="16" xmlns="http://www.w3.org/2000/svg"><path d="m1.707 15.707c-.63.63-1.707.184-1.707-.707v-12a3 3 0 0 1 3-3h10a3 3 0 0 1 3 3v6a3 3 0 0 1 -3 3h-7.586zm.293-3.121 2.293-2.293a1 1 0 0 1 .707-.293h8a1 1 0 0 0 1-1v-6a1 1 0 0 0 -1-1h-10a1 1 0 0 0 -1 1z"/></svg>
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 36 18" enable-background="new 0 0 36 18"><path d="m34 7h-7.2c-.9-4-4.5-7-8.8-7s-7.9 3-8.8 7h-7.2c-1.1 0-2 .9-2 2 0 1.1.9 2 2 2h7.2c.9 4 4.5 7 8.8 7s7.9-3 8.8-7h7.2c1.1 0 2-.9 2-2 0-1.1-.9-2-2-2m-16 7c-2.8 0-5-2.2-5-5s2.2-5 5-5 5 2.2 5 5-2.2 5-5 5"/></svg>
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 16 16"><path d="M8 10a2 2 0 1 0 0-4 2 2 0 0 0 0 4zm3.876-1.008a4.002 4.002 0 0 1-7.752 0A1.01 1.01 0 0 1 4 9H1a1 1 0 1 1 0-2h3c.042 0 .083.003.124.008a4.002 4.002 0 0 1 7.752 0A1.01 1.01 0 0 1 12 7h3a1 1 0 0 1 0 2h-3a1.01 1.01 0 0 1-.124-.008z"/></svg>
<svg width="1792" height="1792" viewBox="0 0 1792 1792" xmlns="http://www.w3.org/2000/svg"><path d="M888 1184l116-116-152-152-116 116v56h96v96h56zm440-720q-16-16-33 1l-350 350q-17 17-1 33t33-1l350-350q17-17 1-33zm80 594v190q0 119-84.5 203.5t-203.5 84.5h-832q-119 0-203.5-84.5t-84.5-203.5v-832q0-119 84.5-203.5t203.5-84.5h832q63 0 117 25 15 7 18 23 3 17-9 29l-49 49q-14 14-32 8-23-6-45-6h-832q-66 0-113 47t-47 113v832q0 66 47 113t113 47h832q66 0 113-47t47-113v-126q0-13 9-22l64-64q15-15 35-7t20 29zm-96-738l288 288-672 672h-288v-288zm444 132l-92 92-288-288 92-92q28-28 68-28t68 28l152 152q28 28 28 68t-28 68z"/></svg>
<svg height="16" viewBox="0 0 16 16" width="16" xmlns="http://www.w3.org/2000/svg"><path d="m13.436 1.413 1.415 1.414a1 1 0 0 1 0 1.414l-10.316 10.315a1 1 0 0 1 -.703.293l-2.407.008-.008-2.421a1 1 0 0 1 .293-.71l10.312-10.314a1 1 0 0 1 1.414 0zm-9.608 12.436 10.315-10.315-1.413-1.414-10.313 10.312.005 1.422zm7.486-10.313 1.414 1.414-7.778 7.778-1.407.007-.007-1.421zm1.414-1.415 1.414 1.415-.707.707-1.414-1.415z"/></svg>
<svg width="1792" height="1792" viewBox="0 0 1792 1792" xmlns="http://www.w3.org/2000/svg"><path d="M1664 960q-152-236-381-353 61 104 61 225 0 185-131.5 316.5t-316.5 131.5-316.5-131.5-131.5-316.5q0-121 61-225-229 117-381 353 133 205 333.5 326.5t434.5 121.5 434.5-121.5 333.5-326.5zm-720-384q0-20-14-34t-34-14q-125 0-214.5 89.5t-89.5 214.5q0 20 14 34t34 14 34-14 14-34q0-86 61-147t147-61q20 0 34-14t14-34zm848 384q0 34-20 69-140 230-376.5 368.5t-499.5 138.5-499.5-139-376.5-368q-20-35-20-69t20-69q140-229 376.5-368t499.5-139 499.5 139 376.5 368q20 35 20 69z"/></svg>
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 16 16"><path d="M8 14C4.816 14 2.253 12.284.393 8.981a2 2 0 0 1 0-1.962C2.253 3.716 4.816 2 8 2s5.747 1.716 7.607 5.019a2 2 0 0 1 0 1.962C13.747 12.284 11.184 14 8 14zm0-2c2.41 0 4.338-1.29 5.864-4C12.338 5.29 10.411 4 8 4 5.59 4 3.662 5.29 2.136 8 3.662 10.71 5.589 12 8 12zm0-1a3 3 0 1 1 0-6 3 3 0 0 1 0 6zm1-3a1 1 0 1 0 0-2 1 1 0 0 0 0 2z"/></svg>
<svg width="1792" height="1792" viewBox="0 0 1792 1792" xmlns="http://www.w3.org/2000/svg"><path d="M555 1335l78-141q-87-63-136-159t-49-203q0-121 61-225-229 117-381 353 167 258 427 375zm389-759q0-20-14-34t-34-14q-125 0-214.5 89.5t-89.5 214.5q0 20 14 34t34 14 34-14 14-34q0-86 61-147t147-61q20 0 34-14t14-34zm363-191q0 7-1 9-105 188-315 566t-316 567l-49 89q-10 16-28 16-12 0-134-70-16-10-16-28 0-12 44-87-143-65-263.5-173t-208.5-245q-20-31-20-69t20-69q153-235 380-371t496-136q89 0 180 17l54-97q10-16 28-16 5 0 18 6t31 15.5 33 18.5 31.5 18.5 19.5 11.5q16 10 16 27zm37 447q0 139-79 253.5t-209 164.5l280-502q8 45 8 84zm448 128q0 35-20 69-39 64-109 145-150 172-347.5 267t-419.5 95l74-132q212-18 392.5-137t301.5-307q-115-179-282-294l63-112q95 64 182.5 153t144.5 184q20 34 20 69z"/></svg>
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 16 16"><path d="M13.618 2.62L1.62 14.619a1 1 0 0 1-.985-1.668l1.525-1.526C1.516 10.742.926 9.927.393 8.981a2 2 0 0 1 0-1.962C2.253 3.716 4.816 2 8 2c1.074 0 2.076.195 3.006.58l.944-.944a1 1 0 0 1 1.668.985zM8.068 11a3 3 0 0 0 2.931-2.932l-2.931 2.931zm-3.02-2.462a3 3 0 0 1 3.49-3.49l.884-.884A6.044 6.044 0 0 0 8 4C5.59 4 3.662 5.29 2.136 8c.445.79.924 1.46 1.439 2.011l1.473-1.473zm.421 5.06l1.658-1.658c.283.04.575.06.873.06 2.41 0 4.338-1.29 5.864-4a11.023 11.023 0 0 0-1.133-1.664l1.418-1.418a12.799 12.799 0 0 1 1.458 2.1 2 2 0 0 1 0 1.963C13.747 12.284 11.184 14 8 14a7.883 7.883 0 0 1-2.53-.402z"/></svg>
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16"><path d="m2 3c.552 0 1-.448 1-1 0-.552-.448-1-1-1-.552 0-1 .448-1 1 0 .552.448 1 1 1m.761.85c.154 2.556 1.987 4.692 4.45 5.255.328-.655 1.01-1.105 1.789-1.105 1.105 0 2 .895 2 2 0 1.105-.895 2-2 2-.89 0-1.645-.582-1.904-1.386-1.916-.376-3.548-1.5-4.596-3.044v4.493c.863.222 1.5 1.01 1.5 1.937 0 1.105-.895 2-2 2-1.105 0-2-.895-2-2 0-.74.402-1.387 1-1.732v-8.535c-.598-.346-1-.992-1-1.732 0-1.105.895-2 2-2 1.105 0 2 .895 2 2 0 .835-.512 1.551-1.239 1.85m6.239 7.15c.552 0 1-.448 1-1 0-.552-.448-1-1-1-.552 0-1 .448-1 1 0 .552.448 1 1 1m-7 4c.552 0 1-.448 1-1 0-.552-.448-1-1-1-.552 0-1 .448-1 1 0 .552.448 1 1 1" transform="translate(3)"/></svg>
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 16 16"><path d="M11 12.268V5a1 1 0 0 0-1-1v1a.5.5 0 0 1-.8.4l-2.667-2a.5.5 0 0 1 0-.8L9.2.6a.5.5 0 0 1 .8.4v1a3 3 0 0 1 3 3v7.268a2 2 0 1 1-2 0zm-6 0a2 2 0 1 1-2 0V4.732a2 2 0 1 1 2 0v7.536zM4 4a1 1 0 1 0 0-2 1 1 0 0 0 0 2zm0 11a1 1 0 1 0 0-2 1 1 0 0 0 0 2zm8 0a1 1 0 1 0 0-2 1 1 0 0 0 0 2z"/></svg>
<svg width="1792" height="1792" viewBox="0 0 1792 1792" xmlns="http://www.w3.org/2000/svg"><path d="M491 1536l91-91-235-235-91 91v107h128v128h107zm523-928q0-22-22-22-10 0-17 7l-542 542q-7 7-7 17 0 22 22 22 10 0 17-7l542-542q7-7 7-17zm-54-192l416 416-832 832h-416v-416zm683 96q0 53-37 90l-166 166-416-416 166-165q36-38 90-38 53 0 91 38l235 234q37 39 37 91z"/></svg>
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 16 16"><path d="M13.02 1.293l1.414 1.414a1 1 0 0 1 0 1.414L4.119 14.436a1 1 0 0 1-.704.293l-2.407.008L1 12.316a1 1 0 0 1 .293-.71L11.605 1.292a1 1 0 0 1 1.414 0zm-1.416 1.415l-.707.707L12.31 4.83l.707-.707-1.414-1.415zM3.411 13.73l1.123-1.122H3.12v-1.415L2 12.312l.005 1.422 1.406-.005z"/></svg>
<svg width="1792" height="1792" viewBox="0 0 1792 1792" xmlns="http://www.w3.org/2000/svg"><path d="M666 481q-60 92-137 273-22-45-37-72.5t-40.5-63.5-51-56.5-63-35-81.5-14.5h-224q-14 0-23-9t-9-23v-192q0-14 9-23t23-9h224q250 0 410 225zm1126 799q0 14-9 23l-320 320q-9 9-23 9-13 0-22.5-9.5t-9.5-22.5v-192q-32 0-85 .5t-81 1-73-1-71-5-64-10.5-63-18.5-58-28.5-59-40-55-53.5-56-69.5q59-93 136-273 22 45 37 72.5t40.5 63.5 51 56.5 63 35 81.5 14.5h256v-192q0-14 9-23t23-9q12 0 24 10l319 319q9 9 9 23zm0-896q0 14-9 23l-320 320q-9 9-23 9-13 0-22.5-9.5t-9.5-22.5v-192h-256q-48 0-87 15t-69 45-51 61.5-45 77.5q-32 62-78 171-29 66-49.5 111t-54 105-64 100-74 83-90 68.5-106.5 42-128 16.5h-224q-14 0-23-9t-9-23v-192q0-14 9-23t23-9h224q48 0 87-15t69-45 51-61.5 45-77.5q32-62 78-171 29-66 49.5-111t54-105 64-100 74-83 90-68.5 106.5-42 128-16.5h256v-192q0-14 9-23t23-9q12 0 24 10l319 319q9 9 9 23z"/></svg>
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 16 16"><path d="M1.707 15.707C1.077 16.337 0 15.891 0 15V3a3 3 0 0 1 3-3h10a3 3 0 0 1 3 3v6a3 3 0 0 1-3 3H5.414l-3.707 3.707zM2 12.586l2.293-2.293A1 1 0 0 1 5 10h8a1 1 0 0 0 1-1V3a1 1 0 0 0-1-1H3a1 1 0 0 0-1 1v9.586zM5 7a1 1 0 1 1 0-2 1 1 0 0 1 0 2zm3 0a1 1 0 1 1 0-2 1 1 0 0 1 0 2zm3 0a1 1 0 1 1 0-2 1 1 0 0 1 0 2z"/></svg>
<svg width="14" height="14" viewBox="0 0 14 14" xmlns="http://www.w3.org/2000/svg"><path d="M0 7c0-3.866 3.142-7 7-7 3.866 0 7 3.142 7 7 0 3.866-3.142 7-7 7-3.866 0-7-3.142-7-7z"/><path d="M1 7c0 3.309 2.69 6 6 6 3.309 0 6-2.69 6-6 0-3.309-2.69-6-6-6-3.309 0-6 2.69-6 6z" fill="#FFF"/><rect x="3.36" y="6.16" width="7.28" height="1.68" rx=".84"/></svg>
<svg height="16" viewBox="0 0 16 16" width="16" xmlns="http://www.w3.org/2000/svg"><path d="M7.536 8.657l2.828-2.83c.39-.39 1.024-.39 1.414 0 .39.392.39 1.025 0 1.416l-3.535 3.535c-.196.195-.452.293-.707.293-.256 0-.512-.097-.708-.292l-2.12-2.12c-.39-.392-.39-1.025 0-1.415s1.023-.39 1.413 0zM8 16c-4.418 0-8-3.582-8-8s3.582-8 8-8 8 3.582 8 8-3.582 8-8 8zm0-2c3.314 0 6-2.686 6-6s-2.686-6-6-6-6 2.686-6 6 2.686 6 6 6z"/></svg>
<svg width="1792" height="1792" viewBox="0 0 1792 1792" xmlns="http://www.w3.org/2000/svg"><path d="M384 448q0-53-37.5-90.5t-90.5-37.5-90.5 37.5-37.5 90.5 37.5 90.5 90.5 37.5 90.5-37.5 37.5-90.5zm1067 576q0 53-37 90l-491 492q-39 37-91 37-53 0-90-37l-715-716q-38-37-64.5-101t-26.5-117v-416q0-52 38-90t90-38h416q53 0 117 26.5t102 64.5l715 714q37 39 37 91zm384 0q0 53-37 90l-491 492q-39 37-91 37-36 0-59-14t-53-45l470-470q37-37 37-90 0-52-37-91l-715-714q-38-38-102-64.5t-117-26.5h224q53 0 117 26.5t102 64.5l715 714q37 39 37 91z"/></svg>
<svg viewBox="0 0 16 16" xmlns="http://www.w3.org/2000/svg"><path d="m7.222.222c1.657 0 3 1.343 3 3v8.327c0 .631-.298 1.226-.805 1.603l-3 2.237c-.709.529-1.682.529-2.391 0l-3-2.237c-.506-.377-.805-.972-.805-1.603v-8.327c0-1.657 1.343-3 3-3h4m-5 3v8.08c0 .158.075.306.201.401l2.5 1.864c.177.132.42.132.598 0l2.5-1.864c.127-.094.201-.243.201-.401v-8.08c0-.552-.448-1-1-1h-4c-.552 0-1 .448-1 1m2.778 7.778c-.552 0-1-.448-1-1s .448-1 1-1 1 .448 1 1-.448 1-1 1" transform="matrix(-.70711 .70711 -.70711 -.70711 17.05 9.767)"/></svg>
<svg width="1792" height="1792" viewBox="0 0 1792 1792" xmlns="http://www.w3.org/2000/svg"><path d="M1600 1405q0 120-73 189.5t-194 69.5h-874q-121 0-194-69.5t-73-189.5q0-53 3.5-103.5t14-109 26.5-108.5 43-97.5 62-81 85.5-53.5 111.5-20q9 0 42 21.5t74.5 48 108 48 133.5 21.5 133.5-21.5 108-48 74.5-48 42-21.5q61 0 111.5 20t85.5 53.5 62 81 43 97.5 26.5 108.5 14 109 3.5 103.5zm-320-893q0 159-112.5 271.5t-271.5 112.5-271.5-112.5-112.5-271.5 112.5-271.5 271.5-112.5 271.5 112.5 112.5 271.5z"/></svg>
<svg height="16" viewBox="0 0 16 16" width="16" xmlns="http://www.w3.org/2000/svg"><path d="M8 7C6.343 7 5 5.657 5 4s1.343-3 3-3 3 1.343 3 3-1.343 3-3 3zm0 8c-6.888 0-6.976-.78-6.976-2.52S2.144 8 8 8s6.976 2.692 6.976 4.48S14.888 15 8 15z" fill-rule="evenodd"/></svg>
......@@ -4,7 +4,7 @@
%li.filter-dropdown-item{ class: ('js-current-user' if user == current_user) }
%button.btn.btn-link.dropdown-user{ type: :button }
.avatar-container.s40
= user_avatar_without_link(user: user, lazy: avatar[:lazy], url: avatar[:url], size: 40).gsub('/images/{{avatar_url}}','{{avatar_url}}').html_safe
= user_avatar_without_link(user: user, lazy: avatar[:lazy], url: avatar[:url], size: 40, has_tooltip: false).gsub('/images/{{avatar_url}}','{{avatar_url}}').html_safe
.dropdown-user-details
%span
= user.name
......
......@@ -4,18 +4,25 @@ class AuthorizedProjectsWorker
# Schedules multiple jobs and waits for them to be completed.
def self.bulk_perform_and_wait(args_list)
job_ids = bulk_perform_async(args_list)
waiter = Gitlab::JobWaiter.new(args_list.size)
Gitlab::JobWaiter.new(job_ids).wait
# Point all the bulk jobs at the same JobWaiter. Converts, [[1], [2], [3]]
# into [[1, "key"], [2, "key"], [3, "key"]]
waiting_args_list = args_list.map { |args| args << waiter.key }
bulk_perform_async(waiting_args_list)
waiter.wait
end
def self.bulk_perform_async(args_list)
Sidekiq::Client.push_bulk('class' => self, 'queue' => sidekiq_options['queue'], 'args' => args_list)
end
def perform(user_id)
def perform(user_id, notify_key = nil)
user = User.find_by(id: user_id)
user&.refresh_authorized_projects
ensure
Gitlab::JobWaiter.notify(notify_key, jid) if notify_key
end
end
......@@ -5,7 +5,7 @@ class GeoRepositoryCreateWorker
def perform(id)
project = Project.find(id)
project.ensure_storage_path_exist
project.ensure_storage_path_exists
project.create_repository unless project.repository_exists? || project.import?
end
end
......@@ -18,7 +18,8 @@ class NamespacelessProjectDestroyWorker
rescue ActiveRecord::RecordNotFound
return
end
return unless project.namespace_id.nil? # Reject doing anything for projects that *do* have a namespace
return if project.namespace # Reject doing anything for projects that *do* have a namespace
project.team.truncate
......
---
title: >
Ensure all database queries are routed through the database load balancer when
load balancing is enabled
merge_request: 2707
author:
type: changed
---
title: Only require Sidekiq throttling library when enabled, to reduce cache misses
title: Geo - Count projects where wiki sync failed in node status page
merge_request:
author:
type: fixed
---
title: "Raise Housekeeping timeout to 24 hours"
merge_request: 13719
---
title: Hashed Storage support for Repositories (EXPERIMENTAL)
merge_request: 13246
author:
---
title: Improves subgroup creation permissions
merge_request: 13418
author:
type: bugifx
---
title: Fix url for object store artifacts
title: Fix caching of future broadcast messages
merge_request:
author:
type: fixed
---
title: Fix merge request pipeline status when pipeline has errors
merge_request: 13664
author:
type: fixed
---
title: Commit rows would occasionally render with the wrong language
title: Fix Import/Export issue to do with fork merge requests
merge_request:
author:
type: fixed
---
title: Fix display of push events for removed refs
merge_request:
author:
type: fixed
---
title: Testing of some integrations were broken due to missing ServiceHook record.
merge_request:
author:
type: fixed
---
title: Migration to remove pending delete projects with non-existing namespace
merge_request: 13598
author:
type: other
......@@ -10,10 +10,8 @@ end
#
module Gitlab
module StrongParameterScalars
GITLAB_PERMITTED_SCALAR_TYPES = [::UploadedFile].freeze
def permitted_scalar?(value)
super || GITLAB_PERMITTED_SCALAR_TYPES.any? { |type| value.is_a?(type) }
super || value.is_a?(::UploadedFile)
end
end
end
......
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddStorageFieldsToProject < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
add_column :projects, :storage_version, :integer, limit: 2
end
def down
remove_column :projects, :storage_version
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddHashedStorageToSettings < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_column_with_default :application_settings, :hashed_storage_enabled, :boolean, default: false
end
def down
remove_columns :application_settings, :hashed_storage_enabled
end
end
# Follow up of CleanupNamespacelessPendingDeleteProjects and it cleans
# all projects with `pending_delete = true` and for which the
# namespace no longer exists.
class CleanupNonexistingNamespacePendingDeleteProjects < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
class Project < ActiveRecord::Base
self.table_name = 'projects'
include ::EachBatch
end
class Namespace < ActiveRecord::Base
self.table_name = 'namespaces'
end
def up
find_projects.each_batch do |batch|
args = batch.pluck(:id).map { |id| [id] }
NamespacelessProjectDestroyWorker.bulk_perform_async(args)
end
end
def down
# NOOP
end
private
def find_projects
projects = Project.arel_table
namespaces = Namespace.arel_table
namespace_query = namespaces.project(1)
.where(namespaces[:id].eq(projects[:namespace_id]))
.exists.not
# SELECT "projects"."id"
# FROM "projects"
# WHERE "projects"."pending_delete" = 't'
# AND (NOT (EXISTS
# (SELECT 1
# FROM "namespaces"
# WHERE "namespaces"."id" = "projects"."namespace_id")))
Project.where(projects[:pending_delete].eq(true))
.where(namespace_query)
.select(:id)
end
end
......@@ -151,6 +151,7 @@ ActiveRecord::Schema.define(version: 20170820100558) do
t.boolean "password_authentication_enabled"
t.boolean "allow_group_owners_to_manage_ldap", default: true, null: false
t.boolean "project_export_enabled", default: true, null: false
t.boolean "hashed_storage_enabled", default: false, null: false
end
create_table "approvals", force: :cascade do |t|
......@@ -1489,6 +1490,7 @@ ActiveRecord::Schema.define(version: 20170820100558) do
t.string "ci_config_path"
t.boolean "disable_overriding_approvers_per_merge_request"
t.text "delete_error"
t.integer "storage_version", limit: 2
end
add_index "projects", ["ci_id"], name: "index_projects_on_ci_id", using: :btree
......
......@@ -428,6 +428,13 @@ ingress:
## Installing GitLab using the Helm Chart
> You may see a temporary error message `SchedulerPredicates failed due to PersistentVolumeClaim is not bound` while storage provisions. Once the storage provisions, the pods will automatically restart. This may take a couple minutes depending on your cloud provider. If the error persists, please review the [prerequisites](#prerequisites) to ensure you have enough RAM, CPU, and storage.
Ensure the GitLab repo has been added and re-initialize Helm:
```bash
helm repo add gitlab https://charts.gitlab.io
helm init
```
Once you [have configured](#configuration) GitLab in your `values.yml` file,
run the following:
......
......@@ -126,14 +126,23 @@ Let's Encrypt limits a single TLD to five certificate requests within a single w
## Installing GitLab using the Helm Chart
> You may see a temporary error message `SchedulerPredicates failed due to PersistentVolumeClaim is not bound` while storage provisions. Once the storage provisions, the pods will automatically restart. This may take a couple minutes depending on your cloud provider. If the error persists, please review the [prerequisites](#prerequisites) to ensure you have enough RAM, CPU, and storage.
Once you have reviewed the [configuration settings](#configuring-and-installing-gitlab) and [added the Helm repository](index.md#add-the-gitlab-helm-repository), you can install the chart. We recommending saving your configuration options in a `values.yaml` file for easier upgrades in the future.
Ensure the GitLab repo has been added and re-initialize Helm:
```bash
helm repo add gitlab https://charts.gitlab.io
helm init
```
Once you have reviewed the [configuration settings](#configuring-and-installing-gitlab) you can install the chart. We recommending saving your configuration options in a `values.yaml` file for easier upgrades in the future.
For example:
```bash
helm install --name gitlab -f values.yaml gitlab/gitlab-omnibus
```
or passing them on the command line:
```bash
helm install --name gitlab --set baseDomain=gitlab.io,baseIP=1.1.1.1,gitlab=ee,gitlabEELicense=$LICENSE,legoEmail=email@gitlab.com gitlab/gitlab-omnibus
```
......
......@@ -190,6 +190,13 @@ certsSecretName: <SECRET NAME>
## Installing GitLab Runner using the Helm Chart
Ensure the GitLab repo has been added and re-initialize Helm:
```bash
helm repo add gitlab https://charts.gitlab.io
helm init
```
Once you [have configured](#configuration) GitLab Runner in your `values.yml` file,
run the following:
......
......@@ -35,12 +35,14 @@ helm init
## Using the GitLab Helm Charts
GitLab makes available three Helm Charts: an easy to use bundled chart, and a specific chart for GitLab itself and the Runner.
GitLab makes available three Helm Charts.
- [gitlab-omnibus](gitlab_omnibus.md): The easiest way to get started. Includes everything needed to run GitLab, including: a Runner, Container Registry, automatic SSL, and an Ingress.
- [gitlab-omnibus](gitlab_omnibus.md): **Recommended** and the easiest way to get started. Includes everything needed to run GitLab, including: a [Runner](https://docs.gitlab.com/runner/), [Container Registry](https://docs.gitlab.com/ee/user/project/container_registry.html#gitlab-container-registry), [automatic SSL](https://github.com/kubernetes/charts/tree/master/stable/kube-lego), and an [Ingress](https://github.com/kubernetes/ingress/tree/master/controllers/nginx).
- [gitlab](gitlab_chart.md): Just the GitLab service, with optional Postgres and Redis.
- [gitlab-runner](gitlab_runner_chart.md): GitLab Runner, to process CI jobs.
We are also working on a new set of [cloud native Charts](https://gitlab.com/charts/helm.gitlab.io) which will eventually replace these.
[chart]: https://github.com/kubernetes/charts
[helm-quick]: https://github.com/kubernetes/helm/blob/master/docs/quickstart.md
[helm]: https://github.com/kubernetes/helm/blob/master/README.md
......@@ -195,6 +195,14 @@ all your changes will be available to preview by anyone with the Review Apps lin
[Read more about Review Apps.](../../../ci/review_apps/index.md)
## Merge request diff file navigation
The diff view has a persistent dropdown for file navigation. As you scroll through
diffs with a large number of files and/or many changes in those files, you can
easily jump to any changed file through the dropdown navigation.
![Merge request diff file navigation](img/merge_request_diff_file_navigation.png)
## Ignore whitespace changes in Merge Request diff view
If you click the **Hide whitespace changes** button, you can see the diff
......
......@@ -62,6 +62,12 @@ You can view recent searches by clicking on the little arrow-clock icon, which i
Individual filters can be removed by clicking on the filter's (x) button or backspacing. The entire search filter can be cleared by clicking on the search box's (x) button.
## Filtering with multiple filters of the same type
Some filters can be added multiple times. These include but are not limited to assignees and labels. When you filter with these multiple filters of the same type, the AND logic is applied. For example, if you were filtering `assignee:@sam assignee:@sarah`, your results will only include entries whereby the assignees are assigned to both Sam and Sarah are returned.
![multiple assignees filtering](img/multiple_assignees.png)
### Shortcut
You'll also find a shortcut on the search field on the top-right of the project's dashboard to
......
......@@ -75,7 +75,7 @@ module Backup
path_to_project_repo = path_to_repo(project)
path_to_project_bundle = path_to_bundle(project)
project.ensure_storage_path_exist
project.ensure_storage_path_exists
cmd = if File.exist?(path_to_project_bundle)
%W(#{Gitlab.config.git.bin_path} clone --bare #{path_to_project_bundle} #{path_to_project_repo})
......
......@@ -102,6 +102,7 @@ excluded_attributes:
- :mirror_last_successful_update_at
- :mirror_user_id
- :mirror_trigger_builds
- :storage_version
snippets:
- :expired_at
merge_request_diff:
......@@ -137,5 +138,7 @@ methods:
- :utf8_diff
merge_requests:
- :diff_head_sha
- :source_branch_sha
- :target_branch_sha
project:
- :description_html
......@@ -30,7 +30,7 @@ module Gitlab
end
def branch_exists?(branch_name)
@project.repository.branch_exists?(branch_name)
@project.repository.raw.branch_exists?(branch_name)
end
def fork_merge_request?
......
module Gitlab
# JobWaiter can be used to wait for a number of Sidekiq jobs to complete.
#
# Its use requires the cooperation of the sidekiq jobs themselves. Set up the
# waiter, then start the jobs, passing them its `key`. Their `perform` methods
# should look like:
#
# def perform(args, notify_key)
# # do work
# ensure
# ::Gitlab::JobWaiter.notify(notify_key, jid)
# end
#
# The JobWaiter blocks popping items from a Redis array. All the sidekiq jobs
# push to that array when done. Once the waiter has popped `count` items, it
# knows all the jobs are done.
class JobWaiter
# The sleep interval between checking keys, in seconds.
INTERVAL = 0.1
def self.notify(key, jid)
Gitlab::Redis::SharedState.with { |redis| redis.lpush(key, jid) }
end
attr_reader :key, :jobs_remaining, :finished
# jobs - The job IDs to wait for.
def initialize(jobs)
@jobs = jobs
# jobs_remaining - the number of jobs left to wait for
def initialize(jobs_remaining)
@key = "gitlab:job_waiter:#{SecureRandom.uuid}"
@jobs_remaining = jobs_remaining
@finished = []
end
# Waits for all the jobs to be completed.
......@@ -15,13 +34,33 @@ module Gitlab
# ensures we don't indefinitely block a caller in case a job takes
# long to process, or is never processed.
def wait(timeout = 10)
start = Time.current
deadline = Time.now.utc + timeout
Gitlab::Redis::SharedState.with do |redis|
# Fallback key expiry: allow a long grace period to reduce the chance of
# a job pushing to an expired key and recreating it
redis.expire(key, [timeout * 2, 10.minutes.to_i].max)
while jobs_remaining > 0
# Redis will not take fractional seconds. Prefer waiting too long over
# not waiting long enough
seconds_left = (deadline - Time.now.utc).ceil
while (Time.current - start) <= timeout
break if SidekiqStatus.all_completed?(@jobs)
# Redis interprets 0 as "wait forever", so skip the final `blpop` call
break if seconds_left <= 0
sleep(INTERVAL) # to not overload Redis too much.
list, jid = redis.blpop(key, timeout: seconds_left)
break unless list && jid # timed out
@finished << jid
@jobs_remaining -= 1
end
# All jobs have finished, so expire the key immediately
redis.expire(key, 0) if jobs_remaining == 0
end
finished
end
end
end
......@@ -14,13 +14,9 @@ module Gitlab
def self.read_latest
path = Rails.root.join("log", file_name)
self.build unless File.exist?(path)
tail_output, _ = Gitlab::Popen.popen(%W(tail -n 2000 #{path}))
tail_output.split("\n")
end
def self.read_latest_for(filename)
path = Rails.root.join("log", filename)
return [] unless File.readable?(path)
tail_output, _ = Gitlab::Popen.popen(%W(tail -n 2000 #{path}))
tail_output.split("\n")
end
......
......@@ -54,7 +54,8 @@ module Gitlab
end
def kubernetes_namespace_regex_message
"can contain only letters, digits or '-', and cannot start or end with '-'"
"can contain only lowercase letters, digits, and '-'. " \
"Must start with a letter, and cannot end with '-'"
end
def environment_slug_regex
......
......@@ -11,6 +11,12 @@ namespace :gitlab do
#
desc "GitLab | Import bare repositories from repositories -> storages into GitLab project instance"
task repos: :environment do
if Project.current_application_settings.hashed_storage_enabled
puts 'Cannot import repositories when Hashed Storage is enabled'.color(:red)
exit 1
end
Gitlab.config.repositories.storages.each_value do |repository_storage|
git_base_path = repository_storage['path']
repos_to_import = Dir.glob(git_base_path + '/**/*.git')
......
......@@ -10,9 +10,6 @@ describe Projects::ServicesController do
before do
sign_in(user)
project.team << [user, :master]
controller.instance_variable_set(:@project, project)
controller.instance_variable_set(:@service, service)
end
describe '#test' do
......@@ -20,7 +17,7 @@ describe Projects::ServicesController do
it 'renders 404' do
allow_any_instance_of(Service).to receive(:can_test?).and_return(false)
put :test, namespace_id: project.namespace.id, project_id: project.id, id: service.id
put :test, namespace_id: project.namespace, project_id: project, id: service.to_param
expect(response).to have_http_status(404)
end
......@@ -36,7 +33,7 @@ describe Projects::ServicesController do
it 'returns success' do
allow_any_instance_of(MicrosoftTeams::Notifier).to receive(:ping).and_return(true)
put :test, namespace_id: project.namespace.id, project_id: project.id, id: service.id
put :test, namespace_id: project.namespace, project_id: project, id: service.to_param
expect(response.status).to eq(200)
end
......@@ -45,7 +42,7 @@ describe Projects::ServicesController do
it 'returns success' do
expect(HipChat::Client).to receive(:new).with('hipchat_token_p', anything).and_return(hipchat_client)
put :test, namespace_id: project.namespace.id, project_id: project.id, id: service.id, service: service_params
put :test, namespace_id: project.namespace, project_id: project, id: service.to_param, service: service_params
expect(response.status).to eq(200)
end
......@@ -54,17 +51,42 @@ describe Projects::ServicesController do
it 'returns success' do
expect(HipChat::Client).to receive(:new).with('hipchat_token_p', anything).and_return(hipchat_client)
put :test, namespace_id: project.namespace.id, project_id: project.id, id: service.id, service: service_params
put :test, namespace_id: project.namespace, project_id: project, id: service.to_param, service: service_params
expect(response.status).to eq(200)
end
context 'when service is configured for the first time' do
before do
allow_any_instance_of(ServiceHook).to receive(:execute).and_return(true)
end
it 'persist the object' do
do_put
expect(BuildkiteService.first).to be_present
end
it 'creates the ServiceHook object' do
do_put
expect(BuildkiteService.first.service_hook).to be_present
end
def do_put
put :test, namespace_id: project.namespace,
project_id: project,
id: 'buildkite',
service: { 'active' => '1', 'push_events' => '1', token: 'token', 'project_url' => 'http://test.com' }
end
end
end
context 'failure' do
it 'returns success status code and the error message' do
expect(HipChat::Client).to receive(:new).with('hipchat_token_p', anything).and_raise('Bad test')
put :test, namespace_id: project.namespace.id, project_id: project.id, id: service.id, service: service_params
put :test, namespace_id: project.namespace, project_id: project, id: service.to_param, service: service_params
expect(response.status).to eq(200)
expect(JSON.parse(response.body))
......@@ -77,7 +99,7 @@ describe Projects::ServicesController do
context 'when param `active` is set to true' do
it 'activates the service and redirects to integrations paths' do
put :update,
namespace_id: project.namespace.id, project_id: project.id, id: service.id, service: { active: true }
namespace_id: project.namespace, project_id: project, id: service.to_param, service: { active: true }
expect(response).to redirect_to(project_settings_integrations_path(project))
expect(flash[:notice]).to eq 'HipChat activated.'
......@@ -87,7 +109,7 @@ describe Projects::ServicesController do
context 'when param `active` is set to false' do
it 'does not activate the service but saves the settings' do
put :update,
namespace_id: project.namespace.id, project_id: project.id, id: service.id, service: { active: false }
namespace_id: project.namespace, project_id: project, id: service.to_param, service: { active: false }
expect(flash[:notice]).to eq 'HipChat settings saved, but not activated.'
end
......
......@@ -40,5 +40,23 @@ FactoryGirl.define do
resync_repository true
resync_wiki true
end
trait :repository_sync_failed do
last_repository_synced_at { 5.days.ago }
last_repository_successful_sync_at nil
last_wiki_synced_at { 5.days.ago }
last_wiki_successful_sync_at { 5.days.ago }
resync_repository true
resync_wiki false
end
trait :wiki_sync_failed do
last_repository_synced_at { 5.days.ago }
last_repository_successful_sync_at { 5.days.ago }
last_wiki_synced_at { 5.days.ago }
last_wiki_successful_sync_at nil
resync_repository false
resync_wiki true
end
end
end
......@@ -95,6 +95,10 @@ FactoryGirl.define do
archived true
end
trait :hashed do
storage_version Project::LATEST_STORAGE_VERSION
end
trait :access_requestable do
request_access_enabled true
end
......
......@@ -74,7 +74,7 @@ feature 'Top Plus Menu', :js do
expect(page).to have_content('Title')
end
scenario 'Click on New subgroup shows new group page' do
scenario 'Click on New subgroup shows new group page', :nested_groups do
visit group_path(group)
click_topmenuitem("New subgroup")
......
......@@ -104,18 +104,15 @@ feature 'Group' do
end
context 'as group owner' do
let(:user) { create(:user) }
it 'creates a nested group' do
user = create(:user)
before do
group.add_owner(user)
sign_out(:user)
sign_in(user)
visit subgroups_group_path(group)
click_link 'New Subgroup'
end
it 'creates a nested group' do
fill_in 'Group path', with: 'bar'
click_button 'Create group'
......@@ -123,6 +120,16 @@ feature 'Group' do
expect(page).to have_content("Group 'bar' was successfully created.")
end
end
context 'when nested group feature is disabled' do
it 'renders 404' do
allow(Group).to receive(:supports_nested_groups?).and_return(false)
visit subgroups_group_path(group)
expect(page.status_code).to eq(404)
end
end
end
it 'checks permissions to avoid exposing groups by parent_id' do
......
......@@ -3,11 +3,13 @@ require 'spec_helper'
feature 'Import/Export - project import integration test', js: true do
include Select2Helper
let(:user) { create(:user) }
let(:file) { File.join(Rails.root, 'spec', 'features', 'projects', 'import_export', 'test_project_export.tar.gz') }
let(:export_path) { "#{Dir.tmpdir}/import_file_spec" }
background do
allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
gitlab_sign_in(user)
end
after do
......@@ -18,57 +20,67 @@ feature 'Import/Export - project import integration test', js: true do
let(:user) { create(:admin) }
let!(:namespace) { create(:namespace, name: "asd", owner: user) }
before do
gitlab_sign_in(user)
end
context 'prefilled the path' do
scenario 'user imports an exported project successfully' do
visit new_project_path
scenario 'user imports an exported project successfully' do
visit new_project_path
select2(namespace.id, from: '#project_namespace_id')
fill_in :project_path, with: 'test-project-path', visible: true
click_link 'GitLab export'
select2(namespace.id, from: '#project_namespace_id')
fill_in :project_path, with: 'test-project-path', visible: true
click_link 'GitLab export'
expect(page).to have_content('Import an exported GitLab project')
expect(URI.parse(current_url).query).to eq("namespace_id=#{namespace.id}&path=test-project-path")
expect(Gitlab::ImportExport).to receive(:import_upload_path).with(filename: /\A\h{32}_test-project-path\z/).and_call_original
expect(page).to have_content('Import an exported GitLab project')
expect(URI.parse(current_url).query).to eq("namespace_id=#{namespace.id}&path=test-project-path")
expect(Gitlab::ImportExport).to receive(:import_upload_path).with(filename: /\A\h{32}_test-project-path\z/).and_call_original
attach_file('file', file)
attach_file('file', file)
expect { click_on 'Import project' }.to change { Project.count }.by(1)
expect { click_on 'Import project' }.to change { Project.count }.from(0).to(1)
project = Project.last
expect(project).not_to be_nil
expect(project.issues).not_to be_empty
expect(project.merge_requests).not_to be_empty
expect(project_hook_exists?(project)).to be true
expect(wiki_exists?(project)).to be true
expect(project.import_status).to eq('finished')
project = Project.last
expect(project).not_to be_nil
expect(project.issues).not_to be_empty
expect(project.merge_requests).not_to be_empty
expect(project_hook_exists?(project)).to be true
expect(wiki_exists?(project)).to be true
expect(project.import_status).to eq('finished')
end
end
scenario 'invalid project' do
project = create(:project, namespace: namespace)
context 'path is not prefilled' do
scenario 'user imports an exported project successfully' do
visit new_project_path
click_link 'GitLab export'
visit new_project_path
fill_in :path, with: 'test-project-path', visible: true
attach_file('file', file)
select2(namespace.id, from: '#project_namespace_id')
fill_in :project_path, with: project.name, visible: true
click_link 'GitLab export'
attach_file('file', file)
click_on 'Import project'
expect { click_on 'Import project' }.to change { Project.count }.by(1)
page.within('.flash-container') do
expect(page).to have_content('Project could not be imported')
project = Project.last
expect(project).not_to be_nil
expect(page).to have_content("Project 'test-project-path' is being imported")
end
end
end
context 'when limited to the default user namespace' do
let(:user) { create(:user) }
before do
gitlab_sign_in(user)
scenario 'invalid project' do
namespace = create(:namespace, name: "asd", owner: user)
project = create(:project, namespace: namespace)
visit new_project_path
select2(namespace.id, from: '#project_namespace_id')
fill_in :project_path, with: project.name, visible: true
click_link 'GitLab export'
attach_file('file', file)
click_on 'Import project'
page.within('.flash-container') do
expect(page).to have_content('Project could not be imported')
end
end
context 'when limited to the default user namespace' do
scenario 'passes correct namespace ID in the URL' do
visit new_project_path
......
......@@ -28,7 +28,7 @@ describe AvatarsHelper do
it 'displays user avatar' do
is_expected.to eq image_tag(
LazyImageTagHelper.placeholder_image,
class: 'avatar has-tooltip s16 lazy',
class: 'avatar s16 has-tooltip lazy',
alt: "#{user.name}'s avatar",
title: user.name,
data: { container: 'body', src: avatar_icon(user, 16) }
......@@ -41,7 +41,7 @@ describe AvatarsHelper do
it 'uses provided css_class' do
is_expected.to eq image_tag(
LazyImageTagHelper.placeholder_image,
class: "avatar has-tooltip s16 #{options[:css_class]} lazy",
class: "avatar s16 #{options[:css_class]} has-tooltip lazy",
alt: "#{user.name}'s avatar",
title: user.name,
data: { container: 'body', src: avatar_icon(user, 16) }
......@@ -55,7 +55,7 @@ describe AvatarsHelper do
it 'uses provided size' do
is_expected.to eq image_tag(
LazyImageTagHelper.placeholder_image,
class: "avatar has-tooltip s#{options[:size]} lazy",
class: "avatar s#{options[:size]} has-tooltip lazy",
alt: "#{user.name}'s avatar",
title: user.name,
data: { container: 'body', src: avatar_icon(user, options[:size]) }
......@@ -69,7 +69,7 @@ describe AvatarsHelper do
it 'uses provided url' do
is_expected.to eq image_tag(
LazyImageTagHelper.placeholder_image,
class: 'avatar has-tooltip s16 lazy',
class: 'avatar s16 has-tooltip lazy',
alt: "#{user.name}'s avatar",
title: user.name,
data: { container: 'body', src: options[:url] }
......@@ -77,6 +77,36 @@ describe AvatarsHelper do
end
end
context 'with has_tooltip parameter' do
context 'with has_tooltip set to true' do
let(:options) { { user: user, has_tooltip: true } }
it 'adds has-tooltip' do
is_expected.to eq image_tag(
LazyImageTagHelper.placeholder_image,
class: 'avatar s16 has-tooltip lazy',
alt: "#{user.name}'s avatar",
title: user.name,
data: { container: 'body', src: avatar_icon(user, 16) }
)
end
end
context 'with has_tooltip set to false' do
let(:options) { { user: user, has_tooltip: false } }
it 'does not add has-tooltip or data container' do
is_expected.to eq image_tag(
LazyImageTagHelper.placeholder_image,
class: 'avatar s16 lazy',
alt: "#{user.name}'s avatar",
title: user.name,
data: { src: avatar_icon(user, 16) }
)
end
end
end
context 'with user_name parameter' do
let(:options) { { user_name: 'Tinky Winky', user_email: 'no@f.un' } }
......@@ -86,7 +116,7 @@ describe AvatarsHelper do
it 'prefers user parameter' do
is_expected.to eq image_tag(
LazyImageTagHelper.placeholder_image,
class: 'avatar has-tooltip s16 lazy',
class: 'avatar s16 has-tooltip lazy',
alt: "#{user.name}'s avatar",
title: user.name,
data: { container: 'body', src: avatar_icon(user, 16) }
......@@ -97,7 +127,7 @@ describe AvatarsHelper do
it 'uses user_name and user_email parameter if user is not present' do
is_expected.to eq image_tag(
LazyImageTagHelper.placeholder_image,
class: 'avatar has-tooltip s16 lazy',
class: 'avatar s16 has-tooltip lazy',
alt: "#{options[:user_name]}'s avatar",
title: options[:user_name],
data: { container: 'body', src: avatar_icon(options[:user_email], 16) }
......
......@@ -106,5 +106,9 @@ describe EventsHelper do
it "handles empty strings" do
expect(helper.event_commit_title("")).to eq("")
end
it 'handles nil values' do
expect(helper.event_commit_title(nil)).to eq('')
end
end
end
/* global __webpack_public_path__ */
import monacoContext from 'monaco-editor/dev/vs/loader';
import monacoLoader from '~/repo/monaco_loader';
describe('MonacoLoader', () => {
it('calls require.config and exports require', () => {
spyOn(monacoContext.require, 'config');
const monacoLoader = require('~/repo/monaco_loader'); // eslint-disable-line global-require
expect(monacoContext.require.config).toHaveBeenCalledWith({
expect(monacoContext.require.getConfig()).toEqual(jasmine.objectContaining({
paths: {
vs: `${__webpack_public_path__}monaco-editor/vs`, // eslint-disable-line camelcase
},
});
expect(monacoLoader.default).toBe(monacoContext.require);
}));
expect(monacoLoader).toBe(monacoContext.require);
});
});
require 'spec_helper'
describe Gitlab::Git::Storage::ForkedStorageCheck, skip_database_cleaner: true do
describe Gitlab::Git::Storage::ForkedStorageCheck, broken_storage: true, skip_database_cleaner: true do
let(:existing_path) do
existing_path = TestEnv.repos_path
FileUtils.mkdir_p(existing_path)
......
......@@ -2522,7 +2522,7 @@
"id": 27,
"target_branch": "feature",
"source_branch": "feature_conflict",
"source_project_id": 5,
"source_project_id": 999,
"author_id": 1,
"assignee_id": null,
"title": "MR1",
......@@ -2536,6 +2536,9 @@
"position": 0,
"updated_by_id": null,
"merge_error": null,
"diff_head_sha": "HEAD",
"source_branch_sha": "ABCD",
"target_branch_sha": "DCBA",
"merge_params": {
"force_remove_source_branch": null
},
......
......@@ -10,6 +10,13 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
@shared = Gitlab::ImportExport::Shared.new(relative_path: "", project_path: 'path')
allow(@shared).to receive(:export_path).and_return('spec/lib/gitlab/import_export/')
@project = create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project')
allow(@project.repository).to receive(:fetch_ref).and_return(true)
allow(@project.repository.raw).to receive(:rugged_branch_exists?).and_return(false)
expect_any_instance_of(Gitlab::Git::Repository).to receive(:create_branch).with('feature', 'DCBA')
allow_any_instance_of(Gitlab::Git::Repository).to receive(:create_branch)
project_tree_restorer = described_class.new(user: @user, shared: @shared, project: @project)
@restored_project_json = project_tree_restorer.restore
end
......
......@@ -11,6 +11,8 @@ describe Gitlab::ImportExport::ProjectTreeSaver do
before do
project.team << [user, :master]
allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
allow_any_instance_of(MergeRequest).to receive(:source_branch_sha).and_return('ABCD')
allow_any_instance_of(MergeRequest).to receive(:target_branch_sha).and_return('DCBA')
end
after do
......@@ -43,6 +45,14 @@ describe Gitlab::ImportExport::ProjectTreeSaver do
expect(saved_project_json['merge_requests'].first['milestone']).not_to be_empty
end
it 'has merge request\'s source branch SHA' do
expect(saved_project_json['merge_requests'].first['source_branch_sha']).to eq('ABCD')
end
it 'has merge request\'s target branch SHA' do
expect(saved_project_json['merge_requests'].first['target_branch_sha']).to eq('DCBA')
end
it 'has events' do
expect(saved_project_json['merge_requests'].first['milestone']['events']).not_to be_empty
end
......
require 'spec_helper'
describe Gitlab::JobWaiter do
describe '#wait' do
let(:waiter) { described_class.new(%w(a)) }
it 'returns when all jobs have been completed' do
expect(Gitlab::SidekiqStatus).to receive(:all_completed?).with(%w(a))
.and_return(true)
describe '.notify' do
it 'pushes the jid to the named queue' do
key = 'gitlab:job_waiter:foo'
jid = 1
expect(waiter).not_to receive(:sleep)
redis = double('redis')
expect(Gitlab::Redis::SharedState).to receive(:with).and_yield(redis)
expect(redis).to receive(:lpush).with(key, jid)
waiter.wait
described_class.notify(key, jid)
end
end
describe '#wait' do
let(:waiter) { described_class.new(2) }
it 'sleeps between checking the job statuses' do
expect(Gitlab::SidekiqStatus).to receive(:all_completed?)
.with(%w(a))
.and_return(false, true)
it 'returns when all jobs have been completed' do
described_class.notify(waiter.key, 'a')
described_class.notify(waiter.key, 'b')
expect(waiter).to receive(:sleep).with(described_class::INTERVAL)
result = nil
expect { Timeout.timeout(1) { result = waiter.wait(2) } }.not_to raise_error
waiter.wait
expect(result).to contain_exactly('a', 'b')
end
it 'returns when timing out' do
expect(waiter).not_to receive(:sleep)
waiter.wait(0)
it 'times out if not all jobs complete' do
described_class.notify(waiter.key, 'a')
result = nil
expect { Timeout.timeout(2) { result = waiter.wait(1) } }.not_to raise_error
expect(result).to contain_exactly('a')
end
end
end
......@@ -4,7 +4,7 @@ require Rails.root.join('db', 'post_migrate', '20170502101023_cleanup_namespacel
describe CleanupNamespacelessPendingDeleteProjects do
before do
# Stub after_save callbacks that will fail when Project has no namespace
allow_any_instance_of(Project).to receive(:ensure_storage_path_exist).and_return(nil)
allow_any_instance_of(Project).to receive(:ensure_storage_path_exists).and_return(nil)
allow_any_instance_of(Project).to receive(:update_project_statistics).and_return(nil)
end
......
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170816102555_cleanup_nonexisting_namespace_pending_delete_projects.rb')
describe CleanupNonexistingNamespacePendingDeleteProjects do
before do
# Stub after_save callbacks that will fail when Project has invalid namespace
allow_any_instance_of(Project).to receive(:ensure_storage_path_exist).and_return(nil)
allow_any_instance_of(Project).to receive(:update_project_statistics).and_return(nil)
end
describe '#up' do
set(:some_project) { create(:project) }
it 'only cleans up when namespace does not exist' do
create(:project, pending_delete: true)
project = build(:project, pending_delete: true, namespace: nil, namespace_id: Namespace.maximum(:id).to_i.succ)
project.save(validate: false)
expect(NamespacelessProjectDestroyWorker).to receive(:bulk_perform_async).with([[project.id]])
described_class.new.up
end
it 'does nothing when no pending delete projects without namespace found' do
create(:project, pending_delete: true, namespace: create(:namespace))
expect(NamespacelessProjectDestroyWorker).not_to receive(:bulk_perform_async)
described_class.new.up
end
end
end
......@@ -53,6 +53,29 @@ describe BroadcastMessage do
2.times { described_class.current }
end
it 'includes messages that need to be displayed in the future' do
create(:broadcast_message)
future = create(
:broadcast_message,
starts_at: Time.now + 10.minutes,
ends_at: Time.now + 20.minutes
)
expect(described_class.current.length).to eq(1)
Timecop.travel(future.starts_at) do
expect(described_class.current.length).to eq(2)
end
end
it 'does not clear the cache if only a future message should be displayed' do
create(:broadcast_message, :future)
expect(Rails.cache).not_to receive(:delete)
expect(described_class.current.length).to eq(0)
end
end
describe '#active?' do
......
......@@ -304,6 +304,50 @@ describe Event do
end
end
describe '#body?' do
let(:push_event) do
event = build(:push_event)
allow(event).to receive(:push?).and_return(true)
event
end
it 'returns true for a push event with commits' do
allow(push_event).to receive(:push_with_commits?).and_return(true)
expect(push_event).to be_body
end
it 'returns false for a push event without a valid commit range' do
allow(push_event).to receive(:push_with_commits?).and_return(false)
expect(push_event).not_to be_body
end
it 'returns true for a Note event' do
event = build(:event)
allow(event).to receive(:note?).and_return(true)
expect(event).to be_body
end
it 'returns true if the target responds to #title' do
event = build(:event)
allow(event).to receive(:target).and_return(double(:target, title: 'foo'))
expect(event).to be_body
end
it 'returns false for a regular event without a target' do
event = build(:event)
expect(event).not_to be_body
end
end
def create_push_event(project, user)
event = create(:push_event, project: project, author: user)
......
......@@ -9,26 +9,26 @@ describe Geo::ProjectRegistry do
it { is_expected.to validate_presence_of(:project) }
end
describe '.synced' do
let(:project) { create(:project) }
let(:synced_at) { Time.now }
it 'does not return dirty projects' do
describe '.failed' do
it 'returns projects where last attempt to sync failed' do
project = create(:project)
create(:geo_project_registry, :synced, project: project)
create(:geo_project_registry, :synced, :dirty, project: project)
repository_sync_failed = create(:geo_project_registry, :repository_sync_failed, project: project)
wiki_sync_failed = create(:geo_project_registry, :wiki_sync_failed, project: project)
expect(described_class.synced).to be_empty
end
it 'does not return projects where last attempt to sync failed' do
create(:geo_project_registry, :sync_failed, project: project)
expect(described_class.synced).to be_empty
expect(described_class.failed).to match_array([repository_sync_failed, wiki_sync_failed])
end
end
describe '.synced' do
it 'returns synced projects' do
registry = create(:geo_project_registry, :synced, project: project)
project = create(:project)
create(:geo_project_registry, :synced, :dirty, project: project)
create(:geo_project_registry, :sync_failed, project: project)
synced_project = create(:geo_project_registry, :synced, project: project)
expect(described_class.synced).to match_array([registry])
expect(described_class.synced).to match_array([synced_project])
end
end
......
......@@ -38,7 +38,8 @@ describe KubernetesService, :use_clean_rails_memory_store_caching do
'a' * 63 => true,
'a' * 64 => false,
'a.b' => false,
'a*b' => false
'a*b' => false,
'FOO' => true
}.each do |namespace, validity|
it "validates #{namespace} as #{validity ? 'valid' : 'invalid'}" do
subject.namespace = namespace
......
......@@ -1432,60 +1432,6 @@ describe Project do
end
end
describe '#rename_repo' do
let(:project) { create(:project, :repository) }
let(:gitlab_shell) { Gitlab::Shell.new }
before do
# Project#gitlab_shell returns a new instance of Gitlab::Shell on every
# call. This makes testing a bit easier.
allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
allow(project).to receive(:previous_changes).and_return('path' => ['foo'])
end
it 'renames a repository' do
stub_container_registry_config(enabled: false)
expect(gitlab_shell).to receive(:mv_repository)
.ordered
.with(project.repository_storage_path, "#{project.namespace.full_path}/foo", "#{project.full_path}")
.and_return(true)
expect(gitlab_shell).to receive(:mv_repository)
.ordered
.with(project.repository_storage_path, "#{project.namespace.full_path}/foo.wiki", "#{project.full_path}.wiki")
.and_return(true)
expect_any_instance_of(SystemHooksService)
.to receive(:execute_hooks_for)
.with(project, :rename)
expect_any_instance_of(Gitlab::UploadsTransfer)
.to receive(:rename_project)
.with('foo', project.path, project.namespace.full_path)
expect(project).to receive(:expire_caches_before_rename)
expect(project).to receive(:expires_full_path_cache)
project.rename_repo
end
context 'container registry with images' do
let(:container_repository) { create(:container_repository) }
before do
stub_container_registry_config(enabled: true)
stub_container_registry_tags(repository: :any, tags: ['tag'])
project.container_repositories << container_repository
end
subject { project.rename_repo }
it { expect {subject}.to raise_error(StandardError) }
end
end
describe '#expire_caches_before_rename' do
let(:project) { create(:project, :repository) }
let(:repo) { double(:repo, exists?: true) }
......@@ -2846,4 +2792,181 @@ describe Project do
expect(project.forks_count).to eq(1)
end
end
context 'legacy storage' do
let(:project) { create(:project, :repository) }
let(:gitlab_shell) { Gitlab::Shell.new }
before do
allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
end
describe '#base_dir' do
it 'returns base_dir based on namespace only' do
expect(project.base_dir).to eq(project.namespace.full_path)
end
end
describe '#disk_path' do
it 'returns disk_path based on namespace and project path' do
expect(project.disk_path).to eq("#{project.namespace.full_path}/#{project.path}")
end
end
describe '#ensure_storage_path_exists' do
it 'delegates to gitlab_shell to ensure namespace is created' do
expect(gitlab_shell).to receive(:add_namespace).with(project.repository_storage_path, project.base_dir)
project.ensure_storage_path_exists
end
end
describe '#legacy_storage?' do
it 'returns true when storage_version is nil' do
project = build(:project)
expect(project.legacy_storage?).to be_truthy
end
end
describe '#rename_repo' do
before do
# Project#gitlab_shell returns a new instance of Gitlab::Shell on every
# call. This makes testing a bit easier.
allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
allow(project).to receive(:previous_changes).and_return('path' => ['foo'])
end
it 'renames a repository' do
stub_container_registry_config(enabled: false)
expect(gitlab_shell).to receive(:mv_repository)
.ordered
.with(project.repository_storage_path, "#{project.namespace.full_path}/foo", "#{project.full_path}")
.and_return(true)
expect(gitlab_shell).to receive(:mv_repository)
.ordered
.with(project.repository_storage_path, "#{project.namespace.full_path}/foo.wiki", "#{project.full_path}.wiki")
.and_return(true)
expect_any_instance_of(SystemHooksService)
.to receive(:execute_hooks_for)
.with(project, :rename)
expect_any_instance_of(Gitlab::UploadsTransfer)
.to receive(:rename_project)
.with('foo', project.path, project.namespace.full_path)
expect(project).to receive(:expire_caches_before_rename)
expect(project).to receive(:expires_full_path_cache)
project.rename_repo
end
context 'container registry with images' do
let(:container_repository) { create(:container_repository) }
before do
stub_container_registry_config(enabled: true)
stub_container_registry_tags(repository: :any, tags: ['tag'])
project.container_repositories << container_repository
end
subject { project.rename_repo }
it { expect { subject }.to raise_error(StandardError) }
end
end
describe '#pages_path' do
it 'returns a path where pages are stored' do
expect(project.pages_path).to eq(File.join(Settings.pages.path, project.namespace.full_path, project.path))
end
end
end
context 'hashed storage' do
let(:project) { create(:project, :repository) }
let(:gitlab_shell) { Gitlab::Shell.new }
let(:hash) { '6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b' }
before do
stub_application_setting(hashed_storage_enabled: true)
allow(Digest::SHA2).to receive(:hexdigest) { hash }
allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
end
describe '#base_dir' do
it 'returns base_dir based on hash of project id' do
expect(project.base_dir).to eq('@hashed/6b/86')
end
end
describe '#disk_path' do
it 'returns disk_path based on hash of project id' do
hashed_path = '@hashed/6b/86/6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b'
expect(project.disk_path).to eq(hashed_path)
end
end
describe '#ensure_storage_path_exists' do
it 'delegates to gitlab_shell to ensure namespace is created' do
expect(gitlab_shell).to receive(:add_namespace).with(project.repository_storage_path, '@hashed/6b/86')
project.ensure_storage_path_exists
end
end
describe '#rename_repo' do
before do
# Project#gitlab_shell returns a new instance of Gitlab::Shell on every
# call. This makes testing a bit easier.
allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
allow(project).to receive(:previous_changes).and_return('path' => ['foo'])
end
it 'renames a repository' do
stub_container_registry_config(enabled: false)
expect(gitlab_shell).not_to receive(:mv_repository)
expect_any_instance_of(SystemHooksService)
.to receive(:execute_hooks_for)
.with(project, :rename)
expect_any_instance_of(Gitlab::UploadsTransfer)
.to receive(:rename_project)
.with('foo', project.path, project.namespace.full_path)
expect(project).to receive(:expire_caches_before_rename)
expect(project).to receive(:expires_full_path_cache)
project.rename_repo
end
context 'container registry with images' do
let(:container_repository) { create(:container_repository) }
before do
stub_container_registry_config(enabled: true)
stub_container_registry_tags(repository: :any, tags: ['tag'])
project.container_repositories << container_repository
end
subject { project.rename_repo }
it { expect { subject }.to raise_error(StandardError) }
end
end
describe '#pages_path' do
it 'returns a path where pages are stored' do
expect(project.pages_path).to eq(File.join(Settings.pages.path, project.namespace.full_path, project.path))
end
end
end
end
......@@ -106,6 +106,8 @@ describe GroupPolicy do
let(:current_user) { owner }
it do
allow(Group).to receive(:supports_nested_groups?).and_return(true)
expect_allowed(:read_group)
expect_allowed(*reporter_permissions)
expect_allowed(*master_permissions)
......@@ -117,6 +119,8 @@ describe GroupPolicy do
let(:current_user) { admin }
it do
allow(Group).to receive(:supports_nested_groups?).and_return(true)
expect_allowed(:read_group)
expect_allowed(*reporter_permissions)
expect_allowed(*master_permissions)
......@@ -124,6 +128,36 @@ describe GroupPolicy do
end
end
describe 'when nested group support feature is disabled' do
before do
allow(Group).to receive(:supports_nested_groups?).and_return(false)
end
context 'admin' do
let(:current_user) { admin }
it 'allows every owner permission except creating subgroups' do
create_subgroup_permission = [:create_subgroup]
updated_owner_permissions = owner_permissions - create_subgroup_permission
expect_disallowed(*create_subgroup_permission)
expect_allowed(*updated_owner_permissions)
end
end
context 'owner' do
let(:current_user) { owner }
it 'allows every owner permission except creating subgroups' do
create_subgroup_permission = [:create_subgroup]
updated_owner_permissions = owner_permissions - create_subgroup_permission
expect_disallowed(*create_subgroup_permission)
expect_allowed(*updated_owner_permissions)
end
end
end
describe 'private nested group use the highest access level from the group and inherited permissions', :nested_groups do
let(:nested_group) { create(:group, :private, parent: group) }
......@@ -200,6 +234,8 @@ describe GroupPolicy do
let(:current_user) { owner }
it do
allow(Group).to receive(:supports_nested_groups?).and_return(true)
expect_allowed(:read_group)
expect_allowed(*reporter_permissions)
expect_allowed(*master_permissions)
......
......@@ -22,7 +22,7 @@ describe Groups::CreateService, '#execute' do
end
end
describe 'creating subgroup' do
describe 'creating subgroup', :nested_groups do
let!(:group) { create(:group) }
let!(:service) { described_class.new(user, group_params.merge(parent_id: group.id)) }
......@@ -32,12 +32,24 @@ describe Groups::CreateService, '#execute' do
end
it { is_expected.to be_persisted }
context 'when nested groups feature is disabled' do
it 'does not save group and returns an error' do
allow(Group).to receive(:supports_nested_groups?).and_return(false)
is_expected.not_to be_persisted
expect(subject.errors[:parent_id]).to include('You don’t have permission to create a subgroup in this group.')
expect(subject.parent_id).to be_nil
end
end
end
context 'as guest' do
it 'does not save group and returns an error' do
allow(Group).to receive(:supports_nested_groups?).and_return(true)
is_expected.not_to be_persisted
expect(subject.errors[:parent_id].first).to eq('manage access required to create subgroup')
expect(subject.errors[:parent_id].first).to eq('You don’t have permission to create a subgroup in this group.')
expect(subject.parent_id).to be_nil
end
end
......
......@@ -8,8 +8,8 @@ describe Groups::DestroyService do
let!(:nested_group) { create(:group, parent: group) }
let!(:project) { create(:project, namespace: group) }
let!(:notification_setting) { create(:notification_setting, source: group)}
let!(:gitlab_shell) { Gitlab::Shell.new }
let!(:remove_path) { group.path + "+#{group.id}+deleted" }
let(:gitlab_shell) { Gitlab::Shell.new }
let(:remove_path) { group.path + "+#{group.id}+deleted" }
before do
group.add_user(user, Gitlab::Access::OWNER)
......@@ -144,4 +144,26 @@ describe Groups::DestroyService do
it_behaves_like 'group destruction', false
end
describe 'repository removal' do
before do
destroy_group(group, user, false)
end
context 'legacy storage' do
let!(:project) { create(:project, :empty_repo, namespace: group) }
it 'removes repository' do
expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_falsey
end
end
context 'hashed storage' do
let!(:project) { create(:project, :hashed, :empty_repo, namespace: group) }
it 'removes repository' do
expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_falsey
end
end
end
end
......@@ -4,9 +4,10 @@ describe Users::DestroyService do
describe "Deletes a user and all their personal projects" do
let!(:user) { create(:user) }
let!(:admin) { create(:admin) }
let!(:namespace) { create(:namespace, owner: user) }
let!(:namespace) { user.namespace }
let!(:project) { create(:project, namespace: namespace) }
let(:service) { described_class.new(admin) }
let(:gitlab_shell) { Gitlab::Shell.new }
context 'no options are given' do
it 'deletes the user' do
......@@ -14,7 +15,7 @@ describe Users::DestroyService do
expect { user_data['email'].to eq(user.email) }
expect { User.find(user.id) }.to raise_error(ActiveRecord::RecordNotFound)
expect { Namespace.with_deleted.find(user.namespace.id) }.to raise_error(ActiveRecord::RecordNotFound)
expect { Namespace.with_deleted.find(namespace.id) }.to raise_error(ActiveRecord::RecordNotFound)
end
it 'will delete the project' do
......@@ -183,5 +184,27 @@ describe Users::DestroyService do
expect(project.reload.mirror_user).to eq group_owner
end
end
describe "user personal's repository removal" do
before do
Sidekiq::Testing.inline! { service.execute(user) }
end
context 'legacy storage' do
let!(:project) { create(:project, :empty_repo, namespace: user.namespace) }
it 'removes repository' do
expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_falsey
end
end
context 'hashed storage' do
let!(:project) { create(:project, :empty_repo, :hashed, namespace: user.namespace) }
it 'removes repository' do
expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_falsey
end
end
end
end
end
......@@ -119,6 +119,18 @@ RSpec.configure do |config|
reset_delivered_emails!
end
# Stub the `ForkedStorageCheck.storage_available?` method unless
# `:broken_storage` metadata is defined
#
# This check can be slow and is unnecessary in a test environment where we
# know the storage is available, because we create it at runtime
config.before(:example) do |example|
unless example.metadata[:broken_storage]
allow(Gitlab::Git::Storage::ForkedStorageCheck)
.to receive(:storage_available?).and_return(true)
end
end
config.around(:each, :use_clean_rails_memory_store_caching) do |example|
caching_store = Rails.cache
Rails.cache = ActiveSupport::Cache::MemoryStore.new
......
......@@ -29,21 +29,27 @@ describe AuthorizedProjectsWorker do
end
describe '#perform' do
subject { described_class.new }
let(:user) { create(:user) }
it "refreshes user's authorized projects" do
user = create(:user)
subject(:job) { described_class.new }
it "refreshes user's authorized projects" do
expect_any_instance_of(User).to receive(:refresh_authorized_projects)
subject.perform(user.id)
job.perform(user.id)
end
it 'notifies the JobWaiter when done if the key is provided' do
expect(Gitlab::JobWaiter).to receive(:notify).with('notify-key', job.jid)
job.perform(user.id, 'notify-key')
end
context "when the user is not found" do
it "does nothing" do
expect_any_instance_of(User).not_to receive(:refresh_authorized_projects)
subject.perform(-1)
job.perform(-1)
end
end
end
......
......@@ -5,7 +5,7 @@ describe NamespacelessProjectDestroyWorker do
before do
# Stub after_save callbacks that will fail when Project has no namespace
allow_any_instance_of(Project).to receive(:ensure_storage_path_exist).and_return(nil)
allow_any_instance_of(Project).to receive(:ensure_storage_path_exists).and_return(nil)
allow_any_instance_of(Project).to receive(:update_project_statistics).and_return(nil)
end
......@@ -75,5 +75,19 @@ describe NamespacelessProjectDestroyWorker do
end
end
end
context 'project has non-existing namespace' do
let!(:project) do
project = build(:project, namespace_id: Namespace.maximum(:id).to_i.succ)
project.save(validate: false)
project
end
it 'deletes the project' do
subject.perform(project.id)
expect(Project.unscoped.all).not_to include(project)
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment