Commit 68f4b26e authored by GitLab Bot's avatar GitLab Bot

Merge remote-tracking branch 'upstream/master' into ce-to-ee-2018-08-14

# Conflicts:
#	app/services/notification_recipient_service.rb
#	doc/user/project/issue_board.md
#	locale/gitlab.pot

[ci skip]
parents 3ff95c3c e610b41e
<script>
import TimeagoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
export default {
components: {
TimeagoTooltip,
},
props: {
erasedByUser: {
type: Boolean,
required: true,
},
username: {
type: String,
required: false,
default: null,
},
linkToUser: {
type: String,
required: false,
default: null,
},
erasedAt: {
type: String,
required: true,
},
},
};
</script>
<template>
<div class="prepend-top-default js-build-erased">
<div class="erased alert alert-warning">
<template v-if="erasedByUser">
{{ s__("Job|Job has been erased by") }}
<a :href="linkToUser">
{{ username }}
</a>
</template>
<template v-else>
{{ s__("Job|Job has been erased") }}
</template>
<timeago-tooltip
:time="erasedAt"
/>
</div>
</div>
</template>
<script>
export default {
name: 'JobLog',
props: {
trace: {
type: String,
required: true,
},
isReceivingBuildTrace: {
type: Boolean,
required: true,
},
},
};
</script>
<template>
<pre class="build-trace">
<code
class="bash"
v-html="trace"
>
</code>
<div
v-if="isReceivingBuildTrace"
class="js-log-animation build-loader-animation"
>
<div class="dot"></div>
<div class="dot"></div>
<div class="dot"></div>
</div>
</pre>
</template>
# frozen_string_literal: true
module Ci module Ci
class BuildRunnerPresenter < SimpleDelegator class BuildRunnerPresenter < SimpleDelegator
def artifacts def artifacts
......
# frozen_string_literal: true
class ProjectMirrorSerializer < BaseSerializer class ProjectMirrorSerializer < BaseSerializer
entity ProjectMirrorEntity entity ProjectMirrorEntity
end end
# frozen_string_literal: true
class TestCaseEntity < Grape::Entity class TestCaseEntity < Grape::Entity
expose :status expose :status
expose :name expose :name
......
# frozen_string_literal: true
class TestReportsComparerEntity < Grape::Entity class TestReportsComparerEntity < Grape::Entity
expose :total_status, as: :status expose :total_status, as: :status
......
# frozen_string_literal: true
class TestReportsComparerSerializer < BaseSerializer class TestReportsComparerSerializer < BaseSerializer
entity TestReportsComparerEntity entity TestReportsComparerEntity
end end
# frozen_string_literal: true
class TestSuiteComparerEntity < Grape::Entity class TestSuiteComparerEntity < Grape::Entity
expose :name expose :name
expose :total_status, as: :status expose :total_status, as: :status
......
...@@ -220,8 +220,11 @@ module NotificationRecipientService ...@@ -220,8 +220,11 @@ module NotificationRecipientService
end end
class Default < Base class Default < Base
<<<<<<< HEAD
prepend ::EE::NotificationRecipientBuilders::Default prepend ::EE::NotificationRecipientBuilders::Default
=======
>>>>>>> upstream/master
MENTION_TYPE_ACTIONS = [:new_issue, :new_merge_request].freeze MENTION_TYPE_ACTIONS = [:new_issue, :new_merge_request].freeze
attr_reader :target attr_reader :target
......
# frozen_string_literal: true
module Projects module Projects
class DetectRepositoryLanguagesService < BaseService class DetectRepositoryLanguagesService < BaseService
attr_reader :detected_repository_languages, :programming_languages attr_reader :detected_repository_languages, :programming_languages
......
# frozen_string_literal: true
module Todos module Todos
module Destroy module Destroy
class BaseService class BaseService
......
# frozen_string_literal: true
module Todos module Todos
module Destroy module Destroy
class ConfidentialIssueService < ::Todos::Destroy::BaseService class ConfidentialIssueService < ::Todos::Destroy::BaseService
......
# frozen_string_literal: true
module Todos module Todos
module Destroy module Destroy
class EntityLeaveService < ::Todos::Destroy::BaseService class EntityLeaveService < ::Todos::Destroy::BaseService
......
# frozen_string_literal: true
module Todos module Todos
module Destroy module Destroy
class GroupPrivateService < ::Todos::Destroy::BaseService class GroupPrivateService < ::Todos::Destroy::BaseService
......
# frozen_string_literal: true
module Todos module Todos
module Destroy module Destroy
class PrivateFeaturesService < ::Todos::Destroy::BaseService class PrivateFeaturesService < ::Todos::Destroy::BaseService
......
# frozen_string_literal: true
module Todos module Todos
module Destroy module Destroy
class ProjectPrivateService < ::Todos::Destroy::BaseService class ProjectPrivateService < ::Todos::Destroy::BaseService
......
...@@ -17,6 +17,6 @@ ...@@ -17,6 +17,6 @@
%th Primary Action %th Primary Action
%th %th
= render @spam_logs = render @spam_logs
= paginate @spam_logs = paginate @spam_logs, theme: 'gitlab'
- else - else
%h4 There are no Spam Logs %h4 There are no Spam Logs
# frozen_string_literal: true
class DetectRepositoryLanguagesWorker class DetectRepositoryLanguagesWorker
include ApplicationWorker include ApplicationWorker
include ExceptionBacktrace include ExceptionBacktrace
......
# frozen_string_literal: true
module TodosDestroyer module TodosDestroyer
class ConfidentialIssueWorker class ConfidentialIssueWorker
include ApplicationWorker include ApplicationWorker
......
# frozen_string_literal: true
module TodosDestroyer module TodosDestroyer
class EntityLeaveWorker class EntityLeaveWorker
include ApplicationWorker include ApplicationWorker
......
# frozen_string_literal: true
module TodosDestroyer module TodosDestroyer
class GroupPrivateWorker class GroupPrivateWorker
include ApplicationWorker include ApplicationWorker
......
# frozen_string_literal: true
module TodosDestroyer module TodosDestroyer
class PrivateFeaturesWorker class PrivateFeaturesWorker
include ApplicationWorker include ApplicationWorker
......
# frozen_string_literal: true
module TodosDestroyer module TodosDestroyer
class ProjectPrivateWorker class ProjectPrivateWorker
include ApplicationWorker include ApplicationWorker
......
---
title: disable_statement_timeout no longer leak to other migrations
merge_request: 20503
author:
type: fixed
---
title: Add gitlab theme to spam logs pagination
merge_request: 21145
author:
type: fixed
---
title: Creates vue component for erased block on job view
merge_request:
author:
type: other
---
title: Creates vue component for job log trace
merge_request:
author:
type: other
---
title: 'Auto-DevOps.gitlab-ci.yml: Update glibc package signing key URL'
merge_request: 21182
author: sgerrand
type: fixed
---
title: Enable frozen string in vestigial app files
merge_request:
author: gfyoung
type: performance
---
title: Remove storage path dependency of gitaly install task
merge_request: 21101
author:
type: changed
...@@ -106,14 +106,14 @@ class ProjectForeignKeysWithCascadingDeletes < ActiveRecord::Migration ...@@ -106,14 +106,14 @@ class ProjectForeignKeysWithCascadingDeletes < ActiveRecord::Migration
# Disables statement timeouts for the current connection. This is # Disables statement timeouts for the current connection. This is
# necessary as removing of orphaned data might otherwise exceed the # necessary as removing of orphaned data might otherwise exceed the
# statement timeout. # statement timeout.
disable_statement_timeout disable_statement_timeout do
remove_orphans(*queue.pop) until queue.empty? remove_orphans(*queue.pop) until queue.empty?
steal_from_queues(queues - [queue]) steal_from_queues(queues - [queue])
end end
end end
end end
end
threads.each(&:join) threads.each(&:join)
end end
......
...@@ -25,8 +25,9 @@ class AddLowerPathIndexToRedirectRoutes < ActiveRecord::Migration ...@@ -25,8 +25,9 @@ class AddLowerPathIndexToRedirectRoutes < ActiveRecord::Migration
# trivial to write a query that checks for an index. BUT there is a # trivial to write a query that checks for an index. BUT there is a
# convenient `IF EXISTS` parameter for `DROP INDEX`. # convenient `IF EXISTS` parameter for `DROP INDEX`.
if supports_drop_index_concurrently? if supports_drop_index_concurrently?
disable_statement_timeout disable_statement_timeout do
execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME};" execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME};"
end
else else
execute "DROP INDEX IF EXISTS #{INDEX_NAME};" execute "DROP INDEX IF EXISTS #{INDEX_NAME};"
end end
......
...@@ -8,8 +8,7 @@ class AddIndexOnNamespacesLowerName < ActiveRecord::Migration ...@@ -8,8 +8,7 @@ class AddIndexOnNamespacesLowerName < ActiveRecord::Migration
def up def up
return unless Gitlab::Database.postgresql? return unless Gitlab::Database.postgresql?
disable_statement_timeout disable_statement_timeout do
if Gitlab::Database.version.to_f >= 9.5 if Gitlab::Database.version.to_f >= 9.5
# Allow us to hot-patch the index manually ahead of the migration # Allow us to hot-patch the index manually ahead of the migration
execute "CREATE INDEX CONCURRENTLY IF NOT EXISTS #{INDEX_NAME} ON namespaces (lower(name));" execute "CREATE INDEX CONCURRENTLY IF NOT EXISTS #{INDEX_NAME} ON namespaces (lower(name));"
...@@ -17,16 +16,17 @@ class AddIndexOnNamespacesLowerName < ActiveRecord::Migration ...@@ -17,16 +16,17 @@ class AddIndexOnNamespacesLowerName < ActiveRecord::Migration
execute "CREATE INDEX CONCURRENTLY #{INDEX_NAME} ON namespaces (lower(name));" execute "CREATE INDEX CONCURRENTLY #{INDEX_NAME} ON namespaces (lower(name));"
end end
end end
end
def down def down
return unless Gitlab::Database.postgresql? return unless Gitlab::Database.postgresql?
disable_statement_timeout disable_statement_timeout do
if Gitlab::Database.version.to_f >= 9.2 if Gitlab::Database.version.to_f >= 9.2
execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME};" execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME};"
else else
execute "DROP INDEX IF EXISTS #{INDEX_NAME};" execute "DROP INDEX IF EXISTS #{INDEX_NAME};"
end end
end end
end
end end
...@@ -18,8 +18,7 @@ class ReworkRedirectRoutesIndexes < ActiveRecord::Migration ...@@ -18,8 +18,7 @@ class ReworkRedirectRoutesIndexes < ActiveRecord::Migration
OLD_INDEX_NAME_PATH_LOWER = "index_on_redirect_routes_lower_path" OLD_INDEX_NAME_PATH_LOWER = "index_on_redirect_routes_lower_path"
def up def up
disable_statement_timeout disable_statement_timeout do
# this is a plain btree on a single boolean column. It'll never be # this is a plain btree on a single boolean column. It'll never be
# selective enough to be valuable. This class is called by # selective enough to be valuable. This class is called by
# setup_postgresql.rake so it needs to be able to handle this # setup_postgresql.rake so it needs to be able to handle this
...@@ -30,7 +29,7 @@ class ReworkRedirectRoutesIndexes < ActiveRecord::Migration ...@@ -30,7 +29,7 @@ class ReworkRedirectRoutesIndexes < ActiveRecord::Migration
# If we're on MySQL then the existing index on path is ok. But on # If we're on MySQL then the existing index on path is ok. But on
# Postgres we need to clean things up: # Postgres we need to clean things up:
return unless Gitlab::Database.postgresql? break unless Gitlab::Database.postgresql?
if_not_exists = Gitlab::Database.version.to_f >= 9.5 ? "IF NOT EXISTS" : "" if_not_exists = Gitlab::Database.version.to_f >= 9.5 ? "IF NOT EXISTS" : ""
...@@ -50,13 +49,13 @@ class ReworkRedirectRoutesIndexes < ActiveRecord::Migration ...@@ -50,13 +49,13 @@ class ReworkRedirectRoutesIndexes < ActiveRecord::Migration
# column so the varchar_pattern_ops index is sufficient # column so the varchar_pattern_ops index is sufficient
execute "DROP INDEX CONCURRENTLY IF EXISTS #{OLD_INDEX_NAME_PATH_LOWER};" execute "DROP INDEX CONCURRENTLY IF EXISTS #{OLD_INDEX_NAME_PATH_LOWER};"
end end
end
def down def down
disable_statement_timeout disable_statement_timeout do
add_concurrent_index(:redirect_routes, :permanent) add_concurrent_index(:redirect_routes, :permanent)
return unless Gitlab::Database.postgresql? break unless Gitlab::Database.postgresql?
execute("CREATE INDEX CONCURRENTLY #{OLD_INDEX_NAME_PATH_TPOPS} ON redirect_routes (path varchar_pattern_ops);") execute("CREATE INDEX CONCURRENTLY #{OLD_INDEX_NAME_PATH_TPOPS} ON redirect_routes (path varchar_pattern_ops);")
execute("CREATE INDEX CONCURRENTLY #{OLD_INDEX_NAME_PATH_LOWER} ON redirect_routes (LOWER(path));") execute("CREATE INDEX CONCURRENTLY #{OLD_INDEX_NAME_PATH_LOWER} ON redirect_routes (LOWER(path));")
...@@ -65,4 +64,5 @@ class ReworkRedirectRoutesIndexes < ActiveRecord::Migration ...@@ -65,4 +64,5 @@ class ReworkRedirectRoutesIndexes < ActiveRecord::Migration
execute("DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_PERM};") execute("DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_PERM};")
execute("DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_TEMP};") execute("DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_TEMP};")
end end
end
end end
...@@ -13,8 +13,7 @@ class CreateProjectCiCdSettings < ActiveRecord::Migration ...@@ -13,8 +13,7 @@ class CreateProjectCiCdSettings < ActiveRecord::Migration
end end
end end
disable_statement_timeout disable_statement_timeout do
# This particular INSERT will take between 10 and 20 seconds. # This particular INSERT will take between 10 and 20 seconds.
execute 'INSERT INTO project_ci_cd_settings (project_id) SELECT id FROM projects' execute 'INSERT INTO project_ci_cd_settings (project_id) SELECT id FROM projects'
...@@ -24,6 +23,7 @@ class CreateProjectCiCdSettings < ActiveRecord::Migration ...@@ -24,6 +23,7 @@ class CreateProjectCiCdSettings < ActiveRecord::Migration
add_foreign_key_with_retry add_foreign_key_with_retry
end end
end
def down def down
drop_table :project_ci_cd_settings drop_table :project_ci_cd_settings
......
...@@ -14,8 +14,7 @@ class CleanupBuildStageMigration < ActiveRecord::Migration ...@@ -14,8 +14,7 @@ class CleanupBuildStageMigration < ActiveRecord::Migration
end end
def up def up
disable_statement_timeout disable_statement_timeout do
## ##
# We steal from the background migrations queue to catch up with the # We steal from the background migrations queue to catch up with the
# scheduled migrations set. # scheduled migrations set.
...@@ -52,10 +51,13 @@ class CleanupBuildStageMigration < ActiveRecord::Migration ...@@ -52,10 +51,13 @@ class CleanupBuildStageMigration < ActiveRecord::Migration
# #
remove_concurrent_index_by_name(:ci_builds, TMP_INDEX) remove_concurrent_index_by_name(:ci_builds, TMP_INDEX)
end end
end
def down def down
if index_exists_by_name?(:ci_builds, TMP_INDEX) if index_exists_by_name?(:ci_builds, TMP_INDEX)
disable_statement_timeout do
remove_concurrent_index_by_name(:ci_builds, TMP_INDEX) remove_concurrent_index_by_name(:ci_builds, TMP_INDEX)
end end
end end
end
end end
...@@ -13,20 +13,20 @@ class ProjectNameLowerIndex < ActiveRecord::Migration ...@@ -13,20 +13,20 @@ class ProjectNameLowerIndex < ActiveRecord::Migration
def up def up
return unless Gitlab::Database.postgresql? return unless Gitlab::Database.postgresql?
disable_statement_timeout disable_statement_timeout do
execute "CREATE INDEX CONCURRENTLY #{INDEX_NAME} ON projects (LOWER(name))" execute "CREATE INDEX CONCURRENTLY #{INDEX_NAME} ON projects (LOWER(name))"
end end
end
def down def down
return unless Gitlab::Database.postgresql? return unless Gitlab::Database.postgresql?
disable_statement_timeout disable_statement_timeout do
if supports_drop_index_concurrently? if supports_drop_index_concurrently?
execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME}" execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME}"
else else
execute "DROP INDEX IF EXISTS #{INDEX_NAME}" execute "DROP INDEX IF EXISTS #{INDEX_NAME}"
end end
end end
end
end end
...@@ -28,8 +28,7 @@ class RemoveOrphanedRoutes < ActiveRecord::Migration ...@@ -28,8 +28,7 @@ class RemoveOrphanedRoutes < ActiveRecord::Migration
# which is pretty close to our 15 second statement timeout. To ensure a # which is pretty close to our 15 second statement timeout. To ensure a
# smooth deployment procedure we disable the statement timeouts for this # smooth deployment procedure we disable the statement timeouts for this
# migration, just in case. # migration, just in case.
disable_statement_timeout disable_statement_timeout do
# On GitLab.com there are around 4000 orphaned project routes, and around # On GitLab.com there are around 4000 orphaned project routes, and around
# 150 orphaned namespace routes. # 150 orphaned namespace routes.
[ [
...@@ -41,6 +40,7 @@ class RemoveOrphanedRoutes < ActiveRecord::Migration ...@@ -41,6 +40,7 @@ class RemoveOrphanedRoutes < ActiveRecord::Migration
end end
end end
end end
end
def down def down
# There is no way to restore orphaned routes, and this doesn't make any # There is no way to restore orphaned routes, and this doesn't make any
......
...@@ -29,20 +29,22 @@ class CompositePrimaryKeysMigration < ActiveRecord::Migration ...@@ -29,20 +29,22 @@ class CompositePrimaryKeysMigration < ActiveRecord::Migration
def up def up
return unless Gitlab::Database.postgresql? return unless Gitlab::Database.postgresql?
disable_statement_timeout disable_statement_timeout do
TABLES.each do |index| TABLES.each do |index|
add_primary_key(index) add_primary_key(index)
end end
end end
end
def down def down
return unless Gitlab::Database.postgresql? return unless Gitlab::Database.postgresql?
disable_statement_timeout disable_statement_timeout do
TABLES.each do |index| TABLES.each do |index|
remove_primary_key(index) remove_primary_key(index)
end end
end end
end
private private
......
...@@ -8,10 +8,10 @@ class EnableAutoCancelPendingPipelinesForAll < ActiveRecord::Migration ...@@ -8,10 +8,10 @@ class EnableAutoCancelPendingPipelinesForAll < ActiveRecord::Migration
DOWNTIME = false DOWNTIME = false
def up def up
disable_statement_timeout disable_statement_timeout do
update_column_in_batches(:projects, :auto_cancel_pending_pipelines, 1) update_column_in_batches(:projects, :auto_cancel_pending_pipelines, 1)
end end
end
def down def down
# Nothing we can do! # Nothing we can do!
......
...@@ -7,14 +7,14 @@ class UpdateRetriedForCiBuild < ActiveRecord::Migration ...@@ -7,14 +7,14 @@ class UpdateRetriedForCiBuild < ActiveRecord::Migration
disable_ddl_transaction! disable_ddl_transaction!
def up def up
disable_statement_timeout
if Gitlab::Database.mysql? if Gitlab::Database.mysql?
up_mysql up_mysql
else else
disable_statement_timeout do
up_postgres up_postgres
end end
end end
end
def down def down
end end
......
...@@ -7,11 +7,10 @@ class AddHeadPipelineForEachMergeRequest < ActiveRecord::Migration ...@@ -7,11 +7,10 @@ class AddHeadPipelineForEachMergeRequest < ActiveRecord::Migration
disable_ddl_transaction! disable_ddl_transaction!
def up def up
disable_statement_timeout
pipelines = Arel::Table.new(:ci_pipelines) pipelines = Arel::Table.new(:ci_pipelines)
merge_requests = Arel::Table.new(:merge_requests) merge_requests = Arel::Table.new(:merge_requests)
disable_statement_timeout do
head_id = pipelines head_id = pipelines
.project(Arel::Nodes::NamedFunction.new('max', [pipelines[:id]])) .project(Arel::Nodes::NamedFunction.new('max', [pipelines[:id]]))
.from(pipelines) .from(pipelines)
...@@ -22,6 +21,7 @@ class AddHeadPipelineForEachMergeRequest < ActiveRecord::Migration ...@@ -22,6 +21,7 @@ class AddHeadPipelineForEachMergeRequest < ActiveRecord::Migration
update_column_in_batches(:merge_requests, :head_pipeline_id, sub_query) update_column_in_batches(:merge_requests, :head_pipeline_id, sub_query)
end end
end
def down def down
end end
......
...@@ -87,16 +87,16 @@ class RenameAllReservedPathsAgain < ActiveRecord::Migration ...@@ -87,16 +87,16 @@ class RenameAllReservedPathsAgain < ActiveRecord::Migration
].freeze ].freeze
def up def up
disable_statement_timeout disable_statement_timeout do
TOP_LEVEL_ROUTES.each { |route| rename_root_paths(route) } TOP_LEVEL_ROUTES.each { |route| rename_root_paths(route) }
PROJECT_WILDCARD_ROUTES.each { |route| rename_wildcard_paths(route) } PROJECT_WILDCARD_ROUTES.each { |route| rename_wildcard_paths(route) }
GROUP_ROUTES.each { |route| rename_child_paths(route) } GROUP_ROUTES.each { |route| rename_child_paths(route) }
end end
end
def down def down
disable_statement_timeout disable_statement_timeout do
revert_renames revert_renames
end end
end
end end
...@@ -6,8 +6,7 @@ class MigratePipelineStages < ActiveRecord::Migration ...@@ -6,8 +6,7 @@ class MigratePipelineStages < ActiveRecord::Migration
disable_ddl_transaction! disable_ddl_transaction!
def up def up
disable_statement_timeout disable_statement_timeout do
execute <<-SQL.strip_heredoc execute <<-SQL.strip_heredoc
INSERT INTO ci_stages (project_id, pipeline_id, name) INSERT INTO ci_stages (project_id, pipeline_id, name)
SELECT project_id, commit_id, stage FROM ci_builds SELECT project_id, commit_id, stage FROM ci_builds
...@@ -19,4 +18,5 @@ class MigratePipelineStages < ActiveRecord::Migration ...@@ -19,4 +18,5 @@ class MigratePipelineStages < ActiveRecord::Migration
ORDER BY MAX(stage_idx) ORDER BY MAX(stage_idx)
SQL SQL
end end
end
end end
...@@ -7,22 +7,22 @@ class MigrateBuildStageReferenceAgain < ActiveRecord::Migration ...@@ -7,22 +7,22 @@ class MigrateBuildStageReferenceAgain < ActiveRecord::Migration
disable_ddl_transaction! disable_ddl_transaction!
def up def up
disable_statement_timeout
stage_id = Arel.sql <<-SQL.strip_heredoc stage_id = Arel.sql <<-SQL.strip_heredoc
(SELECT id FROM ci_stages (SELECT id FROM ci_stages
WHERE ci_stages.pipeline_id = ci_builds.commit_id WHERE ci_stages.pipeline_id = ci_builds.commit_id
AND ci_stages.name = ci_builds.stage) AND ci_stages.name = ci_builds.stage)
SQL SQL
disable_statement_timeout do
update_column_in_batches(:ci_builds, :stage_id, stage_id) do |table, query| update_column_in_batches(:ci_builds, :stage_id, stage_id) do |table, query|
query.where(table[:stage_id].eq(nil)) query.where(table[:stage_id].eq(nil))
end end
end end
end
def down def down
disable_statement_timeout disable_statement_timeout do
update_column_in_batches(:ci_builds, :stage_id, nil) update_column_in_batches(:ci_builds, :stage_id, nil)
end end
end
end end
...@@ -26,9 +26,9 @@ class MigrateStagesStatuses < ActiveRecord::Migration ...@@ -26,9 +26,9 @@ class MigrateStagesStatuses < ActiveRecord::Migration
end end
def down def down
disable_statement_timeout disable_statement_timeout do
# rubocop:disable Migration/UpdateLargeTable # rubocop:disable Migration/UpdateLargeTable
update_column_in_batches(:ci_stages, :status, nil) update_column_in_batches(:ci_stages, :status, nil)
end end
end
end end
...@@ -78,13 +78,13 @@ class RemoveSoftRemovedObjects < ActiveRecord::Migration ...@@ -78,13 +78,13 @@ class RemoveSoftRemovedObjects < ActiveRecord::Migration
MODELS = [Issue, MergeRequest, CiPipelineSchedule, CiTrigger].freeze MODELS = [Issue, MergeRequest, CiPipelineSchedule, CiTrigger].freeze
def up def up
disable_statement_timeout disable_statement_timeout do
remove_personal_routes remove_personal_routes
remove_personal_namespaces remove_personal_namespaces
remove_group_namespaces remove_group_namespaces
remove_simple_soft_removed_rows remove_simple_soft_removed_rows
end end
end
def down def down
# The data removed by this migration can't be restored in an automated way. # The data removed by this migration can't be restored in an automated way.
......
...@@ -38,8 +38,7 @@ class RemoveRedundantPipelineStages < ActiveRecord::Migration ...@@ -38,8 +38,7 @@ class RemoveRedundantPipelineStages < ActiveRecord::Migration
end end
def remove_redundant_pipeline_stages! def remove_redundant_pipeline_stages!
disable_statement_timeout disable_statement_timeout do
redundant_stages_ids = <<~SQL redundant_stages_ids = <<~SQL
SELECT id FROM ci_stages WHERE (pipeline_id, name) IN ( SELECT id FROM ci_stages WHERE (pipeline_id, name) IN (
SELECT pipeline_id, name FROM ci_stages SELECT pipeline_id, name FROM ci_stages
...@@ -63,4 +62,5 @@ class RemoveRedundantPipelineStages < ActiveRecord::Migration ...@@ -63,4 +62,5 @@ class RemoveRedundantPipelineStages < ActiveRecord::Migration
SQL SQL
end end
end end
end
end end
...@@ -15,11 +15,11 @@ class RemovePermanentFromRedirectRoutes < ActiveRecord::Migration ...@@ -15,11 +15,11 @@ class RemovePermanentFromRedirectRoutes < ActiveRecord::Migration
# ReworkRedirectRoutesIndexes: # ReworkRedirectRoutesIndexes:
# https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/16211 # https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/16211
if Gitlab::Database.postgresql? if Gitlab::Database.postgresql?
disable_statement_timeout disable_statement_timeout do
execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_PERM};" execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_PERM};"
execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_TEMP};" execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_TEMP};"
end end
end
remove_column(:redirect_routes, :permanent) remove_column(:redirect_routes, :permanent)
end end
...@@ -28,10 +28,10 @@ class RemovePermanentFromRedirectRoutes < ActiveRecord::Migration ...@@ -28,10 +28,10 @@ class RemovePermanentFromRedirectRoutes < ActiveRecord::Migration
add_column(:redirect_routes, :permanent, :boolean) add_column(:redirect_routes, :permanent, :boolean)
if Gitlab::Database.postgresql? if Gitlab::Database.postgresql?
disable_statement_timeout disable_statement_timeout do
execute("CREATE INDEX CONCURRENTLY #{INDEX_NAME_PERM} ON redirect_routes (lower(path) varchar_pattern_ops) where (permanent);") execute("CREATE INDEX CONCURRENTLY #{INDEX_NAME_PERM} ON redirect_routes (lower(path) varchar_pattern_ops) where (permanent);")
execute("CREATE INDEX CONCURRENTLY #{INDEX_NAME_TEMP} ON redirect_routes (lower(path) varchar_pattern_ops) where (not permanent or permanent is null) ;") execute("CREATE INDEX CONCURRENTLY #{INDEX_NAME_TEMP} ON redirect_routes (lower(path) varchar_pattern_ops) where (not permanent or permanent is null) ;")
end end
end end
end
end end
...@@ -20,12 +20,12 @@ class AddPathIndexToRedirectRoutes < ActiveRecord::Migration ...@@ -20,12 +20,12 @@ class AddPathIndexToRedirectRoutes < ActiveRecord::Migration
def up def up
return unless Gitlab::Database.postgresql? return unless Gitlab::Database.postgresql?
disable_statement_timeout disable_statement_timeout do
unless index_exists_by_name?(:redirect_routes, INDEX_NAME) unless index_exists_by_name?(:redirect_routes, INDEX_NAME)
execute("CREATE UNIQUE INDEX CONCURRENTLY #{INDEX_NAME} ON redirect_routes (lower(path) varchar_pattern_ops);") execute("CREATE UNIQUE INDEX CONCURRENTLY #{INDEX_NAME} ON redirect_routes (lower(path) varchar_pattern_ops);")
end end
end end
end
def down def down
# Do nothing in the DOWN. Since the index above is originally created in the # Do nothing in the DOWN. Since the index above is originally created in the
......
...@@ -17,8 +17,7 @@ class RescheduleBuildsStagesMigration < ActiveRecord::Migration ...@@ -17,8 +17,7 @@ class RescheduleBuildsStagesMigration < ActiveRecord::Migration
end end
def up def up
disable_statement_timeout disable_statement_timeout do
Build.where('stage_id IS NULL').tap do |relation| Build.where('stage_id IS NULL').tap do |relation|
queue_background_migration_jobs_by_range_at_intervals(relation, queue_background_migration_jobs_by_range_at_intervals(relation,
MIGRATION, MIGRATION,
...@@ -26,6 +25,7 @@ class RescheduleBuildsStagesMigration < ActiveRecord::Migration ...@@ -26,6 +25,7 @@ class RescheduleBuildsStagesMigration < ActiveRecord::Migration
batch_size: BATCH_SIZE) batch_size: BATCH_SIZE)
end end
end end
end
def down def down
# noop # noop
......
...@@ -13,8 +13,7 @@ class ScheduleStagesIndexMigration < ActiveRecord::Migration ...@@ -13,8 +13,7 @@ class ScheduleStagesIndexMigration < ActiveRecord::Migration
end end
def up def up
disable_statement_timeout disable_statement_timeout do
Stage.all.tap do |relation| Stage.all.tap do |relation|
queue_background_migration_jobs_by_range_at_intervals(relation, queue_background_migration_jobs_by_range_at_intervals(relation,
MIGRATION, MIGRATION,
...@@ -22,6 +21,7 @@ class ScheduleStagesIndexMigration < ActiveRecord::Migration ...@@ -22,6 +21,7 @@ class ScheduleStagesIndexMigration < ActiveRecord::Migration
batch_size: BATCH_SIZE) batch_size: BATCH_SIZE)
end end
end end
end
def down def down
# noop # noop
......
...@@ -12,8 +12,7 @@ class CleanupStagesPositionMigration < ActiveRecord::Migration ...@@ -12,8 +12,7 @@ class CleanupStagesPositionMigration < ActiveRecord::Migration
end end
def up def up
disable_statement_timeout disable_statement_timeout do
Gitlab::BackgroundMigration.steal('MigrateStageIndex') Gitlab::BackgroundMigration.steal('MigrateStageIndex')
unless index_exists_by_name?(:ci_stages, TMP_INDEX_NAME) unless index_exists_by_name?(:ci_stages, TMP_INDEX_NAME)
...@@ -34,10 +33,13 @@ class CleanupStagesPositionMigration < ActiveRecord::Migration ...@@ -34,10 +33,13 @@ class CleanupStagesPositionMigration < ActiveRecord::Migration
remove_concurrent_index_by_name(:ci_stages, TMP_INDEX_NAME) remove_concurrent_index_by_name(:ci_stages, TMP_INDEX_NAME)
end end
end
def down def down
if index_exists_by_name?(:ci_stages, TMP_INDEX_NAME) if index_exists_by_name?(:ci_stages, TMP_INDEX_NAME)
disable_statement_timeout do
remove_concurrent_index_by_name(:ci_stages, TMP_INDEX_NAME) remove_concurrent_index_by_name(:ci_stages, TMP_INDEX_NAME)
end end
end end
end
end end
...@@ -255,7 +255,7 @@ Example response: ...@@ -255,7 +255,7 @@ Example response:
Get a list of visible events for a particular project. Get a list of visible events for a particular project.
``` ```
GET /:project_id/events GET /projects/:project_id/events
``` ```
Parameters: Parameters:
......
...@@ -494,11 +494,11 @@ Make GitLab start on boot: ...@@ -494,11 +494,11 @@ Make GitLab start on boot:
### Install Gitaly ### Install Gitaly
# Fetch Gitaly source with Git and compile with Go # Fetch Gitaly source with Git and compile with Go
sudo -u git -H bundle exec rake "gitlab:gitaly:install[/home/git/gitaly]" RAILS_ENV=production sudo -u git -H bundle exec rake "gitlab:gitaly:install[/home/git/gitaly,/home/git/repositories]" RAILS_ENV=production
You can specify a different Git repository by providing it as an extra parameter: You can specify a different Git repository by providing it as an extra parameter:
sudo -u git -H bundle exec rake "gitlab:gitaly:install[/home/git/gitaly,https://example.com/gitaly.git]" RAILS_ENV=production sudo -u git -H bundle exec rake "gitlab:gitaly:install[/home/git/gitaly,/home/git/repositories,https://example.com/gitaly.git]" RAILS_ENV=production
Next, make sure gitaly configured: Next, make sure gitaly configured:
......
...@@ -122,7 +122,10 @@ Issue Board, that is, create or delete lists and drag issues from one list to an ...@@ -122,7 +122,10 @@ Issue Board, that is, create or delete lists and drag issues from one list to an
- **List** - A column on the issue board that displays issues matching certain attributes. In addition to the default lists of 'Open' and 'Closed' issue, each additional list will show issues matching your chosen label or assignee. On the top of that list you can see the number of issues that belong to it. - **List** - A column on the issue board that displays issues matching certain attributes. In addition to the default lists of 'Open' and 'Closed' issue, each additional list will show issues matching your chosen label or assignee. On the top of that list you can see the number of issues that belong to it.
- **Label list**: a list based on a label. It shows all opened issues with that label. - **Label list**: a list based on a label. It shows all opened issues with that label.
- **Assignee list**: a list which includes all issues assigned to a user. - **Assignee list**: a list which includes all issues assigned to a user.
<<<<<<< HEAD
- **Milestone list**: a list which includes all issues with that milestone. - **Milestone list**: a list which includes all issues with that milestone.
=======
>>>>>>> upstream/master
- **Open** (default): shows all open issues that do not belong to one of the other lists. Always appears as the leftmost list. - **Open** (default): shows all open issues that do not belong to one of the other lists. Always appears as the leftmost list.
- **Closed** (default): shows all closed issues. Always appears as the rightmost list. - **Closed** (default): shows all closed issues. Always appears as the rightmost list.
- **Card** - A box in the list that represents an individual issue. The information you can see on a card consists of the issue number, the issue title, the assignee, and the labels associated with the issue. You can drag cards from one list to another to change their label or assignee from that of the source list to that of the destination list. - **Card** - A box in the list that represents an individual issue. The information you can see on a card consists of the issue number, the issue title, the assignee, and the labels associated with the issue. You can drag cards from one list to another to change their label or assignee from that of the source list to that of the destination list.
...@@ -380,6 +383,7 @@ As on another list types, click on the trash icon to remove it. ...@@ -380,6 +383,7 @@ As on another list types, click on the trash icon to remove it.
When dragging issues between lists, different behavior occurs depending on the source list and the target list. When dragging issues between lists, different behavior occurs depending on the source list and the target list.
<<<<<<< HEAD
| | To Open | To Closed | To label `B` list | To assignee `Bob` list | To milestone `2.0` list | | | To Open | To Closed | To label `B` list | To assignee `Bob` list | To milestone `2.0` list |
| --- | --- | --- | --- | --- | --- | | --- | --- | --- | --- | --- | --- |
| From Open | - | Issue closed | `B` added | `Bob` assigned | `2.0` added | | From Open | - | Issue closed | `B` added | `Bob` assigned | `2.0` added |
...@@ -387,6 +391,14 @@ When dragging issues between lists, different behavior occurs depending on the s ...@@ -387,6 +391,14 @@ When dragging issues between lists, different behavior occurs depending on the s
| From label `A` list | `A` removed | Issue closed | `A` removed<br/>`B` added | `Bob` assigned | `2.0` added | | From label `A` list | `A` removed | Issue closed | `A` removed<br/>`B` added | `Bob` assigned | `2.0` added |
| From assignee `Alice` list | `Alice` unassigned | Issue closed | `B` added | `Alice` unassigned<br/>`Bob` assigned | `2.0` added | | From assignee `Alice` list | `Alice` unassigned | Issue closed | `B` added | `Alice` unassigned<br/>`Bob` assigned | `2.0` added |
| From milestone `1.0` list | `1.0` removed | Issue closed | `B` added | `Bob` assigned<br/> | `1.0` removed<br/>`2.0` added | | From milestone `1.0` list | `1.0` removed | Issue closed | `B` added | `Bob` assigned<br/> | `1.0` removed<br/>`2.0` added |
=======
| | To Open | To Closed | To label `B` list | To assignee `Bob` list |
| --- | --- | --- | --- | --- |
| From Open | - | Issue closed | `B` added | `Bob` assigned |
| From Closed | Issue reopened | - | Issue reopened<br/>`B` added | Issue reopened<br/>`Bob` assigned |
| From label `A` list | `A` removed | Issue closed | `A` removed<br/>`B` added | `Bob` assigned |
| From assignee `Alice` list | `Alice` unassigned | Issue closed | `B` added | `Alice` unassigned<br/>`Bob` assigned |
>>>>>>> upstream/master
## Features per tier ## Features per tier
......
...@@ -58,7 +58,6 @@ module Gitlab ...@@ -58,7 +58,6 @@ module Gitlab
if Database.postgresql? if Database.postgresql?
options = options.merge({ algorithm: :concurrently }) options = options.merge({ algorithm: :concurrently })
disable_statement_timeout
end end
if index_exists?(table_name, column_name, options) if index_exists?(table_name, column_name, options)
...@@ -66,8 +65,10 @@ module Gitlab ...@@ -66,8 +65,10 @@ module Gitlab
return return
end end
disable_statement_timeout do
add_index(table_name, column_name, options) add_index(table_name, column_name, options)
end end
end
# Removes an existed index, concurrently when supported # Removes an existed index, concurrently when supported
# #
...@@ -87,7 +88,6 @@ module Gitlab ...@@ -87,7 +88,6 @@ module Gitlab
if supports_drop_index_concurrently? if supports_drop_index_concurrently?
options = options.merge({ algorithm: :concurrently }) options = options.merge({ algorithm: :concurrently })
disable_statement_timeout
end end
unless index_exists?(table_name, column_name, options) unless index_exists?(table_name, column_name, options)
...@@ -95,8 +95,10 @@ module Gitlab ...@@ -95,8 +95,10 @@ module Gitlab
return return
end end
disable_statement_timeout do
remove_index(table_name, options.merge({ column: column_name })) remove_index(table_name, options.merge({ column: column_name }))
end end
end
# Removes an existing index, concurrently when supported # Removes an existing index, concurrently when supported
# #
...@@ -116,7 +118,6 @@ module Gitlab ...@@ -116,7 +118,6 @@ module Gitlab
if supports_drop_index_concurrently? if supports_drop_index_concurrently?
options = options.merge({ algorithm: :concurrently }) options = options.merge({ algorithm: :concurrently })
disable_statement_timeout
end end
unless index_exists_by_name?(table_name, index_name) unless index_exists_by_name?(table_name, index_name)
...@@ -124,8 +125,10 @@ module Gitlab ...@@ -124,8 +125,10 @@ module Gitlab
return return
end end
disable_statement_timeout do
remove_index(table_name, options.merge({ name: index_name })) remove_index(table_name, options.merge({ name: index_name }))
end end
end
# Only available on Postgresql >= 9.2 # Only available on Postgresql >= 9.2
def supports_drop_index_concurrently? def supports_drop_index_concurrently?
...@@ -171,8 +174,6 @@ module Gitlab ...@@ -171,8 +174,6 @@ module Gitlab
on_delete = 'SET NULL' if on_delete == :nullify on_delete = 'SET NULL' if on_delete == :nullify
end end
disable_statement_timeout
key_name = concurrent_foreign_key_name(source, column) key_name = concurrent_foreign_key_name(source, column)
unless foreign_key_exists?(source, target, column: column) unless foreign_key_exists?(source, target, column: column)
...@@ -199,8 +200,10 @@ module Gitlab ...@@ -199,8 +200,10 @@ module Gitlab
# while running. # while running.
# #
# Note this is a no-op in case the constraint is VALID already # Note this is a no-op in case the constraint is VALID already
disable_statement_timeout do
execute("ALTER TABLE #{source} VALIDATE CONSTRAINT #{key_name};") execute("ALTER TABLE #{source} VALIDATE CONSTRAINT #{key_name};")
end end
end
def foreign_key_exists?(source, target = nil, column: nil) def foreign_key_exists?(source, target = nil, column: nil)
foreign_keys(source).any? do |key| foreign_keys(source).any? do |key|
...@@ -224,8 +227,48 @@ module Gitlab ...@@ -224,8 +227,48 @@ module Gitlab
# Long-running migrations may take more than the timeout allowed by # Long-running migrations may take more than the timeout allowed by
# the database. Disable the session's statement timeout to ensure # the database. Disable the session's statement timeout to ensure
# migrations don't get killed prematurely. (PostgreSQL only) # migrations don't get killed prematurely. (PostgreSQL only)
#
# There are two possible ways to disable the statement timeout:
#
# - Per transaction (this is the preferred and default mode)
# - Per connection (requires a cleanup after the execution)
#
# When using a per connection disable statement, code must be inside
# a block so we can automatically execute `RESET ALL` after block finishes
# otherwise the statement will still be disabled until connection is dropped
# or `RESET ALL` is executed
def disable_statement_timeout def disable_statement_timeout
execute('SET statement_timeout TO 0') if Database.postgresql? # bypass disabled_statement logic when not using postgres, but still execute block when one is given
unless Database.postgresql?
if block_given?
yield
end
return
end
if block_given?
begin
execute('SET statement_timeout TO 0')
yield
ensure
execute('RESET ALL')
end
else
unless transaction_open?
raise <<~ERROR
Cannot call disable_statement_timeout() without a transaction open or outside of a transaction block.
If you don't want to use a transaction wrap your code in a block call:
disable_statement_timeout { # code that requires disabled statement here }
This will make sure statement_timeout is disabled before and reset after the block execution is finished.
ERROR
end
execute('SET LOCAL statement_timeout TO 0')
end
end end
def true_value def true_value
...@@ -367,8 +410,7 @@ module Gitlab ...@@ -367,8 +410,7 @@ module Gitlab
'in the body of your migration class' 'in the body of your migration class'
end end
disable_statement_timeout disable_statement_timeout do
transaction do transaction do
if limit if limit
add_column(table, column, type, default: nil, limit: limit) add_column(table, column, type, default: nil, limit: limit)
...@@ -393,6 +435,7 @@ module Gitlab ...@@ -393,6 +435,7 @@ module Gitlab
raise error raise error
end end
end end
end
# Renames a column without requiring downtime. # Renames a column without requiring downtime.
# #
......
...@@ -366,18 +366,9 @@ module Gitlab ...@@ -366,18 +366,9 @@ module Gitlab
end end
end end
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/1233
def new_commits(newrev) def new_commits(newrev)
gitaly_migrate(:new_commits) do |is_enabled| wrapped_gitaly_errors do
if is_enabled
gitaly_ref_client.list_new_commits(newrev) gitaly_ref_client.list_new_commits(newrev)
else
refs = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
rev_list(including: newrev, excluding: :all).split("\n").map(&:strip)
end
Gitlab::Git::Commit.batch_by_oid(self, refs)
end
end end
end end
......
require 'toml-rb'
module Gitlab module Gitlab
module SetupHelper module SetupHelper
class << self class << self
...@@ -9,7 +11,7 @@ module Gitlab ...@@ -9,7 +11,7 @@ module Gitlab
# because it uses a Unix socket. # because it uses a Unix socket.
# For development and testing purposes, an extra storage is added to gitaly, # For development and testing purposes, an extra storage is added to gitaly,
# which is not known to Rails, but must be explicitly stubbed. # which is not known to Rails, but must be explicitly stubbed.
def gitaly_configuration_toml(gitaly_dir, gitaly_ruby: true) def gitaly_configuration_toml(gitaly_dir, storage_paths, gitaly_ruby: true)
storages = [] storages = []
address = nil address = nil
...@@ -24,10 +26,7 @@ module Gitlab ...@@ -24,10 +26,7 @@ module Gitlab
address = val['gitaly_address'] address = val['gitaly_address']
end end
# https://gitlab.com/gitlab-org/gitaly/issues/1238 storages << { name: key, path: storage_paths[key] }
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
storages << { name: key, path: val.legacy_disk_path }
end
end end
if Rails.env.test? if Rails.env.test?
...@@ -44,12 +43,12 @@ module Gitlab ...@@ -44,12 +43,12 @@ module Gitlab
end end
# rubocop:disable Rails/Output # rubocop:disable Rails/Output
def create_gitaly_configuration(dir, force: false) def create_gitaly_configuration(dir, storage_paths, force: false)
config_path = File.join(dir, 'config.toml') config_path = File.join(dir, 'config.toml')
FileUtils.rm_f(config_path) if force FileUtils.rm_f(config_path) if force
File.open(config_path, File::WRONLY | File::CREAT | File::EXCL) do |f| File.open(config_path, File::WRONLY | File::CREAT | File::EXCL) do |f|
f.puts gitaly_configuration_toml(dir) f.puts gitaly_configuration_toml(dir, storage_paths)
end end
rescue Errno::EEXIST rescue Errno::EEXIST
puts "Skipping config.toml generation:" puts "Skipping config.toml generation:"
......
namespace :gitlab do namespace :gitlab do
namespace :gitaly do namespace :gitaly do
desc "GitLab | Install or upgrade gitaly" desc "GitLab | Install or upgrade gitaly"
task :install, [:dir, :repo] => :gitlab_environment do |t, args| task :install, [:dir, :storage_path, :repo] => :gitlab_environment do |t, args|
require 'toml-rb'
warn_user_is_not_gitlab warn_user_is_not_gitlab
unless args.dir.present? unless args.dir.present? && args.storage_path.present?
abort %(Please specify the directory where you want to install gitaly:\n rake "gitlab:gitaly:install[/home/git/gitaly]") abort %(Please specify the directory where you want to install gitaly and the path for the default storage
Usage: rake "gitlab:gitaly:install[/installation/dir,/storage/path]")
end end
args.with_defaults(repo: 'https://gitlab.com/gitlab-org/gitaly.git') args.with_defaults(repo: 'https://gitlab.com/gitlab-org/gitaly.git')
...@@ -27,7 +26,8 @@ namespace :gitlab do ...@@ -27,7 +26,8 @@ namespace :gitlab do
"BUNDLE_PATH=#{Bundler.bundle_path}") "BUNDLE_PATH=#{Bundler.bundle_path}")
end end
Gitlab::SetupHelper.create_gitaly_configuration(args.dir) storage_paths = { 'default' => args.storage_path }
Gitlab::SetupHelper.create_gitaly_configuration(args.dir, storage_paths)
Dir.chdir(args.dir) do Dir.chdir(args.dir) do
# In CI we run scripts/gitaly-test-build instead of this command # In CI we run scripts/gitaly-test-build instead of this command
unless ENV['CI'].present? unless ENV['CI'].present?
...@@ -35,17 +35,5 @@ namespace :gitlab do ...@@ -35,17 +35,5 @@ namespace :gitlab do
end end
end end
end end
desc "GitLab | Print storage configuration in TOML format"
task storage_config: :environment do
require 'toml-rb'
puts "# Gitaly storage configuration generated from #{Gitlab.config.source} on #{Time.current.to_s(:long)}"
puts "# This is in TOML format suitable for use in Gitaly's config.toml file."
# Exclude gitaly-ruby configuration because that depends on the gitaly
# installation directory.
puts Gitlab::SetupHelper.gitaly_configuration_toml('', gitaly_ruby: false)
end
end end
end end
...@@ -4067,7 +4067,14 @@ msgstr "" ...@@ -4067,7 +4067,14 @@ msgstr ""
msgid "Jobs" msgid "Jobs"
msgstr "" msgstr ""
<<<<<<< HEAD
msgid "Job|This job is stuck, because the project doesn't have any runners online assigned to it." msgid "Job|This job is stuck, because the project doesn't have any runners online assigned to it."
=======
msgid "Job|Job has been erased"
msgstr ""
msgid "Job|Job has been erased by"
>>>>>>> upstream/master
msgstr "" msgstr ""
msgid "Jul" msgid "Jul"
......
...@@ -50,7 +50,7 @@ module QA ...@@ -50,7 +50,7 @@ module QA
Page::Project::Pipeline::Show.perform do |pipeline| Page::Project::Pipeline::Show.perform do |pipeline|
expect(pipeline).to have_build('build', status: :success, wait: 600) expect(pipeline).to have_build('build', status: :success, wait: 600)
expect(pipeline).to have_build('test', status: :success, wait: 600) expect(pipeline).to have_build('test', status: :success, wait: 600)
expect(pipeline).to have_build('production', status: :success, wait: 600) expect(pipeline).to have_build('production', status: :success, wait: 1200)
end end
end end
end end
......
import Vue from 'vue';
import { getTimeago } from '~/lib/utils/datetime_utility';
import component from '~/jobs/components/erased_block.vue';
import mountComponent from '../helpers/vue_mount_component_helper';
describe('Erased block', () => {
const Component = Vue.extend(component);
let vm;
const erasedAt = '2016-11-07T11:11:16.525Z';
const timeago = getTimeago();
const formatedDate = timeago.format(erasedAt);
afterEach(() => {
vm.$destroy();
});
describe('with job erased by user', () => {
beforeEach(() => {
vm = mountComponent(Component, {
erasedByUser: true,
username: 'root',
linkToUser: 'gitlab.com/root',
erasedAt,
});
});
it('renders username and link', () => {
expect(vm.$el.querySelector('a').getAttribute('href')).toEqual('gitlab.com/root');
expect(vm.$el.textContent).toContain('Job has been erased by');
expect(vm.$el.textContent).toContain('root');
});
it('renders erasedAt', () => {
expect(vm.$el.textContent).toContain(formatedDate);
});
});
describe('with erased job', () => {
beforeEach(() => {
vm = mountComponent(Component, {
erasedByUser: false,
erasedAt,
});
});
it('renders username and link', () => {
expect(vm.$el.textContent).toContain('Job has been erased');
});
it('renders erasedAt', () => {
expect(vm.$el.textContent).toContain(formatedDate);
});
});
});
import Vue from 'vue';
import component from '~/jobs/components/job_log.vue';
import mountComponent from '../helpers/vue_mount_component_helper';
describe('Job Log', () => {
const Component = Vue.extend(component);
let vm;
const trace = 'Running with gitlab-runner 11.1.0 (081978aa)<br> on docker-auto-scale-com d5ae8d25<br>Using Docker executor with image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.4.4-golang-1.9-git-2.18-chrome-67.0-node-8.x-yarn-1.2-postgresql-9.6-graphicsmagick-1.3.29 ...<br>';
afterEach(() => {
vm.$destroy();
});
it('renders provided trace', () => {
vm = mountComponent(Component, {
trace,
isReceivingBuildTrace: true,
});
expect(vm.$el.querySelector('code').textContent).toContain('Running with gitlab-runner 11.1.0 (081978aa)');
});
describe('while receiving trace', () => {
it('renders animation', () => {
vm = mountComponent(Component, {
trace,
isReceivingBuildTrace: true,
});
expect(vm.$el.querySelector('.js-log-animation')).not.toBeNull();
});
});
describe('when build trace has finishes', () => {
it('does not render animation', () => {
vm = mountComponent(Component, {
trace,
isReceivingBuildTrace: false,
});
expect(vm.$el.querySelector('.js-log-animation')).toBeNull();
});
});
});
...@@ -48,10 +48,10 @@ describe Gitlab::Database::MigrationHelpers do ...@@ -48,10 +48,10 @@ describe Gitlab::Database::MigrationHelpers do
allow(model).to receive(:transaction_open?).and_return(false) allow(model).to receive(:transaction_open?).and_return(false)
end end
context 'using PostgreSQL' do context 'using PostgreSQL', :postgresql do
before do before do
allow(Gitlab::Database).to receive(:postgresql?).and_return(true) allow(Gitlab::Database).to receive(:postgresql?).and_return(true)
allow(model).to receive(:disable_statement_timeout) allow(model).to receive(:disable_statement_timeout).and_call_original
end end
it 'creates the index concurrently' do it 'creates the index concurrently' do
...@@ -114,12 +114,12 @@ describe Gitlab::Database::MigrationHelpers do ...@@ -114,12 +114,12 @@ describe Gitlab::Database::MigrationHelpers do
before do before do
allow(model).to receive(:transaction_open?).and_return(false) allow(model).to receive(:transaction_open?).and_return(false)
allow(model).to receive(:index_exists?).and_return(true) allow(model).to receive(:index_exists?).and_return(true)
allow(model).to receive(:disable_statement_timeout).and_call_original
end end
context 'using PostgreSQL' do context 'using PostgreSQL' do
before do before do
allow(model).to receive(:supports_drop_index_concurrently?).and_return(true) allow(model).to receive(:supports_drop_index_concurrently?).and_return(true)
allow(model).to receive(:disable_statement_timeout)
end end
describe 'by column name' do describe 'by column name' do
...@@ -162,7 +162,7 @@ describe Gitlab::Database::MigrationHelpers do ...@@ -162,7 +162,7 @@ describe Gitlab::Database::MigrationHelpers do
context 'using MySQL' do context 'using MySQL' do
it 'removes an index' do it 'removes an index' do
expect(Gitlab::Database).to receive(:postgresql?).and_return(false) expect(Gitlab::Database).to receive(:postgresql?).and_return(false).twice
expect(model).to receive(:remove_index) expect(model).to receive(:remove_index)
.with(:users, { column: :foo }) .with(:users, { column: :foo })
...@@ -224,21 +224,26 @@ describe Gitlab::Database::MigrationHelpers do ...@@ -224,21 +224,26 @@ describe Gitlab::Database::MigrationHelpers do
context 'using PostgreSQL' do context 'using PostgreSQL' do
before do before do
allow(Gitlab::Database).to receive(:postgresql?).and_return(true)
allow(Gitlab::Database).to receive(:mysql?).and_return(false) allow(Gitlab::Database).to receive(:mysql?).and_return(false)
end end
it 'creates a concurrent foreign key and validates it' do it 'creates a concurrent foreign key and validates it' do
expect(model).to receive(:disable_statement_timeout) expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).ordered.with(/NOT VALID/) expect(model).to receive(:execute).ordered.with(/NOT VALID/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/) expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
expect(model).to receive(:execute).with(/RESET ALL/)
model.add_concurrent_foreign_key(:projects, :users, column: :user_id) model.add_concurrent_foreign_key(:projects, :users, column: :user_id)
end end
it 'appends a valid ON DELETE statement' do it 'appends a valid ON DELETE statement' do
expect(model).to receive(:disable_statement_timeout) expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).with(/ON DELETE SET NULL/) expect(model).to receive(:execute).with(/ON DELETE SET NULL/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/) expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
expect(model).to receive(:execute).with(/RESET ALL/)
model.add_concurrent_foreign_key(:projects, :users, model.add_concurrent_foreign_key(:projects, :users,
column: :user_id, column: :user_id,
...@@ -291,13 +296,68 @@ describe Gitlab::Database::MigrationHelpers do ...@@ -291,13 +296,68 @@ describe Gitlab::Database::MigrationHelpers do
describe '#disable_statement_timeout' do describe '#disable_statement_timeout' do
context 'using PostgreSQL' do context 'using PostgreSQL' do
it 'disables statement timeouts' do it 'disables statement timeouts to current transaction only' do
expect(Gitlab::Database).to receive(:postgresql?).and_return(true) expect(Gitlab::Database).to receive(:postgresql?).and_return(true)
expect(model).to receive(:execute).with('SET statement_timeout TO 0') expect(model).to receive(:execute).with('SET LOCAL statement_timeout TO 0')
model.disable_statement_timeout model.disable_statement_timeout
end end
# this specs runs without an enclosing transaction (:delete truncation method for db_cleaner)
context 'with real environment', :postgresql, :delete do
before do
model.execute("SET statement_timeout TO '20000'")
end
after do
model.execute('RESET ALL')
end
it 'defines statement to 0 only for current transaction' do
expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('20s')
model.connection.transaction do
model.disable_statement_timeout
expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('0')
end
expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('20s')
end
end
context 'when passing a blocks' do
it 'disables statement timeouts on session level and executes the block' do
expect(Gitlab::Database).to receive(:postgresql?).and_return(true)
expect(model).to receive(:execute).with('SET statement_timeout TO 0')
expect(model).to receive(:execute).with('RESET ALL')
expect { |block| model.disable_statement_timeout(&block) }.to yield_control
end
# this specs runs without an enclosing transaction (:delete truncation method for db_cleaner)
context 'with real environment', :postgresql, :delete do
before do
model.execute("SET statement_timeout TO '20000'")
end
after do
model.execute('RESET ALL')
end
it 'defines statement to 0 for any code run inside the block' do
expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('20s')
model.disable_statement_timeout do
model.connection.transaction do
expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('0')
end
expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('0')
end
end
end
end
end end
context 'using MySQL' do context 'using MySQL' do
...@@ -308,6 +368,16 @@ describe Gitlab::Database::MigrationHelpers do ...@@ -308,6 +368,16 @@ describe Gitlab::Database::MigrationHelpers do
model.disable_statement_timeout model.disable_statement_timeout
end end
context 'when passing a blocks' do
it 'executes the block of code' do
expect(Gitlab::Database).to receive(:postgresql?).and_return(false)
expect(model).not_to receive(:execute)
expect { |block| model.disable_statement_timeout(&block) }.to yield_control
end
end
end end
end end
......
...@@ -296,7 +296,6 @@ describe Repository do ...@@ -296,7 +296,6 @@ describe Repository do
end end
describe '#new_commits' do describe '#new_commits' do
shared_examples 'finding unreferenced commits' do
set(:project) { create(:project, :repository) } set(:project) { create(:project, :repository) }
let(:repository) { project.repository } let(:repository) { project.repository }
...@@ -324,15 +323,6 @@ describe Repository do ...@@ -324,15 +323,6 @@ describe Repository do
end end
end end
context 'when Gitaly handles the request' do
it_behaves_like 'finding unreferenced commits'
end
context 'when Gitaly is disabled', :disable_gitaly do
it_behaves_like 'finding unreferenced commits'
end
end
describe '#commits_by' do describe '#commits_by' do
set(:project) { create(:project, :repository) } set(:project) { create(:project, :repository) }
......
...@@ -67,6 +67,7 @@ module TestEnv ...@@ -67,6 +67,7 @@ module TestEnv
TMP_TEST_PATH = Rails.root.join('tmp', 'tests', '**') TMP_TEST_PATH = Rails.root.join('tmp', 'tests', '**')
REPOS_STORAGE = 'default'.freeze REPOS_STORAGE = 'default'.freeze
BROKEN_STORAGE = 'broken'.freeze
# Test environment # Test environment
# #
...@@ -157,10 +158,11 @@ module TestEnv ...@@ -157,10 +158,11 @@ module TestEnv
component_timed_setup('Gitaly', component_timed_setup('Gitaly',
install_dir: gitaly_dir, install_dir: gitaly_dir,
version: Gitlab::GitalyClient.expected_server_version, version: Gitlab::GitalyClient.expected_server_version,
task: "gitlab:gitaly:install[#{gitaly_dir}]") do task: "gitlab:gitaly:install[#{gitaly_dir},#{repos_path}]") do
# Always re-create config, in case it's outdated. This is fast anyway. # Re-create config, to specify the broken storage path
Gitlab::SetupHelper.create_gitaly_configuration(gitaly_dir, force: true) storage_paths = { 'default' => repos_path, 'broken' => broken_path }
Gitlab::SetupHelper.create_gitaly_configuration(gitaly_dir, storage_paths, force: true)
start_gitaly(gitaly_dir) start_gitaly(gitaly_dir)
end end
...@@ -256,6 +258,10 @@ module TestEnv ...@@ -256,6 +258,10 @@ module TestEnv
@repos_path ||= Gitlab.config.repositories.storages[REPOS_STORAGE].legacy_disk_path @repos_path ||= Gitlab.config.repositories.storages[REPOS_STORAGE].legacy_disk_path
end end
def broken_path
@broken_path ||= Gitlab.config.repositories.storages[BROKEN_STORAGE].legacy_disk_path
end
def backup_path def backup_path
Gitlab.config.backup.path Gitlab.config.backup.path
end end
......
...@@ -8,13 +8,23 @@ describe 'gitlab:gitaly namespace rake task' do ...@@ -8,13 +8,23 @@ describe 'gitlab:gitaly namespace rake task' do
describe 'install' do describe 'install' do
let(:repo) { 'https://gitlab.com/gitlab-org/gitaly.git' } let(:repo) { 'https://gitlab.com/gitlab-org/gitaly.git' }
let(:clone_path) { Rails.root.join('tmp/tests/gitaly').to_s } let(:clone_path) { Rails.root.join('tmp/tests/gitaly').to_s }
let(:storage_path) { Rails.root.join('tmp/tests/repositories').to_s }
let(:version) { File.read(Rails.root.join(Gitlab::GitalyClient::SERVER_VERSION_FILE)).chomp } let(:version) { File.read(Rails.root.join(Gitlab::GitalyClient::SERVER_VERSION_FILE)).chomp }
subject { run_rake_task('gitlab:gitaly:install', clone_path, storage_path) }
context 'no dir given' do context 'no dir given' do
it 'aborts and display a help message' do it 'aborts and display a help message' do
# avoid writing task output to spec progress # avoid writing task output to spec progress
allow($stderr).to receive :write allow($stderr).to receive :write
expect { run_rake_task('gitlab:gitaly:install') }.to raise_error /Please specify the directory where you want to install gitaly/ expect { run_rake_task('gitlab:gitaly:install') }.to raise_error /Please specify the directory where you want to install gitaly and the path for the default storage/
end
end
context 'no storage path given' do
it 'aborts and display a help message' do
allow($stderr).to receive :write
expect { run_rake_task('gitlab:gitaly:install', clone_path) }.to raise_error /Please specify the directory where you want to install gitaly and the path for the default storage/
end end
end end
...@@ -23,7 +33,7 @@ describe 'gitlab:gitaly namespace rake task' do ...@@ -23,7 +33,7 @@ describe 'gitlab:gitaly namespace rake task' do
expect(main_object) expect(main_object)
.to receive(:checkout_or_clone_version).and_raise 'Git error' .to receive(:checkout_or_clone_version).and_raise 'Git error'
expect { run_rake_task('gitlab:gitaly:install', clone_path) }.to raise_error 'Git error' expect { subject }.to raise_error 'Git error'
end end
end end
...@@ -36,7 +46,7 @@ describe 'gitlab:gitaly namespace rake task' do ...@@ -36,7 +46,7 @@ describe 'gitlab:gitaly namespace rake task' do
expect(main_object) expect(main_object)
.to receive(:checkout_or_clone_version).with(version: version, repo: repo, target_dir: clone_path) .to receive(:checkout_or_clone_version).with(version: version, repo: repo, target_dir: clone_path)
run_rake_task('gitlab:gitaly:install', clone_path) subject
end end
end end
...@@ -59,7 +69,7 @@ describe 'gitlab:gitaly namespace rake task' do ...@@ -59,7 +69,7 @@ describe 'gitlab:gitaly namespace rake task' do
expect(Gitlab::Popen).to receive(:popen).with(%w[which gmake]).and_return(['/usr/bin/gmake', 0]) expect(Gitlab::Popen).to receive(:popen).with(%w[which gmake]).and_return(['/usr/bin/gmake', 0])
expect(main_object).to receive(:run_command!).with(command_preamble + %w[gmake]).and_return(true) expect(main_object).to receive(:run_command!).with(command_preamble + %w[gmake]).and_return(true)
run_rake_task('gitlab:gitaly:install', clone_path) subject
end end
end end
...@@ -72,7 +82,7 @@ describe 'gitlab:gitaly namespace rake task' do ...@@ -72,7 +82,7 @@ describe 'gitlab:gitaly namespace rake task' do
it 'calls make in the gitaly directory' do it 'calls make in the gitaly directory' do
expect(main_object).to receive(:run_command!).with(command_preamble + %w[make]).and_return(true) expect(main_object).to receive(:run_command!).with(command_preamble + %w[make]).and_return(true)
run_rake_task('gitlab:gitaly:install', clone_path) subject
end end
context 'when Rails.env is test' do context 'when Rails.env is test' do
...@@ -89,55 +99,10 @@ describe 'gitlab:gitaly namespace rake task' do ...@@ -89,55 +99,10 @@ describe 'gitlab:gitaly namespace rake task' do
it 'calls make in the gitaly directory with --no-deployment flag for bundle' do it 'calls make in the gitaly directory with --no-deployment flag for bundle' do
expect(main_object).to receive(:run_command!).with(command_preamble + command).and_return(true) expect(main_object).to receive(:run_command!).with(command_preamble + command).and_return(true)
run_rake_task('gitlab:gitaly:install', clone_path) subject
end
end end
end end
end end
end end
describe 'storage_config' do
it 'prints storage configuration in a TOML format' do
config = {
'default' => Gitlab::GitalyClient::StorageSettings.new(
'path' => '/path/to/default',
'gitaly_address' => 'unix:/path/to/my.socket'
),
'nfs_01' => Gitlab::GitalyClient::StorageSettings.new(
'path' => '/path/to/nfs_01',
'gitaly_address' => 'unix:/path/to/my.socket'
)
}
allow(Gitlab.config.repositories).to receive(:storages).and_return(config)
allow(Rails.env).to receive(:test?).and_return(false)
expected_output = ''
Timecop.freeze do
expected_output = <<~TOML
# Gitaly storage configuration generated from #{Gitlab.config.source} on #{Time.current.to_s(:long)}
# This is in TOML format suitable for use in Gitaly's config.toml file.
bin_dir = "tmp/tests/gitaly"
socket_path = "/path/to/my.socket"
[gitlab-shell]
dir = "#{Gitlab.config.gitlab_shell.path}"
[[storage]]
name = "default"
path = "/path/to/default"
[[storage]]
name = "nfs_01"
path = "/path/to/nfs_01"
TOML
end
expect { run_rake_task('gitlab:gitaly:storage_config')}
.to output(expected_output).to_stdout
parsed_output = TomlRB.parse(expected_output)
config.each do |name, params|
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
expect(parsed_output['storage']).to include({ 'name' => name, 'path' => params.legacy_disk_path })
end
end
end
end end
end end
...@@ -641,7 +641,7 @@ rollout 100%: ...@@ -641,7 +641,7 @@ rollout 100%:
function install_dependencies() { function install_dependencies() {
apk add -U openssl curl tar gzip bash ca-certificates git apk add -U openssl curl tar gzip bash ca-certificates git
wget -q -O /etc/apk/keys/sgerrand.rsa.pub https://raw.githubusercontent.com/sgerrand/alpine-pkg-glibc/master/sgerrand.rsa.pub wget -q -O /etc/apk/keys/sgerrand.rsa.pub https://alpine-pkgs.sgerrand.com/sgerrand.rsa.pub
wget https://github.com/sgerrand/alpine-pkg-glibc/releases/download/2.23-r3/glibc-2.23-r3.apk wget https://github.com/sgerrand/alpine-pkg-glibc/releases/download/2.23-r3/glibc-2.23-r3.apk
apk add glibc-2.23-r3.apk apk add glibc-2.23-r3.apk
rm glibc-2.23-r3.apk rm glibc-2.23-r3.apk
......
# Full project: https://gitlab.com/pages/middleman # Full project: https://gitlab.com/pages/middleman
image: ruby:2.3 image: ruby:2.4
variables:
LANG: "C.UTF-8"
cache: cache:
paths: paths:
- vendor - vendor
test: before_script:
script:
- apt-get update -yqqq - apt-get update -yqqq
- apt-get install -y nodejs - apt-get install -y nodejs
- bundle install --path vendor - bundle install --path vendor
test:
script:
- bundle exec middleman build - bundle exec middleman build
except: except:
- master - master
pages: pages:
script: script:
- apt-get update -yqqq
- apt-get install -y nodejs
- bundle install --path vendor
- bundle exec middleman build - bundle exec middleman build
artifacts: artifacts:
paths: paths:
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment