Commit 22ff009a authored by James Lopez's avatar James Lopez

Merge branch 'master' of gitlab.com:gitlab-org/gitlab-ce into feature/project-import

parents 10f16095 81cb636e
...@@ -115,6 +115,11 @@ bundler:audit: ...@@ -115,6 +115,11 @@ bundler:audit:
script: script:
- "bundle exec bundle-audit check --update --ignore OSVDB-115941" - "bundle exec bundle-audit check --update --ignore OSVDB-115941"
db-migrate-reset:
stage: test
script:
- RAILS_ENV=test bundle exec rake db:migrate:reset
# Ruby 2.2 jobs # Ruby 2.2 jobs
spec:feature:ruby22: spec:feature:ruby22:
......
...@@ -65,7 +65,7 @@ linters: ...@@ -65,7 +65,7 @@ linters:
# Reports when you have an empty rule set. # Reports when you have an empty rule set.
EmptyRule: EmptyRule:
enabled: false enabled: true
# Reports when you have an @extend directive. # Reports when you have an @extend directive.
ExtendDirective: ExtendDirective:
......
Please view this file on the master branch, on stable branches it's out of date. Please view this file on the master branch, on stable branches it's out of date.
v 8.8.0 (unreleased) v 8.8.0 (unreleased)
- Remove future dates from contribution calendar graph.
v 8.7.0 (unreleased)
v 8.7.1 (unreleased)
- Fix .gitlab-ci.yml parsing issue when hidde job is a template without script definition. !3849
- Fix license detection to detect all license files, not only known licenses. !3878
- Use the `can?` helper instead of `current_user.can?`. !3882
- Prevent users from deleting Webhooks via API they do not own
- Fix Error 500 due to stale cache when projects are renamed or transferred
v 8.7.0
- Gitlab::GitAccess and Gitlab::GitAccessWiki are now instrumented
- Fix vulnerability that made it possible to gain access to private labels and milestones
- The number of InfluxDB points stored per UDP packet can now be configured - The number of InfluxDB points stored per UDP packet can now be configured
- Fix error when cross-project label reference used with non-existent project - Fix error when cross-project label reference used with non-existent project
- Transactions for /internal/allowed now have an "action" tag set - Transactions for /internal/allowed now have an "action" tag set
...@@ -53,7 +63,7 @@ v 8.7.0 (unreleased) ...@@ -53,7 +63,7 @@ v 8.7.0 (unreleased)
- Add links to CI setup documentation from project settings and builds pages - Add links to CI setup documentation from project settings and builds pages
- Display project members page to all members - Display project members page to all members
- Handle nil descriptions in Slack issue messages (Stan Hu) - Handle nil descriptions in Slack issue messages (Stan Hu)
- Add automated repository integrity checks - Add automated repository integrity checks (OFF by default)
- API: Expose open_issues_count, closed_issues_count, open_merge_requests_count for labels (Robert Schilling) - API: Expose open_issues_count, closed_issues_count, open_merge_requests_count for labels (Robert Schilling)
- API: Ability to star and unstar a project (Robert Schilling) - API: Ability to star and unstar a project (Robert Schilling)
- Add default scope to projects to exclude projects pending deletion - Add default scope to projects to exclude projects pending deletion
...@@ -80,6 +90,7 @@ v 8.7.0 (unreleased) ...@@ -80,6 +90,7 @@ v 8.7.0 (unreleased)
- Remove "Congratulations!" tweet button on newly-created project. (Connor Shea) - Remove "Congratulations!" tweet button on newly-created project. (Connor Shea)
- Fix admin/projects when using visibility levels on search (PotHix) - Fix admin/projects when using visibility levels on search (PotHix)
- Build status notifications - Build status notifications
- Update email confirmation interface
- API: Expose user location (Robert Schilling) - API: Expose user location (Robert Schilling)
- API: Do not leak group existence via return code (Robert Schilling) - API: Do not leak group existence via return code (Robert Schilling)
- ClosingIssueExtractor regex now also works with colons. e.g. "Fixes: #1234" !3591 - ClosingIssueExtractor regex now also works with colons. e.g. "Fixes: #1234" !3591
...@@ -107,6 +118,7 @@ v 8.7.0 (unreleased) ...@@ -107,6 +118,7 @@ v 8.7.0 (unreleased)
- Updated print style for issues - Updated print style for issues
- Use GitHub Issue/PR number as iid to keep references - Use GitHub Issue/PR number as iid to keep references
- Import GitHub labels - Import GitHub labels
- Add option to filter by "Owned projects" on dashboard page
- Import GitHub milestones - Import GitHub milestones
- Fix emoji catgories in the emoji picker - Fix emoji catgories in the emoji picker
- Execute system web hooks on push to the project - Execute system web hooks on push to the project
......
...@@ -178,7 +178,7 @@ gem 'ruby-fogbugz', '~> 0.2.1' ...@@ -178,7 +178,7 @@ gem 'ruby-fogbugz', '~> 0.2.1'
gem 'd3_rails', '~> 3.5.0' gem 'd3_rails', '~> 3.5.0'
#cal-heatmap #cal-heatmap
gem 'cal-heatmap-rails', '~> 3.5.0' gem 'cal-heatmap-rails', '~> 3.6.0'
# underscore-rails # underscore-rails
gem "underscore-rails", "~> 1.8.0" gem "underscore-rails", "~> 1.8.0"
......
...@@ -103,7 +103,7 @@ GEM ...@@ -103,7 +103,7 @@ GEM
bundler (~> 1.2) bundler (~> 1.2)
thor (~> 0.18) thor (~> 0.18)
byebug (8.2.1) byebug (8.2.1)
cal-heatmap-rails (3.5.1) cal-heatmap-rails (3.6.0)
capybara (2.6.2) capybara (2.6.2)
addressable addressable
mime-types (>= 1.16) mime-types (>= 1.16)
...@@ -134,7 +134,7 @@ GEM ...@@ -134,7 +134,7 @@ GEM
execjs execjs
coffee-script-source (1.10.0) coffee-script-source (1.10.0)
colorize (0.7.7) colorize (0.7.7)
concurrent-ruby (1.0.0) concurrent-ruby (1.0.1)
connection_pool (2.2.0) connection_pool (2.2.0)
coveralls (0.8.13) coveralls (0.8.13)
json (~> 1.8) json (~> 1.8)
...@@ -629,7 +629,7 @@ GEM ...@@ -629,7 +629,7 @@ GEM
recaptcha (1.0.2) recaptcha (1.0.2)
json json
redcarpet (3.3.3) redcarpet (3.3.3)
redis (3.2.2) redis (3.3.0)
redis-actionpack (4.0.1) redis-actionpack (4.0.1)
actionpack (~> 4) actionpack (~> 4)
redis-rack (~> 1.5.0) redis-rack (~> 1.5.0)
...@@ -736,10 +736,9 @@ GEM ...@@ -736,10 +736,9 @@ GEM
rack rack
shoulda-matchers (2.8.0) shoulda-matchers (2.8.0)
activesupport (>= 3.0.0) activesupport (>= 3.0.0)
sidekiq (4.0.1) sidekiq (4.1.1)
concurrent-ruby (~> 1.0) concurrent-ruby (~> 1.0)
connection_pool (~> 2.2, >= 2.2.0) connection_pool (~> 2.2, >= 2.2.0)
json (~> 1.0)
redis (~> 3.2, >= 3.2.1) redis (~> 3.2, >= 3.2.1)
sidekiq-cron (0.4.0) sidekiq-cron (0.4.0)
redis-namespace (>= 1.5.2) redis-namespace (>= 1.5.2)
...@@ -905,7 +904,7 @@ DEPENDENCIES ...@@ -905,7 +904,7 @@ DEPENDENCIES
bullet bullet
bundler-audit bundler-audit
byebug byebug
cal-heatmap-rails (~> 3.5.0) cal-heatmap-rails (~> 3.6.0)
capybara (~> 2.6.2) capybara (~> 2.6.2)
capybara-screenshot (~> 1.0.0) capybara-screenshot (~> 1.0.0)
carrierwave (~> 0.10.0) carrierwave (~> 0.10.0)
......
class @CommitsList class @CommitsList
@timer = null @timer = null
@init: (ref, limit) -> @init: (limit) ->
$("body").on "click", ".day-commits-table li.commit", (event) -> $("body").on "click", ".day-commits-table li.commit", (event) ->
if event.target.nodeName != "A" if event.target.nodeName != "A"
location.href = $(this).attr("url") location.href = $(this).attr("url")
......
...@@ -221,6 +221,9 @@ class GitLabDropdown ...@@ -221,6 +221,9 @@ class GitLabDropdown
menu.toggleClass PAGE_TWO_CLASS menu.toggleClass PAGE_TWO_CLASS
# Focus first visible input on active page
@dropdown.find('[class^="dropdown-page-"]:visible :text:visible:first').focus()
parseData: (data) -> parseData: (data) ->
@renderedData = data @renderedData = data
...@@ -240,7 +243,8 @@ class GitLabDropdown ...@@ -240,7 +243,8 @@ class GitLabDropdown
shouldPropagate: (e) => shouldPropagate: (e) =>
if @options.multiSelect if @options.multiSelect
$target = $(e.target) $target = $(e.target)
if not $target.hasClass('dropdown-menu-close') and not $target.hasClass('dropdown-menu-close-icon')
if not $target.hasClass('dropdown-menu-close') and not $target.hasClass('dropdown-menu-close-icon') and not $target.data('is-link')
e.stopPropagation() e.stopPropagation()
return false return false
else else
...@@ -375,7 +379,6 @@ class GitLabDropdown ...@@ -375,7 +379,6 @@ class GitLabDropdown
selectedObject = @renderedData[selectedIndex] selectedObject = @renderedData[selectedIndex]
value = if @options.id then @options.id(selectedObject, el) else selectedObject.id value = if @options.id then @options.id(selectedObject, el) else selectedObject.id
field = @dropdown.parent().find("input[name='#{fieldName}'][value='#{value}']") field = @dropdown.parent().find("input[name='#{fieldName}'][value='#{value}']")
if el.hasClass(ACTIVE_CLASS) if el.hasClass(ACTIVE_CLASS)
el.removeClass(ACTIVE_CLASS) el.removeClass(ACTIVE_CLASS)
field.remove() field.remove()
......
...@@ -19,23 +19,19 @@ class @LabelsSelect ...@@ -19,23 +19,19 @@ class @LabelsSelect
$form = $dropdown.closest('form') $form = $dropdown.closest('form')
$sidebarCollapsedValue = $block.find('.sidebar-collapsed-icon span') $sidebarCollapsedValue = $block.find('.sidebar-collapsed-icon span')
$value = $block.find('.value') $value = $block.find('.value')
$loading = $block.find('.block-loading').fadeOut() $newLabelError = $('.js-label-error')
if newLabelField.length
$newLabelCreateButton = $('.js-new-label-btn')
$colorPreview = $('.js-dropdown-label-color-preview') $colorPreview = $('.js-dropdown-label-color-preview')
$newLabelError = $dropdown.parent().find('.js-label-error') $newLabelCreateButton = $('.js-new-label-btn')
$newLabelError.hide()
# Suggested colors in the dropdown to chose from pre-chosen colors $newLabelError.hide()
$('.suggest-colors-dropdown a').on 'click', (e) -> $loading = $block.find('.block-loading').fadeOut()
issueURLSplit = issueUpdateURL.split('/') if issueUpdateURL? issueURLSplit = issueUpdateURL.split('/') if issueUpdateURL?
if issueUpdateURL if issueUpdateURL
labelHTMLTemplate = _.template( labelHTMLTemplate = _.template(
'<% _.each(labels, function(label){ %> '<% _.each(labels, function(label){ %>
<a href="<%= ["",issueURLSplit[1], issueURLSplit[2],""].join("/") %>issues?label_name=<%= _.escape(label.title) %>"> <a href="<%= ["",issueURLSplit[1], issueURLSplit[2],""].join("/") %>issues?label_name=<%= _.escape(label.title) %>">
<span class="label has-tooltip color-label" title="<%= _.escape(label.description) %>" style="background-color: <%= label.color %>;"> <span class="label has-tooltip color-label" title="<%= _.escape(label.description) %>" style="background-color: <%= label.color %>; color: <%= label.text_color %>;">
<%= _.escape(label.title) %> <%= _.escape(label.title) %>
</span> </span>
</a> </a>
...@@ -43,7 +39,9 @@ class @LabelsSelect ...@@ -43,7 +39,9 @@ class @LabelsSelect
) )
labelNoneHTMLTemplate = _.template('<div class="light">None</div>') labelNoneHTMLTemplate = _.template('<div class="light">None</div>')
if newLabelField.length and $dropdown.hasClass 'js-extra-options' if newLabelField.length
# Suggested colors in the dropdown to chose from pre-chosen colors
$('.suggest-colors-dropdown a').on "click", (e) -> $('.suggest-colors-dropdown a').on "click", (e) ->
e.preventDefault() e.preventDefault()
e.stopPropagation() e.stopPropagation()
...@@ -82,14 +80,17 @@ class @LabelsSelect ...@@ -82,14 +80,17 @@ class @LabelsSelect
enableLabelCreateButton = -> enableLabelCreateButton = ->
if newLabelField.val() isnt '' and newColorField.val() isnt '' if newLabelField.val() isnt '' and newColorField.val() isnt ''
$newLabelError.hide() $newLabelError.hide()
$('.js-new-label-btn').disable() $newLabelCreateButton.enable()
else
$newLabelCreateButton.disable()
saveLabel = ->
# Create new label with API # Create new label with API
Api.newLabel projectId, { Api.newLabel projectId, {
name: newLabelField.val() name: newLabelField.val()
color: newColorField.val() color: newColorField.val()
}, (label) -> }, (label) ->
$('.js-new-label-btn').enable() $newLabelCreateButton.enable()
if label.message? if label.message?
$newLabelError $newLabelError
...@@ -98,10 +99,6 @@ class @LabelsSelect ...@@ -98,10 +99,6 @@ class @LabelsSelect
else else
$('.dropdown-menu-back', $dropdown.parent()).trigger 'click' $('.dropdown-menu-back', $dropdown.parent()).trigger 'click'
$newLabelCreateButton.enable()
else
$newLabelCreateButton.disable()
newLabelField.on 'keyup change', enableLabelCreateButton newLabelField.on 'keyup change', enableLabelCreateButton
newColorField.on 'keyup change', enableLabelCreateButton newColorField.on 'keyup change', enableLabelCreateButton
...@@ -112,24 +109,7 @@ class @LabelsSelect ...@@ -112,24 +109,7 @@ class @LabelsSelect
.on 'click', (e) -> .on 'click', (e) ->
e.preventDefault() e.preventDefault()
e.stopPropagation() e.stopPropagation()
saveLabel()
if newLabelField.val() isnt '' and newColorField.val() isnt ''
$newLabelError.hide()
$('.js-new-label-btn').disable()
# Create new label with API
Api.newLabel projectId, {
name: newLabelField.val()
color: newColorField.val()
}, (label) ->
$('.js-new-label-btn').enable()
if label.message?
$newLabelError
.text label.message
.show()
else
$('.dropdown-menu-back', $dropdown.parent()).trigger 'click'
saveLabelData = -> saveLabelData = ->
selected = $dropdown selected = $dropdown
...@@ -243,7 +223,7 @@ class @LabelsSelect ...@@ -243,7 +223,7 @@ class @LabelsSelect
fieldName: $dropdown.data('field-name') fieldName: $dropdown.data('field-name')
id: (label) -> id: (label) ->
if $dropdown.hasClass("js-filter-submit") and not label.isAny? if $dropdown.hasClass("js-filter-submit") and not label.isAny?
label.title _.escape label.title
else else
label.id label.id
......
...@@ -87,8 +87,8 @@ class @MergeRequestTabs ...@@ -87,8 +87,8 @@ class @MergeRequestTabs
if window.location.hash if window.location.hash
navBarHeight = $('.navbar-gitlab').outerHeight() navBarHeight = $('.navbar-gitlab').outerHeight()
$el = $("#{container} #{window.location.hash}") $el = $("#{container} #{window.location.hash}:not(.match)")
$.scrollTo("#{container} #{window.location.hash}", offset: -navBarHeight) if $el.length $.scrollTo("#{container} #{window.location.hash}:not(.match)", offset: -navBarHeight) if $el.length
# Activate a tab based on the current action # Activate a tab based on the current action
activateTab: (action) -> activateTab: (action) ->
...@@ -176,12 +176,12 @@ class @MergeRequestTabs ...@@ -176,12 +176,12 @@ class @MergeRequestTabs
if locationHash isnt '' if locationHash isnt ''
hashClassString = ".#{locationHash.replace('#', '')}" hashClassString = ".#{locationHash.replace('#', '')}"
$diffLine = $(locationHash) $diffLine = $("#{locationHash}:not(.match)", $('#diffs'))
if $diffLine.is ':not(tr)' if not $diffLine.is 'tr'
$diffLine = $("td#{locationHash}, td#{hashClassString}") $diffLine = $('#diffs').find("td#{locationHash}, td#{hashClassString}")
else else
$diffLine = $('td', $diffLine) $diffLine = $diffLine.find('td')
if $diffLine.length if $diffLine.length
$diffLine.addClass 'hll' $diffLine.addClass 'hll'
......
...@@ -102,7 +102,8 @@ class @Todos ...@@ -102,7 +102,8 @@ class @Todos
todoLink = $(this).data('url') todoLink = $(this).data('url')
return unless todoLink return unless todoLink
if e.metaKey # Allow Meta-Click or Mouse3-click to open in a new tab
if e.metaKey or e.which is 2
e.preventDefault() e.preventDefault()
window.open(todoLink,'_blank') window.open(todoLink,'_blank')
else else
......
...@@ -144,6 +144,10 @@ ...@@ -144,6 +144,10 @@
} }
} }
.btn-lg {
padding: 12px 20px;
}
.btn-transparent { .btn-transparent {
color: $btn-transparent-color; color: $btn-transparent-color;
background-color: transparent; background-color: transparent;
......
...@@ -54,6 +54,10 @@ ...@@ -54,6 +54,10 @@
fill: #254e77 !important; fill: #254e77 !important;
} }
.future {
visibility: hidden;
}
.domain-background { .domain-background {
fill: none; fill: none;
shape-rendering: crispedges; shape-rendering: crispedges;
......
...@@ -320,7 +320,7 @@ ...@@ -320,7 +320,7 @@
} }
} }
.dropdown-input-field { .dropdown-input-field, .default-dropdown-input {
width: 100%; width: 100%;
padding: 0 7px; padding: 0 7px;
color: $dropdown-input-color; color: $dropdown-input-color;
......
...@@ -38,12 +38,14 @@ ...@@ -38,12 +38,14 @@
.filename { .filename {
&.old { &.old {
display: inline-block;
span.idiff { span.idiff {
background-color: #f8cbcb; background-color: #f8cbcb;
} }
} }
&.new { &.new {
display: inline-block;
span.idiff { span.idiff {
background-color: #a6f3a6; background-color: #a6f3a6;
} }
...@@ -82,10 +84,6 @@ ...@@ -82,10 +84,6 @@
} }
} }
&.blob_file {
}
&.blob-no-preview { &.blob-no-preview {
background: #eee; background: #eee;
text-shadow: 0 1px 2px #fff; text-shadow: 0 1px 2px #fff;
...@@ -129,6 +127,11 @@ ...@@ -129,6 +127,11 @@
td.line-numbers { td.line-numbers {
float: none; float: none;
border-left: 1px solid #ddd; border-left: 1px solid #ddd;
i {
float: none;
margin-right: 0;
}
} }
td.lines { td.lines {
padding: 0; padding: 0;
......
...@@ -121,9 +121,6 @@ ...@@ -121,9 +121,6 @@
} }
} }
.select2-container-multi .select2-choices .select2-search-choice {
}
.select2-drop-active { .select2-drop-active {
margin-top: 6px; margin-top: 6px;
font-size: 14px; font-size: 14px;
......
...@@ -111,8 +111,6 @@ ...@@ -111,8 +111,6 @@
.vg { color: #f8f8f2 } /* Name.Variable.Global */ .vg { color: #f8f8f2 } /* Name.Variable.Global */
.vi { color: #f8f8f2 } /* Name.Variable.Instance */ .vi { color: #f8f8f2 } /* Name.Variable.Instance */
.il { color: #ae81ff } /* Literal.Number.Integer.Long */ .il { color: #ae81ff } /* Literal.Number.Integer.Long */
.gh { } /* Generic Heading & Diff Header */
.gu { color: #75715e; } /* Generic.Subheading & Diff Unified/Comment? */ .gu { color: #75715e; } /* Generic.Subheading & Diff Unified/Comment? */
.gd { color: #f92672; } /* Generic.Deleted & Diff Deleted */ .gd { color: #f92672; } /* Generic.Deleted & Diff Deleted */
.gi { color: #a6e22e; } /* Generic.Inserted & Diff Inserted */ .gi { color: #a6e22e; } /* Generic.Inserted & Diff Inserted */
......
.well-confirmation {
margin-bottom: 20px;
border-bottom: 1px solid #eee;
> h1 {
font-weight: 400;
}
.lead {
margin-bottom: 20px;
}
}
.confirmation-content {
a {
color: $md-link-color;
}
}
...@@ -18,9 +18,6 @@ ...@@ -18,9 +18,6 @@
} }
.graphs { .graphs {
.graph-author-commits-count {
}
.graph-author-email { .graph-author-email {
float: right; float: right;
color: #777; color: #777;
......
...@@ -249,6 +249,10 @@ ...@@ -249,6 +249,10 @@
background: $gray-dark; background: $gray-dark;
border: 1px solid $border-gray-dark; border: 1px solid $border-gray-dark;
} }
&.btn-primary {
@extend .btn-primary
}
} }
a:not(.issuable-pager) { a:not(.issuable-pager) {
......
...@@ -183,6 +183,9 @@ ul.notes { ...@@ -183,6 +183,9 @@ ul.notes {
} }
} }
.author_link {
color: $gl-gray;
}
} }
.note-headline-light, .note-headline-light,
......
...@@ -10,6 +10,8 @@ module FilterProjects ...@@ -10,6 +10,8 @@ module FilterProjects
def filter_projects(projects) def filter_projects(projects)
projects = projects.search(params[:filter_projects]) if params[:filter_projects].present? projects = projects.search(params[:filter_projects]) if params[:filter_projects].present?
projects = projects.non_archived if params[:archived].blank? projects = projects.non_archived if params[:archived].blank?
projects = projects.personal(current_user) if params[:personal].present? && current_user
projects projects
end end
end end
class ConfirmationsController < Devise::ConfirmationsController class ConfirmationsController < Devise::ConfirmationsController
def almost_there
flash[:notice] = nil
render layout: "devise_empty"
end
protected protected
def after_resending_confirmation_instructions_path_for(resource)
users_almost_there_path
end
def after_confirmation_path_for(resource_name, resource) def after_confirmation_path_for(resource_name, resource)
if signed_in?(resource_name) if signed_in?(resource_name)
after_sign_in_path_for(resource) after_sign_in_path_for(resource)
......
...@@ -83,8 +83,7 @@ class Projects::ApplicationController < ApplicationController ...@@ -83,8 +83,7 @@ class Projects::ApplicationController < ApplicationController
end end
def apply_diff_view_cookie! def apply_diff_view_cookie!
view = params[:view] || cookies[:diff_view] cookies.permanent[:diff_view] = params.delete(:view) if params[:view].present?
cookies.permanent[:diff_view] = params[:view] = view if view
end end
def builds_enabled def builds_enabled
......
...@@ -101,7 +101,6 @@ class Projects::IssuesController < Projects::ApplicationController ...@@ -101,7 +101,6 @@ class Projects::IssuesController < Projects::ApplicationController
end end
respond_to do |format| respond_to do |format|
format.js
format.html do format.html do
if @issue.valid? if @issue.valid?
redirect_to issue_path(@issue) redirect_to issue_path(@issue)
...@@ -110,7 +109,7 @@ class Projects::IssuesController < Projects::ApplicationController ...@@ -110,7 +109,7 @@ class Projects::IssuesController < Projects::ApplicationController
end end
end end
format.json do format.json do
render json: @issue.to_json(include: [:milestone, :labels, assignee: { methods: :avatar_url }]) render json: @issue.to_json(include: { milestone: {}, assignee: { methods: :avatar_url }, labels: { methods: :text_color } })
end end
end end
end end
......
...@@ -149,13 +149,12 @@ class Projects::MergeRequestsController < Projects::ApplicationController ...@@ -149,13 +149,12 @@ class Projects::MergeRequestsController < Projects::ApplicationController
if @merge_request.valid? if @merge_request.valid?
respond_to do |format| respond_to do |format|
format.js
format.html do format.html do
redirect_to([@merge_request.target_project.namespace.becomes(Namespace), redirect_to([@merge_request.target_project.namespace.becomes(Namespace),
@merge_request.target_project, @merge_request]) @merge_request.target_project, @merge_request])
end end
format.json do format.json do
render json: @merge_request.to_json(include: [:milestone, :labels, assignee: { methods: :avatar_url }]) render json: @merge_request.to_json(include: { milestone: {}, assignee: { methods: :avatar_url }, labels: { methods: :text_color } })
end end
end end
else else
......
...@@ -31,11 +31,11 @@ class RegistrationsController < Devise::RegistrationsController ...@@ -31,11 +31,11 @@ class RegistrationsController < Devise::RegistrationsController
end end
def after_sign_up_path_for(_resource) def after_sign_up_path_for(_resource)
new_user_session_path users_almost_there_path
end end
def after_inactive_sign_up_path_for(_resource) def after_inactive_sign_up_path_for(_resource)
new_user_session_path users_almost_there_path
end end
private private
......
...@@ -278,9 +278,7 @@ class IssuableFinder ...@@ -278,9 +278,7 @@ class IssuableFinder
end end
end end
# When filtering by multiple labels we may end up duplicating issues (if one items
# has multiple labels). This ensures we only return unique issues.
items.distinct
end end
def by_due_date(items) def by_due_date(items)
......
...@@ -9,7 +9,13 @@ module DiffHelper ...@@ -9,7 +9,13 @@ module DiffHelper
end end
def diff_view def diff_view
params[:view] == 'parallel' ? 'parallel' : 'inline' diff_views = %w(inline parallel)
if diff_views.include?(cookies[:diff_view])
cookies[:diff_view]
else
diff_views.first
end
end end
def diff_hard_limit_enabled? def diff_hard_limit_enabled?
......
...@@ -123,6 +123,18 @@ module ProjectsHelper ...@@ -123,6 +123,18 @@ module ProjectsHelper
end end
end end
def license_short_name(project)
no_license_key = project.repository.license_key.nil? ||
# Back-compat if cache contains 'no-license', can be removed in a few weeks
project.repository.license_key == 'no-license'
return 'LICENSE' if no_license_key
license = Licensee::License.new(project.repository.license_key)
license.nickname || license.name
end
private private
def get_project_nav_tabs(project, current_user) def get_project_nav_tabs(project, current_user)
...@@ -320,14 +332,6 @@ module ProjectsHelper ...@@ -320,14 +332,6 @@ module ProjectsHelper
@ref || @repository.try(:root_ref) @ref || @repository.try(:root_ref)
end end
def license_short_name(project)
license = Licensee::License.new(project.repository.license_key)
license.nickname || license.name
end
private
def filename_path(project, filename) def filename_path(project, filename)
if project && blob = project.repository.send(filename) if project && blob = project.repository.send(filename)
namespace_project_blob_path( namespace_project_blob_path(
......
...@@ -820,13 +820,11 @@ class Project < ActiveRecord::Base ...@@ -820,13 +820,11 @@ class Project < ActiveRecord::Base
wiki = Repository.new("#{old_path}.wiki", self) wiki = Repository.new("#{old_path}.wiki", self)
if repo.exists? if repo.exists?
repo.expire_cache repo.before_delete
repo.expire_emptiness_caches
end end
if wiki.exists? if wiki.exists?
wiki.expire_cache wiki.before_delete
wiki.expire_emptiness_caches
end end
end end
......
...@@ -466,8 +466,8 @@ class Repository ...@@ -466,8 +466,8 @@ class Repository
return nil if !exists? || empty? return nil if !exists? || empty?
cache.fetch(:license_blob) do cache.fetch(:license_blob) do
if licensee_project.license tree(:head).blobs.find do |file|
blob_at_branch(root_ref, licensee_project.matched_file.filename) file.name =~ /\A(licen[sc]e|copying)(\..+|\z)/i
end end
end end
end end
...@@ -476,7 +476,7 @@ class Repository ...@@ -476,7 +476,7 @@ class Repository
return nil if !exists? || empty? return nil if !exists? || empty?
cache.fetch(:license_key) do cache.fetch(:license_key) do
licensee_project.license.try(:key) || 'no-license' Licensee.license(path).try(:key)
end end
end end
...@@ -959,8 +959,4 @@ class Repository ...@@ -959,8 +959,4 @@ class Repository
def cache def cache
@cache ||= RepositoryCache.new(path_with_namespace) @cache ||= RepositoryCache.new(path_with_namespace)
end end
def licensee_project
@licensee_project ||= Licensee.project(path)
end
end end
...@@ -37,8 +37,9 @@ class IssuableBaseService < BaseService ...@@ -37,8 +37,9 @@ class IssuableBaseService < BaseService
end end
def filter_params(issuable_ability_name = :issue) def filter_params(issuable_ability_name = :issue)
params[:assignee_id] = "" if params[:assignee_id] == IssuableFinder::NONE filter_assignee
params[:milestone_id] = "" if params[:milestone_id] == IssuableFinder::NONE filter_milestone
filter_labels
ability = :"admin_#{issuable_ability_name}" ability = :"admin_#{issuable_ability_name}"
...@@ -49,6 +50,29 @@ class IssuableBaseService < BaseService ...@@ -49,6 +50,29 @@ class IssuableBaseService < BaseService
end end
end end
def filter_assignee
if params[:assignee_id] == IssuableFinder::NONE
params[:assignee_id] = ''
end
end
def filter_milestone
milestone_id = params[:milestone_id]
return unless milestone_id
if milestone_id == IssuableFinder::NONE ||
project.milestones.find_by(id: milestone_id).nil?
params[:milestone_id] = ''
end
end
def filter_labels
return if params[:label_ids].to_a.empty?
params[:label_ids] =
project.labels.where(id: params[:label_ids]).pluck(:id)
end
def update(issuable) def update(issuable)
change_state(issuable) change_state(issuable)
filter_params filter_params
......
...@@ -34,6 +34,8 @@ module Projects ...@@ -34,6 +34,8 @@ module Projects
raise TransferError.new("Project with same path in target namespace already exists") raise TransferError.new("Project with same path in target namespace already exists")
end end
project.expire_caches_before_rename(old_path)
# Apply new namespace id and visibility level # Apply new namespace id and visibility level
project.namespace = new_namespace project.namespace = new_namespace
project.visibility_level = new_namespace.visibility_level unless project.visibility_level_allowed_by_group? project.visibility_level = new_namespace.visibility_level unless project.visibility_level_allowed_by_group?
......
.well-confirmation.text-center
%h1.prepend-top-0
Almost there...
%p.lead
Please check your email to confirm your account
%p.confirmation-content.text-center
No confirmation email received? Please check your spam folder or
.append-bottom-20.prepend-top-20.text-center
%a.btn.btn-lg.btn-success{ href: new_user_confirmation_path }
Request new confirmation email
!!! 5
%html{ lang: "en"}
= render "layouts/head"
%body.ui_charcoal.login-page.application.navless
= render "layouts/header/empty"
= render "layouts/broadcast"
.container.navless-container
.content
= render "layouts/flash"
= yield
%hr
.container
.footer-links
= link_to "Explore", explore_root_path
= link_to "Help", help_path
= link_to "About GitLab", "https://about.gitlab.com/"
- if @lines.present? - if @lines.present?
- if @form.unfold? && @form.since != 1 && !@form.bottom? - if @form.unfold? && @form.since != 1 && !@form.bottom?
%tr.line_holder{ id: @form.since } %tr.line_holder
= render "projects/diffs/match_line", { line: @match_line, = render "projects/diffs/match_line", { line: @match_line,
line_old: @form.since, line_new: @form.since, bottom: false, new_file: false } line_old: @form.since, line_new: @form.since, bottom: false, new_file: false }
- @lines.each_with_index do |line, index| - @lines.each_with_index do |line, index|
- line_new = index + @form.since - line_new = index + @form.since
- line_old = line_new - @form.offset - line_old = line_new - @form.offset
%tr.line_holder %tr.line_holder{ id: line_old }
%td.old_line.diff-line-num{ data: { linenumber: line_old } } %td.old_line.diff-line-num{ data: { linenumber: line_old } }
= link_to raw(line_old), "#" = link_to raw(line_old), "##{line_old}"
%td.new_line.diff-line-num{ data: { linenumber: line_old } } %td.new_line.diff-line-num{ data: { linenumber: line_old } }
= link_to raw(line_new) , "#" = link_to raw(line_new) , "##{line_old}"
%td.line_content.noteable_line==#{' ' * @form.indent}#{line} %td.line_content.noteable_line==#{' ' * @form.indent}#{line}
- if @form.unfold? && @form.bottom? && @form.to < @blob.loc - if @form.unfold? && @form.bottom? && @form.to < @blob.loc
......
...@@ -39,4 +39,4 @@ ...@@ -39,4 +39,4 @@
= spinner = spinner
:javascript :javascript
CommitsList.init("#{@ref}", #{@limit}); CommitsList.init(#{@limit});
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
- page_card_attributes @merge_request.card_attributes - page_card_attributes @merge_request.card_attributes
- header_title project_title(@project, "Merge Requests", namespace_project_merge_requests_path(@project.namespace, @project)) - header_title project_title(@project, "Merge Requests", namespace_project_merge_requests_path(@project.namespace, @project))
- if params[:view] == 'parallel' - if diff_view == 'parallel'
- fluid_layout true - fluid_layout true
.merge-request{'data-url' => merge_request_path(@merge_request)} .merge-request{'data-url' => merge_request_path(@merge_request)}
......
- page_title "#{@merge_request.title} (#{merge_request.to_reference}", "Merge Requests" - page_title "#{@merge_request.title} (#{@merge_request.to_reference}", "Merge Requests"
= render "header_title" = render "header_title"
.merge-request .merge-request
......
...@@ -8,7 +8,7 @@ ...@@ -8,7 +8,7 @@
group, members with group, members with
%strong #{group_links.human_access} %strong #{group_links.human_access}
role (#{shared_group_users_count}) role (#{shared_group_users_count})
- if current_user.can?(:admin_group, shared_group) - if can?(current_user, :admin_group, shared_group)
.panel-head-actions .panel-head-actions
= link_to group_group_members_path(shared_group), class: 'btn btn-sm' do = link_to group_group_members_path(shared_group), class: 'btn btn-sm' do
%i.fa.fa-pencil-square-o %i.fa.fa-pencil-square-o
......
...@@ -8,39 +8,7 @@ ...@@ -8,39 +8,7 @@
= h(multi_label_name(params[:label_name], "Label")) = h(multi_label_name(params[:label_name], "Label"))
= icon('chevron-down') = icon('chevron-down')
.dropdown-menu.dropdown-select.dropdown-menu-paging.dropdown-menu-labels.dropdown-menu-selectable .dropdown-menu.dropdown-select.dropdown-menu-paging.dropdown-menu-labels.dropdown-menu-selectable
.dropdown-page-one = render partial: "shared/issuable/label_page_default", locals: { title: "Filter by label" }
= dropdown_title("Filter by label")
= dropdown_filter("Search labels")
= dropdown_content
- if @project
= dropdown_footer do
%ul.dropdown-footer-list
- if can? current_user, :admin_label, @project
%li
%a.dropdown-toggle-page{href: "#"}
Create new
%li
= link_to namespace_project_labels_path(@project.namespace, @project) do
- if can? current_user, :admin_label, @project
Manage labels
- else
View labels
- if can? current_user, :admin_label, @project and @project - if can? current_user, :admin_label, @project and @project
.dropdown-page-two.dropdown-new-label = render partial: "shared/issuable/label_page_create"
= dropdown_title("Create new label", back: true)
= dropdown_content do
.dropdown-labels-error.js-label-error
%input#new_label_name.dropdown-input-field{type: "text", placeholder: "Name new label"}
.suggest-colors.suggest-colors-dropdown
- suggested_colors.each do |color|
= link_to '#', style: "background-color: #{color}", data: { color: color } do
&nbsp
.dropdown-label-color-input
.dropdown-label-color-preview.js-dropdown-label-color-preview
%input#new_label_color.dropdown-input-field{ type: "text" }
.clearfix
%button.btn.btn-primary.pull-left.js-new-label-btn{type: "button"}
Create
%button.btn.btn-default.pull-right.js-cancel-label-btn{type: "button"}
Cancel
= dropdown_loading = dropdown_loading
.dropdown-page-two.dropdown-new-label
= dropdown_title("Create new label", back: true)
= dropdown_content do
.dropdown-labels-error.js-label-error
%input#new_label_name.default-dropdown-input{ type: "text", placeholder: "Name new label" }
.suggest-colors.suggest-colors-dropdown
- suggested_colors.each do |color|
= link_to '#', style: "background-color: #{color}", data: { color: color } do
&nbsp
.dropdown-label-color-input
.dropdown-label-color-preview.js-dropdown-label-color-preview
%input#new_label_color.default-dropdown-input{ type: "text" }
.clearfix
%button.btn.btn-primary.pull-left.js-new-label-btn{ type: "button" }
Create
%button.btn.btn-default.pull-right.js-cancel-label-btn{ type: "button" }
Cancel
- title = local_assigns.fetch(:title, 'Assign labels')
- filter_placeholder = local_assigns.fetch(:filter_placeholder, 'Search labels')
.dropdown-page-one
= dropdown_title(title)
= dropdown_filter(filter_placeholder)
= dropdown_content
- if @project
= dropdown_footer do
%ul.dropdown-footer-list
- if can? current_user, :admin_label, @project
%li
%a.dropdown-toggle-page{href: "#"}
Create new
%li
= link_to namespace_project_labels_path(@project.namespace, @project), :"data-is-link" => true do
- if can? current_user, :admin_label, @project
Manage labels
- else
View labels
= dropdown_loading
\ No newline at end of file
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
%a.btn.btn-default.issuable-pager.disabled{href: '#'} %a.btn.btn-default.issuable-pager.disabled{href: '#'}
Next Next
= form_for [@project.namespace.becomes(Namespace), @project, issuable], remote: true, html: {class: 'issuable-context-form inline-update js-issuable-update'} do |f| = form_for [@project.namespace.becomes(Namespace), @project, issuable], remote: true, format: :json, html: {class: 'issuable-context-form inline-update js-issuable-update'} do |f|
.block.assignee .block.assignee
.sidebar-collapsed-icon.sidebar-collapsed-user{data: {toggle: "tooltip", placement: "left", container: "body"}, title: (issuable.assignee.to_reference if issuable.assignee)} .sidebar-collapsed-icon.sidebar-collapsed-user{data: {toggle: "tooltip", placement: "left", container: "body"}, title: (issuable.assignee.to_reference if issuable.assignee)}
- if issuable.assignee - if issuable.assignee
...@@ -129,24 +129,9 @@ ...@@ -129,24 +129,9 @@
Label Label
= icon('chevron-down') = icon('chevron-down')
.dropdown-menu.dropdown-select.dropdown-menu-paging.dropdown-menu-labels.dropdown-menu-selectable .dropdown-menu.dropdown-select.dropdown-menu-paging.dropdown-menu-labels.dropdown-menu-selectable
.dropdown-page-one = render partial: "shared/issuable/label_page_default"
= dropdown_title("Assign labels") - if can? current_user, :admin_label, @project and @project
= dropdown_filter("Search labels") = render partial: "shared/issuable/label_page_create"
= dropdown_content
- if @project
= dropdown_footer do
%ul.dropdown-footer-list
- if can? current_user, :admin_label, @project
%li
%a.dropdown-toggle-page{href: "#"}
Create new
%li
= link_to namespace_project_labels_path(@project.namespace, @project) do
- if can? current_user, :admin_label, @project
Manage labels
- else
View labels
= dropdown_loading
= render "shared/issuable/participants", participants: issuable.participants(current_user) = render "shared/issuable/participants", participants: issuable.participants(current_user)
- if current_user - if current_user
......
- @sort ||= sort_value_recently_updated - @sort ||= sort_value_recently_updated
- personal = params[:personal]
- archived = params[:archived] - archived = params[:archived]
.dropdown.inline .dropdown.inline
%button.dropdown-toggle.btn{type: 'button', 'data-toggle' => 'dropdown'} %button.dropdown-toggle.btn{type: 'button', 'data-toggle' => 'dropdown'}
...@@ -10,7 +11,7 @@ ...@@ -10,7 +11,7 @@
Sort by Sort by
- projects_sort_options_hash.each do |value, title| - projects_sort_options_hash.each do |value, title|
%li %li
= link_to filter_projects_path(sort: value, archived: archived), class: ("is-active" if @sort == value) do = link_to filter_projects_path(sort: value, archived: archived, personal: personal), class: ("is-active" if @sort == value) do
= title = title
%li.divider %li.divider
...@@ -20,3 +21,11 @@ ...@@ -20,3 +21,11 @@
%li %li
= link_to filter_projects_path(sort: @sort, archived: true), class: ("is-active" if params[:archived].present?) do = link_to filter_projects_path(sort: @sort, archived: true), class: ("is-active" if params[:archived].present?) do
Show archived projects Show archived projects
- if current_user
%li.divider
%li
= link_to filter_projects_path(sort: @sort, personal: nil), class: ("is-active" unless personal) do
Owned by anyone
%li
= link_to filter_projects_path(sort: @sort, personal: true), class: ("is-active" if personal) do
Owned by me
#!/usr/bin/env ruby #!/usr/bin/env ruby
begin
load File.expand_path('../spring', __FILE__)
rescue LoadError => e
raise unless e.message.include?('spring')
end
APP_PATH = File.expand_path('../../config/application', __FILE__) APP_PATH = File.expand_path('../../config/application', __FILE__)
require_relative '../config/boot' require_relative '../config/boot'
require 'rails/commands' require 'rails/commands'
#!/usr/bin/env ruby #!/usr/bin/env ruby
begin
load File.expand_path('../spring', __FILE__)
rescue LoadError => e
raise unless e.message.include?('spring')
end
require_relative '../config/boot' require_relative '../config/boot'
require 'rake' require 'rake'
Rake.application.run Rake.application.run
#!/usr/bin/env ruby #!/usr/bin/env ruby
begin begin
load File.expand_path("../spring", __FILE__) load File.expand_path('../spring', __FILE__)
rescue LoadError rescue LoadError => e
raise unless e.message.include?('spring')
end end
require 'bundler/setup' require 'bundler/setup'
load Gem.bin_path('rspec-core', 'rspec') load Gem.bin_path('rspec-core', 'rspec')
#!/usr/bin/env ruby #!/usr/bin/env ruby
begin begin
load File.expand_path("../spring", __FILE__) load File.expand_path('../spring', __FILE__)
rescue LoadError rescue LoadError => e
raise unless e.message.include?('spring')
end end
require 'bundler/setup' require 'bundler/setup'
load Gem.bin_path('spinach', 'spinach') load Gem.bin_path('spinach', 'spinach')
...@@ -4,12 +4,12 @@ ...@@ -4,12 +4,12 @@
# It gets overwritten when you run the `spring binstub` command. # It gets overwritten when you run the `spring binstub` command.
unless defined?(Spring) unless defined?(Spring)
require "rubygems" require 'rubygems'
require "bundler" require 'bundler'
if match = Bundler.default_lockfile.read.match(/^GEM$.*?^ (?: )*spring \((.*?)\)$.*?^$/m) if (match = Bundler.default_lockfile.read.match(/^GEM$.*?^ (?: )*spring \((.*?)\)$.*?^$/m))
Gem.paths = { "GEM_PATH" => [Bundler.bundle_path.to_s, *Gem.path].uniq } Gem.paths = { 'GEM_PATH' => [Bundler.bundle_path.to_s, *Gem.path].uniq.join(Gem.path_separator) }
gem "spring", match[1] gem 'spring', match[1]
require "spring/binstub" require 'spring/binstub'
end end
end end
#!/usr/bin/env ruby
begin
load File.expand_path('../spring', __FILE__)
rescue LoadError => e
raise unless e.message.include?('spring')
end
require 'bundler/setup'
load Gem.bin_path('teaspoon', 'teaspoon')
...@@ -107,6 +107,10 @@ if Gitlab::Metrics.enabled? ...@@ -107,6 +107,10 @@ if Gitlab::Metrics.enabled?
config.instrument_methods(const) config.instrument_methods(const)
config.instrument_instance_methods(const) config.instrument_instance_methods(const)
end end
# Instrument the classes used for checking if somebody has push access.
config.instrument_instance_methods(Gitlab::GitAccess)
config.instrument_instance_methods(Gitlab::GitAccessWiki)
end end
GC::Profiler.enable GC::Profiler.enable
......
...@@ -424,6 +424,7 @@ Rails.application.routes.draw do ...@@ -424,6 +424,7 @@ Rails.application.routes.draw do
devise_scope :user do devise_scope :user do
get '/users/auth/:provider/omniauth_error' => 'omniauth_callbacks#omniauth_error', as: :omniauth_error get '/users/auth/:provider/omniauth_error' => 'omniauth_callbacks#omniauth_error', as: :omniauth_error
get '/users/almost_there' => 'confirmations#almost_there'
end end
root to: "root#index" root to: "root#index"
......
class DisableRepositoryChecks < ActiveRecord::Migration
def up
change_column_default :application_settings, :repository_checks_enabled, false
execute 'UPDATE application_settings SET repository_checks_enabled = false'
end
def down
change_column_default :application_settings, :repository_checks_enabled, true
execute 'UPDATE application_settings SET repository_checks_enabled = true'
end
end
...@@ -11,7 +11,7 @@ ...@@ -11,7 +11,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20160419120017) do ActiveRecord::Schema.define(version: 20160421130527) do
# These are extensions that must be enabled in order to support this database # These are extensions that must be enabled in order to support this database
enable_extension "plpgsql" enable_extension "plpgsql"
...@@ -77,7 +77,7 @@ ActiveRecord::Schema.define(version: 20160419120017) do ...@@ -77,7 +77,7 @@ ActiveRecord::Schema.define(version: 20160419120017) do
t.string "akismet_api_key" t.string "akismet_api_key"
t.boolean "email_author_in_body", default: false t.boolean "email_author_in_body", default: false
t.integer "default_group_visibility" t.integer "default_group_visibility"
t.boolean "repository_checks_enabled", default: true t.boolean "repository_checks_enabled", default: false
t.integer "metrics_packet_size", default: 1 t.integer "metrics_packet_size", default: 1
t.text "shared_runners_text" t.text "shared_runners_text"
end end
......
...@@ -41,6 +41,8 @@ ...@@ -41,6 +41,8 @@
- [Git LFS configuration](workflow/lfs/lfs_administration.md) - [Git LFS configuration](workflow/lfs/lfs_administration.md)
- [Housekeeping](administration/housekeeping.md) Keep your Git repository tidy and fast. - [Housekeeping](administration/housekeeping.md) Keep your Git repository tidy and fast.
- [GitLab Performance Monitoring](monitoring/performance/introduction.md) Configure GitLab and InfluxDB for measuring performance metrics - [GitLab Performance Monitoring](monitoring/performance/introduction.md) Configure GitLab and InfluxDB for measuring performance metrics
- [Sidekiq Troubleshooting](administration/troubleshooting/sidekiq.md) Debug when Sidekiq appears hung and is not processing jobs
- [High Availability](administration/high_availability/README.md) Configure multiple servers for scaling or high availability
## Contributor documentation ## Contributor documentation
......
# High Availability
GitLab supports several different types of clustering and high-availability.
The solution you choose will be based on the level of scalability and
availability you require. The easiest solutions are scalable, but not necessarily
highly available.
## Architecture
### Active/Passive
For pure high-availability/failover with no scaling you can use an
active/passive configuration. This utilizes DRBD (Distributed Replicated
Block Device) to keep all data in sync. DRBD requires a low latency link to
remain in sync. It is not advisable to attempt to run DRBD between data centers
or in different cloud availability zones.
Components/Servers Required:
- 2 servers/virtual machines (one active/one passive)
### Active/Active
This architecture scales easily because all application servers handle
user requests simultaneously. The database, Redis, and GitLab application are
all deployed on separate servers. The configuration is **only** highly-available
if the database, Redis and storage are also configured as such.
**Steps to configure active/active:**
1. [Configure the database](database.md)
1. [Configure Redis](redis.md)
1. [Configure NFS](nfs.md)
1. [Configure the GitLab application servers](gitlab.md)
1. [Configure the load balancers](load_balancer.md)
# Configuring a Database for GitLab HA
You can choose to install and manage a database server (PostgreSQL/MySQL)
yourself, or you can use GitLab Omnibus packages to help. GitLab recommends
PostgreSQL. This is the database that will be installed if you use the
Omnibus package to manage your database.
## Configure your own database server
If you're hosting GitLab on a cloud provider, you can optionally use a
managed service for PostgreSQL. For example, AWS offers a managed Relational
Database Service (RDS) that runs PostgreSQL.
If you use a cloud-managed service, or provide your own PostgreSQL:
1. Set up a `gitlab` username with a password of your choice. The `gitlab` user
needs privileges to create the `gitlabhq_production` database.
1. Configure the GitLab application servers with the appropriate details.
This step is covered in [Configuring GitLab for HA](gitlab.md)
## Configure using Omnibus
1. Download/install GitLab Omnibus using **steps 1 and 2** from
[GitLab downloads](https://about.gitlab.com/downloads). Do not complete other
steps on the download page.
1. Create/edit `/etc/gitlab/gitlab.rb` and use the following configuration.
Be sure to change the `external_url` to match your eventual GitLab front-end
URL.
```ruby
external_url 'https://gitlab.example.com'
# Disable all components except PostgreSQL
postgresql['enable'] = true
bootstrap['enable'] = false
nginx['enable'] = false
unicorn['enable'] = false
sidekiq['enable'] = false
redis['enable'] = false
gitlab_workhorse['enable'] = false
mailroom['enable'] = false
# PostgreSQL configuration
postgresql['sql_password'] = 'DB password'
postgresql['md5_auth_cidr_addresses'] = ['0.0.0.0/0']
postgresql['listen_address'] = '0.0.0.0'
```
1. Run `sudo gitlab-ctl reconfigure` to install and configure PostgreSQL.
> **Note**: This `reconfigure` step will result in some errors.
That's OK - don't be alarmed.
1. Open a database prompt:
```
su - gitlab-psql
/bin/bash
psql -h /var/opt/gitlab/postgresql -d template1
# Output:
psql (9.2.15)
Type "help" for help.
template1=#
```
1. Run the following command at the database prompt and you will be asked to
enter the new password for the PostgreSQL superuser.
```
\password
# Output:
Enter new password:
Enter it again:
```
1. Similarly, set the password for the `gitlab` database user. Use the same
password that you specified in the `/etc/gitlab/gitlab.rb` file for
`postgresql['sql_password']`.
```
\password gitlab
# Output:
Enter new password:
Enter it again:
```
1. Enable the `pg_trgm` extension:
```
CREATE EXTENSION pg_trgm;
# Output:
CREATE EXTENSION
```
1. Exit the database prompt by typing `\q` and Enter.
1. Exit the `gitlab-psql` user by running `exit` twice.
1. Run `sudo gitlab-ctl reconfigure` a final time.
1. Run `touch /etc/gitlab/skip-auto-migrations` to prevent database migrations
from running on upgrade. Only the primary GitLab application server should
handle migrations.
---
Read more on high-availability configuration:
1. [Configure Redis](redis.md)
1. [Configure NFS](nfs.md)
1. [Configure the GitLab application servers](gitlab.md)
1. [Configure the load balancers](load_balancer.md)
# Configuring GitLab for HA
Assuming you have already configured a database, Redis, and NFS, you can
configure the GitLab application server(s) now. Complete the steps below
for each GitLab application server in your environment.
> **Note:** There is some additional configuration near the bottom for
secondary GitLab application servers. It's important to read and understand
these additional steps before proceeding with GitLab installation.
1. If necessary, install the NFS client utility packages using the following
commands:
```
# Ubuntu/Debian
apt-get install nfs-common
# CentOS/Red Hat
yum install nfs-utils nfs-utils-lib
```
1. Specify the necessary NFS shares. Mounts are specified in
`/etc/fstab`. The exact contents of `/etc/fstab` will depend on how you chose
to configure your NFS server. See [NFS documentation](nfs.md) for the various
options. Here is an example snippet to add to `/etc/fstab`:
```
10.1.0.1:/var/opt/gitlab/.ssh /var/opt/gitlab/.ssh nfs defaults,soft,rsize=1048576,wsize=1048576,noatime,nobootwait,lookupcache=positive 0 2
10.1.0.1:/var/opt/gitlab/gitlab-rails/uploads /var/opt/gitlab/gitlab-rails/uploads nfs defaults,soft,rsize=1048576,wsize=1048576,noatime,nobootwait,lookupcache=positive 0 2
10.1.0.1:/var/opt/gitlab/gitlab-rails/shared /var/opt/gitlab/gitlab-rails/shared nfs defaults,soft,rsize=1048576,wsize=1048576,noatime,nobootwait,lookupcache=positive 0 2
10.1.0.1:/var/opt/gitlab/gitlab-ci/builds /var/opt/gitlab/gitlab-ci/builds nfs defaults,soft,rsize=1048576,wsize=1048576,noatime,nobootwait,lookupcache=positive 0 2
10.1.1.1:/var/opt/gitlab/git-data /var/opt/gitlab/git-data nfs defaults,soft,rsize=1048576,wsize=1048576,noatime,nobootwait,lookupcache=positive 0 2
```
1. Create the shared directories. These may be different depending on your NFS
mount locations.
```
mkdir -p /var/opt/gitlab/.ssh /var/opt/gitlab/gitlab-rails/uploads /var/opt/gitlab/gitlab-rails/shared /var/opt/gitlab/gitlab-ci/builds /var/opt/gitlab/git-data
```
1. Download/install GitLab Omnibus using **steps 1 and 2** from
[GitLab downloads](https://about.gitlab.com/downloads). Do not complete other
steps on the download page.
1. Create/edit `/etc/gitlab/gitlab.rb` and use the following configuration.
Be sure to change the `external_url` to match your eventual GitLab front-end
URL. Depending your the NFS configuration, you may need to change some GitLab
data locations. See [NFS documentation](nfs.md) for `/etc/gitlab/gitlab.rb`
configuration values for various scenarios. The example below assumes you've
added NFS mounts in the default data locations.
```ruby
external_url 'https://gitlab.example.com'
# Prevent GitLab from starting if NFS data mounts are not available
high_availability['mountpoint'] = '/var/opt/gitlab/git-data'
# Disable components that will not be on the GitLab application server
postgresql['enable'] = false
redis['enable'] = false
# PostgreSQL connection details
gitlab_rails['db_adapter'] = 'postgresql'
gitlab_rails['db_encoding'] = 'unicode'
gitlab_rails['db_host'] = '10.1.0.5' # IP/hostname of database server
gitlab_rails['db_password'] = 'DB password'
# Redis connection details
gitlab_rails['redis_port'] = '6379'
gitlab_rails['redis_host'] = '10.1.0.6' # IP/hostname of Redis server
gitlab_rails['redis_password'] = 'Redis Password'
```
1. Run `sudo gitlab-ctl reconfigure` to compile the configuration.
## Primary GitLab application server
As a final step, run the setup rake task on the first GitLab application server.
It is not necessary to run this on additional application servers.
1. Initialize the database by running `sudo gitlab-rake gitlab:setup`.
> **WARNING:** Only run this setup task on **NEW** GitLab instances because it
will wipe any existing data.
> **Note:** When you specify `https` in the `external_url`, as in the example
above, GitLab assumes you have SSL certificates in `/etc/gitlab/ssl/`. If
certificates are not present, Nginx will fail to start. See
[Nginx documentation](http://docs.gitlab.com/omnibus/settings/nginx.html#enable-https)
for more information.
## Additional configuration for secondary GitLab application servers
Secondary GitLab servers (servers configured **after** the first GitLab server)
need some additional configuration.
1. Configure shared secrets. These values can be obtained from the primary
GitLab server in `/etc/gitlab/gitlab-secrets.json`. Add these to
`/etc/gitlab/gitlab.rb` **prior to** running the first `reconfigure` in
the steps above.
```ruby
gitlab_shell['secret_token'] = 'fbfb19c355066a9afb030992231c4a363357f77345edd0f2e772359e5be59b02538e1fa6cae8f93f7d23355341cea2b93600dab6d6c3edcdced558fc6d739860'
gitlab_rails['secret_token'] = 'b719fe119132c7810908bba18315259ed12888d4f5ee5430c42a776d840a396799b0a5ef0a801348c8a357f07aa72bbd58e25a84b8f247a25c72f539c7a6c5fa'
gitlab_ci['secret_key_base'] = '6e657410d57c71b4fc3ed0d694e7842b1895a8b401d812c17fe61caf95b48a6d703cb53c112bc01ebd197a85da81b18e29682040e99b4f26594772a4a2c98c6d'
gitlab_ci['db_key_base'] = 'bf2e47b68d6cafaef1d767e628b619365becf27571e10f196f98dc85e7771042b9203199d39aff91fcb6837c8ed83f2a912b278da50999bb11a2fbc0fba52964'
```
1. Run `touch /etc/gitlab/skip-auto-migrations` to prevent database migrations
from running on upgrade. Only the primary GitLab application server should
handle migrations.
## Troubleshooting
- `mount: wrong fs type, bad option, bad superblock on`
You have not installed the necessary NFS client utilities. See step 1 above.
- `mount: mount point /var/opt/gitlab/... does not exist`
This particular directory does not exist on the NFS server. Ensure
the share is exported and exists on the NFS server and try to remount.
---
Read more on high-availability configuration:
1. [Configure the database](database.md)
1. [Configure Redis](redis.md)
1. [Configure NFS](nfs.md)
1. [Configure the load balancers](load_balancer.md)
# Load Balancer for GitLab HA
In an active/active GitLab configuration, you will need a load balancer to route
traffic to the application servers. The specifics on which load balancer to use
or the exact configuration is beyond the scope of GitLab documentation. We hope
that if you're managing HA systems like GitLab you have a load balancer of
choice already. Some examples including HAProxy (open-source), F5 Big-IP LTM,
and Citrix Net Scaler. This documentation will outline what ports and protocols
you need to use with GitLab.
## Basic ports
| LB Port | Backend Port | Protocol |
| ------- | ------------ | -------- |
| 80 | 80 | HTTP |
| 443 | 443 | HTTPS [^1] |
| 22 | 22 | TCP |
## GitLab Pages Ports
If you're using GitLab Pages you will need some additional port configurations.
GitLab Pages requires a separate VIP. Configure DNS to point the
`pages_external_url` from `/etc/gitlab/gitlab.rb` at the new VIP. See the
[GitLab Pages documentation][gitlab-pages] for more information.
| LB Port | Backend Port | Protocol |
| ------- | ------------ | -------- |
| 80 | Varies [^2] | HTTP |
| 443 | Varies [^2] | TCP [^3] |
## Alternate SSH Port
Some organizations have policies against opening SSH port 22. In this case,
it may be helpful to configure an alternate SSH hostname that allows users
to use SSH on port 443. An alternate SSH hostname will require a new VIP
compared to the other GitLab HTTP configuration above.
Configure DNS for an alternate SSH hostname such as altssh.gitlab.example.com.
| LB Port | Backend Port | Protocol |
| ------- | ------------ | -------- |
| 443 | 22 | TCP |
---
Read more on high-availability configuration:
1. [Configure the database](database.md)
1. [Configure Redis](redis.md)
1. [Configure NFS](nfs.md)
1. [Configure the GitLab application servers](gitlab.md)
[^1]: When using HTTPS protocol for port 443, you will need to add an SSL
certificate to the load balancers. If you wish to terminate SSL at the
GitLab application server instead, use TCP protocol.
[^2]: The backend port for GitLab Pages depends on the
`gitlab_pages['external_http']` and `gitlab_pages['external_https']`
setting. See [GitLab Pages documentation][gitlab-pages] for more details.
[^3]: Port 443 for GitLab Pages should always use the TCP protocol. Users can
configure custom domains with custom SSL, which would not be possible
if SSL was terminated at the load balancer.
[gitlab-pages]: http://doc.gitlab.com/ee/pages/administration.html
# NFS
## Required NFS Server features
**File locking**: GitLab **requires** file locking which is only supported
natively in NFS version 4. NFSv3 also supports locking as long as
Linux Kernel 2.6.5+ is used. We recommend using version 4 and do not
specifically test NFSv3.
**no_root_squash**: NFS normally changes the `root` user to `nobody`. This is
a good security measure when NFS shares will be accessed by many different
users. However, in this case only GitLab will use the NFS share so it
is safe. GitLab requires the `no_root_squash` setting because we need to
manage file permissions automatically. Without the setting you will receive
errors when the Omnibus package tries to alter permissions. Note that GitLab
and other bundled components do **not** run as `root` but as non-privileged
users. The requirement for `no_root_squash` is to allow the Omnibus package to
set ownership and permissions on files, as needed.
### Recommended options
When you define your NFS exports, we recommend you also add the following
options:
- `sync` - Force synchronous behavior. Default is asynchronous and under certain
circumstances it could lead to data loss if a failure occurs before data has
synced.
## Client mount options
Below is an example of an NFS mount point we use on GitLab.com:
```
10.1.1.1:/var/opt/gitlab/git-data /var/opt/gitlab/git-data nfs4 defaults,soft,rsize=1048576,wsize=1048576,noatime,nobootwait,lookupcache=positive 0 2
```
Notice several options that you should consider using:
| Setting | Description |
| ------- | ----------- |
| `nobootwait` | Don't halt boot process waiting for this mount to become available
| `lookupcache=positive` | Tells the NFS client to honor `positive` cache results but invalidates any `negative` cache results. Negative cache results cause problems with Git. Specifically, a `git push` can fail to register uniformly across all NFS clients. The negative cache causes the clients to 'remember' that the files did not exist previously.
## Mount locations
When using default Omnibus configuration you will need to share 5 data locations
between all GitLab cluster nodes. No other locations should be shared. The
following are the 5 locations you need to mount:
| Location | Description |
| -------- | ----------- |
| `/var/opt/gitlab/git-data` | Git repository data. This will account for a large portion of your data
| `/var/opt/gitlab/.ssh` | SSH `authorized_keys` file and keys used to import repositories from some other Git services
| `/var/opt/gitlab/gitlab-rails/uploads` | User uploaded attachments
| `/var/opt/gitlab/gitlab-rails/shared` | Build artifacts, GitLab Pages, LFS objects, temp files, etc. If you're using LFS this may also account for a large portion of your data
| `/var/opt/gitlab/gitlab-ci/builds` | GitLab CI build traces
Other GitLab directories should not be shared between nodes. They contain
node-specific files and GitLab code that does not need to be shared. To ship
logs to a central location consider using remote syslog. GitLab Omnibus packages
provide configuration for [UDP log shipping][udp-log-shipping].
### Consolidating mount points
If you don't want to configure 5-6 different NFS mount points, you have a few
alternative options.
#### Change default file locations
Omnibus allows you to configure the file locations. With custom configuration
you can specify just one main mountpoint and have all of these locations
as subdirectories. Mount `/gitlab-data` then use the following Omnibus
configuration to move each data location to a subdirectory:
```ruby
user['home'] = '/gitlab-data/home'
git_data_dir '/gitlab-data/git-data'
gitlab_rails['shared_path'] = '/gitlab-data/shared'
gitlab_rails['uploads_directory'] = "/gitlab-data/uploads"
gitlab_ci['builds_directory'] = '/gitlab-data/builds'
```
To move the `git` home directory, all GitLab services must be stopped. Run
`gitlab-ctl stop && initctl stop gitlab-runsvdir`. Then continue with the
reconfigure.
Run `sudo gitlab-ctl reconfigure` to start using the central location. Please
be aware that if you had existing data you will need to manually copy/rsync it
to these new locations and then restart GitLab.
#### Bind mounts
Bind mounts provide a way to specify just one NFS mount and then
bind the default GitLab data locations to the NFS mount. Start by defining your
single NFS mount point as you normally would in `/etc/fstab`. Let's assume your
NFS mount point is `/gitlab-data`. Then, add the following bind mounts in
`/etc/fstab`:
```bash
/gitlab-data/git-data /var/opt/gitlab/git-data none bind 0 0
/gitlab-data/.ssh /var/opt/gitlab/.ssh none bind 0 0
/gitlab-data/uploads /var/opt/gitlab/gitlab-rails/uploads none bind 0 0
/gitlab-data/shared /var/opt/gitlab/gitlab-rails/shared none bind 0 0
/gitlab-data/builds /var/opt/gitlab/gitlab-ci/builds none bind 0 0
```
---
Read more on high-availability configuration:
1. [Configure the database](database.md)
1. [Configure Redis](redis.md)
1. [Configure the GitLab application servers](gitlab.md)
1. [Configure the load balancers](load_balancer.md)
[udp-log-shipping]: http://doc.gitlab.com/omnibus/settings/logs.html#udp-log-shipping-gitlab-enterprise-edition-only "UDP log shipping"
# Configuring Redis for GitLab HA
You can choose to install and manage Redis yourself, or you can use GitLab
Omnibus packages to help.
## Configure your own Redis server
If you're hosting GitLab on a cloud provider, you can optionally use a
managed service for Redis. For example, AWS offers a managed ElastiCache service
that runs Redis.
> **Note:** Redis does not require authentication by default. See
[Redis Security](http://redis.io/topics/security) documentation for more
information. We recommend using a combination of a Redis password and tight
firewall rules to secure your Redis service.
## Configure using Omnibus
1. Download/install GitLab Omnibus using **steps 1 and 2** from
[GitLab downloads](https://about.gitlab.com/downloads). Do not complete other
steps on the download page.
1. Create/edit `/etc/gitlab/gitlab.rb` and use the following configuration.
Be sure to change the `external_url` to match your eventual GitLab front-end
URL.
```ruby
external_url 'https://gitlab.example.com'
# Disable all components except PostgreSQL
redis['enable'] = true
bootstrap['enable'] = false
nginx['enable'] = false
unicorn['enable'] = false
sidekiq['enable'] = false
postgresql['enable'] = false
gitlab_workhorse['enable'] = false
mailroom['enable'] = false
# Redis configuration
redis['port'] = 6379
redis['bind'] = '0.0.0.0'
# If you wish to use Redis authentication (recommended)
redis['password'] = 'Redis Password'
```
1. Run `sudo gitlab-ctl reconfigure` to install and configure PostgreSQL.
> **Note**: This `reconfigure` step will result in some errors.
That's OK - don't be alarmed.
1. Run `touch /etc/gitlab/skip-auto-migrations` to prevent database migrations
from running on upgrade. Only the primary GitLab application server should
handle migrations.
---
Read more on high-availability configuration:
1. [Configure the database](database.md)
1. [Configure NFS](nfs.md)
1. [Configure the GitLab application servers](gitlab.md)
1. [Configure the load balancers](load_balancer.md)
# Repository checks # Repository checks
>**Note:** >**Note:**
This feature was [introduced][ce-3232] in GitLab 8.7. This feature was [introduced][ce-3232] in GitLab 8.7. It is OFF by
default because it still causes too many false alarms.
Git has a built-in mechanism, [git fsck][git-fsck], to verify the Git has a built-in mechanism, [git fsck][git-fsck], to verify the
integrity of all data commited to a repository. GitLab administrators integrity of all data commited to a repository. GitLab administrators
......
# Troubleshooting Sidekiq
Sidekiq is the background job processor GitLab uses to asynchronously run
tasks. When things go wrong it can be difficult to troubleshoot. These
situations also tend to be high-pressure because a production system job queue
may be filling up. Users will notice when this happens because new branches
may not show up and merge requests may not be updated. The following are some
troubleshooting steps that will help you diagnose the bottleneck.
> **Note:** GitLab administrators/users should consider working through these
debug steps with GitLab Support so the backtraces can be analyzed by our team.
It may reveal a bug or necessary improvement in GitLab.
> **Note:** In any of the backtraces, be weary of suspecting cases where every
thread appears to be waiting in the database, Redis, or waiting to acquire
a mutex. This **may** mean there's contention in the database, for example,
but look for one thread that is different than the rest. This other thread
may be using all available CPU, or have a Ruby Global Interpreter Lock,
preventing other threads from continuing.
## Thread dump
Send the Sidekiq process ID the `TTIN` signal and it will output thread
backtraces in the log file.
```
kill -TTIN <sidekiq_pid>
```
Check in `/var/log/gitlab/sidekiq/current` or `$GITLAB_HOME/log/sidekiq.log` for
the backtrace output. The backtraces will be lengthy and generally start with
several `WARN` level messages. Here's an example of a single thread's backtrace:
```
2016-04-13T06:21:20.022Z 31517 TID-orn4urby0 WARN: ActiveRecord::RecordNotFound: Couldn't find Note with 'id'=3375386
2016-04-13T06:21:20.022Z 31517 TID-orn4urby0 WARN: /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/activerecord-4.2.5.2/lib/active_record/core.rb:155:in `find'
/opt/gitlab/embedded/service/gitlab-rails/app/workers/new_note_worker.rb:7:in `perform'
/opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/sidekiq-4.0.1/lib/sidekiq/processor.rb:150:in `execute_job'
/opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/sidekiq-4.0.1/lib/sidekiq/processor.rb:132:in `block (2 levels) in process'
/opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/sidekiq-4.0.1/lib/sidekiq/middleware/chain.rb:127:in `block in invoke'
/opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/sidekiq_middleware/memory_killer.rb:17:in `call'
/opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/sidekiq-4.0.1/lib/sidekiq/middleware/chain.rb:129:in `block in invoke'
/opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/sidekiq_middleware/arguments_logger.rb:6:in `call'
...
```
In some cases Sidekiq may be hung and unable to respond to the `TTIN` signal.
Move on to other troubleshooting methods if this happens.
## Process profiling with `perf`
Linux has a process profiling tool called `perf` that is helpful when a certain
process is eating up a lot of CPU. If you see high CPU usage and Sidekiq won't
respond to the `TTIN` signal, this is a good next step.
If `perf` is not installed on your system, install it with `apt-get` or `yum`:
```
# Debian
sudo apt-get install linux-tools
# Ubuntu (may require these additional Kernel packages)
sudo apt-get install linux-tools-common linux-tools-generic linux-tools-`uname -r`
# Red Hat/CentOS
sudo yum install perf
```
Run perf against the Sidekiq PID:
```
sudo perf record -p <sidekiq_pid>
```
Let this run for 30-60 seconds and then press Ctrl-C. Then view the perf report:
```
sudo perf report
# Sample output
Samples: 348K of event 'cycles', Event count (approx.): 280908431073
97.69% ruby nokogiri.so [.] xmlXPathNodeSetMergeAndClear
0.18% ruby libruby.so.2.1.0 [.] objspace_malloc_increase
0.12% ruby libc-2.12.so [.] _int_malloc
0.10% ruby libc-2.12.so [.] _int_free
```
Above you see sample output from a perf report. It shows that 97% of the CPU is
being spent inside Nokogiri and `xmlXPathNodeSetMergeAndClear`. For something
this obvious you should then go investigate what job in GitLab would use
Nokogiri and XPath. Combine with `TTIN` or `gdb` output to show the
corresponding Ruby code where this is happening.
## The GNU Project Debugger (gdb)
`gdb` can be another effective tool for debugging Sidekiq. It gives you a little
more interactive way to look at each thread and see what's causing problems.
> **Note:** Attaching to a process with `gdb` will suspends the normal operation
of the process (Sidekiq will not process jobs while `gdb` is attached).
Start by attaching to the Sidekiq PID:
```
gdb -p <sidekiq_pid>
```
Then gather information on all the threads:
```
info threads
# Example output
30 Thread 0x7fe5fbd63700 (LWP 26060) 0x0000003f7cadf113 in poll () from /lib64/libc.so.6
29 Thread 0x7fe5f2b3b700 (LWP 26533) 0x0000003f7ce0b68c in pthread_cond_wait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
28 Thread 0x7fe5f2a3a700 (LWP 26534) 0x0000003f7ce0ba5e in pthread_cond_timedwait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
27 Thread 0x7fe5f2939700 (LWP 26535) 0x0000003f7ce0b68c in pthread_cond_wait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
26 Thread 0x7fe5f2838700 (LWP 26537) 0x0000003f7ce0b68c in pthread_cond_wait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
25 Thread 0x7fe5f2737700 (LWP 26538) 0x0000003f7ce0b68c in pthread_cond_wait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
24 Thread 0x7fe5f2535700 (LWP 26540) 0x0000003f7ce0b68c in pthread_cond_wait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
23 Thread 0x7fe5f2434700 (LWP 26541) 0x0000003f7ce0b68c in pthread_cond_wait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
22 Thread 0x7fe5f2232700 (LWP 26543) 0x0000003f7ce0b68c in pthread_cond_wait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
21 Thread 0x7fe5f2131700 (LWP 26544) 0x00007fe5f7b570f0 in xmlXPathNodeSetMergeAndClear ()
from /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/nokogiri-1.6.7.2/lib/nokogiri/nokogiri.so
...
```
If you see a suspicious thread, like the Nokogiri one above, you may want
to get more information:
```
thread 21
bt
# Example output
#0 0x00007ff0d6afe111 in xmlXPathNodeSetMergeAndClear () from /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/nokogiri-1.6.7.2/lib/nokogiri/nokogiri.so
#1 0x00007ff0d6b0b836 in xmlXPathNodeCollectAndTest () from /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/nokogiri-1.6.7.2/lib/nokogiri/nokogiri.so
#2 0x00007ff0d6b09037 in xmlXPathCompOpEval () from /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/nokogiri-1.6.7.2/lib/nokogiri/nokogiri.so
#3 0x00007ff0d6b09017 in xmlXPathCompOpEval () from /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/nokogiri-1.6.7.2/lib/nokogiri/nokogiri.so
#4 0x00007ff0d6b092e0 in xmlXPathCompOpEval () from /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/nokogiri-1.6.7.2/lib/nokogiri/nokogiri.so
#5 0x00007ff0d6b0bc37 in xmlXPathRunEval () from /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/nokogiri-1.6.7.2/lib/nokogiri/nokogiri.so
#6 0x00007ff0d6b0be5f in xmlXPathEvalExpression () from /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/nokogiri-1.6.7.2/lib/nokogiri/nokogiri.so
#7 0x00007ff0d6a97dc3 in evaluate (argc=2, argv=0x1022d058, self=<value optimized out>) at xml_xpath_context.c:221
#8 0x00007ff0daeab0ea in vm_call_cfunc_with_frame (th=0x1022a4f0, reg_cfp=0x1032b810, ci=<value optimized out>) at vm_insnhelper.c:1510
```
To output a backtrace from all threads at once:
```
apply all thread bt
```
## Check for blocking queries
Sometimes the speed at which Sidekiq processes jobs can be so fast that it can
cause database contention. Check for blocking queries when backtraces above
show that many threads are stuck in the database adapter.
The PostgreSQL wiki has details on the query you can run to see blocking
queries. The query is different based on PostgreSQL version. See
[Lock Monitoring](https://wiki.postgresql.org/wiki/Lock_Monitoring) for
the query details.
# Downgrading from EE to CE
If you ever decide to downgrade your Enterprise Edition back to the Community
Edition, there are a few steps you need take before installing the CE package
on top of the current EE package, or, if you are in an installation from source,
before you change remotes and fetch the latest CE code.
## Disable Enterprise-only features
First thing to do is to disable the following features.
### Authentication mechanisms
Kerberos and Atlassian Crowd are only available on the Enterprise Edition, so
you should disable these mechanisms before downgrading and you should provide
alternative authentication methods to your users.
### Git Annex
Git Annex is also only available on the Enterprise Edition. This means that if
you have repositories that use Git Annex to store large files, these files will
no longer be easily available via Git. You should consider migrating these
repositories to use Git LFS before downgrading to the Community Edition.
### Remove Jenkins CI Service entries from the database
The `JenkinsService` class is only available on the Enterprise Edition codebase,
so if you downgrade to the Community Edition, you'll come across the following
error:
```
Completed 500 Internal Server Error in 497ms (ActiveRecord: 32.2ms)
ActionView::Template::Error (The single-table inheritance mechanism failed to locate the subclass: 'JenkinsService'. This
error is raised because the column 'type' is reserved for storing the class in case of inheritance. Please rename this
column if you didn't intend it to be used for storing the inheritance class or overwrite Service.inheritance_column to
use another column for that information.)
```
All services are created automatically for every project you have, so in order
to avoid getting this error, you need to remove all instances of the
`JenkinsService` from your database:
**Omnibus Installation**
```
$ sudo gitlab-rails runner "Service.where(type: 'JenkinsService').delete_all"
```
**Source Installation**
```
$ bundle exec rails runner "Service.where(type: 'JenkinsService').delete_all" production
```
## Downgrade to CE
After performing the above mentioned steps, you are now ready to downgrade your
GitLab installation to the Community Edition.
**Omnibus Installation**
To downgrade an Omnibus installation, it is sufficient to install the Community
Edition package on top of the currently installed one. You can do this manually,
by directly [downloading the package](https://packages.gitlab.com/gitlab/gitlab-ce)
you need, or by adding our CE package repository and following the
[CE installation instructions](https://about.gitlab.com/downloads/).
**Source Installation**
To downgrade a source installation, you need to replace the current remote of
your GitLab installation with the Community Edition's remote, fetch the latest
changes, and checkout the latest stable branch:
```
$ git remote set-url origin git@gitlab.com:gitlab-org/gitlab-ce.git
$ git fetch --all
$ git checkout 8-x-stable
```
Remember to follow the correct [update guides](../update/README.md) to make
sure all dependencies are up to date.
...@@ -59,34 +59,53 @@ This will drop you in to an InfluxDB interactive session. Copy the entire ...@@ -59,34 +59,53 @@ This will drop you in to an InfluxDB interactive session. Copy the entire
contents below and paste it in to the interactive session: contents below and paste it in to the interactive session:
``` ```
CREATE RETENTION POLICY gitlab_30d ON gitlab DURATION 30d REPLICATION 1 DEFAULT CREATE RETENTION POLICY default ON gitlab DURATION 1h REPLICATION 1 DEFAULT
CREATE RETENTION POLICY seven_days ON gitlab DURATION 7d REPLICATION 1 CREATE RETENTION POLICY downsampled ON gitlab DURATION 7d REPLICATION 1
CREATE CONTINUOUS QUERY rails_transaction_counts_seven_days ON gitlab BEGIN SELECT count("duration") AS "count" INTO gitlab.seven_days.rails_transaction_counts FROM rails_transactions GROUP BY time(1m) END; CREATE CONTINUOUS QUERY grape_git_timings_per_action ON gitlab BEGIN SELECT mean("duration") AS duration_mean, percentile("duration", 95) AS duration_95th, percentile("duration", 99) AS duration_99th INTO gitlab.downsampled.grape_git_timings_per_action FROM gitlab."default".rails_method_calls WHERE (action !~ /.+/ OR action =~ /^Grape#/) AND method =~ /^(Rugged|Gitlab::Git)/ GROUP BY time(1m), action END;
CREATE CONTINUOUS QUERY sidekiq_transaction_counts_seven_days ON gitlab BEGIN SELECT count("duration") AS "count" INTO gitlab.seven_days.sidekiq_transaction_counts FROM sidekiq_transactions GROUP BY time(1m) END; CREATE CONTINUOUS QUERY grape_markdown_render_timings_overall ON gitlab BEGIN SELECT mean(banzai_cached_render_real_time) AS cached_real_mean, percentile(banzai_cached_render_real_time, 95) AS cached_real_95th, percentile(banzai_cached_render_real_time, 99) AS cached_real_99th, mean(banzai_cached_render_cpu_time) AS cached_cpu_mean, percentile(banzai_cached_render_cpu_time, 95) AS cached_cpu_95th, percentile(banzai_cached_render_cpu_time, 99) AS cached_cpu_99th, sum(banzai_cached_render_call_count) AS cached_call_count, mean(banzai_cacheless_render_real_time) AS cacheless_real_mean, percentile(banzai_cacheless_render_real_time, 95) AS cacheless_real_95th, percentile(banzai_cacheless_render_real_time, 99) AS cacheless_real_99th, mean(banzai_cacheless_render_cpu_time) AS cacheless_cpu_mean, percentile(banzai_cacheless_render_cpu_time, 95) AS cacheless_cpu_95th, percentile(banzai_cacheless_render_cpu_time, 99) AS cacheless_cpu_99th, sum(banzai_cacheless_render_call_count) AS cacheless_call_count INTO gitlab.downsampled.grape_markdown_render_timings_overall FROM gitlab."default".rails_transactions WHERE (action !~ /.+/ OR action =~ /^Grape#/) AND (banzai_cached_render_call_count > 0 OR banzai_cacheless_render_call_count > 0) GROUP BY time(1m) END;
CREATE CONTINUOUS QUERY rails_method_call_timings_seven_days ON gitlab BEGIN SELECT percentile("duration", 95.000) AS "duration_95th", percentile("duration", 99.000) AS "duration_99th", mean("duration") AS "duration_mean" INTO gitlab.seven_days.rails_method_call_timings FROM rails_method_calls GROUP BY time(1m) END; CREATE CONTINUOUS QUERY grape_markdown_render_timings_per_action ON gitlab BEGIN SELECT mean(banzai_cached_render_real_time) AS cached_real_mean, percentile(banzai_cached_render_real_time, 95) AS cached_real_95th, percentile(banzai_cached_render_real_time, 99) AS cached_real_99th, mean(banzai_cached_render_cpu_time) AS cached_cpu_mean, percentile(banzai_cached_render_cpu_time, 95) AS cached_cpu_95th, percentile(banzai_cached_render_cpu_time, 99) AS cached_cpu_99th, sum(banzai_cached_render_call_count) AS cached_call_count, mean(banzai_cacheless_render_real_time) AS cacheless_real_mean, percentile(banzai_cacheless_render_real_time, 95) AS cacheless_real_95th, percentile(banzai_cacheless_render_real_time, 99) AS cacheless_real_99th, mean(banzai_cacheless_render_cpu_time) AS cacheless_cpu_mean, percentile(banzai_cacheless_render_cpu_time, 95) AS cacheless_cpu_95th, percentile(banzai_cacheless_render_cpu_time, 99) AS cacheless_cpu_99th, sum(banzai_cacheless_render_call_count) AS cacheless_call_count INTO gitlab.downsampled.grape_markdown_render_timings_per_action FROM gitlab."default".rails_transactions WHERE (action !~ /.+/ OR action =~ /^Grape#/) AND (banzai_cached_render_call_count > 0 OR banzai_cacheless_render_call_count > 0) GROUP BY time(1m), action END;
CREATE CONTINUOUS QUERY sidekiq_method_call_timings_seven_days ON gitlab BEGIN SELECT percentile("duration", 95.000) AS "duration_95th", percentile("duration", 99.000) AS "duration_99th", mean("duration") AS "duration_mean" INTO gitlab.seven_days.sidekiq_method_call_timings FROM sidekiq_method_calls GROUP BY time(1m) END; CREATE CONTINUOUS QUERY grape_markdown_timings_overall ON gitlab BEGIN SELECT mean("duration") AS duration_mean, percentile("duration", 95) AS duration_95th, percentile("duration", 99) AS duration_99th INTO gitlab.downsampled.grape_markdown_timings_overall FROM gitlab."default".rails_method_calls WHERE (action !~ /.+/ OR action =~ /^Grape#/) AND method =~ /^Banzai/ GROUP BY time(1m) END;
CREATE CONTINUOUS QUERY rails_method_call_timings_per_method_seven_days ON gitlab BEGIN SELECT percentile("duration", 95.000) AS duration_95th, percentile("duration", 99.000) AS duration_99th, mean("duration") AS duration_mean INTO gitlab.seven_days.rails_method_call_timings_per_method FROM rails_method_calls GROUP BY time(1m), method END; CREATE CONTINUOUS QUERY grape_method_call_timings_per_action_and_method ON gitlab BEGIN SELECT mean("duration") AS duration_mean, percentile("duration", 95) AS duration_95th, percentile("duration", 99) AS duration_99th INTO gitlab.downsampled.grape_method_call_timings_per_action_and_method FROM gitlab."default".rails_method_calls WHERE action !~ /.+/ OR action =~ /^Grape#/ GROUP BY time(1m), method, action END;
CREATE CONTINUOUS QUERY sidekiq_method_call_timings_per_method_seven_days ON gitlab BEGIN SELECT percentile("duration", 95.000) AS duration_95th, percentile("duration", 99.000) AS duration_99th, mean("duration") AS duration_mean INTO gitlab.seven_days.sidekiq_method_call_timings_per_method FROM sidekiq_method_calls GROUP BY time(1m), method END; CREATE CONTINUOUS QUERY grape_method_call_timings_per_method ON gitlab BEGIN SELECT mean("duration") AS duration_mean, percentile("duration", 95) AS duration_95th, percentile("duration", 99) AS duration_99th INTO gitlab.downsampled.grape_method_call_timings_per_method FROM gitlab."default".rails_method_calls WHERE action !~ /.+/ OR action =~ /^Grape#/ GROUP BY time(1m), method END;
CREATE CONTINUOUS QUERY rails_memory_usage_per_minute ON gitlab BEGIN SELECT percentile(value, 95.000) AS memory_95th, percentile(value, 99.000) AS memory_99th, mean(value) AS memory_mean INTO gitlab.seven_days.rails_memory_usage_per_minute FROM rails_memory_usage GROUP BY time(1m) END; CREATE CONTINUOUS QUERY grape_transaction_counts_overall ON gitlab BEGIN SELECT count("duration") AS count INTO gitlab.downsampled.grape_transaction_counts_overall FROM gitlab."default".rails_transactions WHERE action !~ /.+/ OR action =~ /^Grape#/ GROUP BY time(1m) END;
CREATE CONTINUOUS QUERY sidekiq_memory_usage_per_minute ON gitlab BEGIN SELECT percentile(value, 95.000) AS memory_95th, percentile(value, 99.000) AS memory_99th, mean(value) AS memory_mean INTO gitlab.seven_days.sidekiq_memory_usage_per_minute FROM sidekiq_memory_usage GROUP BY time(1m) END; CREATE CONTINUOUS QUERY grape_transaction_counts_per_action ON gitlab BEGIN SELECT count("duration") AS count INTO gitlab.downsampled.grape_transaction_counts_per_action FROM gitlab."default".rails_transactions WHERE action !~ /.+/ OR action =~ /^Grape#/ GROUP BY time(1m), action END;
CREATE CONTINUOUS QUERY sidekiq_file_descriptors_per_minute ON gitlab BEGIN SELECT sum(value) AS value INTO gitlab.seven_days.sidekiq_file_descriptors_per_minute FROM sidekiq_file_descriptors GROUP BY time(1m) END; CREATE CONTINUOUS QUERY grape_transaction_timings_overall ON gitlab BEGIN SELECT mean("duration") AS duration_mean, percentile("duration", 95) AS duration_95th, percentile("duration", 99) AS duration_99th, mean(sql_duration) AS sql_duration_mean, percentile(sql_duration, 95) AS sql_duration_95th, percentile(sql_duration, 99) AS sql_duration_99th, mean(view_duration) AS view_duration_mean, percentile(view_duration, 95) AS view_duration_95th, percentile(view_duration, 99) AS view_duration_99th, mean(cache_read_duration) AS cache_read_duration_mean, percentile(cache_read_duration, 99) AS cache_read_duration_99th, percentile(cache_read_duration, 95) AS cache_read_duration_95th, mean(cache_write_duration) AS cache_write_duration_mean, percentile(cache_write_duration, 99) AS cache_write_duration_99th, percentile(cache_write_duration, 95) AS cache_write_duration_95th, mean(cache_delete_duration) AS cache_delete_duration_mean, percentile(cache_delete_duration, 99) AS cache_delete_duration_99th, percentile(cache_delete_duration, 95) AS cache_delete_duration_95th, mean(cache_exists_duration) AS cache_exists_duration_mean, percentile(cache_exists_duration, 99) AS cache_exists_duration_99th, percentile(cache_exists_duration, 95) AS cache_exists_duration_95th, mean(cache_duration) AS cache_duration_mean, percentile(cache_duration, 99) AS cache_duration_99th, percentile(cache_duration, 95) AS cache_duration_95th, mean(method_duration) AS method_duration_mean, percentile(method_duration, 99) AS method_duration_99th, percentile(method_duration, 95) AS method_duration_95th INTO gitlab.downsampled.grape_transaction_timings_overall FROM gitlab."default".rails_transactions WHERE action !~ /.+/ OR action =~ /^Grape#/ GROUP BY time(1m) END;
CREATE CONTINUOUS QUERY rails_file_descriptors_per_minute ON gitlab BEGIN SELECT sum(value) AS value INTO gitlab.seven_days.rails_file_descriptors_per_minute FROM rails_file_descriptors GROUP BY time(1m) END; CREATE CONTINUOUS QUERY grape_transaction_timings_per_action ON gitlab BEGIN SELECT mean("duration") AS duration_mean, percentile("duration", 95) AS duration_95th, percentile("duration", 99) AS duration_99th, mean(sql_duration) AS sql_duration_mean, percentile(sql_duration, 95) AS sql_duration_95th, percentile(sql_duration, 99) AS sql_duration_99th, mean(view_duration) AS view_duration_mean, percentile(view_duration, 95) AS view_duration_95th, percentile(view_duration, 99) AS view_duration_99th, mean(cache_read_duration) AS cache_read_duration_mean, percentile(cache_read_duration, 99) AS cache_read_duration_99th, percentile(cache_read_duration, 95) AS cache_read_duration_95th, mean(cache_write_duration) AS cache_write_duration_mean, percentile(cache_write_duration, 99) AS cache_write_duration_99th, percentile(cache_write_duration, 95) AS cache_write_duration_95th, mean(cache_delete_duration) AS cache_delete_duration_mean, percentile(cache_delete_duration, 99) AS cache_delete_duration_99th, percentile(cache_delete_duration, 95) AS cache_delete_duration_95th, mean(cache_exists_duration) AS cache_exists_duration_mean, percentile(cache_exists_duration, 99) AS cache_exists_duration_99th, percentile(cache_exists_duration, 95) AS cache_exists_duration_95th, mean(cache_duration) AS cache_duration_mean, percentile(cache_duration, 99) AS cache_duration_99th, percentile(cache_duration, 95) AS cache_duration_95th, mean(method_duration) AS method_duration_mean, percentile(method_duration, 99) AS method_duration_99th, percentile(method_duration, 95) AS method_duration_95th INTO gitlab.downsampled.grape_transaction_timings_per_action FROM gitlab."default".rails_transactions WHERE action !~ /.+/ OR action =~ /^Grape#/ GROUP BY time(1m), action END;
CREATE CONTINUOUS QUERY rails_gc_counts_per_minute ON gitlab BEGIN SELECT sum(count) AS count INTO gitlab.seven_days.rails_gc_counts_per_minute FROM rails_gc_statistics GROUP BY time(1m) END; CREATE CONTINUOUS QUERY rails_file_descriptor_counts ON gitlab BEGIN SELECT sum(value) AS count INTO gitlab.downsampled.rails_file_descriptor_counts FROM gitlab."default".rails_file_descriptors GROUP BY time(1m) END;
CREATE CONTINUOUS QUERY sidekiq_gc_counts_per_minute ON gitlab BEGIN SELECT sum(count) AS count INTO gitlab.seven_days.sidekiq_gc_counts_per_minute FROM sidekiq_gc_statistics GROUP BY time(1m) END; CREATE CONTINUOUS QUERY rails_gc_counts ON gitlab BEGIN SELECT sum(count) AS total, sum(minor_gc_count) AS minor, sum(major_gc_count) AS major INTO gitlab.downsampled.rails_gc_counts FROM gitlab."default".rails_gc_statistics GROUP BY time(1m) END;
CREATE CONTINUOUS QUERY rails_gc_timings_per_minute ON gitlab BEGIN SELECT percentile(total_time, 95.000) AS duration_95th, percentile(total_time, 99.000) AS duration_99th, mean(total_time) AS duration_mean INTO gitlab.seven_days.rails_gc_timings_per_minute FROM rails_gc_statistics GROUP BY time(1m) END; CREATE CONTINUOUS QUERY rails_gc_timings ON gitlab BEGIN SELECT mean(total_time) AS duration_mean, percentile(total_time, 95) AS duration_95th, percentile(total_time, 99) AS duration_99th INTO gitlab.downsampled.rails_gc_timings FROM gitlab."default".rails_gc_statistics GROUP BY time(1m) END;
CREATE CONTINUOUS QUERY sidekiq_gc_timings_per_minute ON gitlab BEGIN SELECT percentile(total_time, 95.000) AS duration_95th, percentile(total_time, 99.000) AS duration_99th, mean(total_time) AS duration_mean INTO gitlab.seven_days.sidekiq_gc_timings_per_minute FROM sidekiq_gc_statistics GROUP BY time(1m) END; CREATE CONTINUOUS QUERY rails_git_timings_per_action ON gitlab BEGIN SELECT mean("duration") AS duration_mean, percentile("duration", 95) AS duration_95th, percentile("duration", 99) AS duration_99th INTO gitlab.downsampled.rails_git_timings_per_action FROM gitlab."default".rails_method_calls WHERE (action =~ /.+/ AND action !~ /^Grape#/) AND method =~ /^(Rugged|Gitlab::Git)/ GROUP BY time(1m), action END;
CREATE CONTINUOUS QUERY rails_gc_major_minor_per_minute ON gitlab BEGIN SELECT sum(major_gc_count) AS major, sum(minor_gc_count) AS minor INTO gitlab.seven_days.rails_gc_major_minor_per_minute FROM rails_gc_statistics GROUP BY time(1m) END; CREATE CONTINUOUS QUERY rails_markdown_render_timings_overall ON gitlab BEGIN SELECT mean(banzai_cached_render_real_time) AS cached_real_mean, percentile(banzai_cached_render_real_time, 95) AS cached_real_95th, percentile(banzai_cached_render_real_time, 99) AS cached_real_99th, mean(banzai_cached_render_cpu_time) AS cached_cpu_mean, percentile(banzai_cached_render_cpu_time, 95) AS cached_cpu_95th, percentile(banzai_cached_render_cpu_time, 99) AS cached_cpu_99th, sum(banzai_cached_render_call_count) AS cached_call_count, mean(banzai_cacheless_render_real_time) AS cacheless_real_mean, percentile(banzai_cacheless_render_real_time, 95) AS cacheless_real_95th, percentile(banzai_cacheless_render_real_time, 99) AS cacheless_real_99th, mean(banzai_cacheless_render_cpu_time) AS cacheless_cpu_mean, percentile(banzai_cacheless_render_cpu_time, 95) AS cacheless_cpu_95th, percentile(banzai_cacheless_render_cpu_time, 99) AS cacheless_cpu_99th, sum(banzai_cacheless_render_call_count) AS cacheless_call_count INTO gitlab.downsampled.rails_markdown_render_timings_overall FROM gitlab."default".rails_transactions WHERE (action =~ /.+/ AND action !~ /^Grape#/) AND (banzai_cached_render_call_count > 0 OR banzai_cacheless_render_call_count > 0) GROUP BY time(1m) END;
CREATE CONTINUOUS QUERY sidekiq_gc_major_minor_per_minute ON gitlab BEGIN SELECT sum(major_gc_count) AS major, sum(minor_gc_count) AS minor INTO gitlab.seven_days.sidekiq_gc_major_minor_per_minute FROM sidekiq_gc_statistics GROUP BY time(1m) END; CREATE CONTINUOUS QUERY rails_markdown_render_timings_per_action ON gitlab BEGIN SELECT mean(banzai_cached_render_real_time) AS cached_real_mean, percentile(banzai_cached_render_real_time, 95) AS cached_real_95th, percentile(banzai_cached_render_real_time, 99) AS cached_real_99th, mean(banzai_cached_render_cpu_time) AS cached_cpu_mean, percentile(banzai_cached_render_cpu_time, 95) AS cached_cpu_95th, percentile(banzai_cached_render_cpu_time, 99) AS cached_cpu_99th, sum(banzai_cached_render_call_count) AS cached_call_count, mean(banzai_cacheless_render_real_time) AS cacheless_real_mean, percentile(banzai_cacheless_render_real_time, 95) AS cacheless_real_95th, percentile(banzai_cacheless_render_real_time, 99) AS cacheless_real_99th, mean(banzai_cacheless_render_cpu_time) AS cacheless_cpu_mean, percentile(banzai_cacheless_render_cpu_time, 95) AS cacheless_cpu_95th, percentile(banzai_cacheless_render_cpu_time, 99) AS cacheless_cpu_99th, sum(banzai_cacheless_render_call_count) AS cacheless_call_count INTO gitlab.downsampled.rails_markdown_render_timings_per_action FROM gitlab."default".rails_transactions WHERE (action =~ /.+/ AND action !~ /^Grape#/) AND (banzai_cached_render_call_count > 0 OR banzai_cacheless_render_call_count > 0) GROUP BY time(1m), action END;
CREATE CONTINUOUS QUERY grape_internal_allowed_request_counts_per_minute ON gitlab BEGIN SELECT count("duration") AS count INTO gitlab.seven_days.grape_internal_allowed_request_counts_per_minute FROM rails_transactions WHERE request_uri = '/api/v3/internal/allowed' GROUP BY time(1m) END; CREATE CONTINUOUS QUERY rails_markdown_timings_overall ON gitlab BEGIN SELECT mean("duration") AS duration_mean, percentile("duration", 95) AS duration_95th, percentile("duration", 99) AS duration_99th INTO gitlab.downsampled.rails_markdown_timings_overall FROM gitlab."default".rails_method_calls WHERE (action =~ /.+/ AND action !~ /^Grape#/) AND method =~ /^Banzai/ GROUP BY time(1m) END;
CREATE CONTINUOUS QUERY grape_internal_allowed_request_timings_per_minute ON gitlab BEGIN SELECT percentile("duration", 95) AS duration_95th, percentile("duration", 99) AS duration_99th, mean("duration") AS duration_mean INTO gitlab.seven_days.grape_internal_allowed_request_timings_per_minute FROM rails_transactions WHERE request_uri = '/api/v3/internal/allowed' GROUP BY time(1m) END; CREATE CONTINUOUS QUERY rails_memory_usage_overall ON gitlab BEGIN SELECT mean(value) AS memory_mean, percentile(value, 95) AS memory_95th, percentile(value, 99) AS memory_99th INTO gitlab.downsampled.rails_memory_usage_overall FROM gitlab."default".rails_memory_usage GROUP BY time(1m) END;
CREATE CONTINUOUS QUERY grape_internal_allowed_sql_timings_per_minute ON gitlab BEGIN SELECT percentile(sql_duration, 95) AS duration_95th, percentile(sql_duration, 99) AS duration_99th, mean(sql_duration) AS duration_mean INTO gitlab.seven_days.grape_internal_allowed_sql_timings_per_minute FROM rails_transactions WHERE request_uri = '/api/v3/internal/allowed' GROUP BY time(1m) END; CREATE CONTINUOUS QUERY rails_method_call_timings_per_action_and_method ON gitlab BEGIN SELECT mean("duration") AS duration_mean, percentile("duration", 95) AS duration_95th, percentile("duration", 99) AS duration_99th INTO gitlab.downsampled.rails_method_call_timings_per_action_and_method FROM gitlab."default".rails_method_calls WHERE action =~ /.+/ AND action !~ /^Grape#/ GROUP BY time(1m), method, action END;
CREATE CONTINUOUS QUERY grape_internal_authorized_keys_request_counts_per_minute ON gitlab BEGIN SELECT count("duration") AS count INTO gitlab.seven_days.grape_internal_authorized_keys_request_counts_per_minute FROM rails_transactions WHERE request_uri = '/api/v3/internal/authorized_keys' GROUP BY time(1m) END; CREATE CONTINUOUS QUERY rails_method_call_timings_per_method ON gitlab BEGIN SELECT mean("duration") AS duration_mean, percentile("duration", 95) AS duration_95th, percentile("duration", 99) AS duration_99th INTO gitlab.downsampled.rails_method_call_timings_per_method FROM gitlab."default".rails_method_calls WHERE action =~ /.+/ AND action !~ /^Grape#/ GROUP BY time(1m), method END;
CREATE CONTINUOUS QUERY grape_internal_authorized_keys_request_timings_per_minute ON gitlab BEGIN SELECT percentile("duration", 95) AS duration_95th, percentile("duration", 99) AS duration_99th, mean("duration") AS duration_mean INTO gitlab.seven_days.grape_internal_authorized_keys_request_timings_per_minute FROM rails_transactions WHERE request_uri = '/api/v3/internal/authorized_keys' GROUP BY time(1m) END; CREATE CONTINUOUS QUERY rails_object_counts_overall ON gitlab BEGIN SELECT sum(count) AS count INTO gitlab.downsampled.rails_object_counts_overall FROM gitlab."default".rails_object_counts GROUP BY time(1m) END;
CREATE CONTINUOUS QUERY grape_internal_authorized_keys_sql_timings_per_minute ON gitlab BEGIN SELECT percentile(sql_duration, 95) AS duration_95th, percentile(sql_duration, 99) AS duration_99th, mean(sql_duration) AS duration_mean INTO gitlab.seven_days.grape_internal_authorized_keys_sql_timings_per_minute FROM rails_transactions WHERE request_uri = '/api/v3/internal/authorized_keys' GROUP BY time(1m) END; CREATE CONTINUOUS QUERY rails_object_counts_per_type ON gitlab BEGIN SELECT sum(count) AS count INTO gitlab.downsampled.rails_object_counts_per_type FROM gitlab."default".rails_object_counts GROUP BY time(1m), type END;
CREATE CONTINUOUS QUERY rails_transaction_timings_seven_days ON gitlab BEGIN SELECT percentile("duration", 95.000) AS duration_95th, percentile("duration", 99.000) AS duration_99th, mean("duration") AS duration_mean, percentile(sql_duration, 95.000) AS sql_duration_95th, percentile(sql_duration, 99.000) AS sql_duration_99th, mean(sql_duration) AS sql_duration_mean, percentile(view_duration, 95.000) AS view_duration_95th, percentile(view_duration, 99.000) AS view_duration_99th, mean(view_duration) AS view_duration_mean, percentile(cache_read_duration, 99) AS cache_read_duration_99th, percentile(cache_read_duration, 95) AS cache_read_duration_95th, mean(cache_read_duration) AS cache_read_duration_mean, percentile(cache_write_duration, 99) AS cache_write_duration_99th, percentile(cache_write_duration, 95) AS cache_write_duration_95th, mean(cache_write_duration) AS cache_write_duration_mean, percentile(cache_delete_duration, 99) AS cache_delete_duration_99th, percentile(cache_delete_duration, 95) AS cache_delete_duration_95th, mean(cache_delete_duration) AS cache_delete_duration_mean, percentile(cache_exists_duration, 99) AS cache_exists_duration_99th, percentile(cache_exists_duration, 95) AS cache_exists_duration_95th, mean(cache_exists_duration) AS cache_exists_duration_mean, percentile(cache_duration, 99) AS cache_duration_99th, percentile(cache_duration, 95) AS cache_duration_95th, mean(cache_duration) AS cache_duration_mean INTO gitlab.seven_days.rails_transaction_timings FROM rails_transactions GROUP BY time(1m) END; CREATE CONTINUOUS QUERY rails_transaction_counts_overall ON gitlab BEGIN SELECT count("duration") AS count INTO gitlab.downsampled.rails_transaction_counts_overall FROM gitlab."default".rails_transactions WHERE action =~ /.+/ AND action !~ /^Grape#/ GROUP BY time(1m) END;
CREATE CONTINUOUS QUERY sidekiq_transaction_timings_seven_days ON gitlab BEGIN SELECT percentile("duration", 95.000) AS duration_95th, percentile("duration", 99.000) AS duration_99th, mean("duration") AS duration_mean, percentile(sql_duration, 95.000) AS sql_duration_95th, percentile(sql_duration, 99.000) AS sql_duration_99th, mean(sql_duration) AS sql_duration_mean, percentile(view_duration, 95.000) AS view_duration_95th, percentile(view_duration, 99.000) AS view_duration_99th, mean(view_duration) AS view_duration_mean, percentile(cache_read_duration, 99) AS cache_read_duration_99th, percentile(cache_read_duration, 95) AS cache_read_duration_95th, mean(cache_read_duration) AS cache_read_duration_mean, percentile(cache_write_duration, 99) AS cache_write_duration_99th, percentile(cache_write_duration, 95) AS cache_write_duration_95th, mean(cache_write_duration) AS cache_write_duration_mean, percentile(cache_delete_duration, 99) AS cache_delete_duration_99th, percentile(cache_delete_duration, 95) AS cache_delete_duration_95th, mean(cache_delete_duration) AS cache_delete_duration_mean, percentile(cache_exists_duration, 99) AS cache_exists_duration_99th, percentile(cache_exists_duration, 95) AS cache_exists_duration_95th, mean(cache_exists_duration) AS cache_exists_duration_mean, percentile(cache_duration, 99) AS cache_duration_99th, percentile(cache_duration, 95) AS cache_duration_95th, mean(cache_duration) AS cache_duration_mean INTO gitlab.seven_days.sidekiq_transaction_timings FROM sidekiq_transactions GROUP BY time(1m) END; CREATE CONTINUOUS QUERY rails_transaction_counts_per_action ON gitlab BEGIN SELECT count("duration") AS count INTO gitlab.downsampled.rails_transaction_counts_per_action FROM gitlab."default".rails_transactions WHERE action =~ /.+/ AND action !~ /^Grape#/ GROUP BY time(1m), action END;
CREATE CONTINUOUS QUERY grape_transaction_counts_seven_days ON gitlab BEGIN SELECT count("duration") AS count INTO gitlab.seven_days.grape_transaction_counts FROM rails_transactions WHERE action !~ /.+/ GROUP BY time(1m) END; CREATE CONTINUOUS QUERY rails_transaction_timings_overall ON gitlab BEGIN SELECT mean("duration") AS duration_mean, percentile("duration", 95) AS duration_95th, percentile("duration", 99) AS duration_99th, mean(sql_duration) AS sql_duration_mean, percentile(sql_duration, 95) AS sql_duration_95th, percentile(sql_duration, 99) AS sql_duration_99th, mean(view_duration) AS view_duration_mean, percentile(view_duration, 95) AS view_duration_95th, percentile(view_duration, 99) AS view_duration_99th, mean(cache_read_duration) AS cache_read_duration_mean, percentile(cache_read_duration, 99) AS cache_read_duration_99th, percentile(cache_read_duration, 95) AS cache_read_duration_95th, mean(cache_write_duration) AS cache_write_duration_mean, percentile(cache_write_duration, 99) AS cache_write_duration_99th, percentile(cache_write_duration, 95) AS cache_write_duration_95th, mean(cache_delete_duration) AS cache_delete_duration_mean, percentile(cache_delete_duration, 99) AS cache_delete_duration_99th, percentile(cache_delete_duration, 95) AS cache_delete_duration_95th, mean(cache_exists_duration) AS cache_exists_duration_mean, percentile(cache_exists_duration, 99) AS cache_exists_duration_99th, percentile(cache_exists_duration, 95) AS cache_exists_duration_95th, mean(cache_duration) AS cache_duration_mean, percentile(cache_duration, 99) AS cache_duration_99th, percentile(cache_duration, 95) AS cache_duration_95th, mean(method_duration) AS method_duration_mean, percentile(method_duration, 99) AS method_duration_99th, percentile(method_duration, 95) AS method_duration_95th INTO gitlab.downsampled.rails_transaction_timings_overall FROM gitlab."default".rails_transactions WHERE action =~ /.+/ AND action !~ /^Grape#/ GROUP BY time(1m) END;
CREATE CONTINUOUS QUERY grape_transaction_timings_seven_days ON gitlab BEGIN SELECT percentile("duration", 95.000) AS duration_95th, percentile("duration", 99.000) AS duration_99th, mean("duration") AS duration_mean, percentile(sql_duration, 95.000) AS sql_duration_95th, percentile(sql_duration, 99.000) AS sql_duration_99th, mean(sql_duration) AS sql_duration_mean, percentile(cache_read_duration, 99) AS cache_read_duration_99th, percentile(cache_read_duration, 95) AS cache_read_duration_95th, mean(cache_read_duration) AS cache_read_duration_mean, percentile(cache_write_duration, 99) AS cache_write_duration_99th, percentile(cache_write_duration, 95) AS cache_write_duration_95th, mean(cache_write_duration) AS cache_write_duration_mean, percentile(cache_delete_duration, 99) AS cache_delete_duration_99th, percentile(cache_delete_duration, 95) AS cache_delete_duration_95th, mean(cache_delete_duration) AS cache_delete_duration_mean, percentile(cache_exists_duration, 99) AS cache_exists_duration_99th, percentile(cache_exists_duration, 95) AS cache_exists_duration_95th, mean(cache_exists_duration) AS cache_exists_duration_mean, percentile(cache_duration, 99) AS cache_duration_99th, percentile(cache_duration, 95) AS cache_duration_95th, mean(cache_duration) AS cache_duration_mean INTO gitlab.seven_days.grape_transaction_timings FROM rails_transactions WHERE action !~ /.+/ GROUP BY time(1m) END; CREATE CONTINUOUS QUERY rails_transaction_timings_per_action ON gitlab BEGIN SELECT mean("duration") AS duration_mean, percentile("duration", 95) AS duration_95th, percentile("duration", 99) AS duration_99th, mean(sql_duration) AS sql_duration_mean, percentile(sql_duration, 95) AS sql_duration_95th, percentile(sql_duration, 99) AS sql_duration_99th, mean(view_duration) AS view_duration_mean, percentile(view_duration, 95) AS view_duration_95th, percentile(view_duration, 99) AS view_duration_99th, mean(cache_read_duration) AS cache_read_duration_mean, percentile(cache_read_duration, 99) AS cache_read_duration_99th, percentile(cache_read_duration, 95) AS cache_read_duration_95th, mean(cache_write_duration) AS cache_write_duration_mean, percentile(cache_write_duration, 99) AS cache_write_duration_99th, percentile(cache_write_duration, 95) AS cache_write_duration_95th, mean(cache_delete_duration) AS cache_delete_duration_mean, percentile(cache_delete_duration, 99) AS cache_delete_duration_99th, percentile(cache_delete_duration, 95) AS cache_delete_duration_95th, mean(cache_exists_duration) AS cache_exists_duration_mean, percentile(cache_exists_duration, 99) AS cache_exists_duration_99th, percentile(cache_exists_duration, 95) AS cache_exists_duration_95th, mean(cache_duration) AS cache_duration_mean, percentile(cache_duration, 99) AS cache_duration_99th, percentile(cache_duration, 95) AS cache_duration_95th, mean(method_duration) AS method_duration_mean, percentile(method_duration, 99) AS method_duration_99th, percentile(method_duration, 95) AS method_duration_95th INTO gitlab.downsampled.rails_transaction_timings_per_action FROM gitlab."default".rails_transactions WHERE action =~ /.+/ AND action !~ /^Grape#/ GROUP BY time(1m), action END;
CREATE CONTINUOUS QUERY rails_view_timings_per_action_and_view ON gitlab BEGIN SELECT mean("duration") AS duration_mean, percentile("duration", 95) AS duration_95th, percentile("duration", 99) AS duration_99th INTO gitlab.downsampled.rails_view_timings_per_action_and_view FROM gitlab."default".rails_views WHERE action =~ /.+/ AND action !~ /^Grape#/ GROUP BY time(1m), action, view END;
CREATE CONTINUOUS QUERY sidekiq_file_descriptor_counts ON gitlab BEGIN SELECT sum(value) AS count INTO gitlab.downsampled.sidekiq_file_descriptor_counts FROM gitlab."default".sidekiq_file_descriptors GROUP BY time(1m) END;
CREATE CONTINUOUS QUERY sidekiq_gc_counts ON gitlab BEGIN SELECT sum(count) AS total, sum(minor_gc_count) AS minor, sum(major_gc_count) AS major INTO gitlab.downsampled.sidekiq_gc_counts FROM gitlab."default".sidekiq_gc_statistics GROUP BY time(1m) END;
CREATE CONTINUOUS QUERY sidekiq_gc_timings ON gitlab BEGIN SELECT mean(total_time) AS duration_mean, percentile(total_time, 95) AS duration_95th, percentile(total_time, 99) AS duration_99th INTO gitlab.downsampled.sidekiq_gc_timings FROM gitlab."default".sidekiq_gc_statistics GROUP BY time(1m) END;
CREATE CONTINUOUS QUERY sidekiq_git_timings_per_action ON gitlab BEGIN SELECT mean("duration") AS duration_mean, percentile("duration", 95) AS duration_95th, percentile("duration", 99) AS duration_99th INTO gitlab.downsampled.sidekiq_git_timings_per_action FROM gitlab."default".sidekiq_method_calls WHERE method =~ /^(Rugged|Gitlab::Git)/ GROUP BY time(1m), action END;
CREATE CONTINUOUS QUERY sidekiq_markdown_render_timings_overall ON gitlab BEGIN SELECT mean(banzai_cached_render_real_time) AS cached_real_mean, percentile(banzai_cached_render_real_time, 95) AS cached_real_95th, percentile(banzai_cached_render_real_time, 99) AS cached_real_99th, mean(banzai_cached_render_cpu_time) AS cached_cpu_mean, percentile(banzai_cached_render_cpu_time, 95) AS cached_cpu_95th, percentile(banzai_cached_render_cpu_time, 99) AS cached_cpu_99th, sum(banzai_cached_render_call_count) AS cached_call_count, mean(banzai_cacheless_render_real_time) AS cacheless_real_mean, percentile(banzai_cacheless_render_real_time, 95) AS cacheless_real_95th, percentile(banzai_cacheless_render_real_time, 99) AS cacheless_real_99th, mean(banzai_cacheless_render_cpu_time) AS cacheless_cpu_mean, percentile(banzai_cacheless_render_cpu_time, 95) AS cacheless_cpu_95th, percentile(banzai_cacheless_render_cpu_time, 99) AS cacheless_cpu_99th, sum(banzai_cacheless_render_call_count) AS cacheless_call_count INTO gitlab.downsampled.sidekiq_markdown_render_timings_overall FROM gitlab."default".sidekiq_transactions WHERE (banzai_cached_render_call_count > 0 OR banzai_cacheless_render_call_count > 0) GROUP BY time(1m) END;
CREATE CONTINUOUS QUERY sidekiq_markdown_render_timings_per_action ON gitlab BEGIN SELECT mean(banzai_cached_render_real_time) AS cached_real_mean, percentile(banzai_cached_render_real_time, 95) AS cached_real_95th, percentile(banzai_cached_render_real_time, 99) AS cached_real_99th, mean(banzai_cached_render_cpu_time) AS cached_cpu_mean, percentile(banzai_cached_render_cpu_time, 95) AS cached_cpu_95th, percentile(banzai_cached_render_cpu_time, 99) AS cached_cpu_99th, sum(banzai_cached_render_call_count) AS cached_call_count, mean(banzai_cacheless_render_real_time) AS cacheless_real_mean, percentile(banzai_cacheless_render_real_time, 95) AS cacheless_real_95th, percentile(banzai_cacheless_render_real_time, 99) AS cacheless_real_99th, mean(banzai_cacheless_render_cpu_time) AS cacheless_cpu_mean, percentile(banzai_cacheless_render_cpu_time, 95) AS cacheless_cpu_95th, percentile(banzai_cacheless_render_cpu_time, 99) AS cacheless_cpu_99th, sum(banzai_cacheless_render_call_count) AS cacheless_call_count INTO gitlab.downsampled.sidekiq_markdown_render_timings_per_action FROM gitlab."default".sidekiq_transactions WHERE (banzai_cached_render_call_count > 0 OR banzai_cacheless_render_call_count > 0) GROUP BY time(1m), action END;
CREATE CONTINUOUS QUERY sidekiq_markdown_timings_overall ON gitlab BEGIN SELECT mean("duration") AS duration_mean, percentile("duration", 95) AS duration_95th, percentile("duration", 99) AS duration_99th INTO gitlab.downsampled.sidekiq_markdown_timings_overall FROM gitlab."default".sidekiq_method_calls WHERE method =~ /^Banzai/ GROUP BY time(1m) END;
CREATE CONTINUOUS QUERY sidekiq_memory_usage_overall ON gitlab BEGIN SELECT mean(value) AS memory_mean, percentile(value, 95) AS memory_95th, percentile(value, 99) AS memory_99th INTO gitlab.downsampled.sidekiq_memory_usage_overall FROM gitlab."default".sidekiq_memory_usage GROUP BY time(1m) END;
CREATE CONTINUOUS QUERY sidekiq_method_call_timings_per_action_and_method ON gitlab BEGIN SELECT mean("duration") AS duration_mean, percentile("duration", 95) AS duration_95th, percentile("duration", 99) AS duration_99th INTO gitlab.downsampled.sidekiq_method_call_timings_per_action_and_method FROM gitlab."default".sidekiq_method_calls GROUP BY time(1m), method, action END;
CREATE CONTINUOUS QUERY sidekiq_method_call_timings_per_method ON gitlab BEGIN SELECT mean("duration") AS duration_mean, percentile("duration", 95) AS duration_95th, percentile("duration", 99) AS duration_99th INTO gitlab.downsampled.sidekiq_method_call_timings_per_method FROM gitlab."default".sidekiq_method_calls GROUP BY time(1m), method END;
CREATE CONTINUOUS QUERY sidekiq_object_counts_overall ON gitlab BEGIN SELECT sum(count) AS count INTO gitlab.downsampled.sidekiq_object_counts_overall FROM gitlab."default".sidekiq_object_counts GROUP BY time(1m) END;
CREATE CONTINUOUS QUERY sidekiq_object_counts_per_type ON gitlab BEGIN SELECT sum(count) AS count INTO gitlab.downsampled.sidekiq_object_counts_per_type FROM gitlab."default".sidekiq_object_counts GROUP BY time(1m), type END;
CREATE CONTINUOUS QUERY sidekiq_transaction_counts_overall ON gitlab BEGIN SELECT count("duration") AS count INTO gitlab.downsampled.sidekiq_transaction_counts_overall FROM gitlab."default".sidekiq_transactions GROUP BY time(1m) END;
CREATE CONTINUOUS QUERY sidekiq_transaction_counts_per_action ON gitlab BEGIN SELECT count("duration") AS count INTO gitlab.downsampled.sidekiq_transaction_counts_per_action FROM gitlab."default".sidekiq_transactions GROUP BY time(1m), action END;
CREATE CONTINUOUS QUERY sidekiq_transaction_timings_overall ON gitlab BEGIN SELECT mean("duration") AS duration_mean, percentile("duration", 95) AS duration_95th, percentile("duration", 99) AS duration_99th, mean(sql_duration) AS sql_duration_mean, percentile(sql_duration, 95) AS sql_duration_95th, percentile(sql_duration, 99) AS sql_duration_99th, mean(view_duration) AS view_duration_mean, percentile(view_duration, 95) AS view_duration_95th, percentile(view_duration, 99) AS view_duration_99th, mean(cache_read_duration) AS cache_read_duration_mean, percentile(cache_read_duration, 99) AS cache_read_duration_99th, percentile(cache_read_duration, 95) AS cache_read_duration_95th, mean(cache_write_duration) AS cache_write_duration_mean, percentile(cache_write_duration, 99) AS cache_write_duration_99th, percentile(cache_write_duration, 95) AS cache_write_duration_95th, mean(cache_delete_duration) AS cache_delete_duration_mean, percentile(cache_delete_duration, 99) AS cache_delete_duration_99th, percentile(cache_delete_duration, 95) AS cache_delete_duration_95th, mean(cache_exists_duration) AS cache_exists_duration_mean, percentile(cache_exists_duration, 99) AS cache_exists_duration_99th, percentile(cache_exists_duration, 95) AS cache_exists_duration_95th, mean(cache_duration) AS cache_duration_mean, percentile(cache_duration, 99) AS cache_duration_99th, percentile(cache_duration, 95) AS cache_duration_95th, mean(method_duration) AS method_duration_mean, percentile(method_duration, 99) AS method_duration_99th, percentile(method_duration, 95) AS method_duration_95th INTO gitlab.downsampled.sidekiq_transaction_timings_overall FROM gitlab."default".sidekiq_transactions GROUP BY time(1m) END;
CREATE CONTINUOUS QUERY sidekiq_transaction_timings_per_action ON gitlab BEGIN SELECT mean("duration") AS duration_mean, percentile("duration", 95) AS duration_95th, percentile("duration", 99) AS duration_99th, mean(sql_duration) AS sql_duration_mean, percentile(sql_duration, 95) AS sql_duration_95th, percentile(sql_duration, 99) AS sql_duration_99th, mean(view_duration) AS view_duration_mean, percentile(view_duration, 95) AS view_duration_95th, percentile(view_duration, 99) AS view_duration_99th, mean(cache_read_duration) AS cache_read_duration_mean, percentile(cache_read_duration, 99) AS cache_read_duration_99th, percentile(cache_read_duration, 95) AS cache_read_duration_95th, mean(cache_write_duration) AS cache_write_duration_mean, percentile(cache_write_duration, 99) AS cache_write_duration_99th, percentile(cache_write_duration, 95) AS cache_write_duration_95th, mean(cache_delete_duration) AS cache_delete_duration_mean, percentile(cache_delete_duration, 99) AS cache_delete_duration_99th, percentile(cache_delete_duration, 95) AS cache_delete_duration_95th, mean(cache_exists_duration) AS cache_exists_duration_mean, percentile(cache_exists_duration, 99) AS cache_exists_duration_99th, percentile(cache_exists_duration, 95) AS cache_exists_duration_95th, mean(cache_duration) AS cache_duration_mean, percentile(cache_duration, 99) AS cache_duration_99th, percentile(cache_duration, 95) AS cache_duration_95th, mean(method_duration) AS method_duration_mean, percentile(method_duration, 99) AS method_duration_99th, percentile(method_duration, 95) AS method_duration_95th INTO gitlab.downsampled.sidekiq_transaction_timings_per_action FROM gitlab."default".sidekiq_transactions GROUP BY time(1m), action END;
CREATE CONTINUOUS QUERY sidekiq_view_timings_per_action_and_view ON gitlab BEGIN SELECT mean("duration") AS duration_mean, percentile("duration", 95) AS duration_95th, percentile("duration", 99) AS duration_99th INTO gitlab.downsampled.sidekiq_view_timings_per_action_and_view FROM gitlab."default".sidekiq_views GROUP BY time(1m), action, view END;
CREATE CONTINUOUS QUERY web_transaction_counts_overall ON gitlab BEGIN SELECT count("duration") AS count INTO gitlab.downsampled.web_transaction_counts_overall FROM gitlab."default".rails_transactions GROUP BY time(1m) END;
``` ```
## Import Dashboards ## Import Dashboards
......
...@@ -45,8 +45,8 @@ sudo -u git -H git checkout 8-7-stable-ee ...@@ -45,8 +45,8 @@ sudo -u git -H git checkout 8-7-stable-ee
```bash ```bash
cd /home/git/gitlab-shell cd /home/git/gitlab-shell
sudo -u git -H git fetch --all sudo -u git -H git fetch --all --tags
sudo -u git -H git checkout v2.7.0 sudo -u git -H git checkout v2.7.2
``` ```
### 5. Update gitlab-workhorse ### 5. Update gitlab-workhorse
......
Depending on the installation method and your GitLab version, there are multiple update guides. Choose one that fits your needs. # Updating GitLab
Depending on the installation method and your GitLab version, there are multiple
update guides.
There are currently 3 official ways to install GitLab:
- Omnibus packages
- Source installation
- Docker installation
Based on your installation, choose a section below that fits your needs.
---
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)*
- [Omnibus Packages](#omnibus-packages)
- [Installation from source](#installation-from-source)
- [Installation using Docker](#installation-using-docker)
- [Upgrading between editions](#upgrading-between-editions)
- [Community to Enterprise Edition](#community-to-enterprise-edition)
- [Enterprise to Community Edition](#enterprise-to-community-edition)
- [Miscellaneous](#miscellaneous)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
## Omnibus Packages ## Omnibus Packages
- [Omnibus update guide](https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/doc/update.md) contains the steps needed to update a GitLab [package](https://about.gitlab.com/downloads/). - The [Omnibus update guide](http://doc.gitlab.com/omnibus/update/README.html)
contains the steps needed to update an Omnibus GitLab package.
## Installation from source ## Installation from source
- [The individual upgrade guides](https://gitlab.com/gitlab-org/gitlab-ce/tree/master/doc/update) are for those who have installed GitLab from source. - [Upgrading Community Edition from source][source-ce] - The individual
- [The CE to EE update guides](https://gitlab.com/subscribers/gitlab-ee/tree/master/doc/update) are for subscribers of the Enterprise Edition only. The steps are very similar to a version upgrade: stop the server, get the code, update config files for the new functionality, install libs and do migrations, update the init script, start the application and check the application status. upgrade guides are for those who have installed GitLab CE from source.
- [Upgrader](upgrader.md) is an automatic ruby script that performs the update for installations from source. - [Upgrading Enterprise Edition from source][source-ee] - The individual
- [Patch versions](patch_versions.md) guide includes the steps needed for a patch version, eg. 6.2.0 to 6.2.1. upgrade guides are for those who have installed GitLab EE from source.
- [Patch versions](patch_versions.md) guide includes the steps needed for a
patch version, eg. 6.2.0 to 6.2.1, and apply to both Community and Enterprise
Editions.
## Installation using Docker
GitLab provides official Docker images for both Community and Enterprise
editions. They are based on the Omnibus package and instructions on how to
update them are in [a separate document][omnidocker].
## Upgrading between editions
GitLab comes in two flavors: [Community Edition][ce] which is MIT licensed,
and [Enterprise Edition][ee] which builds on top of the Community Edition and
includes extra features mainly aimed at organizations with more than 100 users.
Below you can find some guides to help you change editions easily.
### Community to Enterprise Edition
>**Note:**
The following guides are for subscribers of the Enterprise Edition only.
If you wish to upgrade your GitLab installation from Community to Enterprise
Edition, follow the guides below based on the installation method:
- [Source CE to EE update guides][source-ee] - Find your version, and follow the
`-ce-to-ee.md` guide. The steps are very similar to a version upgrade: stop
the server, get the code, update config files for the new functionality,
install libraries and do migrations, update the init script, start the
application and check its status.
- [Omnibus CE to EE][omni-ce-ee] - Follow this guide to update your Omnibus
GitLab Community Edition to the Enterprise Edition.
### Enterprise to Community Edition
If you need to downgrade your Enterprise Edition installation back to Community
Edition, you can follow [this guide][ee-ce] to make the process as smooth as
possible.
## Miscellaneous ## Miscellaneous
- [MySQL to PostgreSQL](mysql_to_postgresql.md) guides you through migrating your database from MySQL to PostgreSQL. - [MySQL to PostgreSQL](mysql_to_postgresql.md) guides you through migrating
- [MySQL installation guide](https://gitlab.com/gitlab-org/gitlab-ce/blob/master/doc/install/database_mysql.md) contains additional information about configuring GitLab to work with a MySQL database. your database from MySQL to PostgreSQL.
- [MySQL installation guide](../install/database_mysql.md) contains additional
information about configuring GitLab to work with a MySQL database.
- [Restoring from backup after a failed upgrade](restore_after_failure.md) - [Restoring from backup after a failed upgrade](restore_after_failure.md)
[omnidocker]: http://doc.gitlab.com/omnibus/docker/README.html
[source-ee]: https://gitlab.com/gitlab-org/gitlab-ee/tree/master/doc/update
[source-ce]: https://gitlab.com/gitlab-org/gitlab-ce/tree/master/doc/update
[ee-ce]: ../downgrade_ee_to_ce/README.md
[ce]: https://about.gitlab.com/features/#community
[ee]: https://about.gitlab.com/features/#enterprise
[omni-ce-ee]: http://doc.gitlab.com/omnibus/update/README.html#from-community-edition-to-enterprise-edition
# Cherry-pick changes # Cherry-pick changes
_**Note:** This feature was [introduced][ce-3514] in GitLab 8.7._ >**Note:**
This feature was [introduced][ce-3514] in GitLab 8.7.
--- ---
......
# Import your project from GitHub to GitLab # Import your project from GitHub to GitLab
_**Note:** In order to enable the GitHub import setting, you should first >**Note:**
enable the [GitHub integration][gh-import] in your GitLab instance._ In order to enable the GitHub import setting, you should first
enable the [GitHub integration][gh-import] in your GitLab instance.
At its current state, GitHub importer can import: At its current state, GitHub importer can import:
...@@ -10,10 +11,13 @@ At its current state, GitHub importer can import: ...@@ -10,10 +11,13 @@ At its current state, GitHub importer can import:
- the issues (introduced in GitLab 7.7) - the issues (introduced in GitLab 7.7)
- the pull requests (introduced in GitLab 8.4) - the pull requests (introduced in GitLab 8.4)
- the wiki pages (introduced in GitLab 8.4) - the wiki pages (introduced in GitLab 8.4)
- the milestones (introduced in GitLab 8.7)
- the labels (introduced in GitLab 8.7)
It is not yet possible to import your labels, milestones and cross-repository With GitLab 8.7+, references to pull requests and issues are preserved.
pull requests (those from forks). We are working on improving this in the near
future. It is not yet possible to import your cross-repository pull requests (those from
forks). We are working on improving this in the near future.
The importer page is visible when you [create a new project][new-project]. The importer page is visible when you [create a new project][new-project].
Click on the **GitHub** link and you will be redirected to GitHub for Click on the **GitHub** link and you will be redirected to GitHub for
......
...@@ -103,10 +103,10 @@ module API ...@@ -103,10 +103,10 @@ module API
required_attributes! [:hook_id] required_attributes! [:hook_id]
begin begin
@hook = ProjectHook.find(params[:hook_id]) @hook = user_project.hooks.destroy(params[:hook_id])
@hook.destroy
rescue rescue
# ProjectHook can raise Error if hook_id not found # ProjectHook can raise Error if hook_id not found
not_found!("Error deleting hook #{params[:hook_id]}")
end end
end end
end end
......
module Banzai module Banzai
module Filter module Filter
# HTML Filter to add a `rel="nofollow"` attribute to external links # HTML Filter to modify the attributes of external links
#
class ExternalLinkFilter < HTML::Pipeline::Filter class ExternalLinkFilter < HTML::Pipeline::Filter
def call def call
doc.search('a').each do |node| doc.search('a').each do |node|
...@@ -15,7 +14,7 @@ module Banzai ...@@ -15,7 +14,7 @@ module Banzai
# Skip internal links # Skip internal links
next if link.start_with?(internal_url) next if link.start_with?(internal_url)
node.set_attribute('rel', 'nofollow') node.set_attribute('rel', 'nofollow noreferrer')
end end
doc doc
......
...@@ -61,23 +61,21 @@ module Ci ...@@ -61,23 +61,21 @@ module Ci
@stages = @config[:stages] || @config[:types] @stages = @config[:stages] || @config[:types]
@variables = @config[:variables] || {} @variables = @config[:variables] || {}
@cache = @config[:cache] @cache = @config[:cache]
@jobs = {}
@config.except!(*ALLOWED_YAML_KEYS) @config.except!(*ALLOWED_YAML_KEYS)
@config.each { |name, param| add_job(name, param) }
# anything that doesn't have script is considered as unknown raise ValidationError, "Please define at least one job" if @jobs.none?
@config.each do |name, param|
raise ValidationError, "Unknown parameter: #{name}" unless param.is_a?(Hash) && param.has_key?(:script)
end end
unless @config.values.any?{|job| job.is_a?(Hash)} def add_job(name, job)
raise ValidationError, "Please define at least one job" return if name.to_s.start_with?('.')
end
raise ValidationError, "Unknown parameter: #{name}" unless job.is_a?(Hash) && job.has_key?(:script)
@jobs = {}
@config.each do |key, job|
next if key.to_s.start_with?('.')
stage = job[:stage] || job[:type] || DEFAULT_STAGE stage = job[:stage] || job[:type] || DEFAULT_STAGE
@jobs[key] = { stage: stage }.merge(job) @jobs[name] = { stage: stage }.merge(job)
end
end end
def build_job(name, job) def build_job(name, job)
...@@ -112,8 +110,6 @@ module Ci ...@@ -112,8 +110,6 @@ module Ci
true true
end end
private
def validate_global! def validate_global!
unless validate_array_of_strings(@before_script) unless validate_array_of_strings(@before_script)
raise ValidationError, "before_script should be an array of strings" raise ValidationError, "before_script should be an array of strings"
......
...@@ -300,14 +300,6 @@ describe Projects::MergeRequestsController do ...@@ -300,14 +300,6 @@ describe Projects::MergeRequestsController do
expect(response.cookies['diff_view']).to eq('parallel') expect(response.cookies['diff_view']).to eq('parallel')
end end
it 'assigns :view param based on cookie' do
request.cookies['diff_view'] = 'parallel'
go
expect(controller.params[:view]).to eq 'parallel'
end
end end
describe 'GET commits' do describe 'GET commits' do
......
require 'spec_helper'
describe "Dashboard > User filters projects", feature: true do
describe 'filtering personal projects' do
before do
user = create(:user)
project = create(:project, name: "Victorialand", namespace: user.namespace)
project.team << [user, :master]
user2 = create(:user)
project2 = create(:project, name: "Treasure", namespace: user2.namespace)
project2.team << [user, :developer]
login_as(user)
visit dashboard_projects_path
end
it 'filters by projects "Owned by me"' do
click_link "Owned by me"
expect(page).to have_css('.is-active', text: 'Owned by me')
expect(page).to have_content('Victorialand')
expect(page).not_to have_content('Treasure')
end
end
end
require 'rails_helper'
feature 'Issue Sidebar', feature: true do
let(:project) { create(:project) }
let(:issue) { create(:issue, project: project) }
let!(:user) { create(:user)}
before do
create(:label, project: project, title: 'bug')
login_as(user)
end
context 'as a allowed user' do
before do
project.team << [user, :developer]
visit_issue(project, issue)
end
describe 'when clicking on edit labels', js: true do
it 'dropdown has an option to create a new label' do
find('.block.labels .edit-link').click
page.within('.block.labels') do
expect(page).to have_content 'Create new'
end
end
end
context 'creating a new label', js: true do
it 'option to crate a new label is present' do
page.within('.block.labels') do
find('.edit-link').click
expect(page).to have_content 'Create new'
end
end
it 'dropdown switches to "create label" section' do
page.within('.block.labels') do
find('.edit-link').click
click_link 'Create new'
expect(page).to have_content 'Create new label'
end
end
it 'new label is added' do
page.within('.block.labels') do
find('.edit-link').click
sleep 1
click_link 'Create new'
fill_in 'new_label_name', with: 'wontfix'
page.find(".suggest-colors a", match: :first).click
click_button 'Create'
page.within('.dropdown-page-one') do
expect(page).to have_content 'wontfix'
end
end
end
end
end
context 'as a guest' do
before do
project.team << [user, :guest]
visit_issue(project, issue)
end
it 'does not have a option to edit labels' do
expect(page).not_to have_selector('.block.labels .edit-link')
end
end
def visit_issue(project, issue)
visit namespace_project_issue_path(project.namespace, project, issue)
end
end
...@@ -178,6 +178,19 @@ describe 'Issues', feature: true do ...@@ -178,6 +178,19 @@ describe 'Issues', feature: true do
expect(first_issue).to include('foo') expect(first_issue).to include('foo')
end end
context 'with a filter on labels' do
let(:label) { create(:label, project: project) }
before { create(:label_link, label: label, target: foo) }
it 'sorts by least recently due date by excluding nil due dates' do
bar.update(due_date: nil)
visit namespace_project_issues_path(project.namespace, project, label_names: [label.name], sort: sort_value_due_date_later)
expect(first_issue).to include('foo')
end
end
end end
describe 'filtering by due date' do describe 'filtering by due date' do
...@@ -304,6 +317,27 @@ describe 'Issues', feature: true do ...@@ -304,6 +317,27 @@ describe 'Issues', feature: true do
expect(issue.reload.assignee).to be_nil expect(issue.reload.assignee).to be_nil
end end
it 'allows user to select an assignee', js: true do
issue2 = create(:issue, project: project, author: @user)
visit namespace_project_issue_path(project.namespace, project, issue2)
page.within('.assignee') do
expect(page).to have_content "No assignee"
end
page.within '.assignee' do
click_link 'Edit'
end
page.within '.dropdown-menu-user' do
click_link @user.name
end
page.within('.assignee') do
expect(page).to have_content @user.name
end
end
end end
context 'by unauthorized user' do context 'by unauthorized user' do
......
...@@ -165,7 +165,12 @@ describe 'GitLab Markdown', feature: true do ...@@ -165,7 +165,12 @@ describe 'GitLab Markdown', feature: true do
describe 'ExternalLinkFilter' do describe 'ExternalLinkFilter' do
it 'adds nofollow to external link' do it 'adds nofollow to external link' do
link = doc.at_css('a:contains("Google")') link = doc.at_css('a:contains("Google")')
expect(link.attr('rel')).to match 'nofollow' expect(link.attr('rel')).to include('nofollow')
end
it 'adds noreferrer to external link' do
link = doc.at_css('a:contains("Google")')
expect(link.attr('rel')).to include('noreferrer')
end end
it 'ignores internal link' do it 'ignores internal link' do
......
require 'spec_helper'
feature 'Projects > Members > Anonymous user sees members', feature: true do
let(:user) { create(:user) }
let(:group) { create(:group, :public) }
let(:project) { create(:empty_project, :public) }
background do
project.team << [user, :master]
create(:project_group_link, project: project, group: group)
end
scenario "anonymous user visits the project's members page and sees the list of members" do
visit namespace_project_project_members_path(project.namespace, project)
expect(current_path).to eq(
namespace_project_project_members_path(project.namespace, project))
expect(page).to have_content(user.name)
end
end
...@@ -13,8 +13,8 @@ feature 'Signup', feature: true do ...@@ -13,8 +13,8 @@ feature 'Signup', feature: true do
fill_in 'user_password_sign_up', with: user.password fill_in 'user_password_sign_up', with: user.password
click_button "Sign up" click_button "Sign up"
expect(current_path).to eq user_session_path expect(current_path).to eq users_almost_there_path
expect(page).to have_content("A message with a confirmation link has been sent to your email address.") expect(page).to have_content("Please check your email to confirm your account")
end end
end end
......
require 'spec_helper' require 'spec_helper'
describe 'Dashboard Todos', feature: true do describe 'Dashboard Todos', feature: true do
let(:user){ create(:user) } let(:user) { create(:user) }
let(:author){ create(:user) } let(:author) { create(:user) }
let(:project){ create(:project) } let(:project) { create(:project) }
let(:issue){ create(:issue) } let(:issue) { create(:issue) }
let(:todos_per_page){ Todo.default_per_page }
let(:todos_total){ todos_per_page + 1 }
describe 'GET /dashboard/todos' do describe 'GET /dashboard/todos' do
context 'User does not have todos' do context 'User does not have todos' do
...@@ -46,31 +44,35 @@ describe 'Dashboard Todos', feature: true do ...@@ -46,31 +44,35 @@ describe 'Dashboard Todos', feature: true do
end end
context 'User has multiple pages of Todos' do context 'User has multiple pages of Todos' do
let(:todo_total_pages){ (todos_total.to_f/todos_per_page).ceil }
before do before do
todos_total.times do allow(Todo).to receive(:default_per_page).and_return(1)
create(:todo, :mentioned, user: user, project: project, target: issue, author: author)
end # Create just enough records to cause us to paginate
create_list(:todo, 2, :mentioned, user: user, project: project, target: issue, author: author)
login_as(user) login_as(user)
visit dashboard_todos_path
end end
it 'is paginated' do it 'is paginated' do
visit dashboard_todos_path
expect(page).to have_selector('.gl-pagination') expect(page).to have_selector('.gl-pagination')
end end
it 'is has the right number of pages' do it 'is has the right number of pages' do
expect(page).to have_selector('.gl-pagination .page', count: todo_total_pages) visit dashboard_todos_path
expect(page).to have_selector('.gl-pagination .page', count: 2)
end end
describe 'deleting last todo from last page', js: true do describe 'completing last todo from last page', js: true do
it 'redirects to the previous page' do it 'redirects to the previous page' do
page.within('.gl-pagination') do visit dashboard_todos_path(page: 2)
click_link todo_total_pages.to_s expect(page).to have_content(Todo.first.body)
end
first('.done-todo').click click_link('Done')
expect(current_path).to eq dashboard_todos_path
expect(page).to have_content(Todo.last.body) expect(page).to have_content(Todo.last.body)
end end
end end
......
...@@ -11,6 +11,26 @@ describe DiffHelper do ...@@ -11,6 +11,26 @@ describe DiffHelper do
let(:diff_refs) { [commit.parent, commit] } let(:diff_refs) { [commit.parent, commit] }
let(:diff_file) { Gitlab::Diff::File.new(diff, diff_refs) } let(:diff_file) { Gitlab::Diff::File.new(diff, diff_refs) }
describe 'diff_view' do
it 'returns a valid value when cookie is set' do
helper.request.cookies[:diff_view] = 'parallel'
expect(helper.diff_view).to eq 'parallel'
end
it 'returns a default value when cookie is invalid' do
helper.request.cookies[:diff_view] = 'invalid'
expect(helper.diff_view).to eq 'inline'
end
it 'returns a default value when cookie is nil' do
expect(helper.request.cookies).to be_empty
expect(helper.diff_view).to eq 'inline'
end
end
describe 'diff_hard_limit_enabled?' do describe 'diff_hard_limit_enabled?' do
it 'should return true if param is provided' do it 'should return true if param is provided' do
allow(controller).to receive(:params) { { force_show_diff: true } } allow(controller).to receive(:params) { { force_show_diff: true } }
......
...@@ -105,4 +105,30 @@ describe ProjectsHelper do ...@@ -105,4 +105,30 @@ describe ProjectsHelper do
end end
end end
end end
describe '#license_short_name' do
let(:project) { create(:project) }
context 'when project.repository has a license_key' do
it 'returns the nickname of the license if present' do
allow(project.repository).to receive(:license_key).and_return('agpl-3.0')
expect(helper.license_short_name(project)).to eq('GNU AGPLv3')
end
it 'returns the name of the license if nickname is not present' do
allow(project.repository).to receive(:license_key).and_return('mit')
expect(helper.license_short_name(project)).to eq('MIT License')
end
end
context 'when project.repository has no license_key but a license_blob' do
it 'returns LICENSE' do
allow(project.repository).to receive(:license_key).and_return(nil)
expect(helper.license_short_name(project)).to eq('LICENSE')
end
end
end
end end
...@@ -24,6 +24,14 @@ describe Banzai::Filter::ExternalLinkFilter, lib: true do ...@@ -24,6 +24,14 @@ describe Banzai::Filter::ExternalLinkFilter, lib: true do
doc = filter(act) doc = filter(act)
expect(doc.at_css('a')).to have_attribute('rel') expect(doc.at_css('a')).to have_attribute('rel')
expect(doc.at_css('a')['rel']).to eq 'nofollow' expect(doc.at_css('a')['rel']).to include 'nofollow'
end
it 'adds rel="noreferrer" to external links' do
act = %q(<a href="https://google.com/">Google</a>)
doc = filter(act)
expect(doc.at_css('a')).to have_attribute('rel')
expect(doc.at_css('a')['rel']).to include 'noreferrer'
end end
end end
...@@ -648,18 +648,11 @@ module Ci ...@@ -648,18 +648,11 @@ module Ci
end end
describe "Hidden jobs" do describe "Hidden jobs" do
let(:config) do
YAML.dump({
'.hidden_job' => { script: 'test' },
'normal_job' => { script: 'test' }
})
end
let(:config_processor) { GitlabCiYamlProcessor.new(config) } let(:config_processor) { GitlabCiYamlProcessor.new(config) }
subject { config_processor.builds_for_stage_and_ref("test", "master") } subject { config_processor.builds_for_stage_and_ref("test", "master") }
it "doesn't create jobs that starts with dot" do shared_examples 'hidden_job_handling' do
it "doesn't create jobs that start with dot" do
expect(subject.size).to eq(1) expect(subject.size).to eq(1)
expect(subject.first).to eq({ expect(subject.first).to eq({
except: nil, except: nil,
...@@ -676,22 +669,40 @@ module Ci ...@@ -676,22 +669,40 @@ module Ci
end end
end end
describe "YAML Alias/Anchor" do context 'when hidden job have a script definition' do
it "is correctly supported for jobs" do let(:config) do
config = <<EOT YAML.dump({
job1: &JOBTMPL '.hidden_job' => { image: 'ruby:2.1', script: 'test' },
script: execute-script-for-job 'normal_job' => { script: 'test' }
})
end
job2: *JOBTMPL it_behaves_like 'hidden_job_handling'
EOT end
config_processor = GitlabCiYamlProcessor.new(config) context "when hidden job doesn't have a script definition" do
let(:config) do
YAML.dump({
'.hidden_job' => { image: 'ruby:2.1' },
'normal_job' => { script: 'test' }
})
end
expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(2) it_behaves_like 'hidden_job_handling'
expect(config_processor.builds_for_stage_and_ref("test", "master").first).to eq({ end
end
describe "YAML Alias/Anchor" do
let(:config_processor) { GitlabCiYamlProcessor.new(config) }
subject { config_processor.builds_for_stage_and_ref("build", "master") }
shared_examples 'job_templates_handling' do
it "is correctly supported for jobs" do
expect(subject.size).to eq(2)
expect(subject.first).to eq({
except: nil, except: nil,
stage: "test", stage: "build",
stage_idx: 1, stage_idx: 0,
name: :job1, name: :job1,
only: nil, only: nil,
commands: "execute-script-for-job", commands: "execute-script-for-job",
...@@ -700,10 +711,10 @@ EOT ...@@ -700,10 +711,10 @@ EOT
when: "on_success", when: "on_success",
allow_failure: false allow_failure: false
}) })
expect(config_processor.builds_for_stage_and_ref("test", "master").second).to eq({ expect(subject.second).to eq({
except: nil, except: nil,
stage: "test", stage: "build",
stage_idx: 1, stage_idx: 0,
name: :job2, name: :job2,
only: nil, only: nil,
commands: "execute-script-for-job", commands: "execute-script-for-job",
...@@ -715,6 +726,56 @@ EOT ...@@ -715,6 +726,56 @@ EOT
end end
end end
context 'when template is a job' do
let(:config) do
<<EOT
job1: &JOBTMPL
stage: build
script: execute-script-for-job
job2: *JOBTMPL
EOT
end
it_behaves_like 'job_templates_handling'
end
context 'when template is a hidden job' do
let(:config) do
<<EOT
.template: &JOBTMPL
stage: build
script: execute-script-for-job
job1: *JOBTMPL
job2: *JOBTMPL
EOT
end
it_behaves_like 'job_templates_handling'
end
context 'when job adds its own keys to a template definition' do
let(:config) do
<<EOT
.template: &JOBTMPL
stage: build
job1:
<<: *JOBTMPL
script: execute-script-for-job
job2:
<<: *JOBTMPL
script: execute-script-for-job
EOT
end
it_behaves_like 'job_templates_handling'
end
end
describe "Error handling" do describe "Error handling" do
it "fails to parse YAML" do it "fails to parse YAML" do
expect{GitlabCiYamlProcessor.new("invalid: yaml: test")}.to raise_error(Psych::SyntaxError) expect{GitlabCiYamlProcessor.new("invalid: yaml: test")}.to raise_error(Psych::SyntaxError)
......
...@@ -719,11 +719,8 @@ describe Project, models: true do ...@@ -719,11 +719,8 @@ describe Project, models: true do
with('foo.wiki', project). with('foo.wiki', project).
and_return(wiki) and_return(wiki)
expect(repo).to receive(:expire_cache) expect(repo).to receive(:before_delete)
expect(repo).to receive(:expire_emptiness_caches) expect(wiki).to receive(:before_delete)
expect(wiki).to receive(:expire_cache)
expect(wiki).to receive(:expire_emptiness_caches)
project.expire_caches_before_rename('foo') project.expire_caches_before_rename('foo')
end end
......
...@@ -132,7 +132,6 @@ describe Repository, models: true do ...@@ -132,7 +132,6 @@ describe Repository, models: true do
it { expect(subject.basename).to eq('a/b/c') } it { expect(subject.basename).to eq('a/b/c') }
end end
end end
end end
describe '#license_blob' do describe '#license_blob' do
...@@ -148,39 +147,18 @@ describe Repository, models: true do ...@@ -148,39 +147,18 @@ describe Repository, models: true do
expect(repository.license_blob).to be_nil expect(repository.license_blob).to be_nil
end end
it 'favors license file with no extension' do it 'detects license file with no recognizable open-source license content' do
repository.commit_file(user, 'LICENSE', Licensee::License.new('mit').content, 'Add LICENSE', 'master', false) repository.commit_file(user, 'LICENSE', 'Copyright!', 'Add LICENSE', 'master', false)
repository.commit_file(user, 'LICENSE.md', Licensee::License.new('mit').content, 'Add LICENSE.md', 'master', false)
expect(repository.license_blob.name).to eq('LICENSE') expect(repository.license_blob.name).to eq('LICENSE')
end end
it 'favors .md file to .txt' do %w[LICENSE LICENCE LiCensE LICENSE.md LICENSE.foo COPYING COPYING.md].each do |filename|
repository.commit_file(user, 'LICENSE.md', Licensee::License.new('mit').content, 'Add LICENSE.md', 'master', false) it "detects '#{filename}'" do
repository.commit_file(user, 'LICENSE.txt', Licensee::License.new('mit').content, 'Add LICENSE.txt', 'master', false) repository.commit_file(user, filename, Licensee::License.new('mit').content, "Add #{filename}", 'master', false)
expect(repository.license_blob.name).to eq('LICENSE.md')
end
it 'favors LICENCE to LICENSE' do
repository.commit_file(user, 'LICENSE', Licensee::License.new('mit').content, 'Add LICENSE', 'master', false)
repository.commit_file(user, 'LICENCE', Licensee::License.new('mit').content, 'Add LICENCE', 'master', false)
expect(repository.license_blob.name).to eq('LICENCE')
end
it 'favors LICENSE to COPYING' do
repository.commit_file(user, 'LICENSE', Licensee::License.new('mit').content, 'Add LICENSE', 'master', false)
repository.commit_file(user, 'COPYING', Licensee::License.new('mit').content, 'Add COPYING', 'master', false)
expect(repository.license_blob.name).to eq('LICENSE') expect(repository.license_blob.name).to eq(filename)
end end
it 'favors LICENCE to COPYING' do
repository.commit_file(user, 'LICENCE', Licensee::License.new('mit').content, 'Add LICENCE', 'master', false)
repository.commit_file(user, 'COPYING', Licensee::License.new('mit').content, 'Add COPYING', 'master', false)
expect(repository.license_blob.name).to eq('LICENCE')
end end
end end
...@@ -190,8 +168,14 @@ describe Repository, models: true do ...@@ -190,8 +168,14 @@ describe Repository, models: true do
repository.remove_file(user, 'LICENSE', 'Remove LICENSE', 'master') repository.remove_file(user, 'LICENSE', 'Remove LICENSE', 'master')
end end
it 'returns "no-license" when no license is detected' do it 'returns nil when no license is detected' do
expect(repository.license_key).to eq('no-license') expect(repository.license_key).to be_nil
end
it 'detects license file with no recognizable open-source license content' do
repository.commit_file(user, 'LICENSE', 'Copyright!', 'Add LICENSE', 'master', false)
expect(repository.license_key).to be_nil
end end
it 'returns the license key' do it 'returns the license key' do
......
...@@ -148,14 +148,24 @@ describe API::API, 'ProjectHooks', api: true do ...@@ -148,14 +148,24 @@ describe API::API, 'ProjectHooks', api: true do
expect(response.status).to eq(200) expect(response.status).to eq(200)
end end
it "should return success when deleting non existent hook" do it "should return a 404 error when deleting non existent hook" do
delete api("/projects/#{project.id}/hooks/42", user) delete api("/projects/#{project.id}/hooks/42", user)
expect(response.status).to eq(200) expect(response.status).to eq(404)
end end
it "should return a 405 error if hook id not given" do it "should return a 405 error if hook id not given" do
delete api("/projects/#{project.id}/hooks", user) delete api("/projects/#{project.id}/hooks", user)
expect(response.status).to eq(405) expect(response.status).to eq(405)
end end
it "shold return a 404 if a user attempts to delete project hooks he/she does not own" do
test_user = create(:user)
other_project = create(:project)
other_project.team << [test_user, :master]
delete api("/projects/#{other_project.id}/hooks/#{hook.id}", test_user)
expect(response.status).to eq(404)
expect(WebHook.exists?(hook.id)).to be_truthy
end
end end
end end
...@@ -100,7 +100,7 @@ describe Issues::BulkUpdateService, services: true do ...@@ -100,7 +100,7 @@ describe Issues::BulkUpdateService, services: true do
describe :update_milestone do describe :update_milestone do
before do before do
@milestone = create :milestone @milestone = create(:milestone, project: @project)
@params = { @params = {
issues_ids: [issue.id], issues_ids: [issue.id],
milestone_id: @milestone.id milestone_id: @milestone.id
......
...@@ -3,40 +3,75 @@ require 'spec_helper' ...@@ -3,40 +3,75 @@ require 'spec_helper'
describe Issues::CreateService, services: true do describe Issues::CreateService, services: true do
let(:project) { create(:empty_project) } let(:project) { create(:empty_project) }
let(:user) { create(:user) } let(:user) { create(:user) }
describe '#execute' do
let(:issue) { described_class.new(project, user, opts).execute }
context 'when params are valid' do
let(:assignee) { create(:user) } let(:assignee) { create(:user) }
let(:milestone) { create(:milestone, project: project) }
let(:labels) { create_pair(:label, project: project) }
describe :execute do
context 'valid params' do
before do before do
project.team << [user, :master] project.team << [user, :master]
project.team << [assignee, :master] project.team << [assignee, :master]
end
opts = { let(:opts) do
title: 'Awesome issue', { title: 'Awesome issue',
description: 'please fix', description: 'please fix',
assignee: assignee assignee: assignee,
} label_ids: labels.map(&:id),
milestone_id: milestone.id }
@issue = Issues::CreateService.new(project, user, opts).execute
end end
it { expect(@issue).to be_valid } it { expect(issue).to be_valid }
it { expect(@issue.title).to eq('Awesome issue') } it { expect(issue.title).to eq('Awesome issue') }
it { expect(@issue.assignee).to eq assignee } it { expect(issue.assignee).to eq assignee }
it { expect(issue.labels).to match_array labels }
it { expect(issue.milestone).to eq milestone }
it 'creates a pending todo for new assignee' do it 'creates a pending todo for new assignee' do
attributes = { attributes = {
project: project, project: project,
author: user, author: user,
user: assignee, user: assignee,
target_id: @issue.id, target_id: issue.id,
target_type: @issue.class.name, target_type: issue.class.name,
action: Todo::ASSIGNED, action: Todo::ASSIGNED,
state: :pending state: :pending
} }
expect(Todo.where(attributes).count).to eq 1 expect(Todo.where(attributes).count).to eq 1
end end
context 'when label belongs to different project' do
let(:label) { create(:label) }
let(:opts) do
{ title: 'Title',
description: 'Description',
label_ids: [label.id] }
end
it 'does not assign label'do
expect(issue.labels).to_not include label
end
end
context 'when milestone belongs to different project' do
let(:milestone) { create(:milestone) }
let(:opts) do
{ title: 'Title',
description: 'Description',
milestone_id: milestone.id }
end
it 'does not assign milestone' do
expect(issue.milestone).to_not eq milestone
end
end
end end
end end
end end
...@@ -4,10 +4,15 @@ describe Issues::UpdateService, services: true do ...@@ -4,10 +4,15 @@ describe Issues::UpdateService, services: true do
let(:user) { create(:user) } let(:user) { create(:user) }
let(:user2) { create(:user) } let(:user2) { create(:user) }
let(:user3) { create(:user) } let(:user3) { create(:user) }
let(:issue) { create(:issue, title: 'Old title', assignee_id: user3.id) } let(:project) { create(:empty_project) }
let(:label) { create(:label) } let(:label) { create(:label, project: project) }
let(:label2) { create(:label) } let(:label2) { create(:label) }
let(:project) { issue.project }
let(:issue) do
create(:issue, title: 'Old title',
assignee_id: user3.id,
project: project)
end
before do before do
project.team << [user, :master] project.team << [user, :master]
......
require 'spec_helper' require 'spec_helper'
describe MergeRequests::UpdateService, services: true do describe MergeRequests::UpdateService, services: true do
let(:project) { create(:project) }
let(:user) { create(:user) } let(:user) { create(:user) }
let(:user2) { create(:user) } let(:user2) { create(:user) }
let(:user3) { create(:user) } let(:user3) { create(:user) }
let(:merge_request) { create(:merge_request, :simple, title: 'Old title', assignee_id: user3.id) } let(:label) { create(:label, project: project) }
let(:project) { merge_request.project }
let(:label) { create(:label) }
let(:label2) { create(:label) } let(:label2) { create(:label) }
let(:merge_request) do
create(:merge_request, :simple, title: 'Old title',
assignee_id: user3.id,
source_project: project)
end
before do before do
project.team << [user, :master] project.team << [user, :master]
project.team << [user2, :developer] project.team << [user2, :developer]
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment