Commit 4de9710a authored by Filipa Lacerda's avatar Filipa Lacerda

Merge branch 'master' into fl-more-mr-widget

* master: (30 commits)
  Resolve "Link to Clusters in Auto DevOps instead of Kubernetes service"
  Update CHANGELOG.md for 10.4.1
  Add a gRPC health check to ensure Gitaly is up
  Add formatted_data attribute to Git::WikiPage
  Avoid array indices to fixtures in JS specs
  Migrate .batch_lfs_pointers to Gitaly
  Updates `Revert this merge request` text
  Work around a bug in DatabaseCleaner when using the deletion strategy on MySQL
  Use the DatabaseCleaner 'deletion' strategy instead of 'truncation'
  Workaround a recaptcha pop-up that cannot be tested
  Moves more mr widget components into vue files Adds i18n Adds better test coverage
  Execute system hooks after-commit when executing project hooks
  Remove one Spinach job and add one RSpec job
  Migrate repository bundling to Gitaly
  Use limit for search count queries
  Fix offense in runners settings QA page object class
  Wait for runner until it registers itself in QA tests
  Fix static-analysis offenses in QA support class
  Add specific views / selectors for QA runners page
  Add views / selectors for pipeline show page object
  ...
parents 8f6f4216 dcb79741
...@@ -322,69 +322,69 @@ setup-test-env: ...@@ -322,69 +322,69 @@ setup-test-env:
paths: paths:
- tmp/tests - tmp/tests
rspec-pg 0 26: *rspec-metadata-pg rspec-pg 0 27: *rspec-metadata-pg
rspec-pg 1 26: *rspec-metadata-pg rspec-pg 1 27: *rspec-metadata-pg
rspec-pg 2 26: *rspec-metadata-pg rspec-pg 2 27: *rspec-metadata-pg
rspec-pg 3 26: *rspec-metadata-pg rspec-pg 3 27: *rspec-metadata-pg
rspec-pg 4 26: *rspec-metadata-pg rspec-pg 4 27: *rspec-metadata-pg
rspec-pg 5 26: *rspec-metadata-pg rspec-pg 5 27: *rspec-metadata-pg
rspec-pg 6 26: *rspec-metadata-pg rspec-pg 6 27: *rspec-metadata-pg
rspec-pg 7 26: *rspec-metadata-pg rspec-pg 7 27: *rspec-metadata-pg
rspec-pg 8 26: *rspec-metadata-pg rspec-pg 8 27: *rspec-metadata-pg
rspec-pg 9 26: *rspec-metadata-pg rspec-pg 9 27: *rspec-metadata-pg
rspec-pg 10 26: *rspec-metadata-pg rspec-pg 10 27: *rspec-metadata-pg
rspec-pg 11 26: *rspec-metadata-pg rspec-pg 11 27: *rspec-metadata-pg
rspec-pg 12 26: *rspec-metadata-pg rspec-pg 12 27: *rspec-metadata-pg
rspec-pg 13 26: *rspec-metadata-pg rspec-pg 13 27: *rspec-metadata-pg
rspec-pg 14 26: *rspec-metadata-pg rspec-pg 14 27: *rspec-metadata-pg
rspec-pg 15 26: *rspec-metadata-pg rspec-pg 15 27: *rspec-metadata-pg
rspec-pg 16 26: *rspec-metadata-pg rspec-pg 16 27: *rspec-metadata-pg
rspec-pg 17 26: *rspec-metadata-pg rspec-pg 17 27: *rspec-metadata-pg
rspec-pg 18 26: *rspec-metadata-pg rspec-pg 18 27: *rspec-metadata-pg
rspec-pg 19 26: *rspec-metadata-pg rspec-pg 19 27: *rspec-metadata-pg
rspec-pg 20 26: *rspec-metadata-pg rspec-pg 20 27: *rspec-metadata-pg
rspec-pg 21 26: *rspec-metadata-pg rspec-pg 21 27: *rspec-metadata-pg
rspec-pg 22 26: *rspec-metadata-pg rspec-pg 22 27: *rspec-metadata-pg
rspec-pg 23 26: *rspec-metadata-pg rspec-pg 23 27: *rspec-metadata-pg
rspec-pg 24 26: *rspec-metadata-pg rspec-pg 24 27: *rspec-metadata-pg
rspec-pg 25 26: *rspec-metadata-pg rspec-pg 25 27: *rspec-metadata-pg
rspec-pg 26 27: *rspec-metadata-pg
rspec-mysql 0 26: *rspec-metadata-mysql
rspec-mysql 1 26: *rspec-metadata-mysql rspec-mysql 0 27: *rspec-metadata-mysql
rspec-mysql 2 26: *rspec-metadata-mysql rspec-mysql 1 27: *rspec-metadata-mysql
rspec-mysql 3 26: *rspec-metadata-mysql rspec-mysql 2 27: *rspec-metadata-mysql
rspec-mysql 4 26: *rspec-metadata-mysql rspec-mysql 3 27: *rspec-metadata-mysql
rspec-mysql 5 26: *rspec-metadata-mysql rspec-mysql 4 27: *rspec-metadata-mysql
rspec-mysql 6 26: *rspec-metadata-mysql rspec-mysql 5 27: *rspec-metadata-mysql
rspec-mysql 7 26: *rspec-metadata-mysql rspec-mysql 6 27: *rspec-metadata-mysql
rspec-mysql 8 26: *rspec-metadata-mysql rspec-mysql 7 27: *rspec-metadata-mysql
rspec-mysql 9 26: *rspec-metadata-mysql rspec-mysql 8 27: *rspec-metadata-mysql
rspec-mysql 10 26: *rspec-metadata-mysql rspec-mysql 9 27: *rspec-metadata-mysql
rspec-mysql 11 26: *rspec-metadata-mysql rspec-mysql 10 27: *rspec-metadata-mysql
rspec-mysql 12 26: *rspec-metadata-mysql rspec-mysql 11 27: *rspec-metadata-mysql
rspec-mysql 13 26: *rspec-metadata-mysql rspec-mysql 12 27: *rspec-metadata-mysql
rspec-mysql 14 26: *rspec-metadata-mysql rspec-mysql 13 27: *rspec-metadata-mysql
rspec-mysql 15 26: *rspec-metadata-mysql rspec-mysql 14 27: *rspec-metadata-mysql
rspec-mysql 16 26: *rspec-metadata-mysql rspec-mysql 15 27: *rspec-metadata-mysql
rspec-mysql 17 26: *rspec-metadata-mysql rspec-mysql 16 27: *rspec-metadata-mysql
rspec-mysql 18 26: *rspec-metadata-mysql rspec-mysql 17 27: *rspec-metadata-mysql
rspec-mysql 19 26: *rspec-metadata-mysql rspec-mysql 18 27: *rspec-metadata-mysql
rspec-mysql 20 26: *rspec-metadata-mysql rspec-mysql 19 27: *rspec-metadata-mysql
rspec-mysql 21 26: *rspec-metadata-mysql rspec-mysql 20 27: *rspec-metadata-mysql
rspec-mysql 22 26: *rspec-metadata-mysql rspec-mysql 21 27: *rspec-metadata-mysql
rspec-mysql 23 26: *rspec-metadata-mysql rspec-mysql 22 27: *rspec-metadata-mysql
rspec-mysql 24 26: *rspec-metadata-mysql rspec-mysql 23 27: *rspec-metadata-mysql
rspec-mysql 25 26: *rspec-metadata-mysql rspec-mysql 24 27: *rspec-metadata-mysql
rspec-mysql 25 27: *rspec-metadata-mysql
spinach-pg 0 4: *spinach-metadata-pg rspec-mysql 26 27: *rspec-metadata-mysql
spinach-pg 1 4: *spinach-metadata-pg
spinach-pg 2 4: *spinach-metadata-pg spinach-pg 0 3: *spinach-metadata-pg
spinach-pg 3 4: *spinach-metadata-pg spinach-pg 1 3: *spinach-metadata-pg
spinach-pg 2 3: *spinach-metadata-pg
spinach-mysql 0 4: *spinach-metadata-mysql
spinach-mysql 1 4: *spinach-metadata-mysql spinach-mysql 0 3: *spinach-metadata-mysql
spinach-mysql 2 4: *spinach-metadata-mysql spinach-mysql 1 3: *spinach-metadata-mysql
spinach-mysql 3 4: *spinach-metadata-mysql spinach-mysql 2 3: *spinach-metadata-mysql
# Static analysis jobs # Static analysis jobs
.ruby-static-analysis: &ruby-static-analysis .ruby-static-analysis: &ruby-static-analysis
......
...@@ -2,6 +2,21 @@ ...@@ -2,6 +2,21 @@
documentation](doc/development/changelog.md) for instructions on adding your own documentation](doc/development/changelog.md) for instructions on adding your own
entry. entry.
## 10.4.1 (2018-01-24)
### Fixed (4 changes)
- Ensure that users can reclaim a namespace or project path that is blocked by an orphaned route. !16242
- Correctly escape UTF-8 path elements for uploads. !16560
- Fix issues when rendering groups and their children. !16584
- Fix bug in which projects with forks could not change visibility settings from Private to Public. !16595
### Performance (2 changes)
- rework indexes on redirect_routes.
- Remove unecessary query from labels filter.
## 10.4.0 (2018-01-22) ## 10.4.0 (2018-01-22)
### Security (8 changes, 1 of them is from the community) ### Security (8 changes, 1 of them is from the community)
......
...@@ -406,7 +406,7 @@ group :ed25519 do ...@@ -406,7 +406,7 @@ group :ed25519 do
end end
# Gitaly GRPC client # Gitaly GRPC client
gem 'gitaly-proto', '~> 0.74.0', require: 'gitaly' gem 'gitaly-proto', '~> 0.76.0', require: 'gitaly'
gem 'toml-rb', '~> 0.3.15', require: false gem 'toml-rb', '~> 0.3.15', require: false
......
...@@ -285,7 +285,7 @@ GEM ...@@ -285,7 +285,7 @@ GEM
po_to_json (>= 1.0.0) po_to_json (>= 1.0.0)
rails (>= 3.2.0) rails (>= 3.2.0)
gherkin-ruby (0.3.2) gherkin-ruby (0.3.2)
gitaly-proto (0.74.0) gitaly-proto (0.76.0)
google-protobuf (~> 3.1) google-protobuf (~> 3.1)
grpc (~> 1.0) grpc (~> 1.0)
github-linguist (4.7.6) github-linguist (4.7.6)
...@@ -1056,7 +1056,7 @@ DEPENDENCIES ...@@ -1056,7 +1056,7 @@ DEPENDENCIES
gettext (~> 3.2.2) gettext (~> 3.2.2)
gettext_i18n_rails (~> 1.8.0) gettext_i18n_rails (~> 1.8.0)
gettext_i18n_rails_js (~> 1.2.0) gettext_i18n_rails_js (~> 1.2.0)
gitaly-proto (~> 0.74.0) gitaly-proto (~> 0.76.0)
github-linguist (~> 4.7.0) github-linguist (~> 4.7.0)
gitlab-flowdock-git-hook (~> 1.0.1) gitlab-flowdock-git-hook (~> 1.0.1)
gitlab-markup (~> 1.6.2) gitlab-markup (~> 1.6.2)
......
import statusIcon from '../mr_widget_status_icon.vue';
export default {
name: 'MRWidgetChecking',
components: {
statusIcon,
},
template: `
<div class="mr-widget-body media">
<status-icon status="loading" :show-disabled-button="true" />
<div class="media-body space-children">
<span class="bold">
Checking ability to merge automatically
</span>
</div>
</div>
`,
};
<script>
import statusIcon from '../mr_widget_status_icon';
export default {
name: 'MRWidgetChecking',
components: {
statusIcon,
},
};
</script>
<template>
<div class="mr-widget-body media">
<status-icon
status="loading"
:show-disabled-button="true"
/>
<div class="media-body space-children">
<span class="bold">
{{ s__("mrWidget|Checking ability to merge automatically") }}
</span>
</div>
</div>
</template>
import mrWidgetAuthorTime from '../../components/mr_widget_author_time';
import statusIcon from '../mr_widget_status_icon.vue';
export default {
name: 'MRWidgetClosed',
props: {
mr: { type: Object, required: true },
},
components: {
'mr-widget-author-and-time': mrWidgetAuthorTime,
statusIcon,
},
template: `
<div class="mr-widget-body media">
<status-icon status="warning" />
<div class="media-body">
<mr-widget-author-and-time
actionText="Closed by"
:author="mr.metrics.closedBy"
:dateTitle="mr.metrics.closedAt"
:dateReadable="mr.metrics.readableClosedAt"
/>
<section class="mr-info-list">
<p>
The changes were not merged into
<a
:href="mr.targetBranchPath"
class="label-branch">
{{mr.targetBranch}}</a>
</p>
</section>
</div>
</div>
`,
};
<script>
import mrWidgetAuthorTime from '../../components/mr_widget_author_time';
import statusIcon from '../mr_widget_status_icon';
export default {
name: 'MRWidgetClosed',
components: {
mrWidgetAuthorTime,
statusIcon,
},
props: {
/* TODO: This is providing all store and service down when it
only needs metrics and targetBranch */
mr: {
type: Object,
required: true,
default: () => ({}),
},
},
};
</script>
<template>
<div class="mr-widget-body media">
<status-icon
status="warning"
/>
<div class="media-body">
<mr-widget-author-time
:action-text="s__('mrWidget|Closed by')"
:author="mr.metrics.closedBy"
:date-title="mr.metrics.closedAt"
:date-readable="mr.metrics.readableClosedAt"
/>
<section class="mr-info-list">
<p>
{{ s__("mrWidget|The changes were not merged into") }}
<a
:href="mr.targetBranchPath"
class="label-branch"
>
{{ mr.targetBranch }}
</a>
</p>
</section>
</div>
</div>
</template>
import statusIcon from '../mr_widget_status_icon.vue';
export default {
name: 'MRWidgetConflicts',
props: {
mr: { type: Object, required: true },
},
components: {
statusIcon,
},
template: `
<div class="mr-widget-body media">
<status-icon
status="warning"
:show-disabled-button="true" />
<div class="media-body space-children">
<span
v-if="mr.shouldBeRebased"
class="bold">
Fast-forward merge is not possible.
To merge this request, first rebase locally.
</span>
<template v-else>
<span class="bold">
There are merge conflicts<span v-if="!mr.canMerge">.</span>
<span v-if="!mr.canMerge">
Resolve these conflicts or ask someone with write access to this repository to merge it locally
</span>
</span>
<a
v-if="mr.canMerge && mr.conflictResolutionPath"
:href="mr.conflictResolutionPath"
class="js-resolve-conflicts-button btn btn-default btn-xs">
Resolve conflicts
</a>
<a
v-if="mr.canMerge"
class="js-merge-locally-button btn btn-default btn-xs"
data-toggle="modal"
href="#modal_merge_info">
Merge locally
</a>
</template>
</div>
</div>
`,
};
<script>
import statusIcon from '../mr_widget_status_icon';
export default {
name: 'MRWidgetConflicts',
components: {
statusIcon,
},
props: {
/* TODO: This is providing all store and service down when it
only needs a few props */
mr: {
type: Object,
required: true,
default: () => ({}),
},
},
};
</script>
<template>
<div class="mr-widget-body media">
<status-icon
status="warning"
:show-disabled-button="true"
/>
<div class="media-body space-children">
<span
v-if="mr.shouldBeRebased"
class="bold"
>
{{ s__(`mrWidget|Fast-forward merge is not possible.
To merge this request, first rebase locally.`) }}
</span>
<template v-else>
<span class="bold">
{{ s__("mrWidget|There are merge conflicts") }}<span v-if="!mr.canMerge">.</span>
<span v-if="!mr.canMerge">
{{ s__(`mrWidget|Resolve these conflicts or ask someone
with write access to this repository to merge it locally`) }}
</span>
</span>
<a
v-if="mr.canMerge && mr.conflictResolutionPath"
:href="mr.conflictResolutionPath"
class="js-resolve-conflicts-button btn btn-default btn-xs"
>
{{ s__("mrWidget|Resolve conflicts") }}
</a>
<button
v-if="mr.canMerge"
class="js-merge-locally-button btn btn-default btn-xs"
data-toggle="modal"
data-target="#modal_merge_info"
>
{{ s__("mrWidget|Merge locally") }}
</button>
</template>
</div>
</div>
</template>
...@@ -18,11 +18,11 @@ export { default as WidgetDeployment } from './components/mr_widget_deployment'; ...@@ -18,11 +18,11 @@ export { default as WidgetDeployment } from './components/mr_widget_deployment';
export { default as WidgetRelatedLinks } from './components/mr_widget_related_links'; export { default as WidgetRelatedLinks } from './components/mr_widget_related_links';
export { default as MergedState } from './components/states/mr_widget_merged'; export { default as MergedState } from './components/states/mr_widget_merged';
export { default as FailedToMerge } from './components/states/mr_widget_failed_to_merge'; export { default as FailedToMerge } from './components/states/mr_widget_failed_to_merge';
export { default as ClosedState } from './components/states/mr_widget_closed'; export { default as ClosedState } from './components/states/mr_widget_closed.vue';
export { default as MergingState } from './components/states/mr_widget_merging.vue'; export { default as MergingState } from './components/states/mr_widget_merging.vue';
export { default as WipState } from './components/states/mr_widget_wip'; export { default as WipState } from './components/states/mr_widget_wip';
export { default as ArchivedState } from './components/states/mr_widget_archived.vue'; export { default as ArchivedState } from './components/states/mr_widget_archived.vue';
export { default as ConflictsState } from './components/states/mr_widget_conflicts'; export { default as ConflictsState } from './components/states/mr_widget_conflicts.vue';
export { default as NothingToMergeState } from './components/states/mr_widget_nothing_to_merge'; export { default as NothingToMergeState } from './components/states/mr_widget_nothing_to_merge';
export { default as MissingBranchState } from './components/states/mr_widget_missing_branch'; export { default as MissingBranchState } from './components/states/mr_widget_missing_branch';
export { default as NotAllowedState } from './components/states/mr_widget_not_allowed'; export { default as NotAllowedState } from './components/states/mr_widget_not_allowed';
...@@ -34,7 +34,7 @@ export { default as PipelineFailedState } from './components/states/mr_widget_pi ...@@ -34,7 +34,7 @@ export { default as PipelineFailedState } from './components/states/mr_widget_pi
export { default as MergeWhenPipelineSucceedsState } from './components/states/mr_widget_merge_when_pipeline_succeeds'; export { default as MergeWhenPipelineSucceedsState } from './components/states/mr_widget_merge_when_pipeline_succeeds';
export { default as RebaseState } from './components/states/mr_widget_rebase.vue'; export { default as RebaseState } from './components/states/mr_widget_rebase.vue';
export { default as AutoMergeFailed } from './components/states/mr_widget_auto_merge_failed.vue'; export { default as AutoMergeFailed } from './components/states/mr_widget_auto_merge_failed.vue';
export { default as CheckingState } from './components/states/mr_widget_checking'; export { default as CheckingState } from './components/states/mr_widget_checking.vue';
export { default as MRWidgetStore } from './stores/mr_widget_store'; export { default as MRWidgetStore } from './stores/mr_widget_store';
export { default as MRWidgetService } from './services/mr_widget_service'; export { default as MRWidgetService } from './services/mr_widget_service';
export { default as eventHub } from './event_hub'; export { default as eventHub } from './event_hub';
......
...@@ -8,7 +8,8 @@ class HealthController < ActionController::Base ...@@ -8,7 +8,8 @@ class HealthController < ActionController::Base
Gitlab::HealthChecks::Redis::CacheCheck, Gitlab::HealthChecks::Redis::CacheCheck,
Gitlab::HealthChecks::Redis::QueuesCheck, Gitlab::HealthChecks::Redis::QueuesCheck,
Gitlab::HealthChecks::Redis::SharedStateCheck, Gitlab::HealthChecks::Redis::SharedStateCheck,
Gitlab::HealthChecks::FsShardsCheck Gitlab::HealthChecks::FsShardsCheck,
Gitlab::HealthChecks::GitalyCheck
].freeze ].freeze
def readiness def readiness
......
...@@ -15,6 +15,7 @@ ...@@ -15,6 +15,7 @@
# label_name: string # label_name: string
# sort: string # sort: string
# my_reaction_emoji: string # my_reaction_emoji: string
# public_only: boolean
# #
class IssuesFinder < IssuableFinder class IssuesFinder < IssuableFinder
CONFIDENTIAL_ACCESS_LEVEL = Gitlab::Access::REPORTER CONFIDENTIAL_ACCESS_LEVEL = Gitlab::Access::REPORTER
...@@ -40,7 +41,15 @@ class IssuesFinder < IssuableFinder ...@@ -40,7 +41,15 @@ class IssuesFinder < IssuableFinder
private private
def init_collection def init_collection
with_confidentiality_access_check if public_only?
Issue.public_only
else
with_confidentiality_access_check
end
end
def public_only?
params.fetch(:public_only, false)
end end
def user_can_see_all_confidential_issues? def user_can_see_all_confidential_issues?
......
...@@ -14,13 +14,13 @@ module AutoDevopsHelper ...@@ -14,13 +14,13 @@ module AutoDevopsHelper
if missing_service if missing_service
params = { params = {
kubernetes: link_to('Kubernetes service', edit_project_service_path(project, 'kubernetes')) kubernetes: link_to('Kubernetes cluster', project_clusters_path(project))
} }
if missing_domain if missing_domain
_('Auto Review Apps and Auto Deploy need a domain name and the %{kubernetes} to work correctly.') % params _('Auto Review Apps and Auto Deploy need a domain name and a %{kubernetes} to work correctly.') % params
else else
_('Auto Review Apps and Auto Deploy need the %{kubernetes} to work correctly.') % params _('Auto Review Apps and Auto Deploy need a %{kubernetes} to work correctly.') % params
end end
elsif missing_domain elsif missing_domain
_('Auto Review Apps and Auto Deploy need a domain name to work correctly.') _('Auto Review Apps and Auto Deploy need a domain name to work correctly.')
......
...@@ -170,4 +170,8 @@ module SearchHelper ...@@ -170,4 +170,8 @@ module SearchHelper
# Truncato's filtered_tags and filtered_attributes are not quite the same # Truncato's filtered_tags and filtered_attributes are not quite the same
sanitize(html, tags: %w(a p ol ul li pre code)) sanitize(html, tags: %w(a p ol ul li pre code))
end end
def limited_count(count, limit = 1000)
count > limit ? "#{limit}+" : count
end
end end
...@@ -971,9 +971,9 @@ class Project < ActiveRecord::Base ...@@ -971,9 +971,9 @@ class Project < ActiveRecord::Base
hooks.hooks_for(hooks_scope).each do |hook| hooks.hooks_for(hooks_scope).each do |hook|
hook.async_execute(data, hooks_scope.to_s) hook.async_execute(data, hooks_scope.to_s)
end end
end
SystemHooksService.new.execute_hooks(data, hooks_scope) SystemHooksService.new.execute_hooks(data, hooks_scope)
end
end end
def execute_services(data, hooks_scope = :push_hooks) def execute_services(data, hooks_scope = :push_hooks)
......
...@@ -20,6 +20,7 @@ class Repository ...@@ -20,6 +20,7 @@ class Repository
attr_accessor :full_path, :disk_path, :project, :is_wiki attr_accessor :full_path, :disk_path, :project, :is_wiki
delegate :ref_name_for_sha, to: :raw_repository delegate :ref_name_for_sha, to: :raw_repository
delegate :bundle_to_disk, to: :raw_repository
CreateTreeError = Class.new(StandardError) CreateTreeError = Class.new(StandardError)
......
...@@ -8,7 +8,7 @@ ...@@ -8,7 +8,7 @@
= (_("(checkout the %{link} for information on how to install it).") % { link: link }).html_safe = (_("(checkout the %{link} for information on how to install it).") % { link: link }).html_safe
%li %li
= _("Specify the following URL during the Runner setup:") = _("Specify the following URL during the Runner setup:")
%code= root_url(only_path: false) %code#coordinator_address= root_url(only_path: false)
%li %li
= _("Use the following registration token during setup:") = _("Use the following registration token during setup:")
%code#registration_token= registration_token %code#registration_token= registration_token
......
...@@ -4,6 +4,7 @@ ...@@ -4,6 +4,7 @@
- branch_label = s_('ChangeTypeActionLabel|Revert in branch') - branch_label = s_('ChangeTypeActionLabel|Revert in branch')
- revert_merge_request = _('Revert this merge request') - revert_merge_request = _('Revert this merge request')
- revert_commit = _('Revert this commit') - revert_commit = _('Revert this commit')
- description = s_('ChangeTypeAction|This will create a new commit in order to revert the existing changes.')
- title = commit.merged_merge_request(current_user) ? revert_merge_request : revert_commit - title = commit.merged_merge_request(current_user) ? revert_merge_request : revert_commit
- when 'cherry-pick' - when 'cherry-pick'
- label = s_('ChangeTypeAction|Cherry-pick') - label = s_('ChangeTypeAction|Cherry-pick')
...@@ -17,6 +18,8 @@ ...@@ -17,6 +18,8 @@
%a.close{ href: "#", "data-dismiss" => "modal" } × %a.close{ href: "#", "data-dismiss" => "modal" } ×
%h3.page-title= title %h3.page-title= title
.modal-body .modal-body
- if description
%p.append-bottom-20= description
= form_tag [type.underscore, @project.namespace.becomes(Namespace), @project, commit], method: :post, remote: false, class: "form-horizontal js-#{type}-form js-requires-input" do = form_tag [type.underscore, @project.namespace.becomes(Namespace), @project, commit], method: :post, remote: false, class: "form-horizontal js-#{type}-form js-requires-input" do
.form-group.branch .form-group.branch
= label_tag 'start_branch', branch_label, class: 'control-label' = label_tag 'start_branch', branch_label, class: 'control-label'
......
...@@ -57,25 +57,24 @@ ...@@ -57,25 +57,24 @@
Titles and Filenames Titles and Filenames
%span.badge %span.badge
= @search_results.snippet_titles_count = @search_results.snippet_titles_count
- else - else
%li{ class: active_when(@scope == 'projects') } %li{ class: active_when(@scope == 'projects') }
= link_to search_filter_path(scope: 'projects') do = link_to search_filter_path(scope: 'projects') do
Projects Projects
%span.badge %span.badge
= @search_results.projects_count = limited_count(@search_results.limited_projects_count)
%li{ class: active_when(@scope == 'issues') } %li{ class: active_when(@scope == 'issues') }
= link_to search_filter_path(scope: 'issues') do = link_to search_filter_path(scope: 'issues') do
Issues Issues
%span.badge %span.badge
= @search_results.issues_count = limited_count(@search_results.limited_issues_count)
%li{ class: active_when(@scope == 'merge_requests') } %li{ class: active_when(@scope == 'merge_requests') }
= link_to search_filter_path(scope: 'merge_requests') do = link_to search_filter_path(scope: 'merge_requests') do
Merge requests Merge requests
%span.badge %span.badge
= @search_results.merge_requests_count = limited_count(@search_results.limited_merge_requests_count)
%li{ class: active_when(@scope == 'milestones') } %li{ class: active_when(@scope == 'milestones') }
= link_to search_filter_path(scope: 'milestones') do = link_to search_filter_path(scope: 'milestones') do
Milestones Milestones
%span.badge %span.badge
= @search_results.milestones_count = limited_count(@search_results.limited_milestones_count)
...@@ -2,7 +2,8 @@ ...@@ -2,7 +2,8 @@
= render partial: "search/results/empty" = render partial: "search/results/empty"
- else - else
.row-content-block .row-content-block
= search_entries_info(@search_objects, @scope, @search_term) - unless @search_objects.is_a?(Kaminari::PaginatableWithoutCount)
= search_entries_info(@search_objects, @scope, @search_term)
- unless @show_snippets - unless @show_snippets
- if @project - if @project
in project #{link_to @project.name_with_namespace, [@project.namespace.becomes(Namespace), @project]} in project #{link_to @project.name_with_namespace, [@project.namespace.becomes(Namespace), @project]}
...@@ -22,4 +23,4 @@ ...@@ -22,4 +23,4 @@
= render partial: "search/results/#{@scope.singularize}", collection: @search_objects = render partial: "search/results/#{@scope.singularize}", collection: @search_objects
- if @scope != 'projects' - if @scope != 'projects'
= paginate(@search_objects, theme: 'gitlab') = paginate_collection(@search_objects)
---
title: Changes Revert this merge request text
merge_request: 16611
author: Jacopo Beschi @jacopo-beschi
type: changed
--- ---
title: rework indexes on redirect_routes title: Optimize search queries on the search page by setting a limit for matching records.
merge_request: merge_request:
author: author:
type: performance type: performance
---
title: Fix bug in which projects with forks could not change visibility settings from
Private to Public
merge_request: 16595
author:
type: fixed
---
title: Link Auto DevOps settings to Clusters page
merge_request: 16641
author:
type: changed
---
title: Correctly escape UTF-8 path elements for uploads
merge_request: 16560
author:
type: fixed
---
title: Fix encoding issue when counting commit count
merge_request: 16637
author:
type: fixed
---
title: Fix issues when rendering groups and their children
merge_request: 16584
author:
type: fixed
---
title: Execute system hooks after-commit when executing project hooks
merge_request:
author:
type: fixed
---
title: Refactors mr widget components into vue files and adds i18n
merge_request:
author:
type: other
---
title: Remove unecessary query from labels filter
merge_request:
author:
type: performance
---
title: Ensure that users can reclaim a namespace or project path that is blocked by
an orphaned route
merge_request: 16242
author:
type: fixed
---
title: Add a gRPC health check to ensure Gitaly is up
merge_request:
author:
type: added
class AddIndexUpdatedAtToIssues < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_index :issues, :updated_at
end
def down
remove_concurrent_index :issues, :updated_at
end
end
...@@ -11,7 +11,7 @@ ...@@ -11,7 +11,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20180113220114) do ActiveRecord::Schema.define(version: 20180115201419) do
# These are extensions that must be enabled in order to support this database # These are extensions that must be enabled in order to support this database
enable_extension "plpgsql" enable_extension "plpgsql"
...@@ -886,6 +886,7 @@ ActiveRecord::Schema.define(version: 20180113220114) do ...@@ -886,6 +886,7 @@ ActiveRecord::Schema.define(version: 20180113220114) do
add_index "issues", ["relative_position"], name: "index_issues_on_relative_position", using: :btree add_index "issues", ["relative_position"], name: "index_issues_on_relative_position", using: :btree
add_index "issues", ["state"], name: "index_issues_on_state", using: :btree add_index "issues", ["state"], name: "index_issues_on_state", using: :btree
add_index "issues", ["title"], name: "index_issues_on_title_trigram", using: :gin, opclasses: {"title"=>"gin_trgm_ops"} add_index "issues", ["title"], name: "index_issues_on_title_trigram", using: :gin, opclasses: {"title"=>"gin_trgm_ops"}
add_index "issues", ["updated_at"], name: "index_issues_on_updated_at", using: :btree
add_index "issues", ["updated_by_id"], name: "index_issues_on_updated_by_id", where: "(updated_by_id IS NOT NULL)", using: :btree add_index "issues", ["updated_by_id"], name: "index_issues_on_updated_by_id", where: "(updated_by_id IS NOT NULL)", using: :btree
create_table "keys", force: :cascade do |t| create_table "keys", force: :cascade do |t|
......
...@@ -66,9 +66,8 @@ To make full use of Auto DevOps, you will need: ...@@ -66,9 +66,8 @@ To make full use of Auto DevOps, you will need:
a domain configured with wildcard DNS which is gonna be used by all of your a domain configured with wildcard DNS which is gonna be used by all of your
Auto DevOps applications. [Read the specifics](#auto-devops-base-domain). Auto DevOps applications. [Read the specifics](#auto-devops-base-domain).
1. **Kubernetes** (needed for Auto Review Apps, Auto Deploy, and Auto Monitoring) - 1. **Kubernetes** (needed for Auto Review Apps, Auto Deploy, and Auto Monitoring) -
To enable deployments, you will need Kubernetes 1.5+. The [Kubernetes service][kubernetes-service] To enable deployments, you will need Kubernetes 1.5+. You need a [Kubernetes cluster][kubernetes-clusters]
integration will need to be enabled for the project, or enabled as a for the project, or a Kubernetes [default service template](../../user/project/integrations/services_templates.md)
[default service template](../../user/project/integrations/services_templates.md)
for the entire GitLab installation. for the entire GitLab installation.
1. **A load balancer** - You can use NGINX ingress by deploying it to your 1. **A load balancer** - You can use NGINX ingress by deploying it to your
Kubernetes cluster using the Kubernetes cluster using the
...@@ -587,7 +586,7 @@ curl --data "value=true" --header "PRIVATE-TOKEN: personal_access_token" https:/ ...@@ -587,7 +586,7 @@ curl --data "value=true" --header "PRIVATE-TOKEN: personal_access_token" https:/
``` ```
[ce-37115]: https://gitlab.com/gitlab-org/gitlab-ce/issues/37115 [ce-37115]: https://gitlab.com/gitlab-org/gitlab-ce/issues/37115
[kubernetes-service]: ../../user/project/integrations/kubernetes.md [kubernetes-clusters]: ../../user/project/clusters/index.md
[docker-in-docker]: ../../docker/using_docker_build.md#use-docker-in-docker-executor [docker-in-docker]: ../../docker/using_docker_build.md#use-docker-in-docker-executor
[review-app]: ../../ci/review_apps/index.md [review-app]: ../../ci/review_apps/index.md
[container-registry]: ../../user/project/container_registry.md [container-registry]: ../../user/project/container_registry.md
......
require 'database_cleaner' require 'database_cleaner'
DatabaseCleaner[:active_record].strategy = :truncation DatabaseCleaner[:active_record].strategy = :deletion
Spinach.hooks.before_scenario do Spinach.hooks.before_scenario do
DatabaseCleaner.start DatabaseCleaner.start
......
...@@ -175,7 +175,7 @@ module API ...@@ -175,7 +175,7 @@ module API
end end
get "/search/:query", requirements: { query: /[^\/]+/ } do get "/search/:query", requirements: { query: /[^\/]+/ } do
search_service = Search::GlobalService.new(current_user, search: params[:query]).execute search_service = Search::GlobalService.new(current_user, search: params[:query]).execute
projects = search_service.objects('projects', params[:page]) projects = search_service.objects('projects', params[:page], false)
projects = projects.reorder(params[:order_by] => params[:sort]) projects = projects.reorder(params[:order_by] => params[:sort])
present paginate(projects), with: ::API::V3::Entities::Project present paginate(projects), with: ::API::V3::Entities::Project
......
...@@ -34,7 +34,7 @@ module Gitlab ...@@ -34,7 +34,7 @@ module Gitlab
def raw(repository, sha) def raw(repository, sha)
Gitlab::GitalyClient.migrate(:git_blob_raw) do |is_enabled| Gitlab::GitalyClient.migrate(:git_blob_raw) do |is_enabled|
if is_enabled if is_enabled
Gitlab::GitalyClient::BlobService.new(repository).get_blob(oid: sha, limit: MAX_DATA_DISPLAY_SIZE) repository.gitaly_blob_client.get_blob(oid: sha, limit: MAX_DATA_DISPLAY_SIZE)
else else
rugged_raw(repository, sha, limit: MAX_DATA_DISPLAY_SIZE) rugged_raw(repository, sha, limit: MAX_DATA_DISPLAY_SIZE)
end end
...@@ -70,11 +70,19 @@ module Gitlab ...@@ -70,11 +70,19 @@ module Gitlab
# Returns array of Gitlab::Git::Blob # Returns array of Gitlab::Git::Blob
# Does not guarantee blob data will be set # Does not guarantee blob data will be set
def batch_lfs_pointers(repository, blob_ids) def batch_lfs_pointers(repository, blob_ids)
blob_ids.lazy return [] if blob_ids.empty?
.select { |sha| possible_lfs_blob?(repository, sha) }
.map { |sha| rugged_raw(repository, sha, limit: LFS_POINTER_MAX_SIZE) } repository.gitaly_migrate(:batch_lfs_pointers) do |is_enabled|
.select(&:lfs_pointer?) if is_enabled
.force repository.gitaly_blob_client.batch_lfs_pointers(blob_ids)
else
blob_ids.lazy
.select { |sha| possible_lfs_blob?(repository, sha) }
.map { |sha| rugged_raw(repository, sha, limit: LFS_POINTER_MAX_SIZE) }
.select(&:lfs_pointer?)
.force
end
end
end end
def binary?(data) def binary?(data)
...@@ -258,7 +266,7 @@ module Gitlab ...@@ -258,7 +266,7 @@ module Gitlab
Gitlab::GitalyClient.migrate(:git_blob_load_all_data) do |is_enabled| Gitlab::GitalyClient.migrate(:git_blob_load_all_data) do |is_enabled|
@data = begin @data = begin
if is_enabled if is_enabled
Gitlab::GitalyClient::BlobService.new(repository).get_blob(oid: id, limit: -1).data repository.gitaly_blob_client.get_blob(oid: id, limit: -1).data
else else
repository.lookup(id).content repository.lookup(id).content
end end
......
...@@ -1268,6 +1268,18 @@ module Gitlab ...@@ -1268,6 +1268,18 @@ module Gitlab
success || gitlab_projects_error success || gitlab_projects_error
end end
def bundle_to_disk(save_path)
gitaly_migrate(:bundle_to_disk) do |is_enabled|
if is_enabled
gitaly_repository_client.create_bundle(save_path)
else
run_git!(%W(bundle create #{save_path} --all))
end
end
true
end
# rubocop:disable Metrics/ParameterLists # rubocop:disable Metrics/ParameterLists
def multi_action( def multi_action(
user, branch_name:, message:, actions:, user, branch_name:, message:, actions:,
...@@ -1319,6 +1331,10 @@ module Gitlab ...@@ -1319,6 +1331,10 @@ module Gitlab
@gitaly_remote_client ||= Gitlab::GitalyClient::RemoteService.new(self) @gitaly_remote_client ||= Gitlab::GitalyClient::RemoteService.new(self)
end end
def gitaly_blob_client
@gitaly_blob_client ||= Gitlab::GitalyClient::BlobService.new(self)
end
def gitaly_conflicts_client(our_commit_oid, their_commit_oid) def gitaly_conflicts_client(our_commit_oid, their_commit_oid)
Gitlab::GitalyClient::ConflictsService.new(self, our_commit_oid, their_commit_oid) Gitlab::GitalyClient::ConflictsService.new(self, our_commit_oid, their_commit_oid)
end end
......
module Gitlab module Gitlab
module Git module Git
class WikiPage class WikiPage
attr_reader :url_path, :title, :format, :path, :version, :raw_data, :name, :text_data, :historical attr_reader :url_path, :title, :format, :path, :version, :raw_data, :name, :text_data, :historical, :formatted_data
# This class is meant to be serializable so that it can be constructed # This class is meant to be serializable so that it can be constructed
# by Gitaly and sent over the network to GitLab. # by Gitaly and sent over the network to GitLab.
...@@ -21,6 +21,7 @@ module Gitlab ...@@ -21,6 +21,7 @@ module Gitlab
@raw_data = gollum_page.raw_data @raw_data = gollum_page.raw_data
@name = gollum_page.name @name = gollum_page.name
@historical = gollum_page.historical? @historical = gollum_page.historical?
@formatted_data = gollum_page.formatted_data if gollum_page.is_a?(Gollum::Page)
@version = version @version = version
end end
......
require 'base64' require 'base64'
require 'gitaly' require 'gitaly'
require 'grpc/health/v1/health_pb'
require 'grpc/health/v1/health_services_pb'
module Gitlab module Gitlab
module GitalyClient module GitalyClient
...@@ -69,14 +71,27 @@ module Gitlab ...@@ -69,14 +71,27 @@ module Gitlab
@stubs ||= {} @stubs ||= {}
@stubs[storage] ||= {} @stubs[storage] ||= {}
@stubs[storage][name] ||= begin @stubs[storage][name] ||= begin
klass = Gitaly.const_get(name.to_s.camelcase.to_sym).const_get(:Stub) klass = stub_class(name)
addr = address(storage) addr = stub_address(storage)
addr = addr.sub(%r{^tcp://}, '') if URI(addr).scheme == 'tcp'
klass.new(addr, :this_channel_is_insecure) klass.new(addr, :this_channel_is_insecure)
end end
end end
end end
def self.stub_class(name)
if name == :health_check
Grpc::Health::V1::Health::Stub
else
Gitaly.const_get(name.to_s.camelcase.to_sym).const_get(:Stub)
end
end
def self.stub_address(storage)
addr = address(storage)
addr = addr.sub(%r{^tcp://}, '') if URI(addr).scheme == 'tcp'
addr
end
def self.clear_stubs! def self.clear_stubs!
MUTEX.synchronize do MUTEX.synchronize do
@stubs = nil @stubs = nil
......
...@@ -32,6 +32,26 @@ module Gitlab ...@@ -32,6 +32,26 @@ module Gitlab
binary: Gitlab::Git::Blob.binary?(data) binary: Gitlab::Git::Blob.binary?(data)
) )
end end
def batch_lfs_pointers(blob_ids)
request = Gitaly::GetLFSPointersRequest.new(
repository: @gitaly_repo,
blob_ids: blob_ids
)
response = GitalyClient.call(@gitaly_repo.storage_name, :blob_service, :get_lfs_pointers, request)
response.flat_map do |message|
message.lfs_pointers.map do |lfs_pointer|
Gitlab::Git::Blob.new(
id: lfs_pointer.oid,
size: lfs_pointer.size,
data: lfs_pointer.data,
binary: Gitlab::Git::Blob.binary?(lfs_pointer.data)
)
end
end
end
end end
end end
end end
...@@ -125,11 +125,11 @@ module Gitlab ...@@ -125,11 +125,11 @@ module Gitlab
def commit_count(ref, options = {}) def commit_count(ref, options = {})
request = Gitaly::CountCommitsRequest.new( request = Gitaly::CountCommitsRequest.new(
repository: @gitaly_repo, repository: @gitaly_repo,
revision: ref revision: encode_binary(ref)
) )
request.after = Google::Protobuf::Timestamp.new(seconds: options[:after].to_i) if options[:after].present? request.after = Google::Protobuf::Timestamp.new(seconds: options[:after].to_i) if options[:after].present?
request.before = Google::Protobuf::Timestamp.new(seconds: options[:before].to_i) if options[:before].present? request.before = Google::Protobuf::Timestamp.new(seconds: options[:before].to_i) if options[:before].present?
request.path = options[:path] if options[:path].present? request.path = encode_binary(options[:path]) if options[:path].present?
request.max_count = options[:max_count] if options[:max_count].present? request.max_count = options[:max_count] if options[:max_count].present?
GitalyClient.call(@repository.storage, :commit_service, :count_commits, request, timeout: GitalyClient.medium_timeout).count GitalyClient.call(@repository.storage, :commit_service, :count_commits, request, timeout: GitalyClient.medium_timeout).count
......
module Gitlab
module GitalyClient
class HealthCheckService
def initialize(storage)
@storage = storage
end
# Sends a gRPC health ping to the Gitaly server for the storage shard.
def check
request = Grpc::Health::V1::HealthCheckRequest.new
response = GitalyClient.call(@storage, :health_check, :check, request, timeout: GitalyClient.fast_timeout)
{ success: response&.status == :SERVING }
rescue GRPC::BadStatus => e
{ success: false, message: e.to_s }
end
end
end
end
...@@ -161,6 +161,23 @@ module Gitlab ...@@ -161,6 +161,23 @@ module Gitlab
return response.error.b, 1 return response.error.b, 1
end end
end end
def create_bundle(save_path)
request = Gitaly::CreateBundleRequest.new(repository: @gitaly_repo)
response = GitalyClient.call(
@storage,
:repository_service,
:create_bundle,
request,
timeout: GitalyClient.default_timeout
)
File.open(save_path, 'wb') do |f|
response.each do |message|
f.write(message.data)
end
end
end
end end
end end
end end
module Gitlab
module HealthChecks
class GitalyCheck
extend BaseAbstractCheck
METRIC_PREFIX = 'gitaly_health_check'.freeze
class << self
def readiness
repository_storages.map do |storage_name|
check(storage_name)
end
end
def metrics
repository_storages.flat_map do |storage_name|
result, elapsed = with_timing { check(storage_name) }
labels = { shard: storage_name }
[
metric("#{metric_prefix}_success", successful?(result) ? 1 : 0, **labels),
metric("#{metric_prefix}_latency_seconds", elapsed, **labels)
].flatten
end
end
def check(storage_name)
serv = Gitlab::GitalyClient::HealthCheckService.new(storage_name)
result = serv.check
HealthChecks::Result.new(result[:success], result[:message], shard: storage_name)
end
private
def metric_prefix
METRIC_PREFIX
end
def successful?(result)
result[:success]
end
def repository_storages
storages.keys
end
def storages
Gitlab.config.repositories.storages
end
end
end
end
end
...@@ -11,10 +11,6 @@ module Gitlab ...@@ -11,10 +11,6 @@ module Gitlab
untar_with_options(archive: archive, dir: dir, options: 'zxf') untar_with_options(archive: archive, dir: dir, options: 'zxf')
end end
def git_bundle(repo_path:, bundle_path:)
execute(%W(#{git_bin_path} --git-dir=#{repo_path} bundle create #{bundle_path} --all))
end
def git_clone_bundle(repo_path:, bundle_path:) def git_clone_bundle(repo_path:, bundle_path:)
execute(%W(#{git_bin_path} clone --bare -- #{bundle_path} #{repo_path})) execute(%W(#{git_bin_path} clone --bare -- #{bundle_path} #{repo_path}))
Gitlab::Git::Repository.create_hooks(repo_path, File.expand_path(Gitlab.config.gitlab_shell.hooks_path)) Gitlab::Git::Repository.create_hooks(repo_path, File.expand_path(Gitlab.config.gitlab_shell.hooks_path))
......
...@@ -21,7 +21,7 @@ module Gitlab ...@@ -21,7 +21,7 @@ module Gitlab
def bundle_to_disk def bundle_to_disk
mkdir_p(@shared.export_path) mkdir_p(@shared.export_path)
git_bundle(repo_path: path_to_repo, bundle_path: @full_path) @project.repository.bundle_to_disk(@full_path)
rescue => e rescue => e
@shared.error(e) @shared.error(e)
false false
......
...@@ -10,7 +10,7 @@ module Gitlab ...@@ -10,7 +10,7 @@ module Gitlab
def bundle_to_disk(full_path) def bundle_to_disk(full_path)
mkdir_p(@shared.export_path) mkdir_p(@shared.export_path)
git_bundle(repo_path: path_to_repo, bundle_path: full_path) @wiki.repository.bundle_to_disk(full_path)
rescue => e rescue => e
@shared.error(e) @shared.error(e)
false false
......
...@@ -20,7 +20,7 @@ module Gitlab ...@@ -20,7 +20,7 @@ module Gitlab
when 'commits' when 'commits'
Kaminari.paginate_array(commits).page(page).per(per_page) Kaminari.paginate_array(commits).page(page).per(per_page)
else else
super super(scope, page, false)
end end
end end
......
...@@ -40,19 +40,21 @@ module Gitlab ...@@ -40,19 +40,21 @@ module Gitlab
@default_project_filter = default_project_filter @default_project_filter = default_project_filter
end end
def objects(scope, page = nil) def objects(scope, page = nil, without_count = true)
case scope collection = case scope
when 'projects' when 'projects'
projects.page(page).per(per_page) projects.page(page).per(per_page)
when 'issues' when 'issues'
issues.page(page).per(per_page) issues.page(page).per(per_page)
when 'merge_requests' when 'merge_requests'
merge_requests.page(page).per(per_page) merge_requests.page(page).per(per_page)
when 'milestones' when 'milestones'
milestones.page(page).per(per_page) milestones.page(page).per(per_page)
else else
Kaminari.paginate_array([]).page(page).per(per_page) Kaminari.paginate_array([]).page(page).per(per_page)
end end
without_count ? collection.without_count : collection
end end
def projects_count def projects_count
...@@ -71,18 +73,46 @@ module Gitlab ...@@ -71,18 +73,46 @@ module Gitlab
@milestones_count ||= milestones.count @milestones_count ||= milestones.count
end end
def limited_projects_count
@limited_projects_count ||= projects.limit(count_limit).count
end
def limited_issues_count
return @limited_issues_count if @limited_issues_count
# By default getting limited count (e.g. 1000+) is fast on issuable
# collections except for issues, where filtering both not confidential
# and confidential issues user has access to, is too complex.
# It's faster to try to fetch all public issues first, then only
# if necessary try to fetch all issues.
sum = issues(public_only: true).limit(count_limit).count
@limited_issues_count = sum < count_limit ? issues.limit(count_limit).count : sum
end
def limited_merge_requests_count
@limited_merge_requests_count ||= merge_requests.limit(count_limit).count
end
def limited_milestones_count
@limited_milestones_count ||= milestones.limit(count_limit).count
end
def single_commit_result? def single_commit_result?
false false
end end
def count_limit
1001
end
private private
def projects def projects
limit_projects.search(query) limit_projects.search(query)
end end
def issues def issues(finder_params = {})
issues = IssuesFinder.new(current_user).execute issues = IssuesFinder.new(current_user, finder_params).execute
unless default_project_filter unless default_project_filter
issues = issues.where(project_id: project_ids_relation) issues = issues.where(project_id: project_ids_relation)
end end
...@@ -94,13 +124,13 @@ module Gitlab ...@@ -94,13 +124,13 @@ module Gitlab
issues.full_search(query) issues.full_search(query)
end end
issues.order('updated_at DESC') issues.reorder('updated_at DESC')
end end
def milestones def milestones
milestones = Milestone.where(project_id: project_ids_relation) milestones = Milestone.where(project_id: project_ids_relation)
milestones = milestones.search(query) milestones = milestones.search(query)
milestones.order('updated_at DESC') milestones.reorder('updated_at DESC')
end end
def merge_requests def merge_requests
...@@ -116,7 +146,7 @@ module Gitlab ...@@ -116,7 +146,7 @@ module Gitlab
merge_requests.full_search(query) merge_requests.full_search(query)
end end
merge_requests.order('updated_at DESC') merge_requests.reorder('updated_at DESC')
end end
def default_scope def default_scope
......
...@@ -16,7 +16,7 @@ module Gitlab ...@@ -16,7 +16,7 @@ module Gitlab
when 'snippet_blobs' when 'snippet_blobs'
snippet_blobs.page(page).per(per_page) snippet_blobs.page(page).per(per_page)
else else
super super(scope, nil, false)
end end
end end
......
...@@ -186,13 +186,13 @@ msgstr "" ...@@ -186,13 +186,13 @@ msgstr ""
msgid "Author" msgid "Author"
msgstr "" msgstr ""
msgid "Auto Review Apps and Auto Deploy need a domain name and the %{kubernetes} to work correctly." msgid "Auto Review Apps and Auto Deploy need a domain name and a %{kubernetes} to work correctly."
msgstr "" msgstr ""
msgid "Auto Review Apps and Auto Deploy need a domain name to work correctly." msgid "Auto Review Apps and Auto Deploy need a domain name to work correctly."
msgstr "" msgstr ""
msgid "Auto Review Apps and Auto Deploy need the %{kubernetes} to work correctly." msgid "Auto Review Apps and Auto Deploy need a %{kubernetes} to work correctly."
msgstr "" msgstr ""
msgid "AutoDevOps|Auto DevOps (Beta)" msgid "AutoDevOps|Auto DevOps (Beta)"
......
...@@ -28,6 +28,7 @@ module QA ...@@ -28,6 +28,7 @@ module QA
autoload :Group, 'qa/factory/resource/group' autoload :Group, 'qa/factory/resource/group'
autoload :Project, 'qa/factory/resource/project' autoload :Project, 'qa/factory/resource/project'
autoload :DeployKey, 'qa/factory/resource/deploy_key' autoload :DeployKey, 'qa/factory/resource/deploy_key'
autoload :Runner, 'qa/factory/resource/runner'
autoload :PersonalAccessToken, 'qa/factory/resource/personal_access_token' autoload :PersonalAccessToken, 'qa/factory/resource/personal_access_token'
end end
...@@ -49,7 +50,7 @@ module QA ...@@ -49,7 +50,7 @@ module QA
# #
autoload :Bootable, 'qa/scenario/bootable' autoload :Bootable, 'qa/scenario/bootable'
autoload :Actable, 'qa/scenario/actable' autoload :Actable, 'qa/scenario/actable'
autoload :Entrypoint, 'qa/scenario/entrypoint' autoload :Taggable, 'qa/scenario/taggable'
autoload :Template, 'qa/scenario/template' autoload :Template, 'qa/scenario/template'
## ##
...@@ -108,7 +109,14 @@ module QA ...@@ -108,7 +109,14 @@ module QA
module Settings module Settings
autoload :Common, 'qa/page/project/settings/common' autoload :Common, 'qa/page/project/settings/common'
autoload :Repository, 'qa/page/project/settings/repository' autoload :Repository, 'qa/page/project/settings/repository'
autoload :CICD, 'qa/page/project/settings/ci_cd'
autoload :DeployKeys, 'qa/page/project/settings/deploy_keys' autoload :DeployKeys, 'qa/page/project/settings/deploy_keys'
autoload :Runners, 'qa/page/project/settings/runners'
end
module Pipeline
autoload :Index, 'qa/page/project/pipeline/index'
autoload :Show, 'qa/page/project/pipeline/show'
end end
end end
...@@ -134,10 +142,13 @@ module QA ...@@ -134,10 +142,13 @@ module QA
end end
## ##
# Classes describing shell interaction with GitLab # Classes describing services being part of GitLab and how we can interact
# with these services, like through the shell.
# #
module Shell module Service
autoload :Omnibus, 'qa/shell/omnibus' autoload :Shellout, 'qa/service/shellout'
autoload :Omnibus, 'qa/service/omnibus'
autoload :Runner, 'qa/service/runner'
end end
## ##
......
...@@ -19,7 +19,7 @@ module QA ...@@ -19,7 +19,7 @@ module QA
project.visit! project.visit!
Page::Menu::Side.act do Page::Menu::Side.act do
click_repository_setting click_repository_settings
end end
Page::Project::Settings::Repository.perform do |setting| Page::Project::Settings::Repository.perform do |setting|
......
require 'securerandom'
module QA
module Factory
module Resource
class Runner < Factory::Base
attr_writer :name, :tags
dependency Factory::Resource::Project, as: :project do |project|
project.name = 'project-with-ci-cd'
project.description = 'Project with CI/CD Pipelines'
end
def name
@name || "qa-runner-#{SecureRandom.hex(4)}"
end
def tags
@tags || %w[qa e2e]
end
def fabricate!
project.visit!
Page::Menu::Side.act { click_ci_cd_settings }
Service::Runner.new(name).tap do |runner|
Page::Project::Settings::CICD.perform do |settings|
settings.expand_runners_settings do |runners|
runner.pull
runner.token = runners.registration_token
runner.address = runners.coordinator_address
runner.tags = tags
runner.register!
end
end
end
end
end
end
end
end
...@@ -5,18 +5,35 @@ module QA ...@@ -5,18 +5,35 @@ module QA
view 'app/views/layouts/nav/sidebar/_project.html.haml' do view 'app/views/layouts/nav/sidebar/_project.html.haml' do
element :settings_item element :settings_item
element :repository_link, "title: 'Repository'" element :repository_link, "title: 'Repository'"
element :pipelines_settings_link, "title: 'CI / CD'"
element :top_level_items, '.sidebar-top-level-items' element :top_level_items, '.sidebar-top-level-items'
end end
def click_repository_setting def click_repository_settings
hover_setting do hover_settings do
click_link('Repository') within_submenu do
click_link('Repository')
end
end
end
def click_ci_cd_settings
hover_settings do
within_submenu do
click_link('CI / CD')
end
end
end
def click_ci_cd_pipelines
within_sidebar do
click_link('CI / CD')
end end
end end
private private
def hover_setting def hover_settings
within_sidebar do within_sidebar do
find('.qa-settings-item').hover find('.qa-settings-item').hover
...@@ -29,6 +46,12 @@ module QA ...@@ -29,6 +46,12 @@ module QA
yield yield
end end
end end
def within_submenu
page.within('.fly-out-list') do
yield
end
end
end end
end end
end end
......
module QA::Page
module Project::Pipeline
class Index < QA::Page::Base
view 'app/assets/javascripts/pipelines/components/pipeline_url.vue' do
element :pipeline_link, 'class="js-pipeline-url-link"'
end
def go_to_latest_pipeline
first('.js-pipeline-url-link').click
end
end
end
end
module QA::Page
module Project::Pipeline
class Show < QA::Page::Base
view 'app/assets/javascripts/vue_shared/components/header_ci_component.vue' do
element :pipeline_header, /header class.*ci-header-container.*/
end
view 'app/assets/javascripts/pipelines/components/graph/graph_component.vue' do
element :pipeline_graph, /class.*pipeline-graph.*/
end
view 'app/assets/javascripts/pipelines/components/graph/job_component.vue' do
element :job_component, /class.*ci-job-component.*/
end
view 'app/assets/javascripts/vue_shared/components/ci_icon.vue' do
element :status_icon, 'ci-status-icon-${status}'
end
def running?
within('.ci-header-container') do
return page.has_content?('running')
end
end
def has_build?(name, status: :success)
within('.pipeline-graph') do
within('.ci-job-component', text: name) do
return has_selector?(".ci-status-icon-#{status}")
end
end
end
end
end
end
module QA
module Page
module Project
module Settings
class CICD < Page::Base
include Common
view 'app/views/projects/settings/ci_cd/show.html.haml' do
element :runners_settings, 'Runners settings'
end
def expand_runners_settings(&block)
expand_section('Runners settings') do
Settings::Runners.perform(&block)
end
end
end
end
end
end
end
...@@ -10,6 +10,16 @@ module QA ...@@ -10,6 +10,16 @@ module QA
yield yield
end end
end end
def expand_section(name)
page.within('#content-body') do
page.within('section', text: name) do
click_button 'Expand'
yield
end
end
end
end end
end end
end end
......
module QA
module Page
module Project
module Settings
class Runners < Page::Base
view 'app/views/ci/runner/_how_to_setup_runner.html.haml' do
element :registration_token, '%code#registration_token'
element :coordinator_address, '%code#coordinator_address'
end
##
# TODO, phase-out CSS classes added in Ruby helpers.
#
view 'app/helpers/runners_helper.rb' do
# rubocop:disable Lint/InterpolationCheck
element :runner_status, 'runner-status-#{status}'
# rubocop:enable Lint/InterpolationCheck
end
def registration_token
find('code#registration_token').text
end
def coordinator_address
find('code#coordinator_address').text
end
def has_online_runner?
page.has_css?('.runner-status-online')
end
end
end
end
end
end
...@@ -33,6 +33,7 @@ module QA ...@@ -33,6 +33,7 @@ module QA
def wait_for_push def wait_for_push
sleep 5 sleep 5
refresh
end end
end end
end end
......
module QA
module Scenario
##
# Base class for running the suite against any GitLab instance,
# including staging and on-premises installation.
#
class Entrypoint < Template
include Bootable
def perform(address, *files)
Runtime::Scenario.define(:gitlab_address, address)
##
# Perform before hooks, which are different for CE and EE
#
Runtime::Release.perform_before_hooks
Specs::Runner.perform do |specs|
specs.tty = true
specs.tags = self.class.get_tags
specs.files = files.any? ? files : 'qa/specs/features'
end
end
def self.tags(*tags)
@tags = tags
end
def self.get_tags
@tags
end
end
end
end
module QA
module Scenario
module Taggable
# rubocop:disable Gitlab/ModuleWithInstanceVariables
def tags(*tags)
@tags = tags
end
def focus
@tags.to_a
end
# rubocop:enable Gitlab/ModuleWithInstanceVariables
end
end
end
...@@ -2,11 +2,29 @@ module QA ...@@ -2,11 +2,29 @@ module QA
module Scenario module Scenario
module Test module Test
## ##
# Run test suite against any GitLab instance, # Base class for running the suite against any GitLab instance,
# including staging and on-premises installation. # including staging and on-premises installation.
# #
class Instance < Entrypoint class Instance < Template
include Bootable
extend Taggable
tags :core tags :core
def perform(address, *files)
Runtime::Scenario.define(:gitlab_address, address)
##
# Perform before hooks, which are different for CE and EE
#
Runtime::Release.perform_before_hooks
Specs::Runner.perform do |specs|
specs.tty = true
specs.tags = self.class.focus
specs.files = files.any? ? files : 'qa/specs/features'
end
end
end end
end end
end end
......
...@@ -6,7 +6,7 @@ module QA ...@@ -6,7 +6,7 @@ module QA
# Run test suite against any GitLab instance where mattermost is enabled, # Run test suite against any GitLab instance where mattermost is enabled,
# including staging and on-premises installation. # including staging and on-premises installation.
# #
class Mattermost < Scenario::Entrypoint class Mattermost < Test::Instance
tags :core, :mattermost tags :core, :mattermost
def perform(address, mattermost, *files) def perform(address, mattermost, *files)
......
module QA
module Service
class Omnibus
include Scenario::Actable
include Service::Shellout
def initialize(container)
@name = container
end
def gitlab_ctl(command, input: nil)
if input.nil?
shell "docker exec #{@name} gitlab-ctl #{command}"
else
shell "docker exec #{@name} bash -c '#{input} | gitlab-ctl #{command}'"
end
end
end
end
end
require 'securerandom'
module QA
module Service
class Runner
include Scenario::Actable
include Service::Shellout
attr_accessor :token, :address, :tags, :image
def initialize(name)
@image = 'gitlab/gitlab-runner:alpine'
@name = name || "qa-runner-#{SecureRandom.hex(4)}"
@network = Runtime::Scenario.attributes[:network] || 'test'
@tags = %w[qa test]
end
def pull
shell "docker pull #{@image}"
end
def register!
shell <<~CMD.tr("\n", ' ')
docker run -d --rm --entrypoint=/bin/sh
--network #{@network} --name #{@name}
-e CI_SERVER_URL=#{@address}
-e REGISTER_NON_INTERACTIVE=true
-e REGISTRATION_TOKEN=#{@token}
-e RUNNER_EXECUTOR=shell
-e RUNNER_TAG_LIST=#{@tags.join(',')}
-e RUNNER_NAME=#{@name}
#{@image} -c 'gitlab-runner register && gitlab-runner run'
CMD
end
def remove!
shell "docker rm -f #{@name}"
end
end
end
end
require 'open3' require 'open3'
module QA module QA
module Shell module Service
class Omnibus module Shellout
include Scenario::Actable
def initialize(container)
@name = container
end
def gitlab_ctl(command, input: nil)
if input.nil?
shell "docker exec #{@name} gitlab-ctl #{command}"
else
shell "docker exec #{@name} bash -c '#{input} | gitlab-ctl #{command}'"
end
end
private
## ##
# TODO, make it possible to use generic QA framework classes # TODO, make it possible to use generic QA framework classes
# as a library - gitlab-org/gitlab-qa#94 # as a library - gitlab-org/gitlab-qa#94
...@@ -30,7 +14,7 @@ module QA ...@@ -30,7 +14,7 @@ module QA
out.each { |line| puts line } out.each { |line| puts line }
if wait.value.exited? && wait.value.exitstatus.nonzero? if wait.value.exited? && wait.value.exitstatus.nonzero?
raise "Docker command `#{command}` failed!" raise "Command `#{command}` failed!"
end end
end end
end end
......
module QA
feature 'CI/CD Pipelines', :core, :docker do
let(:executor) { "qa-runner-#{Time.now.to_i}" }
after do
Service::Runner.new(executor).remove!
end
scenario 'user registers a new specific runner' do
Runtime::Browser.visit(:gitlab, Page::Main::Login)
Page::Main::Login.act { sign_in_using_credentials }
Factory::Resource::Runner.fabricate! do |runner|
runner.name = executor
end
Page::Project::Settings::CICD.perform do |settings|
sleep 5 # Runner should register within 5 seconds
settings.refresh
settings.expand_runners_settings do |page|
expect(page).to have_content(executor)
expect(page).to have_online_runner
end
end
end
scenario 'users creates a new pipeline' do
Runtime::Browser.visit(:gitlab, Page::Main::Login)
Page::Main::Login.act { sign_in_using_credentials }
project = Factory::Resource::Project.fabricate! do |project|
project.name = 'project-with-pipelines'
project.description = 'Project with CI/CD Pipelines.'
end
Factory::Resource::Runner.fabricate! do |runner|
runner.project = project
runner.name = executor
runner.tags = %w[qa test]
end
Factory::Repository::Push.fabricate! do |push|
push.project = project
push.file_name = '.gitlab-ci.yml'
push.commit_message = 'Add .gitlab-ci.yml'
push.file_content = <<~EOF
test-success:
tags:
- qa
- test
script: echo 'OK'
test-failure:
tags:
- qa
- test
script:
- echo 'FAILURE'
- exit 1
test-tags:
tags:
- qa
- docker
script: echo 'NOOP'
test-artifacts:
tags:
- qa
- test
script: echo "CONTENTS" > my-artifacts/artifact.txt
artifacts:
paths:
- my-artifacts/
EOF
end
Page::Project::Show.act { wait_for_push }
expect(page).to have_content('Add .gitlab-ci.yml')
Page::Menu::Side.act { click_ci_cd_pipelines }
expect(page).to have_content('All 1')
expect(page).to have_content('Add .gitlab-ci.yml')
puts 'Waiting for the runner to process the pipeline'
sleep 15 # Runner should process all jobs within 15 seconds.
Page::Project::Pipeline::Index.act { go_to_latest_pipeline }
Page::Project::Pipeline::Show.perform do |pipeline|
expect(pipeline).to be_running
expect(pipeline).to have_build('test-success', status: :success)
expect(pipeline).to have_build('test-failure', status: :failed)
expect(pipeline).to have_build('test-tags', status: :pending)
expect(pipeline).to have_build('test-artifacts', status: :failed)
end
end
end
end
...@@ -11,10 +11,7 @@ module QA ...@@ -11,10 +11,7 @@ module QA
push.commit_message = 'Add README.md' push.commit_message = 'Add README.md'
end end
Page::Project::Show.act do Page::Project::Show.act { wait_for_push }
wait_for_push
refresh
end
expect(page).to have_content('README.md') expect(page).to have_content('README.md')
expect(page).to have_content('This is a test project') expect(page).to have_content('This is a test project')
......
...@@ -19,7 +19,6 @@ describe QA::Factory::Base do ...@@ -19,7 +19,6 @@ describe QA::Factory::Base do
it 'returns fabrication product' do it 'returns fabrication product' do
allow(subject).to receive(:new).and_return(factory) allow(subject).to receive(:new).and_return(factory)
allow(factory).to receive(:fabricate!).and_return('something')
result = subject.fabricate!('something') result = subject.fabricate!('something')
......
describe QA::Scenario::Entrypoint do describe QA::Scenario::Test::Instance do
subject do subject do
Class.new(QA::Scenario::Entrypoint) do Class.new(described_class) do
tags :rspec tags :rspec
end end
end end
......
...@@ -22,7 +22,7 @@ feature 'Global search' do ...@@ -22,7 +22,7 @@ feature 'Global search' do
click_button "Go" click_button "Go"
select_filter("Issues") select_filter("Issues")
expect(page).to have_selector('.gl-pagination .page', count: 2) expect(page).to have_selector('.gl-pagination .next')
end end
end end
end end
...@@ -34,6 +34,9 @@ describe 'New issue', :js do ...@@ -34,6 +34,9 @@ describe 'New issue', :js do
click_button 'Submit issue' click_button 'Submit issue'
# reCAPTCHA alerts when it can't contact the server, so just accept it and move on
page.driver.browser.switch_to.alert.accept
# it is impossible to test recaptcha automatically and there is no possibility to fill in recaptcha # it is impossible to test recaptcha automatically and there is no possibility to fill in recaptcha
# recaptcha verification is skipped in test environment and it always returns true # recaptcha verification is skipped in test environment and it always returns true
expect(page).not_to have_content('issue title') expect(page).not_to have_content('issue title')
......
...@@ -108,7 +108,7 @@ describe 'Merge request > User resolves diff notes and discussions', :js do ...@@ -108,7 +108,7 @@ describe 'Merge request > User resolves diff notes and discussions', :js do
it 'shows resolved discussion when toggled' do it 'shows resolved discussion when toggled' do
find(".timeline-content .discussion[data-discussion-id='#{note.discussion_id}'] .discussion-toggle-button").click find(".timeline-content .discussion[data-discussion-id='#{note.discussion_id}'] .discussion-toggle-button").click
expect(page.find(".timeline-content #note_#{note.noteable_id}")).to be_visible expect(page.find(".timeline-content #note_#{note.id}")).to be_visible
end end
end end
......
...@@ -10,9 +10,10 @@ describe('Pipelines table in Commits and Merge requests', () => { ...@@ -10,9 +10,10 @@ describe('Pipelines table in Commits and Merge requests', () => {
preloadFixtures(jsonFixtureName); preloadFixtures(jsonFixtureName);
beforeEach(() => { beforeEach(() => {
PipelinesTable = Vue.extend(pipelinesTable);
const pipelines = getJSONFixture(jsonFixtureName).pipelines; const pipelines = getJSONFixture(jsonFixtureName).pipelines;
pipeline = pipelines.find(p => p.id === 1);
PipelinesTable = Vue.extend(pipelinesTable);
pipeline = pipelines.find(p => p.user !== null && p.commit !== null);
}); });
describe('successful request', () => { describe('successful request', () => {
......
...@@ -24,9 +24,10 @@ describe('Pipelines Table Row', () => { ...@@ -24,9 +24,10 @@ describe('Pipelines Table Row', () => {
beforeEach(() => { beforeEach(() => {
const pipelines = getJSONFixture(jsonFixtureName).pipelines; const pipelines = getJSONFixture(jsonFixtureName).pipelines;
pipeline = pipelines.find(p => p.id === 1);
pipelineWithoutAuthor = pipelines.find(p => p.id === 2); pipeline = pipelines.find(p => p.user !== null && p.commit !== null);
pipelineWithoutCommit = pipelines.find(p => p.id === 3); pipelineWithoutAuthor = pipelines.find(p => p.user == null && p.commit !== null);
pipelineWithoutCommit = pipelines.find(p => p.user == null && p.commit == null);
}); });
afterEach(() => { afterEach(() => {
......
...@@ -11,9 +11,10 @@ describe('Pipelines Table', () => { ...@@ -11,9 +11,10 @@ describe('Pipelines Table', () => {
preloadFixtures(jsonFixtureName); preloadFixtures(jsonFixtureName);
beforeEach(() => { beforeEach(() => {
PipelinesTableComponent = Vue.extend(pipelinesTableComp);
const pipelines = getJSONFixture(jsonFixtureName).pipelines; const pipelines = getJSONFixture(jsonFixtureName).pipelines;
pipeline = pipelines.find(p => p.id === 1);
PipelinesTableComponent = Vue.extend(pipelinesTableComp);
pipeline = pipelines.find(p => p.user !== null && p.commit !== null);
}); });
describe('table', () => { describe('table', () => {
......
import Vue from 'vue'; import Vue from 'vue';
import checkingComponent from '~/vue_merge_request_widget/components/states/mr_widget_checking'; import checkingComponent from '~/vue_merge_request_widget/components/states/mr_widget_checking.vue';
import mountComponent from '../../../helpers/vue_mount_component_helper';
describe('MRWidgetChecking', () => { describe('MRWidgetChecking', () => {
describe('template', () => { let Component;
it('should have correct elements', () => { let vm;
const Component = Vue.extend(checkingComponent);
const el = new Component({
el: document.createElement('div'),
}).$el;
expect(el.classList.contains('mr-widget-body')).toBeTruthy(); beforeEach(() => {
expect(el.querySelector('button').classList.contains('btn-success')).toBeTruthy(); Component = Vue.extend(checkingComponent);
expect(el.querySelector('button').disabled).toBeTruthy(); vm = mountComponent(Component);
expect(el.innerText).toContain('Checking ability to merge automatically'); });
expect(el.querySelector('i')).toBeDefined();
}); afterEach(() => {
vm.$destroy();
});
it('renders disabled button', () => {
expect(vm.$el.querySelector('button').getAttribute('disabled')).toEqual('disabled');
});
it('renders loading icon', () => {
expect(vm.$el.querySelector('.mr-widget-icon i').classList).toContain('fa-spinner');
});
it('renders information about merging', () => {
expect(vm.$el.querySelector('.media-body').textContent.trim()).toEqual('Checking ability to merge automatically');
}); });
}); });
import Vue from 'vue'; import Vue from 'vue';
import closedComponent from '~/vue_merge_request_widget/components/states/mr_widget_closed'; import closedComponent from '~/vue_merge_request_widget/components/states/mr_widget_closed.vue';
import mountComponent from '../../../helpers/vue_mount_component_helper';
const mr = {
targetBranch: 'good-branch',
targetBranchPath: '/good-branch',
metrics: {
mergedBy: {},
mergedAt: 'mergedUpdatedAt',
closedBy: {
name: 'Fatih Acet',
username: 'fatihacet',
},
closedAt: 'closedEventUpdatedAt',
readableMergedAt: '',
readableClosedAt: '',
},
updatedAt: 'mrUpdatedAt',
closedAt: '1 day ago',
};
const createComponent = () => {
const Component = Vue.extend(closedComponent);
return new Component({
el: document.createElement('div'),
propsData: { mr },
});
};
describe('MRWidgetClosed', () => { describe('MRWidgetClosed', () => {
describe('props', () => { let vm;
it('should have props', () => {
const mrProp = closedComponent.props.mr; beforeEach(() => {
const Component = Vue.extend(closedComponent);
expect(mrProp.type instanceof Object).toBeTruthy(); vm = mountComponent(Component, { mr: {
expect(mrProp.required).toBeTruthy(); metrics: {
}); mergedBy: {},
closedBy: {
name: 'Administrator',
username: 'root',
webUrl: 'http://localhost:3000/root',
avatarUrl: 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
},
mergedAt: 'Jan 24, 2018 1:02pm GMT+0000',
closedAt: 'Jan 24, 2018 1:02pm GMT+0000',
readableMergedAt: '',
readableClosedAt: 'less than a minute ago',
},
targetBranchPath: '/twitter/flight/commits/so_long_jquery',
targetBranch: 'so_long_jquery',
} });
}); });
describe('components', () => { afterEach(() => {
it('should have components added', () => { vm.$destroy();
expect(closedComponent.components['mr-widget-author-and-time']).toBeDefined();
});
}); });
describe('template', () => { it('renders warning icon', () => {
let vm; expect(vm.$el.querySelector('.js-ci-status-icon-warning')).not.toBeNull();
let el; });
beforeEach(() => { it('renders closed by information with author and time', () => {
vm = createComponent(); expect(
el = vm.$el; vm.$el.querySelector('.js-mr-widget-author').textContent.trim().replace(/\s\s+/g, ' '),
}); ).toContain(
'Closed by Administrator less than a minute ago',
);
});
afterEach(() => { it('links to the user that closed the MR', () => {
vm.$destroy(); expect(vm.$el.querySelector('.author-link').getAttribute('href')).toEqual('http://localhost:3000/root');
}); });
it('should have correct elements', () => { it('renders information about the changes not being merged', () => {
expect(el.querySelector('h4').textContent).toContain('Closed by'); expect(
expect(el.querySelector('h4').textContent).toContain(mr.metrics.closedBy.name); vm.$el.querySelector('.mr-info-list').textContent.trim().replace(/\s\s+/g, ' '),
expect(el.textContent).toContain('The changes were not merged into'); ).toContain('The changes were not merged into so_long_jquery');
expect(el.querySelector('.label-branch').getAttribute('href')).toEqual(mr.targetBranchPath); });
expect(el.querySelector('.label-branch').textContent).toContain(mr.targetBranch);
});
it('should use closedEvent updatedAt as tooltip title', () => { it('renders link for target branch', () => {
expect( expect(vm.$el.querySelector('.label-branch').getAttribute('href')).toEqual('/twitter/flight/commits/so_long_jquery');
el.querySelector('time').getAttribute('title'),
).toBe('closedEventUpdatedAt');
});
}); });
}); });
import Vue from 'vue'; import Vue from 'vue';
import conflictsComponent from '~/vue_merge_request_widget/components/states/mr_widget_conflicts'; import conflictsComponent from '~/vue_merge_request_widget/components/states/mr_widget_conflicts.vue';
import mountComponent from '../../../helpers/vue_mount_component_helper'; import mountComponent from '../../../helpers/vue_mount_component_helper';
const ConflictsComponent = Vue.extend(conflictsComponent);
const path = '/conflicts';
describe('MRWidgetConflicts', () => { describe('MRWidgetConflicts', () => {
describe('props', () => { let Component;
it('should have props', () => { let vm;
const { mr } = conflictsComponent.props; const path = '/conflicts';
expect(mr.type instanceof Object).toBeTruthy(); beforeEach(() => {
expect(mr.required).toBeTruthy(); Component = Vue.extend(conflictsComponent);
});
}); });
describe('template', () => { afterEach(() => {
describe('when allowed to merge', () => { vm.$destroy();
let vm; });
beforeEach(() => {
vm = mountComponent(ConflictsComponent, {
mr: {
canMerge: true,
conflictResolutionPath: path,
},
});
});
afterEach(() => {
vm.$destroy();
});
it('should tell you about conflicts without bothering other people', () => {
expect(vm.$el.textContent).toContain('There are merge conflicts');
expect(vm.$el.textContent).not.toContain('ask someone with write access');
});
it('should allow you to resolve the conflicts', () => {
const resolveButton = vm.$el.querySelector('.js-resolve-conflicts-button');
expect(resolveButton.textContent).toContain('Resolve conflicts'); describe('when allowed to merge', () => {
expect(resolveButton.getAttribute('href')).toEqual(path); beforeEach(() => {
vm = mountComponent(Component, {
mr: {
canMerge: true,
conflictResolutionPath: path,
},
}); });
});
it('should have merge buttons', () => { it('should tell you about conflicts without bothering other people', () => {
const mergeButton = vm.$el.querySelector('.js-disabled-merge-button'); expect(vm.$el.textContent).toContain('There are merge conflicts');
const mergeLocallyButton = vm.$el.querySelector('.js-merge-locally-button'); expect(vm.$el.textContent).not.toContain('ask someone with write access');
expect(mergeButton.textContent).toContain('Merge');
expect(mergeButton.disabled).toBeTruthy();
expect(mergeButton.classList.contains('btn-success')).toEqual(true);
expect(mergeLocallyButton.textContent).toContain('Merge locally');
});
}); });
describe('when user does not have permission to merge', () => { it('should allow you to resolve the conflicts', () => {
let vm; const resolveButton = vm.$el.querySelector('.js-resolve-conflicts-button');
beforeEach(() => { expect(resolveButton.textContent).toContain('Resolve conflicts');
vm = mountComponent(ConflictsComponent, { expect(resolveButton.getAttribute('href')).toEqual(path);
mr: { });
canMerge: false,
},
});
});
afterEach(() => { it('should have merge buttons', () => {
vm.$destroy(); const mergeButton = vm.$el.querySelector('.js-disabled-merge-button');
}); const mergeLocallyButton = vm.$el.querySelector('.js-merge-locally-button');
it('should show proper message', () => { expect(mergeButton.textContent).toContain('Merge');
expect(vm.$el.textContent).toContain('ask someone with write access'); expect(mergeButton.disabled).toBeTruthy();
}); expect(mergeButton.classList.contains('btn-success')).toEqual(true);
expect(mergeLocallyButton.textContent).toContain('Merge locally');
});
});
it('should not have action buttons', () => { describe('when user does not have permission to merge', () => {
expect(vm.$el.querySelector('.js-disabled-merge-button')).toBeDefined(); beforeEach(() => {
expect(vm.$el.querySelector('.js-resolve-conflicts-button')).toBeNull(); vm = mountComponent(Component, {
expect(vm.$el.querySelector('.js-merge-locally-button')).toBeNull(); mr: {
canMerge: false,
},
}); });
}); });
describe('when fast-forward or semi-linear merge enabled', () => { it('should show proper message', () => {
let vm; expect(vm.$el.textContent.trim().replace(/\s\s+/g, ' ')).toContain('ask someone with write access');
});
beforeEach(() => { it('should not have action buttons', () => {
vm = mountComponent(ConflictsComponent, { expect(vm.$el.querySelector('.js-disabled-merge-button')).toBeDefined();
mr: { expect(vm.$el.querySelector('.js-resolve-conflicts-button')).toBeNull();
shouldBeRebased: true, expect(vm.$el.querySelector('.js-merge-locally-button')).toBeNull();
}, });
}); });
});
afterEach(() => { describe('when fast-forward or semi-linear merge enabled', () => {
vm.$destroy(); beforeEach(() => {
vm = mountComponent(Component, {
mr: {
shouldBeRebased: true,
},
}); });
});
it('should tell you to rebase locally', () => { it('should tell you to rebase locally', () => {
expect(vm.$el.textContent).toContain('Fast-forward merge is not possible.'); expect(vm.$el.textContent.trim().replace(/\s\s+/g, ' ')).toContain('Fast-forward merge is not possible.');
expect(vm.$el.textContent).toContain('To merge this request, first rebase locally'); expect(vm.$el.textContent.trim().replace(/\s\s+/g, ' ')).toContain('To merge this request, first rebase locally');
});
}); });
}); });
}); });
require 'spec_helper' require 'spec_helper'
describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :truncate, :migration, schema: 20171114162227 do describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :migration, schema: 20171114162227 do
let(:merge_request_diffs) { table(:merge_request_diffs) } let(:merge_request_diffs) { table(:merge_request_diffs) }
let(:merge_requests) { table(:merge_requests) } let(:merge_requests) { table(:merge_requests) }
......
require 'spec_helper' require 'spec_helper'
describe Gitlab::BackgroundMigration::MigrateSystemUploadsToNewFolder do describe Gitlab::BackgroundMigration::MigrateSystemUploadsToNewFolder, :delete do
let(:migration) { described_class.new } let(:migration) { described_class.new }
before do before do
...@@ -8,7 +8,7 @@ describe Gitlab::BackgroundMigration::MigrateSystemUploadsToNewFolder do ...@@ -8,7 +8,7 @@ describe Gitlab::BackgroundMigration::MigrateSystemUploadsToNewFolder do
end end
describe '#perform' do describe '#perform' do
it 'renames the path of system-uploads', :truncate do it 'renames the path of system-uploads' do
upload = create(:upload, model: create(:project), path: 'uploads/system/project/avatar.jpg') upload = create(:upload, model: create(:project), path: 'uploads/system/project/avatar.jpg')
migration.perform('uploads/system/', 'uploads/-/system/') migration.perform('uploads/system/', 'uploads/-/system/')
......
require 'spec_helper' require 'spec_helper'
describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :truncate do describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :delete do
let(:migration) { FakeRenameReservedPathMigrationV1.new } let(:migration) { FakeRenameReservedPathMigrationV1.new }
let(:subject) { described_class.new(['the-path'], migration) } let(:subject) { described_class.new(['the-path'], migration) }
......
require 'spec_helper' require 'spec_helper'
describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, :truncate do describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, :delete do
let(:migration) { FakeRenameReservedPathMigrationV1.new } let(:migration) { FakeRenameReservedPathMigrationV1.new }
let(:subject) { described_class.new(['the-path'], migration) } let(:subject) { described_class.new(['the-path'], migration) }
let(:namespace) { create(:group, name: 'the-path') } let(:namespace) { create(:group, name: 'the-path') }
......
require 'spec_helper' require 'spec_helper'
describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :truncate do describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :delete do
let(:migration) { FakeRenameReservedPathMigrationV1.new } let(:migration) { FakeRenameReservedPathMigrationV1.new }
let(:subject) { described_class.new(['the-path'], migration) } let(:subject) { described_class.new(['the-path'], migration) }
let(:project) do let(:project) do
......
...@@ -13,7 +13,7 @@ shared_examples 'renames child namespaces' do |type| ...@@ -13,7 +13,7 @@ shared_examples 'renames child namespaces' do |type|
end end
end end
describe Gitlab::Database::RenameReservedPathsMigration::V1, :truncate do describe Gitlab::Database::RenameReservedPathsMigration::V1, :delete do
let(:subject) { FakeRenameReservedPathMigrationV1.new } let(:subject) { FakeRenameReservedPathMigrationV1.new }
before do before do
......
...@@ -260,29 +260,42 @@ describe Gitlab::Git::Blob, seed_helper: true do ...@@ -260,29 +260,42 @@ describe Gitlab::Git::Blob, seed_helper: true do
) )
end end
it 'returns a list of Gitlab::Git::Blob' do shared_examples 'fetching batch of LFS pointers' do
blobs = described_class.batch_lfs_pointers(repository, [lfs_blob.id]) it 'returns a list of Gitlab::Git::Blob' do
blobs = described_class.batch_lfs_pointers(repository, [lfs_blob.id])
expect(blobs.count).to eq(1) expect(blobs.count).to eq(1)
expect(blobs).to all( be_a(Gitlab::Git::Blob) ) expect(blobs).to all( be_a(Gitlab::Git::Blob) )
end end
it 'silently ignores tree objects' do it 'silently ignores tree objects' do
blobs = described_class.batch_lfs_pointers(repository, [tree_object.oid]) blobs = described_class.batch_lfs_pointers(repository, [tree_object.oid])
expect(blobs).to eq([]) expect(blobs).to eq([])
end end
it 'silently ignores non lfs objects' do
blobs = described_class.batch_lfs_pointers(repository, [non_lfs_blob.id])
it 'silently ignores non lfs objects' do expect(blobs).to eq([])
blobs = described_class.batch_lfs_pointers(repository, [non_lfs_blob.id]) end
it 'avoids loading large blobs into memory' do
# This line could call `lookup` on `repository`, so do here before mocking.
non_lfs_blob_id = non_lfs_blob.id
expect(repository).not_to receive(:lookup)
expect(blobs).to eq([]) described_class.batch_lfs_pointers(repository, [non_lfs_blob_id])
end
end end
it 'avoids loading large blobs into memory' do context 'when Gitaly batch_lfs_pointers is enabled' do
expect(repository).not_to receive(:lookup) it_behaves_like 'fetching batch of LFS pointers'
end
described_class.batch_lfs_pointers(repository, [non_lfs_blob.id]) context 'when Gitaly batch_lfs_pointers is disabled', :disable_gitaly do
it_behaves_like 'fetching batch of LFS pointers'
end end
end end
......
...@@ -1926,6 +1926,34 @@ describe Gitlab::Git::Repository, seed_helper: true do ...@@ -1926,6 +1926,34 @@ describe Gitlab::Git::Repository, seed_helper: true do
it { expect(subject.repository_relative_path).to eq(repository.relative_path) } it { expect(subject.repository_relative_path).to eq(repository.relative_path) }
end end
describe '#bundle_to_disk' do
shared_examples 'bundling to disk' do
let(:save_path) { File.join(Dir.tmpdir, "repo-#{SecureRandom.hex}.bundle") }
after do
FileUtils.rm_rf(save_path)
end
it 'saves a bundle to disk' do
repository.bundle_to_disk(save_path)
success = system(
*%W(#{Gitlab.config.git.bin_path} -C #{repository.path} bundle verify #{save_path}),
[:out, :err] => '/dev/null'
)
expect(success).to be true
end
end
context 'when Gitaly bundle_to_disk feature is enabled' do
it_behaves_like 'bundling to disk'
end
context 'when Gitaly bundle_to_disk feature is disabled', :disable_gitaly do
it_behaves_like 'bundling to disk'
end
end
context 'gitlab_projects commands' do context 'gitlab_projects commands' do
let(:gitlab_projects) { repository.gitlab_projects } let(:gitlab_projects) { repository.gitlab_projects }
let(:timeout) { Gitlab.config.gitlab_shell.git_timeout } let(:timeout) { Gitlab.config.gitlab_shell.git_timeout }
......
...@@ -131,6 +131,29 @@ describe Gitlab::GitalyClient::CommitService do ...@@ -131,6 +131,29 @@ describe Gitlab::GitalyClient::CommitService do
end end
end end
describe '#commit_count' do
before do
expect_any_instance_of(Gitaly::CommitService::Stub)
.to receive(:count_commits)
.with(gitaly_request_with_path(storage_name, relative_path),
kind_of(Hash))
.and_return([])
end
it 'sends a commit_count message' do
client.commit_count(revision)
end
context 'with UTF-8 params strings' do
let(:revision) { "branch\u011F" }
let(:path) { "foo/\u011F.txt" }
it 'handles string encodings correctly' do
client.commit_count(revision, path: path)
end
end
end
describe '#find_commit' do describe '#find_commit' do
let(:revision) { '4b825dc642cb6eb9a060e54bf8d69288fbee4904' } let(:revision) { '4b825dc642cb6eb9a060e54bf8d69288fbee4904' }
it 'sends an RPC request' do it 'sends an RPC request' do
......
require 'spec_helper'
describe Gitlab::GitalyClient::HealthCheckService do
let(:project) { create(:project) }
let(:storage_name) { project.repository_storage }
subject { described_class.new(storage_name) }
describe '#check' do
it 'successfully sends a health check request' do
expect(Gitlab::GitalyClient).to receive(:call).with(
storage_name,
:health_check,
:check,
instance_of(Grpc::Health::V1::HealthCheckRequest),
timeout: Gitlab::GitalyClient.fast_timeout).and_call_original
expect(subject.check).to eq({ success: true })
end
it 'receives an unsuccessful health check request' do
expect_any_instance_of(Grpc::Health::V1::Health::Stub)
.to receive(:check)
.and_return(double(status: false))
expect(subject.check).to eq({ success: false })
end
it 'gracefully handles gRPC error' do
expect(Gitlab::GitalyClient).to receive(:call).with(
storage_name,
:health_check,
:check,
instance_of(Grpc::Health::V1::HealthCheckRequest),
timeout: Gitlab::GitalyClient.fast_timeout)
.and_raise(GRPC::Unavailable.new('Connection refused'))
expect(subject.check).to eq({ success: false, message: '14:Connection refused' })
end
end
end
...@@ -3,6 +3,31 @@ require 'spec_helper' ...@@ -3,6 +3,31 @@ require 'spec_helper'
# We stub Gitaly in `spec/support/gitaly.rb` for other tests. We don't want # We stub Gitaly in `spec/support/gitaly.rb` for other tests. We don't want
# those stubs while testing the GitalyClient itself. # those stubs while testing the GitalyClient itself.
describe Gitlab::GitalyClient, skip_gitaly_mock: true do describe Gitlab::GitalyClient, skip_gitaly_mock: true do
describe '.stub_class' do
it 'returns the gRPC health check stub' do
expect(described_class.stub_class(:health_check)).to eq(::Grpc::Health::V1::Health::Stub)
end
it 'returns a Gitaly stub' do
expect(described_class.stub_class(:ref_service)).to eq(::Gitaly::RefService::Stub)
end
end
describe '.stub_address' do
it 'returns the same result after being called multiple times' do
address = 'localhost:9876'
prefixed_address = "tcp://#{address}"
allow(Gitlab.config.repositories).to receive(:storages).and_return({
'default' => { 'gitaly_address' => prefixed_address }
})
2.times do
expect(described_class.stub_address('default')).to eq('localhost:9876')
end
end
end
describe '.stub' do describe '.stub' do
# Notice that this is referring to gRPC "stubs", not rspec stubs # Notice that this is referring to gRPC "stubs", not rspec stubs
before do before do
......
require 'spec_helper'
describe Gitlab::HealthChecks::GitalyCheck do
let(:result_class) { Gitlab::HealthChecks::Result }
let(:repository_storages) { ['default'] }
before do
allow(described_class).to receive(:repository_storages) { repository_storages }
end
describe '#readiness' do
subject { described_class.readiness }
before do
expect(Gitlab::GitalyClient::HealthCheckService).to receive(:new).and_return(gitaly_check)
end
context 'Gitaly server is up' do
let(:gitaly_check) { double(check: { success: true }) }
it { is_expected.to eq([result_class.new(true, nil, shard: 'default')]) }
end
context 'Gitaly server is down' do
let(:gitaly_check) { double(check: { success: false, message: 'Connection refused' }) }
it { is_expected.to eq([result_class.new(false, 'Connection refused', shard: 'default')]) }
end
end
describe '#metrics' do
subject { described_class.metrics }
before do
expect(Gitlab::GitalyClient::HealthCheckService).to receive(:new).and_return(gitaly_check)
end
context 'Gitaly server is up' do
let(:gitaly_check) { double(check: { success: true }) }
it 'provides metrics' do
expect(subject).to all(have_attributes(labels: { shard: 'default' }))
expect(subject).to include(an_object_having_attributes(name: 'gitaly_health_check_success', value: 1))
expect(subject).to include(an_object_having_attributes(name: 'gitaly_health_check_latency_seconds', value: be >= 0))
end
end
context 'Gitaly server is down' do
let(:gitaly_check) { double(check: { success: false, message: 'Connection refused' }) }
it 'provides metrics' do
expect(subject).to include(an_object_having_attributes(name: 'gitaly_health_check_success', value: 0))
expect(subject).to include(an_object_having_attributes(name: 'gitaly_health_check_latency_seconds', value: be >= 0))
end
end
end
end
...@@ -19,6 +19,12 @@ describe Gitlab::SearchResults do ...@@ -19,6 +19,12 @@ describe Gitlab::SearchResults do
project.add_developer(user) project.add_developer(user)
end end
describe '#objects' do
it 'returns without_page collection by default' do
expect(results.objects('projects')).to be_kind_of(Kaminari::PaginatableWithoutCount)
end
end
describe '#projects_count' do describe '#projects_count' do
it 'returns the total amount of projects' do it 'returns the total amount of projects' do
expect(results.projects_count).to eq(1) expect(results.projects_count).to eq(1)
...@@ -43,6 +49,58 @@ describe Gitlab::SearchResults do ...@@ -43,6 +49,58 @@ describe Gitlab::SearchResults do
end end
end end
context "when count_limit is lower than total amount" do
before do
allow(results).to receive(:count_limit).and_return(1)
end
describe '#limited_projects_count' do
it 'returns the limited amount of projects' do
create(:project, name: 'foo2')
expect(results.limited_projects_count).to eq(1)
end
end
describe '#limited_merge_requests_count' do
it 'returns the limited amount of merge requests' do
create(:merge_request, :simple, source_project: project, title: 'foo2')
expect(results.limited_merge_requests_count).to eq(1)
end
end
describe '#limited_milestones_count' do
it 'returns the limited amount of milestones' do
create(:milestone, project: project, title: 'foo2')
expect(results.limited_milestones_count).to eq(1)
end
end
describe '#limited_issues_count' do
it 'runs single SQL query to get the limited amount of issues' do
create(:milestone, project: project, title: 'foo2')
expect(results).to receive(:issues).with(public_only: true).and_call_original
expect(results).not_to receive(:issues).with(no_args).and_call_original
expect(results.limited_issues_count).to eq(1)
end
end
end
context "when count_limit is higher than total amount" do
describe '#limited_issues_count' do
it 'runs multiple queries to get the limited amount of issues' do
expect(results).to receive(:issues).with(public_only: true).and_call_original
expect(results).to receive(:issues).with(no_args).and_call_original
expect(results.limited_issues_count).to eq(1)
end
end
end
it 'includes merge requests from source and target projects' do it 'includes merge requests from source and target projects' do
forked_project = fork_project(project, user) forked_project = fork_project(project, user)
merge_request_2 = create(:merge_request, target_project: project, source_project: forked_project, title: 'foo') merge_request_2 = create(:merge_request, target_project: project, source_project: forked_project, title: 'foo')
......
require 'spec_helper' require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170508170547_add_head_pipeline_for_each_merge_request.rb') require Rails.root.join('db', 'post_migrate', '20170508170547_add_head_pipeline_for_each_merge_request.rb')
describe AddHeadPipelineForEachMergeRequest, :truncate do describe AddHeadPipelineForEachMergeRequest, :delete do
include ProjectForksHelper include ProjectForksHelper
let(:migration) { described_class.new } let(:migration) { described_class.new }
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
require 'spec_helper' require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170803090603_calculate_conv_dev_index_percentages.rb') require Rails.root.join('db', 'post_migrate', '20170803090603_calculate_conv_dev_index_percentages.rb')
describe CalculateConvDevIndexPercentages, truncate: true do describe CalculateConvDevIndexPercentages, :delete do
let(:migration) { described_class.new } let(:migration) { described_class.new }
let!(:conv_dev_index) do let!(:conv_dev_index) do
create(:conversational_development_index_metric, create(:conversational_development_index_metric,
......
require 'spec_helper' require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170518231126_fix_wrongly_renamed_routes.rb') require Rails.root.join('db', 'post_migrate', '20170518231126_fix_wrongly_renamed_routes.rb')
describe FixWronglyRenamedRoutes, :truncate, :migration do describe FixWronglyRenamedRoutes, :migration do
let(:subject) { described_class.new } let(:subject) { described_class.new }
let(:namespaces_table) { table(:namespaces) } let(:namespaces_table) { table(:namespaces) }
let(:projects_table) { table(:projects) } let(:projects_table) { table(:projects) }
......
...@@ -8,10 +8,10 @@ describe MigrateIssuesToGhostUser, :migration do ...@@ -8,10 +8,10 @@ describe MigrateIssuesToGhostUser, :migration do
let(:users) { table(:users) } let(:users) { table(:users) }
before do before do
projects.create!(name: 'gitlab') project = projects.create!(name: 'gitlab')
user = users.create(email: 'test@example.com') user = users.create(email: 'test@example.com')
issues.create(title: 'Issue 1', author_id: nil, project_id: 1) issues.create(title: 'Issue 1', author_id: nil, project_id: project.id)
issues.create(title: 'Issue 2', author_id: user.id, project_id: 1) issues.create(title: 'Issue 2', author_id: user.id, project_id: project.id)
end end
context 'when ghost user exists' do context 'when ghost user exists' do
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
require 'spec_helper' require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170324160416_migrate_user_activities_to_users_last_activity_on.rb') require Rails.root.join('db', 'post_migrate', '20170324160416_migrate_user_activities_to_users_last_activity_on.rb')
describe MigrateUserActivitiesToUsersLastActivityOn, :clean_gitlab_redis_shared_state, :truncate do describe MigrateUserActivitiesToUsersLastActivityOn, :clean_gitlab_redis_shared_state, :delete do
let(:migration) { described_class.new } let(:migration) { described_class.new }
let!(:user_active_1) { create(:user) } let!(:user_active_1) { create(:user) }
let!(:user_active_2) { create(:user) } let!(:user_active_2) { create(:user) }
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
require 'spec_helper' require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170406142253_migrate_user_project_view.rb') require Rails.root.join('db', 'post_migrate', '20170406142253_migrate_user_project_view.rb')
describe MigrateUserProjectView, :truncate do describe MigrateUserProjectView, :delete do
let(:migration) { described_class.new } let(:migration) { described_class.new }
let!(:user) { create(:user, project_view: 'readme') } let!(:user) { create(:user, project_view: 'readme') }
......
require 'spec_helper' require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170815060945_remove_duplicate_mr_events.rb') require Rails.root.join('db', 'post_migrate', '20170815060945_remove_duplicate_mr_events.rb')
describe RemoveDuplicateMrEvents, truncate: true do describe RemoveDuplicateMrEvents, :delete do
let(:migration) { described_class.new } let(:migration) { described_class.new }
describe '#up' do describe '#up' do
......
...@@ -5,8 +5,8 @@ require Rails.root.join('db', 'post_migrate', '20170313133418_rename_more_reserv ...@@ -5,8 +5,8 @@ require Rails.root.join('db', 'post_migrate', '20170313133418_rename_more_reserv
# This migration uses multiple threads, and thus different transactions. This # This migration uses multiple threads, and thus different transactions. This
# means data created in this spec may not be visible to some threads. To work # means data created in this spec may not be visible to some threads. To work
# around this we use the TRUNCATE cleaning strategy. # around this we use the DELETE cleaning strategy.
describe RenameMoreReservedProjectNames, truncate: true do describe RenameMoreReservedProjectNames, :delete do
let(:migration) { described_class.new } let(:migration) { described_class.new }
let!(:project) { create(:project) } let!(:project) { create(:project) }
......
...@@ -5,8 +5,8 @@ require Rails.root.join('db', 'post_migrate', '20161221153951_rename_reserved_pr ...@@ -5,8 +5,8 @@ require Rails.root.join('db', 'post_migrate', '20161221153951_rename_reserved_pr
# This migration uses multiple threads, and thus different transactions. This # This migration uses multiple threads, and thus different transactions. This
# means data created in this spec may not be visible to some threads. To work # means data created in this spec may not be visible to some threads. To work
# around this we use the TRUNCATE cleaning strategy. # around this we use the DELETE cleaning strategy.
describe RenameReservedProjectNames, truncate: true do describe RenameReservedProjectNames, :delete do
let(:migration) { described_class.new } let(:migration) { described_class.new }
let!(:project) { create(:project) } let!(:project) { create(:project) }
......
require 'spec_helper' require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170518200835_rename_users_with_renamed_namespace.rb') require Rails.root.join('db', 'post_migrate', '20170518200835_rename_users_with_renamed_namespace.rb')
describe RenameUsersWithRenamedNamespace, truncate: true do describe RenameUsersWithRenamedNamespace, :delete do
it 'renames a user that had their namespace renamed to the namespace path' do it 'renames a user that had their namespace renamed to the namespace path' do
other_user = create(:user, username: 'kodingu') other_user = create(:user, username: 'kodingu')
other_user1 = create(:user, username: 'api0') other_user1 = create(:user, username: 'api0')
......
require 'spec_helper' require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170503004427_update_retried_for_ci_build.rb') require Rails.root.join('db', 'post_migrate', '20170503004427_update_retried_for_ci_build.rb')
describe UpdateRetriedForCiBuild, truncate: true do describe UpdateRetriedForCiBuild, :delete do
let(:pipeline) { create(:ci_pipeline) } let(:pipeline) { create(:ci_pipeline) }
let!(:build_old) { create(:ci_build, pipeline: pipeline, name: 'test') } let!(:build_old) { create(:ci_build, pipeline: pipeline, name: 'test') }
let!(:build_new) { create(:ci_build, pipeline: pipeline, name: 'test') } let!(:build_new) { create(:ci_build, pipeline: pipeline, name: 'test') }
......
require 'spec_helper' require 'spec_helper'
describe Avatarable do describe Avatarable do
subject { create(:project, avatar: fixture_file_upload(File.join(Rails.root, 'spec/fixtures/dk.png'))) } set(:project) { create(:project, avatar: fixture_file_upload(File.join(Rails.root, 'spec/fixtures/dk.png'))) }
let(:gitlab_host) { "https://gitlab.example.com" } let(:gitlab_host) { "https://gitlab.example.com" }
let(:relative_url_root) { "/gitlab" } let(:relative_url_root) { "/gitlab" }
let(:asset_host) { "https://gitlab-assets.example.com" } let(:asset_host) { 'https://gitlab-assets.example.com' }
before do before do
stub_config_setting(base_url: gitlab_host) stub_config_setting(base_url: gitlab_host)
...@@ -15,29 +15,32 @@ describe Avatarable do ...@@ -15,29 +15,32 @@ describe Avatarable do
describe '#avatar_path' do describe '#avatar_path' do
using RSpec::Parameterized::TableSyntax using RSpec::Parameterized::TableSyntax
where(:has_asset_host, :visibility_level, :only_path, :avatar_path) do where(:has_asset_host, :visibility_level, :only_path, :avatar_path_prefix) do
true | Project::PRIVATE | true | [gitlab_host, relative_url_root, subject.avatar.url] true | Project::PRIVATE | true | [gitlab_host, relative_url_root]
true | Project::PRIVATE | false | [gitlab_host, relative_url_root, subject.avatar.url] true | Project::PRIVATE | false | [gitlab_host, relative_url_root]
true | Project::INTERNAL | true | [gitlab_host, relative_url_root, subject.avatar.url] true | Project::INTERNAL | true | [gitlab_host, relative_url_root]
true | Project::INTERNAL | false | [gitlab_host, relative_url_root, subject.avatar.url] true | Project::INTERNAL | false | [gitlab_host, relative_url_root]
true | Project::PUBLIC | true | [subject.avatar.url] true | Project::PUBLIC | true | []
true | Project::PUBLIC | false | [asset_host, subject.avatar.url] true | Project::PUBLIC | false | [asset_host]
false | Project::PRIVATE | true | [relative_url_root, subject.avatar.url] false | Project::PRIVATE | true | [relative_url_root]
false | Project::PRIVATE | false | [gitlab_host, relative_url_root, subject.avatar.url] false | Project::PRIVATE | false | [gitlab_host, relative_url_root]
false | Project::INTERNAL | true | [relative_url_root, subject.avatar.url] false | Project::INTERNAL | true | [relative_url_root]
false | Project::INTERNAL | false | [gitlab_host, relative_url_root, subject.avatar.url] false | Project::INTERNAL | false | [gitlab_host, relative_url_root]
false | Project::PUBLIC | true | [relative_url_root, subject.avatar.url] false | Project::PUBLIC | true | [relative_url_root]
false | Project::PUBLIC | false | [gitlab_host, relative_url_root, subject.avatar.url] false | Project::PUBLIC | false | [gitlab_host, relative_url_root]
end end
with_them do with_them do
before do before do
allow(ActionController::Base).to receive(:asset_host).and_return(has_asset_host ? asset_host : nil) allow(ActionController::Base).to receive(:asset_host) { has_asset_host && asset_host }
subject.visibility_level = visibility_level
project.visibility_level = visibility_level
end end
let(:avatar_path) { (avatar_path_prefix + [project.avatar.url]).join }
it 'returns the expected avatar path' do it 'returns the expected avatar path' do
expect(subject.avatar_path(only_path: only_path)).to eq(avatar_path.join) expect(project.avatar_path(only_path: only_path)).to eq(avatar_path)
end end
end end
end end
......
...@@ -488,7 +488,7 @@ describe Member do ...@@ -488,7 +488,7 @@ describe Member do
member.accept_invite!(user) member.accept_invite!(user)
end end
it "refreshes user's authorized projects", :truncate do it "refreshes user's authorized projects", :delete do
project = member.source project = member.source
expect(user.authorized_projects).not_to include(project) expect(user.authorized_projects).not_to include(project)
...@@ -523,7 +523,7 @@ describe Member do ...@@ -523,7 +523,7 @@ describe Member do
end end
end end
describe "destroying a record", :truncate do describe "destroying a record", :delete do
it "refreshes user's authorized projects" do it "refreshes user's authorized projects" do
project = create(:project, :private) project = create(:project, :private)
user = create(:user) user = create(:user)
......
...@@ -30,7 +30,7 @@ describe ProjectGroupLink do ...@@ -30,7 +30,7 @@ describe ProjectGroupLink do
end end
end end
describe "destroying a record", :truncate do describe "destroying a record", :delete do
it "refreshes group users' authorized projects" do it "refreshes group users' authorized projects" do
project = create(:project, :private) project = create(:project, :private)
group = create(:group) group = create(:group)
......
...@@ -3228,5 +3228,22 @@ describe Project do ...@@ -3228,5 +3228,22 @@ describe Project do
project = build(:project) project = build(:project)
project.execute_hooks({ data: 'data' }, :merge_request_hooks) project.execute_hooks({ data: 'data' }, :merge_request_hooks)
end end
it 'executes the system hooks when inside a transaction' do
allow_any_instance_of(WebHookService).to receive(:execute)
create(:system_hook, merge_requests_events: true)
project = build(:project)
# Ideally, we'd test that `WebHookWorker.jobs.size` increased by 1,
# but since the entire spec run takes place in a transaction, we never
# actually get to the `after_commit` hook that queues these jobs.
expect do
project.transaction do
project.execute_hooks({ data: 'data' }, :merge_request_hooks)
end
end.not_to raise_error # Sidekiq::Worker::EnqueueFromTransactionError
end
end end
end end
...@@ -1569,7 +1569,7 @@ describe User do ...@@ -1569,7 +1569,7 @@ describe User do
it { is_expected.to eq([private_group]) } it { is_expected.to eq([private_group]) }
end end
describe '#authorized_projects', :truncate do describe '#authorized_projects', :delete do
context 'with a minimum access level' do context 'with a minimum access level' do
it 'includes projects for which the user is an owner' do it 'includes projects for which the user is an owner' do
user = create(:user) user = create(:user)
......
...@@ -386,6 +386,17 @@ describe WikiPage do ...@@ -386,6 +386,17 @@ describe WikiPage do
end end
end end
describe '#formatted_content' do
it 'returns processed content of the page', :disable_gitaly do
subject.create({ title: "RDoc", content: "*bold*", format: "rdoc" })
page = wiki.find_page('RDoc')
expect(page.formatted_content).to eq("\n<p><strong>bold</strong></p>\n")
destroy_page('RDoc')
end
end
private private
def remove_temp_repo(path) def remove_temp_repo(path)
......
...@@ -297,9 +297,11 @@ describe Issues::MoveService do ...@@ -297,9 +297,11 @@ describe Issues::MoveService do
end end
context 'project issue hooks' do context 'project issue hooks' do
let(:hook) { create(:project_hook, project: old_project, issues_events: true) } let!(:hook) { create(:project_hook, project: old_project, issues_events: true) }
it 'executes project issue hooks' do it 'executes project issue hooks' do
allow_any_instance_of(WebHookService).to receive(:execute)
# Ideally, we'd test that `WebHookWorker.jobs.size` increased by 1, # Ideally, we'd test that `WebHookWorker.jobs.size` increased by 1,
# but since the entire spec run takes place in a transaction, we never # but since the entire spec run takes place in a transaction, we never
# actually get to the `after_commit` hook that queues these jobs. # actually get to the `after_commit` hook that queues these jobs.
......
require 'database_cleaner/active_record/deletion'
module FakeInformationSchema
# Work around a bug in DatabaseCleaner when using the deletion strategy:
# https://github.com/DatabaseCleaner/database_cleaner/issues/347
#
# On MySQL, if the information schema is said to exist, we use an inaccurate
# row count leading to some tables not being cleaned when they should
def information_schema_exists?(_connection)
false
end
end
DatabaseCleaner::ActiveRecord::Deletion.prepend(FakeInformationSchema)
RSpec.configure do |config| RSpec.configure do |config|
# Ensure all sequences are reset at the start of the suite run
config.before(:suite) do config.before(:suite) do
DatabaseCleaner.clean_with(:truncation) DatabaseCleaner.clean_with(:truncation)
end end
config.append_after(:context) do config.append_after(:context) do
DatabaseCleaner.clean_with(:truncation, cache_tables: false) DatabaseCleaner.clean_with(:deletion, cache_tables: false)
end end
config.before(:each) do config.before(:each) do
...@@ -12,15 +28,15 @@ RSpec.configure do |config| ...@@ -12,15 +28,15 @@ RSpec.configure do |config|
end end
config.before(:each, :js) do config.before(:each, :js) do
DatabaseCleaner.strategy = :truncation DatabaseCleaner.strategy = :deletion
end end
config.before(:each, :truncate) do config.before(:each, :delete) do
DatabaseCleaner.strategy = :truncation DatabaseCleaner.strategy = :deletion
end end
config.before(:each, :migration) do config.before(:each, :migration) do
DatabaseCleaner.strategy = :truncation, { cache_tables: false } DatabaseCleaner.strategy = :deletion, { cache_tables: false }
end end
config.before(:each) do config.before(:each) do
......
...@@ -143,15 +143,17 @@ shared_examples 'discussion comments' do |resource_name| ...@@ -143,15 +143,17 @@ shared_examples 'discussion comments' do |resource_name|
end end
if resource_name == 'merge request' if resource_name == 'merge request'
let(:note_id) { find("#{comments_selector} .note", match: :first)['data-note-id'] }
it 'shows resolved discussion when toggled' do it 'shows resolved discussion when toggled' do
click_button "Resolve discussion" click_button "Resolve discussion"
expect(page).to have_selector('.note-row-1', visible: true) expect(page).to have_selector(".note-row-#{note_id}", visible: true)
refresh refresh
click_button "Toggle discussion" click_button "Toggle discussion"
expect(page).to have_selector('.note-row-1', visible: true) expect(page).to have_selector(".note-row-#{note_id}", visible: true)
end end
end end
end end
......
...@@ -8,7 +8,7 @@ describe JobArtifactUploader do ...@@ -8,7 +8,7 @@ describe JobArtifactUploader do
describe '#store_dir' do describe '#store_dir' do
subject { uploader.store_dir } subject { uploader.store_dir }
let(:path) { "#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/#{job_artifact.project_id}/#{job_artifact.id}" } let(:path) { "#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/#{job_artifact.job_id}/#{job_artifact.id}" }
context 'when using local storage' do context 'when using local storage' do
it { is_expected.to start_with(local_path) } it { is_expected.to start_with(local_path) }
...@@ -45,7 +45,7 @@ describe JobArtifactUploader do ...@@ -45,7 +45,7 @@ describe JobArtifactUploader do
it { is_expected.to start_with(local_path) } it { is_expected.to start_with(local_path) }
it { is_expected.to include("/#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/") } it { is_expected.to include("/#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/") }
it { is_expected.to include("/#{job_artifact.project_id}/") } it { is_expected.to include("/#{job_artifact.job_id}/#{job_artifact.id}/") }
it { is_expected.to end_with("ci_build_artifacts.zip") } it { is_expected.to end_with("ci_build_artifacts.zip") }
end end
end end
...@@ -13,8 +13,8 @@ describe 'projects/pipelines_settings/_show' do ...@@ -13,8 +13,8 @@ describe 'projects/pipelines_settings/_show' do
render render
expect(rendered).to have_css('.settings-message') expect(rendered).to have_css('.settings-message')
expect(rendered).to have_text('Auto Review Apps and Auto Deploy need a domain name and the') expect(rendered).to have_text('Auto Review Apps and Auto Deploy need a domain name and a')
expect(rendered).to have_link('Kubernetes service') expect(rendered).to have_link('Kubernetes cluster')
end end
end end
...@@ -27,8 +27,8 @@ describe 'projects/pipelines_settings/_show' do ...@@ -27,8 +27,8 @@ describe 'projects/pipelines_settings/_show' do
render render
expect(rendered).to have_css('.settings-message') expect(rendered).to have_css('.settings-message')
expect(rendered).to have_text('Auto Review Apps and Auto Deploy need the') expect(rendered).to have_text('Auto Review Apps and Auto Deploy need a')
expect(rendered).to have_link('Kubernetes service') expect(rendered).to have_link('Kubernetes cluster')
end end
end end
end end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment