Commit 1cda1a8b authored by Nick Thomas's avatar Nick Thomas

Merge remote-tracking branch 'ce/master' into ce-to-ee

parents 1f298fc4 43d98cd1
...@@ -612,6 +612,16 @@ codequality: ...@@ -612,6 +612,16 @@ codequality:
artifacts: artifacts:
paths: [codeclimate.json] paths: [codeclimate.json]
qa:internal:
stage: test
variables:
SETUP_DB: "false"
services: []
script:
- cd qa/
- bundle install
- bundle exec rspec
coverage: coverage:
<<: *dedicated-runner <<: *dedicated-runner
<<: *except-docs <<: *except-docs
......
...@@ -2,7 +2,10 @@ ...@@ -2,7 +2,10 @@
import pipelineStage from '../../pipelines/components/stage.vue'; import pipelineStage from '../../pipelines/components/stage.vue';
import ciIcon from '../../vue_shared/components/ci_icon.vue'; import ciIcon from '../../vue_shared/components/ci_icon.vue';
import icon from '../../vue_shared/components/icon.vue'; import icon from '../../vue_shared/components/icon.vue';
<<<<<<< HEAD
import linkedPipelinesMiniList from '../../vue_shared/components/linked_pipelines_mini_list.vue'; import linkedPipelinesMiniList from '../../vue_shared/components/linked_pipelines_mini_list.vue';
=======
>>>>>>> ce/master
export default { export default {
name: 'MRWidgetPipeline', name: 'MRWidgetPipeline',
...@@ -26,7 +29,10 @@ ...@@ -26,7 +29,10 @@
pipelineStage, pipelineStage,
ciIcon, ciIcon,
icon, icon,
<<<<<<< HEAD
linkedPipelinesMiniList, linkedPipelinesMiniList,
=======
>>>>>>> ce/master
}, },
computed: { computed: {
hasPipeline() { hasPipeline() {
...@@ -44,6 +50,7 @@ ...@@ -44,6 +50,7 @@
this.pipeline.details.stages && this.pipeline.details.stages &&
this.pipeline.details.stages.length; this.pipeline.details.stages.length;
}, },
<<<<<<< HEAD
/* We typically set defaults ([]) in the store or prop declarations, but because triggered /* We typically set defaults ([]) in the store or prop declarations, but because triggered
* and triggeredBy are appended to `pipeline`, we can't set defaults in the store, and we * and triggeredBy are appended to `pipeline`, we can't set defaults in the store, and we
...@@ -59,6 +66,12 @@ ...@@ -59,6 +66,12 @@
}, },
}; };
</script> </script>
=======
},
};
</script>
>>>>>>> ce/master
<template> <template>
<div <div
v-if="hasPipeline || hasCIError" v-if="hasPipeline || hasCIError"
...@@ -66,8 +79,12 @@ ...@@ -66,8 +79,12 @@
<div class="ci-widget media"> <div class="ci-widget media">
<template v-if="hasCIError"> <template v-if="hasCIError">
<div class="ci-status-icon ci-status-icon-failed ci-error js-ci-error append-right-10"> <div class="ci-status-icon ci-status-icon-failed ci-error js-ci-error append-right-10">
<<<<<<< HEAD
<icon <icon
name="status_failed"/> name="status_failed"/>
=======
<icon name="status_failed" />
>>>>>>> ce/master
</div> </div>
<div class="media-body"> <div class="media-body">
Could not connect to the CI server. Please check your settings and try again Could not connect to the CI server. Please check your settings and try again
...@@ -79,6 +96,10 @@ ...@@ -79,6 +96,10 @@
:href="this.status.details_path"> :href="this.status.details_path">
<ci-icon :status="status" /> <ci-icon :status="status" />
</a> </a>
<<<<<<< HEAD
=======
>>>>>>> ce/master
<div class="media-body"> <div class="media-body">
Pipeline Pipeline
<a <a
...@@ -86,7 +107,13 @@ ...@@ -86,7 +107,13 @@
class="pipeline-id"> class="pipeline-id">
#{{pipeline.id}} #{{pipeline.id}}
</a> </a>
<<<<<<< HEAD
{{pipeline.details.status.label}} for {{pipeline.details.status.label}} for
=======
{{pipeline.details.status.label}} for
>>>>>>> ce/master
<a <a
:href="pipeline.commit.commit_path" :href="pipeline.commit.commit_path"
class="commit-sha js-commit-link"> class="commit-sha js-commit-link">
...@@ -94,15 +121,19 @@ ...@@ -94,15 +121,19 @@
<span class="mr-widget-pipeline-graph"> <span class="mr-widget-pipeline-graph">
<span class="stage-cell"> <span class="stage-cell">
<<<<<<< HEAD
<linked-pipelines-mini-list <linked-pipelines-mini-list
v-if="triggeredBy.length" v-if="triggeredBy.length"
:triggered-by="triggeredBy" :triggered-by="triggeredBy"
/> />
=======
>>>>>>> ce/master
<div <div
v-if="hasStages" v-if="hasStages"
v-for="(stage, i) in pipeline.details.stages" v-for="(stage, i) in pipeline.details.stages"
:key="i" :key="i"
<<<<<<< HEAD
class="stage-container dropdown js-mini-pipeline-graph" class="stage-container dropdown js-mini-pipeline-graph"
:class="{ :class="{
'has-downstream': i === pipeline.details.stages.length - 1 && triggered.length 'has-downstream': i === pipeline.details.stages.length - 1 && triggered.length
...@@ -114,6 +145,11 @@ ...@@ -114,6 +145,11 @@
v-if="triggered.length" v-if="triggered.length"
:triggered="triggered" :triggered="triggered"
/> />
=======
class="stage-container dropdown js-mini-pipeline-graph">
<pipeline-stage :stage="stage" />
</div>
>>>>>>> ce/master
</span> </span>
</span> </span>
......
...@@ -75,7 +75,10 @@ module LfsRequest ...@@ -75,7 +75,10 @@ module LfsRequest
def lfs_upload_access? def lfs_upload_access?
return false unless project.lfs_enabled? return false unless project.lfs_enabled?
return false unless has_authentication_ability?(:push_code) return false unless has_authentication_ability?(:push_code)
<<<<<<< HEAD
return false if project.above_size_limit? || objects_exceed_repo_limit? return false if project.above_size_limit? || objects_exceed_repo_limit?
=======
>>>>>>> ce/master
lfs_deploy_token? || can?(user, :push_code, project) lfs_deploy_token? || can?(user, :push_code, project)
end end
......
...@@ -286,6 +286,7 @@ class User < ActiveRecord::Base ...@@ -286,6 +286,7 @@ class User < ActiveRecord::Base
def find_by_any_email(email) def find_by_any_email(email)
by_any_email(email).take by_any_email(email).take
end end
<<<<<<< HEAD
# Returns a relation containing all the users for the given Email address # Returns a relation containing all the users for the given Email address
def by_any_email(email) def by_any_email(email)
...@@ -298,6 +299,16 @@ class User < ActiveRecord::Base ...@@ -298,6 +299,16 @@ class User < ActiveRecord::Base
def existing_member?(email) def existing_member?(email)
User.where(email: email).any? || Email.where(email: email).any? User.where(email: email).any? || Email.where(email: email).any?
=======
# Returns a relation containing all the users for the given Email address
def by_any_email(email)
users = where(email: email)
emails = joins(:emails).where(emails: { email: email })
union = Gitlab::SQL::Union.new([users, emails])
from("(#{union.to_sql}) #{table_name}")
>>>>>>> ce/master
end end
def filter(filter_name) def filter(filter_name)
......
...@@ -61,7 +61,7 @@ ...@@ -61,7 +61,7 @@
= link_to "Help", help_path = link_to "Help", help_path
%li.divider %li.divider
%li %li
= link_to "Sign out", destroy_user_session_path, method: :delete, class: "sign-out-link" = link_to "Sign out", destroy_user_session_path, class: "sign-out-link"
- if session[:impersonator_id] - if session[:impersonator_id]
%li.impersonation %li.impersonation
= link_to admin_impersonation_path, class: 'impersonation-btn', method: :delete, title: "Stop impersonation", aria: { label: 'Stop impersonation' }, data: { toggle: 'tooltip', placement: 'bottom', container: 'body' } do = link_to admin_impersonation_path, class: 'impersonation-btn', method: :delete, title: "Stop impersonation", aria: { label: 'Stop impersonation' }, data: { toggle: 'tooltip', placement: 'bottom', container: 'body' } do
......
...@@ -33,7 +33,7 @@ ...@@ -33,7 +33,7 @@
= s_('TagsPage|Optionally, add a message to the tag.') = s_('TagsPage|Optionally, add a message to the tag.')
%hr %hr
.form-group .form-group
= label_tag :release_description, 'Release notes', class: 'control-label' = label_tag :release_description, s_('TagsPage|Release notes'), class: 'control-label'
.col-sm-10 .col-sm-10
= render layout: 'projects/md_preview', locals: { url: preview_markdown_path(@project), referenced_users: true } do = render layout: 'projects/md_preview', locals: { url: preview_markdown_path(@project), referenced_users: true } do
= render 'projects/zen', attr: :release_description, classes: 'note-textarea', placeholder: s_('TagsPage|Write your release notes or drag files here...'), current_text: @release_description = render 'projects/zen', attr: :release_description, classes: 'note-textarea', placeholder: s_('TagsPage|Write your release notes or drag files here...'), current_text: @release_description
...@@ -41,6 +41,6 @@ ...@@ -41,6 +41,6 @@
.help-block .help-block
= s_('TagsPage|Optionally, add release notes to the tag. They will be stored in the GitLab database and displayed on the tags page.') = s_('TagsPage|Optionally, add release notes to the tag. They will be stored in the GitLab database and displayed on the tags page.')
.form-actions .form-actions
= button_tag 'Create tag', class: 'btn btn-create', tabindex: 3 = button_tag s_('TagsPage|Create tag'), class: 'btn btn-create', tabindex: 3
= link_to 'Cancel', project_tags_path(@project), class: 'btn btn-cancel' = link_to s_('TagsPage|Cancel'), project_tags_path(@project), class: 'btn btn-cancel'
%script#availableRefs{ type: "application/json" }= @project.repository.ref_names.to_json.html_safe %script#availableRefs{ type: "application/json" }= @project.repository.ref_names.to_json.html_safe
...@@ -27,10 +27,13 @@ class RepositoryImportWorker ...@@ -27,10 +27,13 @@ class RepositoryImportWorker
raise ImportError, result[:message] if result[:status] == :error raise ImportError, result[:message] if result[:status] == :error
project.after_import project.after_import
<<<<<<< HEAD
# Explicitly enqueue mirror for update so # Explicitly enqueue mirror for update so
# that upstream remote is created and fetched # that upstream remote is created and fetched
project.force_import_job! if project.mirror? project.force_import_job! if project.mirror?
=======
>>>>>>> ce/master
rescue ImportError => ex rescue ImportError => ex
fail_import(project, ex.message) fail_import(project, ex.message)
raise raise
......
...@@ -2,10 +2,6 @@ class UpdateMergeRequestsWorker ...@@ -2,10 +2,6 @@ class UpdateMergeRequestsWorker
include Sidekiq::Worker include Sidekiq::Worker
include DedicatedSidekiqQueue include DedicatedSidekiqQueue
def metrics_tags
@metrics_tags || {}
end
def perform(project_id, user_id, oldrev, newrev, ref) def perform(project_id, user_id, oldrev, newrev, ref)
project = Project.find_by(id: project_id) project = Project.find_by(id: project_id)
return unless project return unless project
...@@ -13,11 +9,6 @@ class UpdateMergeRequestsWorker ...@@ -13,11 +9,6 @@ class UpdateMergeRequestsWorker
user = User.find_by(id: user_id) user = User.find_by(id: user_id)
return unless user return unless user
@metrics_tags = {
project_id: project_id,
user_id: user_id
}
MergeRequests::RefreshService.new(project, user).execute(oldrev, newrev, ref) MergeRequests::RefreshService.new(project, user).execute(oldrev, newrev, ref)
end end
end end
---
title: Remove update merge request worker tagging.
merge_request:
author:
type: removed
---
title: Moves mini graph of pipeline to the end of sentence in MR widget. Cleans HTML
and tests
merge_request:
author:
type: fixed
---
title: Change 'Sign Out' route from a DELETE to a GET
merge_request: 39708
author: Joe Marty
type: changed
---
title: Speed up issues list APIs
merge_request:
author:
type: performance
...@@ -195,7 +195,7 @@ Devise.setup do |config| ...@@ -195,7 +195,7 @@ Devise.setup do |config|
config.navigational_formats = [:"*/*", "*/*", :html, :zip] config.navigational_formats = [:"*/*", "*/*", :html, :zip]
# The default HTTP method used to sign out a resource. Default is :delete. # The default HTTP method used to sign out a resource. Default is :delete.
config.sign_out_via = :delete config.sign_out_via = :get
# ==> OmniAuth # ==> OmniAuth
# To configure a new OmniAuth provider copy and edit omniauth.rb.sample # To configure a new OmniAuth provider copy and edit omniauth.rb.sample
......
...@@ -9,7 +9,7 @@ mapping structure from the projects URLs: ...@@ -9,7 +9,7 @@ mapping structure from the projects URLs:
* Project's repository: `#{namespace}/#{project_name}.git` * Project's repository: `#{namespace}/#{project_name}.git`
* Project's wiki: `#{namespace}/#{project_name}.wiki.git` * Project's wiki: `#{namespace}/#{project_name}.wiki.git`
This structure made simple to migrate from existing solutions to GitLab and easy for Administrators to find where the This structure made simple to migrate from existing solutions to GitLab and easy for Administrators to find where the
repository is stored. repository is stored.
...@@ -27,7 +27,7 @@ of load in big installations, and can be even worst if they are using any type o ...@@ -27,7 +27,7 @@ of load in big installations, and can be even worst if they are using any type o
Last, for GitLab Geo, this storage type means we have to synchronize the disk state, replicate renames in the correct Last, for GitLab Geo, this storage type means we have to synchronize the disk state, replicate renames in the correct
order or we may end-up with wrong repository or missing data temporarily. order or we may end-up with wrong repository or missing data temporarily.
This pattern also exists in other objects stored in GitLab, like issue Attachments, GitLab Pages artifacts, This pattern also exists in other objects stored in GitLab, like issue Attachments, GitLab Pages artifacts,
Docker Containers for the integrated Registry, etc. Docker Containers for the integrated Registry, etc.
## Hashed Storage ## Hashed Storage
...@@ -62,9 +62,9 @@ you will never mistakenly restore a repository in the wrong project (considering ...@@ -62,9 +62,9 @@ you will never mistakenly restore a repository in the wrong project (considering
### How to migrate to Hashed Storage ### How to migrate to Hashed Storage
In GitLab, go to **Admin > Settings**, find the **Repository Storage** section and select In GitLab, go to **Admin > Settings**, find the **Repository Storage** section and select
"_Create new projects using hashed storage paths_". "_Create new projects using hashed storage paths_".
To migrate your existing projects to the new storage type, check the specific [rake tasks]. To migrate your existing projects to the new storage type, check the specific [rake tasks].
[ce-28283]: https://gitlab.com/gitlab-org/gitlab-ce/issues/28283 [ce-28283]: https://gitlab.com/gitlab-org/gitlab-ce/issues/28283
...@@ -79,14 +79,14 @@ coverage status below. ...@@ -79,14 +79,14 @@ coverage status below.
Note that things stored in an S3 compatible endpoint will not have the downsides mentioned earlier, if they are not Note that things stored in an S3 compatible endpoint will not have the downsides mentioned earlier, if they are not
prefixed with `#{namespace}/#{project_name}`, which is true for CI Cache and LFS Objects. prefixed with `#{namespace}/#{project_name}`, which is true for CI Cache and LFS Objects.
| Storable Object | Legacy Storage | Hashed Storage | S3 Compatible | GitLab Version | | Storable Object | Legacy Storage | Hashed Storage | S3 Compatible | GitLab Version |
| ----------------| -------------- | -------------- | ------------- | -------------- | | --------------- | -------------- | -------------- | ------------- | -------------- |
| Repository | Yes | Yes | - | 10.0 | | Repository | Yes | Yes | - | 10.0 |
| Attachments | Yes | Yes | - | 10.2 | | Attachments | Yes | Yes | - | 10.2 |
| Avatars | Yes | No | - | - | | Avatars | Yes | No | - | - |
| Pages | Yes | No | - | - | | Pages | Yes | No | - | - |
| Docker Registry | Yes | No | - | - | | Docker Registry | Yes | No | - | - |
| CI Build Logs | No | No | - | - | | CI Build Logs | No | No | - | - |
| CI Artifacts | No | No | - | - | | CI Artifacts | No | No | Yes (EEP) | - |
| CI Cache | No | No | Yes | - | | CI Cache | No | No | Yes | - |
| LFS Objects | Yes | No | Yes (EEP) | - | | LFS Objects | Yes | No | Yes (EEP) | - |
...@@ -58,6 +58,8 @@ Parameters: ...@@ -58,6 +58,8 @@ Parameters:
"project_id": 3, "project_id": 3,
"title": "test1", "title": "test1",
"state": "opened", "state": "opened",
"created_at": "2017-04-29T08:46:00Z",
"updated_at": "2017-04-29T08:46:00Z",
"upvotes": 0, "upvotes": 0,
"downvotes": 0, "downvotes": 0,
"author": { "author": {
...@@ -170,6 +172,8 @@ Parameters: ...@@ -170,6 +172,8 @@ Parameters:
"project_id": 3, "project_id": 3,
"title": "test1", "title": "test1",
"state": "opened", "state": "opened",
"created_at": "2017-04-29T08:46:00Z",
"updated_at": "2017-04-29T08:46:00Z",
"upvotes": 0, "upvotes": 0,
"downvotes": 0, "downvotes": 0,
"author": { "author": {
...@@ -248,6 +252,8 @@ Parameters: ...@@ -248,6 +252,8 @@ Parameters:
"project_id": 3, "project_id": 3,
"title": "test1", "title": "test1",
"state": "merged", "state": "merged",
"created_at": "2017-04-29T08:46:00Z",
"updated_at": "2017-04-29T08:46:00Z",
"upvotes": 0, "upvotes": 0,
"downvotes": 0, "downvotes": 0,
"author": { "author": {
......
# File Storage in GitLab
We use the [CarrierWave] gem to handle file upload, store and retrieval.
There are many places where file uploading is used, according to contexts:
* System
- Instance Logo (logo visible in sign in/sign up pages)
- Header Logo (one displayed in the navigation bar)
* Group
- Group avatars
* User
- User avatars
- User snippet attachments
* Project
- Project avatars
- Issues/MR Markdown attachments
- Issues/MR Legacy Markdown attachments
- CI Build Artifacts
- LFS Objects
## Disk storage
GitLab started saving everything on local disk. While directory location changed from previous versions,
they are still not 100% standardized. You can see them below:
| Description | In DB? | Relative path | Uploader class | model_type |
| ------------------------------------- | ------ | ----------------------------------------------------------- | ---------------------- | ---------- |
| Instance logo | yes | uploads/-/system/appearance/logo/:id/:filename | `AttachmentUploader` | Appearance |
| Header logo | yes | uploads/-/system/appearance/header_logo/:id/:filename | `AttachmentUploader` | Appearance |
| Group avatars | yes | uploads/-/system/group/avatar/:id/:filename | `AvatarUploader` | Group |
| User avatars | yes | uploads/-/system/user/avatar/:id/:filename | `AvatarUploader` | User |
| User snippet attachments | yes | uploads/-/system/personal_snippet/:id/:random_hex/:filename | `PersonalFileUploader` | Snippet |
| Project avatars | yes | uploads/-/system/project/avatar/:id/:filename | `AvatarUploader` | Project |
| Issues/MR Markdown attachments | yes | uploads/:project_path_with_namespace/:random_hex/:filename | `FileUploader` | Project |
| Issues/MR Legacy Markdown attachments | no | uploads/-/system/note/attachment/:id/:filename | `AttachmentUploader` | Note |
| CI Artifacts (CE) | yes | shared/artifacts/:year_:month/:project_id/:id | `ArtifactUploader` | Ci::Build |
| LFS Objects (CE) | yes | shared/lfs-objects/:hex/:hex/:object_hash | `LfsObjectUploader` | LfsObject |
CI Artifacts and LFS Objects behave differently in CE and EE. In CE they inherit the `GitlabUploader`
while in EE they inherit the `ObjectStoreUploader` and store files in and S3 API compatible object store.
In the case of Issues/MR Markdown attachments, there is a different approach using the [Hashed Storage] layout,
instead of basing the path into a mutable variable `:project_path_with_namespace`, it's possible to use the
hash of the project ID instead, if project migrates to the new approach (introduced in 10.2).
[CarrierWave]: https://github.com/carrierwaveuploader/carrierwave
[Hashed Storage]: ../administration/repository_storage_types.md
...@@ -110,7 +110,7 @@ You can mark that content for translation with: ...@@ -110,7 +110,7 @@ You can mark that content for translation with:
In JavaScript we added the `__()` (double underscore parenthesis) function In JavaScript we added the `__()` (double underscore parenthesis) function
for translations. for translations.
### Updating the PO files with the new content ## Updating the PO files with the new content
Now that the new content is marked for translation, we need to update the PO Now that the new content is marked for translation, we need to update the PO
files with the following command: files with the following command:
...@@ -119,23 +119,20 @@ files with the following command: ...@@ -119,23 +119,20 @@ files with the following command:
bundle exec rake gettext:find bundle exec rake gettext:find
``` ```
This command will update the `locale/**/gitlab.edit.po` file with the This command will update the `locale/gitlab.pot` file with the newly externalized
new content that the parser has found. strings and remove any strings that aren't used anymore. You should check this
file in. Once the changes are on master, they will be picked up by
[Crowdin](http://translate.gitlab.com) and be presented for translation.
New translations will be added with their default content and will be marked The command also updates the translation files for each language: `locale/*/gitlab.po`
fuzzy. To use the translation, look for the `#, fuzzy` mention in `gitlab.edit.po` These changes can be discarded, the languange files will be updated by Crowdin
and remove it. automatically.
We need to make sure we remove the `fuzzy` translations before generating the Discard all of them at once like this:
`locale/**/gitlab.po` file. When they aren't removed, the resulting `.po` will
be treated as a binary file which could overwrite translations that were merged
before the new translations.
When we are just preparing a page to be translated, but not actually adding any ```sh
translations. There's no need to generate `.po` files. git checkout locale/*/gitlab.po
```
Translations that aren't used in the source code anymore will be marked with
`~#`; these can be removed to keep our translation files clutter-free.
### Validating PO files ### Validating PO files
......
...@@ -73,7 +73,7 @@ module API ...@@ -73,7 +73,7 @@ module API
desc: 'Return issues for the given scope: `created-by-me`, `assigned-to-me` or `all`' desc: 'Return issues for the given scope: `created-by-me`, `assigned-to-me` or `all`'
end end
get do get do
issues = find_issues issues = paginate(find_issues)
options = { options = {
with: Entities::IssueBasic, with: Entities::IssueBasic,
...@@ -81,7 +81,7 @@ module API ...@@ -81,7 +81,7 @@ module API
issuable_metadata: issuable_meta_data(issues, 'Issue') issuable_metadata: issuable_meta_data(issues, 'Issue')
} }
present paginate(issues), options present issues, options
end end
end end
...@@ -100,7 +100,7 @@ module API ...@@ -100,7 +100,7 @@ module API
get ":id/issues" do get ":id/issues" do
group = find_group!(params[:id]) group = find_group!(params[:id])
issues = find_issues(group_id: group.id) issues = paginate(find_issues(group_id: group.id))
options = { options = {
with: Entities::IssueBasic, with: Entities::IssueBasic,
...@@ -108,7 +108,7 @@ module API ...@@ -108,7 +108,7 @@ module API
issuable_metadata: issuable_meta_data(issues, 'Issue') issuable_metadata: issuable_meta_data(issues, 'Issue')
} }
present paginate(issues), options present issues, options
end end
end end
...@@ -129,7 +129,7 @@ module API ...@@ -129,7 +129,7 @@ module API
get ":id/issues" do get ":id/issues" do
project = find_project!(params[:id]) project = find_project!(params[:id])
issues = find_issues(project_id: project.id) issues = paginate(find_issues(project_id: project.id))
options = { options = {
with: Entities::IssueBasic, with: Entities::IssueBasic,
...@@ -138,7 +138,7 @@ module API ...@@ -138,7 +138,7 @@ module API
issuable_metadata: issuable_meta_data(issues, 'Issue') issuable_metadata: issuable_meta_data(issues, 'Issue')
} }
present paginate(issues), options present issues, options
end end
desc 'Get a single project issue' do desc 'Get a single project issue' do
......
...@@ -48,11 +48,14 @@ module Gitlab ...@@ -48,11 +48,14 @@ module Gitlab
end end
def update_page(page_path, title, format, content, commit_details) def update_page(page_path, title, format, content, commit_details)
assert_type!(format, Symbol) @repository.gitaly_migrate(:wiki_update_page) do |is_enabled|
assert_type!(commit_details, CommitDetails) if is_enabled
gitaly_update_page(page_path, title, format, content, commit_details)
gollum_wiki.update_page(gollum_page_by_path(page_path), title, format, content, commit_details.to_h) gollum_wiki.clear_cache
nil else
gollum_update_page(page_path, title, format, content, commit_details)
end
end
end end
def pages def pages
...@@ -149,6 +152,14 @@ module Gitlab ...@@ -149,6 +152,14 @@ module Gitlab
nil nil
end end
def gollum_update_page(page_path, title, format, content, commit_details)
assert_type!(format, Symbol)
assert_type!(commit_details, CommitDetails)
gollum_wiki.update_page(gollum_page_by_path(page_path), title, format, content, commit_details.to_h)
nil
end
def gollum_find_page(title:, version: nil, dir: nil) def gollum_find_page(title:, version: nil, dir: nil)
if version if version
version = Gitlab::Git::Commit.find(@repository, version).id version = Gitlab::Git::Commit.find(@repository, version).id
...@@ -172,6 +183,10 @@ module Gitlab ...@@ -172,6 +183,10 @@ module Gitlab
gitaly_wiki_client.write_page(name, format, content, commit_details) gitaly_wiki_client.write_page(name, format, content, commit_details)
end end
def gitaly_update_page(page_path, title, format, content, commit_details)
gitaly_wiki_client.update_page(page_path, title, format, content, commit_details)
end
def gitaly_delete_page(page_path, commit_details) def gitaly_delete_page(page_path, commit_details)
gitaly_wiki_client.delete_page(page_path, commit_details) gitaly_wiki_client.delete_page(page_path, commit_details)
end end
......
...@@ -37,6 +37,31 @@ module Gitlab ...@@ -37,6 +37,31 @@ module Gitlab
end end
end end
def update_page(page_path, title, format, content, commit_details)
request = Gitaly::WikiUpdatePageRequest.new(
repository: @gitaly_repo,
page_path: GitalyClient.encode(page_path),
title: GitalyClient.encode(title),
format: format.to_s,
commit_details: gitaly_commit_details(commit_details)
)
strio = StringIO.new(content)
enum = Enumerator.new do |y|
until strio.eof?
chunk = strio.read(MAX_MSG_SIZE)
request.content = GitalyClient.encode(chunk)
y.yield request
request = Gitaly::WikiUpdatePageRequest.new
end
end
GitalyClient.call(@repository.storage, :wiki_service, :wiki_update_page, enum)
end
def delete_page(page_path, commit_details) def delete_page(page_path, commit_details)
request = Gitaly::WikiDeletePageRequest.new( request = Gitaly::WikiDeletePageRequest.new(
repository: @gitaly_repo, repository: @gitaly_repo,
......
...@@ -15,6 +15,7 @@ module Gitlab ...@@ -15,6 +15,7 @@ module Gitlab
# end # end
class Client class Client
attr_reader :octokit attr_reader :octokit
<<<<<<< HEAD
# A single page of data and the corresponding page number. # A single page of data and the corresponding page number.
Page = Struct.new(:objects, :number) Page = Struct.new(:objects, :number)
...@@ -174,6 +175,199 @@ module Gitlab ...@@ -174,6 +175,199 @@ module Gitlab
) )
end end
=======
# A single page of data and the corresponding page number.
Page = Struct.new(:objects, :number)
# The minimum number of requests we want to keep available.
#
# We don't use a value of 0 as multiple threads may be using the same
# token in parallel. This could result in all of them hitting the GitHub
# rate limit at once. The threshold is put in place to not hit the limit
# in most cases.
RATE_LIMIT_THRESHOLD = 50
# token - The GitHub API token to use.
#
# per_page - The number of objects that should be displayed per page.
#
# parallel - When set to true hitting the rate limit will result in a
# dedicated error being raised. When set to `false` we will
# instead just `sleep()` until the rate limit is reset. Setting
# this value to `true` for parallel importing is crucial as
# otherwise hitting the rate limit will result in a thread
# being blocked in a `sleep()` call for up to an hour.
def initialize(token, per_page: 100, parallel: true)
@octokit = Octokit::Client.new(
access_token: token,
per_page: per_page,
api_endpoint: api_endpoint
)
@octokit.connection_options[:ssl] = { verify: verify_ssl }
@parallel = parallel
end
def parallel?
@parallel
end
# Returns the details of a GitHub user.
#
# username - The username of the user.
def user(username)
with_rate_limit { octokit.user(username) }
end
# Returns the details of a GitHub repository.
#
# name - The path (in the form `owner/repository`) of the repository.
def repository(name)
with_rate_limit { octokit.repo(name) }
end
def labels(*args)
each_object(:labels, *args)
end
def milestones(*args)
each_object(:milestones, *args)
end
def releases(*args)
each_object(:releases, *args)
end
# Fetches data from the GitHub API and yields a Page object for every page
# of data, without loading all of them into memory.
#
# method - The Octokit method to use for getting the data.
# args - Arguments to pass to the Octokit method.
#
# rubocop: disable GitlabSecurity/PublicSend
def each_page(method, *args, &block)
return to_enum(__method__, method, *args) unless block_given?
page =
if args.last.is_a?(Hash) && args.last[:page]
args.last[:page]
else
1
end
collection = with_rate_limit { octokit.public_send(method, *args) }
next_url = octokit.last_response.rels[:next]
yield Page.new(collection, page)
while next_url
response = with_rate_limit { next_url.get }
next_url = response.rels[:next]
yield Page.new(response.data, page += 1)
end
end
# Iterates over all of the objects for the given method (e.g. `:labels`).
#
# method - The method to send to Octokit for querying data.
# args - Any arguments to pass to the Octokit method.
def each_object(method, *args, &block)
return to_enum(__method__, method, *args) unless block_given?
each_page(method, *args) do |page|
page.objects.each do |object|
yield object
end
end
end
# Yields the supplied block, responding to any rate limit errors.
#
# The exact strategy used for handling rate limiting errors depends on
# whether we are running in parallel mode or not. For more information see
# `#rate_or_wait_for_rate_limit`.
def with_rate_limit
return yield unless rate_limiting_enabled?
request_count_counter.increment
raise_or_wait_for_rate_limit unless requests_remaining?
begin
yield
rescue Octokit::TooManyRequests
raise_or_wait_for_rate_limit
# This retry will only happen when running in sequential mode as we'll
# raise an error in parallel mode.
retry
end
end
# Returns `true` if we're still allowed to perform API calls.
def requests_remaining?
remaining_requests > RATE_LIMIT_THRESHOLD
end
def remaining_requests
octokit.rate_limit.remaining
end
def raise_or_wait_for_rate_limit
rate_limit_counter.increment
if parallel?
raise RateLimitError
else
sleep(rate_limit_resets_in)
end
end
def rate_limit_resets_in
# We add a few seconds to the rate limit so we don't _immediately_
# resume when the rate limit resets as this may result in us performing
# a request before GitHub has a chance to reset the limit.
octokit.rate_limit.resets_in + 5
end
def rate_limiting_enabled?
@rate_limiting_enabled ||= api_endpoint.include?('.github.com')
end
def api_endpoint
custom_api_endpoint || default_api_endpoint
end
def custom_api_endpoint
github_omniauth_provider.dig('args', 'client_options', 'site')
end
def default_api_endpoint
OmniAuth::Strategies::GitHub.default_options[:client_options][:site]
end
def verify_ssl
github_omniauth_provider.fetch('verify_ssl', true)
end
def github_omniauth_provider
@github_omniauth_provider ||=
Gitlab.config.omniauth.providers
.find { |provider| provider.name == 'github' }
.to_h
end
def rate_limit_counter
@rate_limit_counter ||= Gitlab::Metrics.counter(
:github_importer_rate_limit_hits,
'The number of times we hit the GitHub rate limit when importing projects'
)
end
>>>>>>> ce/master
def request_count_counter def request_count_counter
@request_counter ||= Gitlab::Metrics.counter( @request_counter ||= Gitlab::Metrics.counter(
:github_importer_request_count, :github_importer_request_count,
......
module Gitlab module Gitlab
module IssuableMetadata module IssuableMetadata
def issuable_meta_data(issuable_collection, collection_type) def issuable_meta_data(issuable_collection, collection_type)
# ActiveRecord uses Object#extend for null relations.
if !(issuable_collection.singleton_class < ActiveRecord::NullRelation) &&
issuable_collection.respond_to?(:limit_value) &&
issuable_collection.limit_value.nil?
raise 'Collection must have a limit applied for preloading meta-data'
end
# map has to be used here since using pluck or select will # map has to be used here since using pluck or select will
# throw an error when ordering issuables by priority which inserts # throw an error when ordering issuables by priority which inserts
# a new order into the collection. # a new order into the collection.
......
...@@ -11,8 +11,6 @@ module Gitlab ...@@ -11,8 +11,6 @@ module Gitlab
# Old gitlad-shell messages don't provide enqueued_at/created_at attributes # Old gitlad-shell messages don't provide enqueued_at/created_at attributes
trans.set(:sidekiq_queue_duration, Time.now.to_f - (message['enqueued_at'] || message['created_at'] || 0)) trans.set(:sidekiq_queue_duration, Time.now.to_f - (message['enqueued_at'] || message['created_at'] || 0))
trans.run { yield } trans.run { yield }
worker.metrics_tags.each { |tag, value| trans.add_tag(tag, value) } if worker.respond_to?(:metrics_tags)
rescue Exception => error # rubocop: disable Lint/RescueException rescue Exception => error # rubocop: disable Lint/RescueException
trans.add_event(:sidekiq_exception) trans.add_event(:sidekiq_exception)
......
...@@ -66,11 +66,7 @@ module Gitlab ...@@ -66,11 +66,7 @@ module Gitlab
end end
def whitelisted_routes def whitelisted_routes
logout_route || grack_route || @whitelisted.any? { |path| request.path.include?(path) } || lfs_route || sidekiq_route grack_route || @whitelisted.any? { |path| request.path.include?(path) } || lfs_route || sidekiq_route
end
def logout_route
route_hash[:controller] == 'sessions' && route_hash[:action] == 'destroy'
end end
def sidekiq_route def sidekiq_route
......
...@@ -14,7 +14,9 @@ class GithubImport ...@@ -14,7 +14,9 @@ class GithubImport
end end
def run! def run!
@repo = GithubRepos.new(@options, @current_user, @github_repo).choose_one! @repo = GithubRepos
.new(@options[:token], @current_user, @github_repo)
.choose_one!
raise 'No repo found!' unless @repo raise 'No repo found!' unless @repo
...@@ -28,7 +30,7 @@ class GithubImport ...@@ -28,7 +30,7 @@ class GithubImport
private private
def show_warning! def show_warning!
puts "This will import GitHub #{@repo['full_name'].bright} into GitLab #{@project_path.bright} as #{@current_user.name}" puts "This will import GitHub #{@repo.full_name.bright} into GitLab #{@project_path.bright} as #{@current_user.name}"
puts "Permission checks are ignored. Press any key to continue.".color(:red) puts "Permission checks are ignored. Press any key to continue.".color(:red)
STDIN.getch STDIN.getch
...@@ -65,16 +67,16 @@ class GithubImport ...@@ -65,16 +67,16 @@ class GithubImport
@current_user, @current_user,
name: name, name: name,
path: name, path: name,
description: @repo['description'], description: @repo.description,
namespace_id: namespace.id, namespace_id: namespace.id,
visibility_level: visibility_level, visibility_level: visibility_level,
skip_wiki: @repo['has_wiki'] skip_wiki: @repo.has_wiki
).execute ).execute
project.update!( project.update!(
import_type: 'github', import_type: 'github',
import_source: @repo['full_name'], import_source: @repo.full_name,
import_url: @repo['clone_url'].sub('://', "://#{@options[:token]}@") import_url: @repo.clone_url.sub('://', "://#{@options[:token]}@")
) )
project project
...@@ -93,13 +95,15 @@ class GithubImport ...@@ -93,13 +95,15 @@ class GithubImport
end end
def visibility_level def visibility_level
@repo['private'] ? Gitlab::VisibilityLevel::PRIVATE : Gitlab::CurrentSettings.current_application_settings.default_project_visibility @repo.private ? Gitlab::VisibilityLevel::PRIVATE : Gitlab::CurrentSettings.current_application_settings.default_project_visibility
end end
end end
class GithubRepos class GithubRepos
def initialize(options, current_user, github_repo) def initialize(token, current_user, github_repo)
@options = options @client = Gitlab::GithubImport::Client.new(token)
@client.octokit.auto_paginate = true
@current_user = current_user @current_user = current_user
@github_repo = github_repo @github_repo = github_repo
end end
...@@ -108,17 +112,17 @@ class GithubRepos ...@@ -108,17 +112,17 @@ class GithubRepos
return found_github_repo if @github_repo return found_github_repo if @github_repo
repos.each do |repo| repos.each do |repo|
print "ID: #{repo['id'].to_s.bright}".color(:green) print "ID: #{repo.id.to_s.bright}".color(:green)
print "\tName: #{repo['full_name']}\n".color(:green) print "\tName: #{repo.full_name}\n".color(:green)
end end
print 'ID? '.bright print 'ID? '.bright
repos.find { |repo| repo['id'] == repo_id } repos.find { |repo| repo.id == repo_id }
end end
def found_github_repo def found_github_repo
repos.find { |repo| repo['full_name'] == @github_repo } repos.find { |repo| repo.full_name == @github_repo }
end end
def repo_id def repo_id
...@@ -126,7 +130,7 @@ class GithubRepos ...@@ -126,7 +130,7 @@ class GithubRepos
end end
def repos def repos
Github::Repositories.new(@options).fetch @client.octokit.list_repositories
end end
end end
......
tmp/ tmp/
.ruby-version
...@@ -4,4 +4,4 @@ require_relative '../qa' ...@@ -4,4 +4,4 @@ require_relative '../qa'
QA::Scenario QA::Scenario
.const_get(ARGV.shift) .const_get(ARGV.shift)
.perform(*ARGV) .launch!(*ARGV)
...@@ -18,6 +18,7 @@ module QA ...@@ -18,6 +18,7 @@ module QA
## ##
# Support files # Support files
# #
autoload :Bootable, 'qa/scenario/bootable'
autoload :Actable, 'qa/scenario/actable' autoload :Actable, 'qa/scenario/actable'
autoload :Entrypoint, 'qa/scenario/entrypoint' autoload :Entrypoint, 'qa/scenario/entrypoint'
autoload :Template, 'qa/scenario/template' autoload :Template, 'qa/scenario/template'
......
...@@ -3,7 +3,7 @@ module QA ...@@ -3,7 +3,7 @@ module QA
module Mattermost module Mattermost
class Login < Page::Base class Login < Page::Base
def initialize def initialize
visit(Runtime::Scenario.mattermost + '/login') visit(Runtime::Scenario.mattermost_address + '/login')
end end
def sign_in_using_oauth def sign_in_using_oauth
......
...@@ -3,7 +3,7 @@ module QA ...@@ -3,7 +3,7 @@ module QA
module Mattermost module Mattermost
class Main < Page::Base class Main < Page::Base
def initialize def initialize
visit(Runtime::Scenario.mattermost) visit(Runtime::Scenario.mattermost_address)
end end
end end
end end
......
module QA module QA
module Runtime module Runtime
##
# Singleton approach to global test scenario arguments.
#
module Scenario module Scenario
extend self extend self
attr_accessor :mattermost
attr_reader :attributes
def define(attribute, value)
(@attributes ||= {}).store(attribute.to_sym, value)
define_singleton_method(attribute) do
@attributes[attribute.to_sym].tap do |value|
if value.to_s.empty?
raise ArgumentError, "Empty `#{attribute}` attribute!"
end
end
end
end
def method_missing(name, *)
raise ArgumentError, "Scenario attribute `#{name}` not defined!"
end
end end
end end
end end
require 'optparse'
module QA
module Scenario
module Bootable
Option = Struct.new(:name, :arg, :desc)
def self.included(base)
base.extend(ClassMethods)
end
module ClassMethods
def launch!(argv)
arguments = OptionParser.new do |parser|
options.to_a.each do |opt|
parser.on(opt.arg, opt.desc) do |value|
Runtime::Scenario.define(opt.name, value)
end
end
end
arguments.parse!(argv)
if has_attributes?
self.perform(**Runtime::Scenario.attributes)
else
self.perform(*argv)
end
end
private
def attribute(name, arg, desc)
options.push(Option.new(name, arg, desc))
end
def options
@options ||= []
end
def has_attributes?
options.any?
end
end
end
end
end
...@@ -5,6 +5,8 @@ module QA ...@@ -5,6 +5,8 @@ module QA
# including staging and on-premises installation. # including staging and on-premises installation.
# #
class Entrypoint < Template class Entrypoint < Template
include Bootable
def self.tags(*tags) def self.tags(*tags)
@tags = tags @tags = tags
end end
......
...@@ -7,11 +7,12 @@ module QA ...@@ -7,11 +7,12 @@ module QA
# including staging and on-premises installation. # including staging and on-premises installation.
# #
class Mattermost < Scenario::Entrypoint class Mattermost < Scenario::Entrypoint
tags :mattermost tags :core, :mattermost
def perform(address, mattermost, *files) def perform(address, mattermost, *files)
Runtime::Scenario.mattermost = mattermost Runtime::Scenario.define(:mattermost_address, mattermost)
super(address, files)
super(address, *files)
end end
end end
end end
......
describe QA::Runtime::Scenario do
subject do
Module.new.extend(described_class)
end
it 'makes it possible to define global scenario attributes' do
subject.define(:my_attribute, 'some-value')
subject.define(:another_attribute, 'another-value')
expect(subject.my_attribute).to eq 'some-value'
expect(subject.another_attribute).to eq 'another-value'
expect(subject.attributes)
.to eq(my_attribute: 'some-value', another_attribute: 'another-value')
end
it 'raises error when attribute is not known' do
expect { subject.invalid_accessor }
.to raise_error ArgumentError, /invalid_accessor/
end
it 'raises error when attribute is empty' do
subject.define(:empty_attribute, '')
expect { subject.empty_attribute }
.to raise_error ArgumentError, /empty_attribute/
end
end
describe QA::Scenario::Bootable do
subject do
Class.new(QA::Scenario::Template)
.include(described_class)
end
it 'makes it possible to define the scenario attribute' do
subject.class_eval do
attribute :something, '--something SOMETHING', 'Some attribute'
attribute :another, '--another ANOTHER', 'Some other attribute'
end
expect(subject).to receive(:perform)
.with(something: 'test', another: 'other')
subject.launch!(%w[--another other --something test])
end
it 'does not require attributes to be defined' do
expect(subject).to receive(:perform).with('some', 'argv')
subject.launch!(%w[some argv])
end
end
import Vue from 'vue';
import SidebarService from '~/sidebar/services/sidebar_service';
import Mock from './mock_data';
describe('Sidebar service', () => {
beforeEach(() => {
Vue.http.interceptors.push(Mock.sidebarMockInterceptor);
this.service = new SidebarService({
endpoint: '/gitlab-org/gitlab-shell/issues/5.json',
toggleSubscriptionEndpoint: '/gitlab-org/gitlab-shell/issues/5/toggle_subscription',
moveIssueEndpoint: '/gitlab-org/gitlab-shell/issues/5/move',
projectsAutocompleteEndpoint: '/autocomplete/projects?project_id=15',
});
});
afterEach(() => {
SidebarService.singleton = null;
Vue.http.interceptors = _.without(Vue.http.interceptors, Mock.sidebarMockInterceptor);
});
it('gets the data', (done) => {
this.service.get()
.then((resp) => {
expect(resp).toBeDefined();
done();
})
.then(done)
.catch(done.fail);
});
it('updates the data', (done) => {
this.service.update('issue[assignee_ids]', [1])
.then((resp) => {
expect(resp).toBeDefined();
})
.then(done)
.catch(done.fail);
});
it('gets projects for autocomplete', (done) => {
this.service.getProjectsAutocomplete()
.then((resp) => {
expect(resp).toBeDefined();
})
.then(done)
.catch(done.fail);
});
it('moves the issue to another project', (done) => {
this.service.moveIssue(123)
.then((resp) => {
expect(resp).toBeDefined();
})
.then(done)
.catch(done.fail);
});
it('toggles the subscription', (done) => {
this.service.toggleSubscription()
.then((resp) => {
expect(resp).toBeDefined();
})
.then(done)
.catch(done.fail);
});
});
import Vue from 'vue'; import Vue from 'vue';
import pipelineComponent from '~/vue_merge_request_widget/components/mr_widget_pipeline.vue'; import pipelineComponent from '~/vue_merge_request_widget/components/mr_widget_pipeline.vue';
<<<<<<< HEAD
import mockData from '../mock_data'; import mockData from '../mock_data';
import mockLinkedPipelines from '../../pipelines/graph/linked_pipelines_mock_data'; import mockLinkedPipelines from '../../pipelines/graph/linked_pipelines_mock_data';
import mountComponent from '../../helpers/vue_mount_component_helper'; import mountComponent from '../../helpers/vue_mount_component_helper';
=======
import mountComponent from '../../helpers/vue_mount_component_helper';
import mockData from '../mock_data';
>>>>>>> ce/master
describe('MRWidgetPipeline', () => { describe('MRWidgetPipeline', () => {
let vm; let vm;
...@@ -67,6 +72,7 @@ describe('MRWidgetPipeline', () => { ...@@ -67,6 +72,7 @@ describe('MRWidgetPipeline', () => {
hasCi: true, hasCi: true,
ciStatus: null, ciStatus: null,
}); });
<<<<<<< HEAD
expect( expect(
vm.$el.querySelector('.media-body').textContent.trim(), vm.$el.querySelector('.media-body').textContent.trim(),
...@@ -111,9 +117,44 @@ describe('MRWidgetPipeline', () => { ...@@ -111,9 +117,44 @@ describe('MRWidgetPipeline', () => {
expect( expect(
vm.$el.querySelector('.media-body').textContent, vm.$el.querySelector('.media-body').textContent,
).toContain(`Coverage ${mockData.pipeline.coverage}`); ).toContain(`Coverage ${mockData.pipeline.coverage}`);
}); =======
expect(
vm.$el.querySelector('.media-body').textContent.trim(),
).toEqual('Could not connect to the CI server. Please check your settings and try again');
}); });
describe('with a pipeline', () => {
beforeEach(() => {
vm = mountComponent(Component, {
pipeline: mockData.pipeline,
hasCi: true,
ciStatus: 'success',
});
});
it('should render pipeline ID', () => {
expect(
vm.$el.querySelector('.pipeline-id').textContent.trim(),
).toEqual(`#${mockData.pipeline.id}`);
});
it('should render pipeline status and commit id', () => {
expect(
vm.$el.querySelector('.media-body').textContent.trim(),
).toContain(mockData.pipeline.details.status.label);
expect(
vm.$el.querySelector('.js-commit-link').textContent.trim(),
).toEqual(mockData.pipeline.commit.short_id);
expect(
vm.$el.querySelector('.js-commit-link').getAttribute('href'),
).toEqual(mockData.pipeline.commit.commit_path);
>>>>>>> ce/master
});
<<<<<<< HEAD
describe('without coverage', () => { describe('without coverage', () => {
it('should not render a coverage', () => { it('should not render a coverage', () => {
const mockCopy = Object.assign({}, mockData); const mockCopy = Object.assign({}, mockData);
...@@ -143,10 +184,39 @@ describe('MRWidgetPipeline', () => { ...@@ -143,10 +184,39 @@ describe('MRWidgetPipeline', () => {
}); });
expect(vm.$el.querySelector('.js-mini-pipeline-graph')).toEqual(null); expect(vm.$el.querySelector('.js-mini-pipeline-graph')).toEqual(null);
=======
it('should render pipeline graph', () => {
expect(vm.$el.querySelector('.mr-widget-pipeline-graph')).toBeDefined();
expect(vm.$el.querySelectorAll('.stage-container').length).toEqual(mockData.pipeline.details.stages.length);
});
it('should render coverage information', () => {
expect(
vm.$el.querySelector('.media-body').textContent,
).toContain(`Coverage ${mockData.pipeline.coverage}`);
});
});
describe('without coverage', () => {
it('should not render a coverage', () => {
const mockCopy = Object.assign({}, mockData);
delete mockCopy.pipeline.coverage;
vm = mountComponent(Component, {
pipeline: mockCopy.pipeline,
hasCi: true,
ciStatus: 'success',
});
expect(
vm.$el.querySelector('.media-body').textContent,
).not.toContain('Coverage');
>>>>>>> ce/master
}); });
}); });
}); });
<<<<<<< HEAD
describe('when upstream pipelines are passed', () => { describe('when upstream pipelines are passed', () => {
beforeEach(() => { beforeEach(() => {
vm = mountComponent(Component, { vm = mountComponent(Component, {
...@@ -175,6 +245,20 @@ describe('MRWidgetPipeline', () => { ...@@ -175,6 +245,20 @@ describe('MRWidgetPipeline', () => {
}), }),
hasCi: true, hasCi: true,
ciStatus: 'success', ciStatus: 'success',
=======
describe('without a pipeline graph', () => {
it('should not render a pipeline graph', () => {
const mockCopy = Object.assign({}, mockData);
delete mockCopy.pipeline.details.stages;
vm = mountComponent(Component, {
pipeline: mockCopy.pipeline,
hasCi: true,
ciStatus: 'success',
});
expect(vm.$el.querySelector('.js-mini-pipeline-graph')).toEqual(null);
>>>>>>> ce/master
}); });
}); });
......
import Vue from 'vue';
import VueResource from 'vue-resource';
import MRWidgetService from '~/vue_merge_request_widget/services/mr_widget_service';
Vue.use(VueResource);
describe('MRWidgetService', () => {
const mr = {
mergePath: './',
mergeCheckPath: './',
cancelAutoMergePath: './',
removeWIPPath: './',
sourceBranchPath: './',
ciEnvironmentsStatusPath: './',
statusPath: './',
mergeActionsContentPath: './',
isServiceStore: true,
};
it('should have store and resources created in constructor', () => {
const service = new MRWidgetService(mr);
expect(service.mergeResource).toBeDefined();
expect(service.mergeCheckResource).toBeDefined();
expect(service.cancelAutoMergeResource).toBeDefined();
expect(service.removeWIPResource).toBeDefined();
expect(service.removeSourceBranchResource).toBeDefined();
expect(service.deploymentsResource).toBeDefined();
expect(service.pollResource).toBeDefined();
expect(service.mergeActionsContentResource).toBeDefined();
});
it('should have methods defined', () => {
window.history.pushState({}, null, '/');
const service = new MRWidgetService(mr);
expect(service.merge()).toBeDefined();
expect(service.cancelAutomaticMerge()).toBeDefined();
expect(service.removeWIP()).toBeDefined();
expect(service.removeSourceBranch()).toBeDefined();
expect(service.fetchDeployments()).toBeDefined();
expect(service.poll()).toBeDefined();
expect(service.checkStatus()).toBeDefined();
expect(service.fetchMergeActionsContent()).toBeDefined();
expect(MRWidgetService.stopEnvironment()).toBeDefined();
});
});
require 'spec_helper' require 'spec_helper'
describe Gitlab::IssuableMetadata do describe Gitlab::IssuableMetadata do
let(:user) { create(:user) } let(:user) { create(:user) }
let!(:project) { create(:project, :public, :repository, creator: user, namespace: user.namespace) } let!(:project) { create(:project, :public, :repository, creator: user, namespace: user.namespace) }
subject { Class.new { include Gitlab::IssuableMetadata }.new } subject { Class.new { include Gitlab::IssuableMetadata }.new }
...@@ -10,6 +10,10 @@ describe Gitlab::IssuableMetadata do ...@@ -10,6 +10,10 @@ describe Gitlab::IssuableMetadata do
expect(subject.issuable_meta_data(Issue.none, 'Issue')).to eq({}) expect(subject.issuable_meta_data(Issue.none, 'Issue')).to eq({})
end end
it 'raises an error when given a collection with no limit' do
expect { subject.issuable_meta_data(Issue.all, 'Issue') }.to raise_error(/must have a limit/)
end
context 'issues' do context 'issues' do
let!(:issue) { create(:issue, author: user, project: project) } let!(:issue) { create(:issue, author: user, project: project) }
let!(:closed_issue) { create(:issue, state: :closed, author: user, project: project) } let!(:closed_issue) { create(:issue, state: :closed, author: user, project: project) }
...@@ -19,7 +23,7 @@ describe Gitlab::IssuableMetadata do ...@@ -19,7 +23,7 @@ describe Gitlab::IssuableMetadata do
let!(:closing_issues) { create(:merge_requests_closing_issues, issue: issue, merge_request: merge_request) } let!(:closing_issues) { create(:merge_requests_closing_issues, issue: issue, merge_request: merge_request) }
it 'aggregates stats on issues' do it 'aggregates stats on issues' do
data = subject.issuable_meta_data(Issue.all, 'Issue') data = subject.issuable_meta_data(Issue.all.limit(10), 'Issue')
expect(data.count).to eq(2) expect(data.count).to eq(2)
expect(data[issue.id].upvotes).to eq(1) expect(data[issue.id].upvotes).to eq(1)
...@@ -42,7 +46,7 @@ describe Gitlab::IssuableMetadata do ...@@ -42,7 +46,7 @@ describe Gitlab::IssuableMetadata do
let!(:note) { create(:note_on_merge_request, author: user, project: project, noteable: merge_request, note: "a comment on a MR") } let!(:note) { create(:note_on_merge_request, author: user, project: project, noteable: merge_request, note: "a comment on a MR") }
it 'aggregates stats on merge requests' do it 'aggregates stats on merge requests' do
data = subject.issuable_meta_data(MergeRequest.all, 'MergeRequest') data = subject.issuable_meta_data(MergeRequest.all.limit(10), 'MergeRequest')
expect(data.count).to eq(2) expect(data.count).to eq(2)
expect(data[merge_request.id].upvotes).to eq(1) expect(data[merge_request.id].upvotes).to eq(1)
......
...@@ -4,32 +4,40 @@ describe Gitlab::Metrics::SidekiqMiddleware do ...@@ -4,32 +4,40 @@ describe Gitlab::Metrics::SidekiqMiddleware do
let(:middleware) { described_class.new } let(:middleware) { described_class.new }
let(:message) { { 'args' => ['test'], 'enqueued_at' => Time.new(2016, 6, 23, 6, 59).to_f } } let(:message) { { 'args' => ['test'], 'enqueued_at' => Time.new(2016, 6, 23, 6, 59).to_f } }
def run(worker, message) describe '#call' do
expect(Gitlab::Metrics::BackgroundTransaction).to receive(:new) it 'tracks the transaction' do
.with(worker.class) worker = double(:worker, class: double(:class, name: 'TestWorker'))
.and_call_original
expect_any_instance_of(Gitlab::Metrics::Transaction).to receive(:set)
.with(:sidekiq_queue_duration, instance_of(Float))
expect_any_instance_of(Gitlab::Metrics::Transaction).to receive(:finish) expect(Gitlab::Metrics::BackgroundTransaction).to receive(:new)
.with(worker.class)
.and_call_original
middleware.call(worker, message, :test) { nil } expect_any_instance_of(Gitlab::Metrics::Transaction).to receive(:set)
end .with(:sidekiq_queue_duration, instance_of(Float))
describe '#call' do expect_any_instance_of(Gitlab::Metrics::Transaction).to receive(:finish)
let(:test_worker_class) { double(:class, name: 'TestWorker') }
let(:worker) { double(:worker, class: test_worker_class) }
it 'tracks the transaction' do middleware.call(worker, message, :test) { nil }
run(worker, message)
end end
it 'tracks the transaction (for messages without `enqueued_at`)' do it 'tracks the transaction (for messages without `enqueued_at`)' do
run(worker, {}) worker = double(:worker, class: double(:class, name: 'TestWorker'))
expect(Gitlab::Metrics::BackgroundTransaction).to receive(:new)
.with(worker.class)
.and_call_original
expect_any_instance_of(Gitlab::Metrics::Transaction).to receive(:set)
.with(:sidekiq_queue_duration, instance_of(Float))
expect_any_instance_of(Gitlab::Metrics::Transaction).to receive(:finish)
middleware.call(worker, {}, :test) { nil }
end end
it 'tracks any raised exceptions' do it 'tracks any raised exceptions' do
worker = double(:worker, class: double(:class, name: 'TestWorker'))
expect_any_instance_of(Gitlab::Metrics::Transaction) expect_any_instance_of(Gitlab::Metrics::Transaction)
.to receive(:run).and_raise(RuntimeError) .to receive(:run).and_raise(RuntimeError)
......
...@@ -91,13 +91,6 @@ describe Gitlab::Middleware::ReadOnly do ...@@ -91,13 +91,6 @@ describe Gitlab::Middleware::ReadOnly do
end end
context 'whitelisted requests' do context 'whitelisted requests' do
it 'expects DELETE request to logout to be allowed' do
response = request.delete('/users/sign_out')
expect(response).not_to be_a_redirect
expect(subject).not_to disallow_request
end
it 'expects a POST internal request to be allowed' do it 'expects a POST internal request to be allowed' do
response = request.post("/api/#{API::API.version}/internal") response = request.post("/api/#{API::API.version}/internal")
......
...@@ -683,6 +683,7 @@ describe 'Git LFS API and storage' do ...@@ -683,6 +683,7 @@ describe 'Git LFS API and storage' do
expect(json_response['objects'].first['actions']['upload']['href']).to eq("#{Gitlab.config.gitlab.url}/#{project.full_path}.git/gitlab-lfs/objects/#{sample_oid}/#{sample_size}") expect(json_response['objects'].first['actions']['upload']['href']).to eq("#{Gitlab.config.gitlab.url}/#{project.full_path}.git/gitlab-lfs/objects/#{sample_oid}/#{sample_size}")
expect(json_response['objects'].first['actions']['upload']['header']).to eq('Authorization' => authorization) expect(json_response['objects'].first['actions']['upload']['header']).to eq('Authorization' => authorization)
end end
<<<<<<< HEAD
## EE-specific context ## EE-specific context
context 'and project is above the limit' do context 'and project is above the limit' do
...@@ -711,6 +712,8 @@ describe 'Git LFS API and storage' do ...@@ -711,6 +712,8 @@ describe 'Git LFS API and storage' do
expect(json_response['message']).to eql('Your push has been rejected, because this repository has exceeded its size limit of 300 MB by 50 MB. Please contact your GitLab administrator for more information.') expect(json_response['message']).to eql('Your push has been rejected, because this repository has exceeded its size limit of 300 MB by 50 MB. Please contact your GitLab administrator for more information.')
end end
end end
=======
>>>>>>> ce/master
end end
describe 'when request is authenticated' do describe 'when request is authenticated' do
......
...@@ -257,8 +257,10 @@ describe "Authentication", "routing" do ...@@ -257,8 +257,10 @@ describe "Authentication", "routing" do
expect(post("/users/sign_in")).to route_to('sessions#create') expect(post("/users/sign_in")).to route_to('sessions#create')
end end
it "DELETE /users/sign_out" do # sign_out with GET instead of DELETE facilitates ad-hoc single-sign-out processes
expect(delete("/users/sign_out")).to route_to('sessions#destroy') # (https://gitlab.com/gitlab-org/gitlab-ce/issues/39708)
it "GET /users/sign_out" do
expect(get("/users/sign_out")).to route_to('sessions#destroy')
end end
it "POST /users/password" do it "POST /users/password" do
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment