Commit 2a030c97 authored by Dmitriy Zaporozhets's avatar Dmitriy Zaporozhets

Merge branch 'ce-to-ee' into 'master'

CE upstream: Thursday

Closes gitlab-qa#87, gitaly#700, and gitlab-ce#39361

See merge request gitlab-org/gitlab-ee!3328
parents 59adb2e6 e2a2e8c3
......@@ -613,6 +613,16 @@ codequality:
artifacts:
paths: [codeclimate.json]
qa:internal:
stage: test
variables:
SETUP_DB: "false"
services: []
script:
- cd qa/
- bundle install
- bundle exec rspec
coverage:
<<: *dedicated-runner
<<: *except-docs
......
......@@ -66,8 +66,7 @@
<div class="ci-widget media">
<template v-if="hasCIError">
<div class="ci-status-icon ci-status-icon-failed ci-error js-ci-error append-right-10">
<icon
name="status_failed"/>
<icon name="status_failed" />
</div>
<div class="media-body">
Could not connect to the CI server. Please check your settings and try again
......@@ -86,7 +85,9 @@
class="pipeline-id">
#{{pipeline.id}}
</a>
{{pipeline.details.status.label}} for
<a
:href="pipeline.commit.commit_path"
class="commit-sha js-commit-link">
......
......@@ -61,7 +61,7 @@
= link_to "Help", help_path
%li.divider
%li
= link_to "Sign out", destroy_user_session_path, method: :delete, class: "sign-out-link"
= link_to "Sign out", destroy_user_session_path, class: "sign-out-link"
- if session[:impersonator_id]
%li.impersonation
= link_to admin_impersonation_path, class: 'impersonation-btn', method: :delete, title: "Stop impersonation", aria: { label: 'Stop impersonation' }, data: { toggle: 'tooltip', placement: 'bottom', container: 'body' } do
......
......@@ -33,7 +33,7 @@
= s_('TagsPage|Optionally, add a message to the tag.')
%hr
.form-group
= label_tag :release_description, 'Release notes', class: 'control-label'
= label_tag :release_description, s_('TagsPage|Release notes'), class: 'control-label'
.col-sm-10
= render layout: 'projects/md_preview', locals: { url: preview_markdown_path(@project), referenced_users: true } do
= render 'projects/zen', attr: :release_description, classes: 'note-textarea', placeholder: s_('TagsPage|Write your release notes or drag files here...'), current_text: @release_description
......@@ -41,6 +41,6 @@
.help-block
= s_('TagsPage|Optionally, add release notes to the tag. They will be stored in the GitLab database and displayed on the tags page.')
.form-actions
= button_tag 'Create tag', class: 'btn btn-create', tabindex: 3
= link_to 'Cancel', project_tags_path(@project), class: 'btn btn-cancel'
= button_tag s_('TagsPage|Create tag'), class: 'btn btn-create', tabindex: 3
= link_to s_('TagsPage|Cancel'), project_tags_path(@project), class: 'btn btn-cancel'
%script#availableRefs{ type: "application/json" }= @project.repository.ref_names.to_json.html_safe
---
title: Moves mini graph of pipeline to the end of sentence in MR widget. Cleans HTML
and tests
merge_request:
author:
type: fixed
---
title: Change 'Sign Out' route from a DELETE to a GET
merge_request: 39708
author: Joe Marty
type: changed
---
title: Speed up issues list APIs
merge_request:
author:
type: performance
......@@ -195,7 +195,7 @@ Devise.setup do |config|
config.navigational_formats = [:"*/*", "*/*", :html, :zip]
# The default HTTP method used to sign out a resource. Default is :delete.
config.sign_out_via = :delete
config.sign_out_via = :get
# ==> OmniAuth
# To configure a new OmniAuth provider copy and edit omniauth.rb.sample
......
......@@ -9,7 +9,7 @@ mapping structure from the projects URLs:
* Project's repository: `#{namespace}/#{project_name}.git`
* Project's wiki: `#{namespace}/#{project_name}.wiki.git`
This structure made simple to migrate from existing solutions to GitLab and easy for Administrators to find where the
repository is stored.
......@@ -27,7 +27,7 @@ of load in big installations, and can be even worst if they are using any type o
Last, for GitLab Geo, this storage type means we have to synchronize the disk state, replicate renames in the correct
order or we may end-up with wrong repository or missing data temporarily.
This pattern also exists in other objects stored in GitLab, like issue Attachments, GitLab Pages artifacts,
This pattern also exists in other objects stored in GitLab, like issue Attachments, GitLab Pages artifacts,
Docker Containers for the integrated Registry, etc.
## Hashed Storage
......@@ -62,9 +62,9 @@ you will never mistakenly restore a repository in the wrong project (considering
### How to migrate to Hashed Storage
In GitLab, go to **Admin > Settings**, find the **Repository Storage** section and select
In GitLab, go to **Admin > Settings**, find the **Repository Storage** section and select
"_Create new projects using hashed storage paths_".
To migrate your existing projects to the new storage type, check the specific [rake tasks].
[ce-28283]: https://gitlab.com/gitlab-org/gitlab-ce/issues/28283
......@@ -79,14 +79,14 @@ coverage status below.
Note that things stored in an S3 compatible endpoint will not have the downsides mentioned earlier, if they are not
prefixed with `#{namespace}/#{project_name}`, which is true for CI Cache and LFS Objects.
| Storable Object | Legacy Storage | Hashed Storage | S3 Compatible | GitLab Version |
| ----------------| -------------- | -------------- | ------------- | -------------- |
| Storable Object | Legacy Storage | Hashed Storage | S3 Compatible | GitLab Version |
| --------------- | -------------- | -------------- | ------------- | -------------- |
| Repository | Yes | Yes | - | 10.0 |
| Attachments | Yes | Yes | - | 10.2 |
| Avatars | Yes | No | - | - |
| Avatars | Yes | No | - | - |
| Pages | Yes | No | - | - |
| Docker Registry | Yes | No | - | - |
| CI Build Logs | No | No | - | - |
| CI Artifacts | No | No | - | - |
| CI Build Logs | No | No | - | - |
| CI Artifacts | No | No | Yes (EEP) | - |
| CI Cache | No | No | Yes | - |
| LFS Objects | Yes | No | Yes (EEP) | - |
| LFS Objects | Yes | No | Yes (EEP) | - |
......@@ -58,6 +58,8 @@ Parameters:
"project_id": 3,
"title": "test1",
"state": "opened",
"created_at": "2017-04-29T08:46:00Z",
"updated_at": "2017-04-29T08:46:00Z",
"upvotes": 0,
"downvotes": 0,
"author": {
......@@ -170,6 +172,8 @@ Parameters:
"project_id": 3,
"title": "test1",
"state": "opened",
"created_at": "2017-04-29T08:46:00Z",
"updated_at": "2017-04-29T08:46:00Z",
"upvotes": 0,
"downvotes": 0,
"author": {
......@@ -248,6 +252,8 @@ Parameters:
"project_id": 3,
"title": "test1",
"state": "merged",
"created_at": "2017-04-29T08:46:00Z",
"updated_at": "2017-04-29T08:46:00Z",
"upvotes": 0,
"downvotes": 0,
"author": {
......
# File Storage in GitLab
We use the [CarrierWave] gem to handle file upload, store and retrieval.
There are many places where file uploading is used, according to contexts:
* System
- Instance Logo (logo visible in sign in/sign up pages)
- Header Logo (one displayed in the navigation bar)
* Group
- Group avatars
* User
- User avatars
- User snippet attachments
* Project
- Project avatars
- Issues/MR Markdown attachments
- Issues/MR Legacy Markdown attachments
- CI Build Artifacts
- LFS Objects
## Disk storage
GitLab started saving everything on local disk. While directory location changed from previous versions,
they are still not 100% standardized. You can see them below:
| Description | In DB? | Relative path | Uploader class | model_type |
| ------------------------------------- | ------ | ----------------------------------------------------------- | ---------------------- | ---------- |
| Instance logo | yes | uploads/-/system/appearance/logo/:id/:filename | `AttachmentUploader` | Appearance |
| Header logo | yes | uploads/-/system/appearance/header_logo/:id/:filename | `AttachmentUploader` | Appearance |
| Group avatars | yes | uploads/-/system/group/avatar/:id/:filename | `AvatarUploader` | Group |
| User avatars | yes | uploads/-/system/user/avatar/:id/:filename | `AvatarUploader` | User |
| User snippet attachments | yes | uploads/-/system/personal_snippet/:id/:random_hex/:filename | `PersonalFileUploader` | Snippet |
| Project avatars | yes | uploads/-/system/project/avatar/:id/:filename | `AvatarUploader` | Project |
| Issues/MR Markdown attachments | yes | uploads/:project_path_with_namespace/:random_hex/:filename | `FileUploader` | Project |
| Issues/MR Legacy Markdown attachments | no | uploads/-/system/note/attachment/:id/:filename | `AttachmentUploader` | Note |
| CI Artifacts (CE) | yes | shared/artifacts/:year_:month/:project_id/:id | `ArtifactUploader` | Ci::Build |
| LFS Objects (CE) | yes | shared/lfs-objects/:hex/:hex/:object_hash | `LfsObjectUploader` | LfsObject |
CI Artifacts and LFS Objects behave differently in CE and EE. In CE they inherit the `GitlabUploader`
while in EE they inherit the `ObjectStoreUploader` and store files in and S3 API compatible object store.
In the case of Issues/MR Markdown attachments, there is a different approach using the [Hashed Storage] layout,
instead of basing the path into a mutable variable `:project_path_with_namespace`, it's possible to use the
hash of the project ID instead, if project migrates to the new approach (introduced in 10.2).
[CarrierWave]: https://github.com/carrierwaveuploader/carrierwave
[Hashed Storage]: ../administration/repository_storage_types.md
......@@ -110,7 +110,7 @@ You can mark that content for translation with:
In JavaScript we added the `__()` (double underscore parenthesis) function
for translations.
### Updating the PO files with the new content
## Updating the PO files with the new content
Now that the new content is marked for translation, we need to update the PO
files with the following command:
......@@ -119,23 +119,20 @@ files with the following command:
bundle exec rake gettext:find
```
This command will update the `locale/**/gitlab.edit.po` file with the
new content that the parser has found.
This command will update the `locale/gitlab.pot` file with the newly externalized
strings and remove any strings that aren't used anymore. You should check this
file in. Once the changes are on master, they will be picked up by
[Crowdin](http://translate.gitlab.com) and be presented for translation.
New translations will be added with their default content and will be marked
fuzzy. To use the translation, look for the `#, fuzzy` mention in `gitlab.edit.po`
and remove it.
The command also updates the translation files for each language: `locale/*/gitlab.po`
These changes can be discarded, the languange files will be updated by Crowdin
automatically.
We need to make sure we remove the `fuzzy` translations before generating the
`locale/**/gitlab.po` file. When they aren't removed, the resulting `.po` will
be treated as a binary file which could overwrite translations that were merged
before the new translations.
Discard all of them at once like this:
When we are just preparing a page to be translated, but not actually adding any
translations. There's no need to generate `.po` files.
Translations that aren't used in the source code anymore will be marked with
`~#`; these can be removed to keep our translation files clutter-free.
```sh
git checkout locale/*/gitlab.po
```
### Validating PO files
......
......@@ -73,7 +73,7 @@ module API
desc: 'Return issues for the given scope: `created-by-me`, `assigned-to-me` or `all`'
end
get do
issues = find_issues
issues = paginate(find_issues)
options = {
with: Entities::IssueBasic,
......@@ -81,7 +81,7 @@ module API
issuable_metadata: issuable_meta_data(issues, 'Issue')
}
present paginate(issues), options
present issues, options
end
end
......@@ -100,7 +100,7 @@ module API
get ":id/issues" do
group = find_group!(params[:id])
issues = find_issues(group_id: group.id)
issues = paginate(find_issues(group_id: group.id))
options = {
with: Entities::IssueBasic,
......@@ -108,7 +108,7 @@ module API
issuable_metadata: issuable_meta_data(issues, 'Issue')
}
present paginate(issues), options
present issues, options
end
end
......@@ -129,7 +129,7 @@ module API
get ":id/issues" do
project = find_project!(params[:id])
issues = find_issues(project_id: project.id)
issues = paginate(find_issues(project_id: project.id))
options = {
with: Entities::IssueBasic,
......@@ -138,7 +138,7 @@ module API
issuable_metadata: issuable_meta_data(issues, 'Issue')
}
present paginate(issues), options
present issues, options
end
desc 'Get a single project issue' do
......
......@@ -48,11 +48,14 @@ module Gitlab
end
def update_page(page_path, title, format, content, commit_details)
assert_type!(format, Symbol)
assert_type!(commit_details, CommitDetails)
gollum_wiki.update_page(gollum_page_by_path(page_path), title, format, content, commit_details.to_h)
nil
@repository.gitaly_migrate(:wiki_update_page) do |is_enabled|
if is_enabled
gitaly_update_page(page_path, title, format, content, commit_details)
gollum_wiki.clear_cache
else
gollum_update_page(page_path, title, format, content, commit_details)
end
end
end
def pages
......@@ -149,6 +152,14 @@ module Gitlab
nil
end
def gollum_update_page(page_path, title, format, content, commit_details)
assert_type!(format, Symbol)
assert_type!(commit_details, CommitDetails)
gollum_wiki.update_page(gollum_page_by_path(page_path), title, format, content, commit_details.to_h)
nil
end
def gollum_find_page(title:, version: nil, dir: nil)
if version
version = Gitlab::Git::Commit.find(@repository, version).id
......@@ -172,6 +183,10 @@ module Gitlab
gitaly_wiki_client.write_page(name, format, content, commit_details)
end
def gitaly_update_page(page_path, title, format, content, commit_details)
gitaly_wiki_client.update_page(page_path, title, format, content, commit_details)
end
def gitaly_delete_page(page_path, commit_details)
gitaly_wiki_client.delete_page(page_path, commit_details)
end
......
......@@ -37,6 +37,31 @@ module Gitlab
end
end
def update_page(page_path, title, format, content, commit_details)
request = Gitaly::WikiUpdatePageRequest.new(
repository: @gitaly_repo,
page_path: GitalyClient.encode(page_path),
title: GitalyClient.encode(title),
format: format.to_s,
commit_details: gitaly_commit_details(commit_details)
)
strio = StringIO.new(content)
enum = Enumerator.new do |y|
until strio.eof?
chunk = strio.read(MAX_MSG_SIZE)
request.content = GitalyClient.encode(chunk)
y.yield request
request = Gitaly::WikiUpdatePageRequest.new
end
end
GitalyClient.call(@repository.storage, :wiki_service, :wiki_update_page, enum)
end
def delete_page(page_path, commit_details)
request = Gitaly::WikiDeletePageRequest.new(
repository: @gitaly_repo,
......
......@@ -38,7 +38,14 @@ module Gitlab
# otherwise hitting the rate limit will result in a thread
# being blocked in a `sleep()` call for up to an hour.
def initialize(token, per_page: 100, parallel: true)
@octokit = Octokit::Client.new(access_token: token, per_page: per_page)
@octokit = Octokit::Client.new(
access_token: token,
per_page: per_page,
api_endpoint: api_endpoint
)
@octokit.connection_options[:ssl] = { verify: verify_ssl }
@parallel = parallel
end
......@@ -122,6 +129,8 @@ module Gitlab
# whether we are running in parallel mode or not. For more information see
# `#rate_or_wait_for_rate_limit`.
def with_rate_limit
return yield unless rate_limiting_enabled?
request_count_counter.increment
raise_or_wait_for_rate_limit unless requests_remaining?
......@@ -163,8 +172,31 @@ module Gitlab
octokit.rate_limit.resets_in + 5
end
def respond_to_missing?(method, include_private = false)
octokit.respond_to?(method, include_private)
def rate_limiting_enabled?
@rate_limiting_enabled ||= api_endpoint.include?('.github.com')
end
def api_endpoint
custom_api_endpoint || default_api_endpoint
end
def custom_api_endpoint
github_omniauth_provider.dig('args', 'client_options', 'site')
end
def default_api_endpoint
OmniAuth::Strategies::GitHub.default_options[:client_options][:site]
end
def verify_ssl
github_omniauth_provider.fetch('verify_ssl', true)
end
def github_omniauth_provider
@github_omniauth_provider ||=
Gitlab.config.omniauth.providers
.find { |provider| provider.name == 'github' }
.to_h
end
def rate_limit_counter
......
module Gitlab
module IssuableMetadata
def issuable_meta_data(issuable_collection, collection_type)
# ActiveRecord uses Object#extend for null relations.
if !(issuable_collection.singleton_class < ActiveRecord::NullRelation) &&
issuable_collection.respond_to?(:limit_value) &&
issuable_collection.limit_value.nil?
raise 'Collection must have a limit applied for preloading meta-data'
end
# map has to be used here since using pluck or select will
# throw an error when ordering issuables by priority which inserts
# a new order into the collection.
......
......@@ -66,11 +66,7 @@ module Gitlab
end
def whitelisted_routes
logout_route || grack_route || @whitelisted.any? { |path| request.path.include?(path) } || lfs_route || sidekiq_route
end
def logout_route
route_hash[:controller] == 'sessions' && route_hash[:action] == 'destroy'
grack_route || @whitelisted.any? { |path| request.path.include?(path) } || lfs_route || sidekiq_route
end
def sidekiq_route
......
......@@ -14,7 +14,9 @@ class GithubImport
end
def run!
@repo = GithubRepos.new(@options, @current_user, @github_repo).choose_one!
@repo = GithubRepos
.new(@options[:token], @current_user, @github_repo)
.choose_one!
raise 'No repo found!' unless @repo
......@@ -28,7 +30,7 @@ class GithubImport
private
def show_warning!
puts "This will import GitHub #{@repo['full_name'].bright} into GitLab #{@project_path.bright} as #{@current_user.name}"
puts "This will import GitHub #{@repo.full_name.bright} into GitLab #{@project_path.bright} as #{@current_user.name}"
puts "Permission checks are ignored. Press any key to continue.".color(:red)
STDIN.getch
......@@ -65,16 +67,16 @@ class GithubImport
@current_user,
name: name,
path: name,
description: @repo['description'],
description: @repo.description,
namespace_id: namespace.id,
visibility_level: visibility_level,
skip_wiki: @repo['has_wiki']
skip_wiki: @repo.has_wiki
).execute
project.update!(
import_type: 'github',
import_source: @repo['full_name'],
import_url: @repo['clone_url'].sub('://', "://#{@options[:token]}@")
import_source: @repo.full_name,
import_url: @repo.clone_url.sub('://', "://#{@options[:token]}@")
)
project
......@@ -93,13 +95,15 @@ class GithubImport
end
def visibility_level
@repo['private'] ? Gitlab::VisibilityLevel::PRIVATE : Gitlab::CurrentSettings.current_application_settings.default_project_visibility
@repo.private ? Gitlab::VisibilityLevel::PRIVATE : Gitlab::CurrentSettings.current_application_settings.default_project_visibility
end
end
class GithubRepos
def initialize(options, current_user, github_repo)
@options = options
def initialize(token, current_user, github_repo)
@client = Gitlab::GithubImport::Client.new(token)
@client.octokit.auto_paginate = true
@current_user = current_user
@github_repo = github_repo
end
......@@ -108,17 +112,17 @@ class GithubRepos
return found_github_repo if @github_repo
repos.each do |repo|
print "ID: #{repo['id'].to_s.bright}".color(:green)
print "\tName: #{repo['full_name']}\n".color(:green)
print "ID: #{repo.id.to_s.bright}".color(:green)
print "\tName: #{repo.full_name}\n".color(:green)
end
print 'ID? '.bright
repos.find { |repo| repo['id'] == repo_id }
repos.find { |repo| repo.id == repo_id }
end
def found_github_repo
repos.find { |repo| repo['full_name'] == @github_repo }
repos.find { |repo| repo.full_name == @github_repo }
end
def repo_id
......@@ -126,7 +130,7 @@ class GithubRepos
end
def repos
Github::Repositories.new(@options).fetch
@client.octokit.list_repositories
end
end
......
......@@ -7,7 +7,7 @@ module QA
# including staging and on-premises installation.
#
class Mattermost < Scenario::Entrypoint
tags :mattermost
tags :core, :mattermost
def perform(address, mattermost, *files)
Runtime::Scenario.define(:mattermost_address, mattermost)
......
import Vue from 'vue';
import SidebarService from '~/sidebar/services/sidebar_service';
import Mock from './mock_data';
describe('Sidebar service', () => {
beforeEach(() => {
Vue.http.interceptors.push(Mock.sidebarMockInterceptor);
this.service = new SidebarService({
endpoint: '/gitlab-org/gitlab-shell/issues/5.json',
toggleSubscriptionEndpoint: '/gitlab-org/gitlab-shell/issues/5/toggle_subscription',
moveIssueEndpoint: '/gitlab-org/gitlab-shell/issues/5/move',
projectsAutocompleteEndpoint: '/autocomplete/projects?project_id=15',
});
});
afterEach(() => {
SidebarService.singleton = null;
Vue.http.interceptors = _.without(Vue.http.interceptors, Mock.sidebarMockInterceptor);
});
it('gets the data', (done) => {
this.service.get()
.then((resp) => {
expect(resp).toBeDefined();
done();
})
.then(done)
.catch(done.fail);
});
it('updates the data', (done) => {
this.service.update('issue[assignee_ids]', [1])
.then((resp) => {
expect(resp).toBeDefined();
})
.then(done)
.catch(done.fail);
});
it('gets projects for autocomplete', (done) => {
this.service.getProjectsAutocomplete()
.then((resp) => {
expect(resp).toBeDefined();
})
.then(done)
.catch(done.fail);
});
it('moves the issue to another project', (done) => {
this.service.moveIssue(123)
.then((resp) => {
expect(resp).toBeDefined();
})
.then(done)
.catch(done.fail);
});
it('toggles the subscription', (done) => {
this.service.toggleSubscription()
.then((resp) => {
expect(resp).toBeDefined();
})
.then(done)
.catch(done.fail);
});
});
......@@ -145,41 +145,41 @@ describe('MRWidgetPipeline', () => {
expect(vm.$el.querySelector('.js-mini-pipeline-graph')).toEqual(null);
});
});
});
describe('when upstream pipelines are passed', () => {
beforeEach(() => {
vm = mountComponent(Component, {
pipeline: Object.assign({}, mockData.pipeline, {
triggered_by: mockLinkedPipelines.triggered_by,
}),
hasCi: true,
ciStatus: 'success',
describe('when upstream pipelines are passed', () => {
beforeEach(() => {
vm = mountComponent(Component, {
pipeline: Object.assign({}, mockData.pipeline, {
triggered_by: mockLinkedPipelines.triggered_by,
}),
hasCi: true,
ciStatus: 'success',
});
});
});
it('should coerce triggeredBy into a collection', () => {
expect(vm.triggeredBy.length).toBe(1);
});
it('should coerce triggeredBy into a collection', () => {
expect(vm.triggeredBy.length).toBe(1);
});
it('should render the linked pipelines mini list', () => {
expect(vm.$el.querySelector('.linked-pipeline-mini-list.is-upstream')).not.toBeNull();
it('should render the linked pipelines mini list', () => {
expect(vm.$el.querySelector('.linked-pipeline-mini-list.is-upstream')).not.toBeNull();
});
});
});
describe('when downstream pipelines are passed', () => {
beforeEach(() => {
vm = mountComponent(Component, {
pipeline: Object.assign({}, mockData.pipeline, {
triggered: mockLinkedPipelines.triggered,
}),
hasCi: true,
ciStatus: 'success',
describe('when downstream pipelines are passed', () => {
beforeEach(() => {
vm = mountComponent(Component, {
pipeline: Object.assign({}, mockData.pipeline, {
triggered: mockLinkedPipelines.triggered,
}),
hasCi: true,
ciStatus: 'success',
});
});
});
it('should render the linked pipelines mini list', () => {
expect(vm.$el.querySelector('.linked-pipeline-mini-list.is-downstream')).not.toBeNull();
it('should render the linked pipelines mini list', () => {
expect(vm.$el.querySelector('.linked-pipeline-mini-list.is-downstream')).not.toBeNull();
});
});
});
});
import Vue from 'vue';
import VueResource from 'vue-resource';
import MRWidgetService from '~/vue_merge_request_widget/services/mr_widget_service';
Vue.use(VueResource);
describe('MRWidgetService', () => {
const mr = {
mergePath: './',
mergeCheckPath: './',
cancelAutoMergePath: './',
removeWIPPath: './',
sourceBranchPath: './',
ciEnvironmentsStatusPath: './',
statusPath: './',
mergeActionsContentPath: './',
isServiceStore: true,
};
it('should have store and resources created in constructor', () => {
const service = new MRWidgetService(mr);
expect(service.mergeResource).toBeDefined();
expect(service.mergeCheckResource).toBeDefined();
expect(service.cancelAutoMergeResource).toBeDefined();
expect(service.removeWIPResource).toBeDefined();
expect(service.removeSourceBranchResource).toBeDefined();
expect(service.deploymentsResource).toBeDefined();
expect(service.pollResource).toBeDefined();
expect(service.mergeActionsContentResource).toBeDefined();
});
it('should have methods defined', () => {
window.history.pushState({}, null, '/');
const service = new MRWidgetService(mr);
expect(service.merge()).toBeDefined();
expect(service.cancelAutomaticMerge()).toBeDefined();
expect(service.removeWIP()).toBeDefined();
expect(service.removeSourceBranch()).toBeDefined();
expect(service.fetchDeployments()).toBeDefined();
expect(service.poll()).toBeDefined();
expect(service.checkStatus()).toBeDefined();
expect(service.fetchMergeActionsContent()).toBeDefined();
expect(MRWidgetService.stopEnvironment()).toBeDefined();
});
});
......@@ -185,6 +185,17 @@ describe Gitlab::GithubImport::Client do
client.with_rate_limit { }
end
it 'ignores rate limiting when disabled' do
expect(client)
.to receive(:rate_limiting_enabled?)
.and_return(false)
expect(client)
.not_to receive(:requests_remaining?)
expect(client.with_rate_limit { 10 }).to eq(10)
end
end
describe '#requests_remaining?' do
......@@ -260,27 +271,122 @@ describe Gitlab::GithubImport::Client do
end
end
describe '#method_missing' do
it 'delegates missing methods to the request method' do
client = described_class.new('foo')
describe '#api_endpoint' do
let(:client) { described_class.new('foo') }
context 'without a custom endpoint configured in Omniauth' do
it 'returns the default API endpoint' do
expect(client)
.to receive(:custom_api_endpoint)
.and_return(nil)
expect(client).to receive(:milestones).with(state: 'all')
expect(client.api_endpoint).to eq('https://api.github.com')
end
end
client.milestones(state: 'all')
context 'with a custom endpoint configured in Omniauth' do
it 'returns the custom endpoint' do
endpoint = 'https://github.kittens.com'
expect(client)
.to receive(:custom_api_endpoint)
.and_return(endpoint)
expect(client.api_endpoint).to eq(endpoint)
end
end
end
describe '#respond_to_missing?' do
it 'returns true for methods supported by Octokit' do
client = described_class.new('foo')
describe '#custom_api_endpoint' do
let(:client) { described_class.new('foo') }
expect(client.respond_to?(:milestones)).to eq(true)
context 'without a custom endpoint' do
it 'returns nil' do
expect(client)
.to receive(:github_omniauth_provider)
.and_return({})
expect(client.custom_api_endpoint).to be_nil
end
end
it 'returns false for methods not supported by Octokit' do
context 'with a custom endpoint' do
it 'returns the API endpoint' do
endpoint = 'https://github.kittens.com'
expect(client)
.to receive(:github_omniauth_provider)
.and_return({ 'args' => { 'client_options' => { 'site' => endpoint } } })
expect(client.custom_api_endpoint).to eq(endpoint)
end
end
end
describe '#default_api_endpoint' do
it 'returns the default API endpoint' do
client = described_class.new('foo')
expect(client.respond_to?(:kittens)).to eq(false)
expect(client.default_api_endpoint).to eq('https://api.github.com')
end
end
describe '#verify_ssl' do
let(:client) { described_class.new('foo') }
context 'without a custom configuration' do
it 'returns true' do
expect(client)
.to receive(:github_omniauth_provider)
.and_return({})
expect(client.verify_ssl).to eq(true)
end
end
context 'with a custom configuration' do
it 'returns the configured value' do
expect(client.verify_ssl).to eq(false)
end
end
end
describe '#github_omniauth_provider' do
let(:client) { described_class.new('foo') }
context 'without a configured provider' do
it 'returns an empty Hash' do
expect(Gitlab.config.omniauth)
.to receive(:providers)
.and_return([])
expect(client.github_omniauth_provider).to eq({})
end
end
context 'with a configured provider' do
it 'returns the provider details as a Hash' do
hash = client.github_omniauth_provider
expect(hash['name']).to eq('github')
expect(hash['url']).to eq('https://github.com/')
end
end
end
describe '#rate_limiting_enabled?' do
let(:client) { described_class.new('foo') }
it 'returns true when using GitHub.com' do
expect(client.rate_limiting_enabled?).to eq(true)
end
it 'returns false for GitHub enterprise installations' do
expect(client)
.to receive(:api_endpoint)
.and_return('https://github.kittens.com/')
expect(client.rate_limiting_enabled?).to eq(false)
end
end
end
require 'spec_helper'
describe Gitlab::IssuableMetadata do
let(:user) { create(:user) }
let!(:project) { create(:project, :public, :repository, creator: user, namespace: user.namespace) }
let(:user) { create(:user) }
let!(:project) { create(:project, :public, :repository, creator: user, namespace: user.namespace) }
subject { Class.new { include Gitlab::IssuableMetadata }.new }
......@@ -10,6 +10,10 @@ describe Gitlab::IssuableMetadata do
expect(subject.issuable_meta_data(Issue.none, 'Issue')).to eq({})
end
it 'raises an error when given a collection with no limit' do
expect { subject.issuable_meta_data(Issue.all, 'Issue') }.to raise_error(/must have a limit/)
end
context 'issues' do
let!(:issue) { create(:issue, author: user, project: project) }
let!(:closed_issue) { create(:issue, state: :closed, author: user, project: project) }
......@@ -19,7 +23,7 @@ describe Gitlab::IssuableMetadata do
let!(:closing_issues) { create(:merge_requests_closing_issues, issue: issue, merge_request: merge_request) }
it 'aggregates stats on issues' do
data = subject.issuable_meta_data(Issue.all, 'Issue')
data = subject.issuable_meta_data(Issue.all.limit(10), 'Issue')
expect(data.count).to eq(2)
expect(data[issue.id].upvotes).to eq(1)
......@@ -42,7 +46,7 @@ describe Gitlab::IssuableMetadata do
let!(:note) { create(:note_on_merge_request, author: user, project: project, noteable: merge_request, note: "a comment on a MR") }
it 'aggregates stats on merge requests' do
data = subject.issuable_meta_data(MergeRequest.all, 'MergeRequest')
data = subject.issuable_meta_data(MergeRequest.all.limit(10), 'MergeRequest')
expect(data.count).to eq(2)
expect(data[merge_request.id].upvotes).to eq(1)
......
......@@ -91,13 +91,6 @@ describe Gitlab::Middleware::ReadOnly do
end
context 'whitelisted requests' do
it 'expects DELETE request to logout to be allowed' do
response = request.delete('/users/sign_out')
expect(response).not_to be_a_redirect
expect(subject).not_to disallow_request
end
it 'expects a POST internal request to be allowed' do
response = request.post("/api/#{API::API.version}/internal")
......
......@@ -257,8 +257,10 @@ describe "Authentication", "routing" do
expect(post("/users/sign_in")).to route_to('sessions#create')
end
it "DELETE /users/sign_out" do
expect(delete("/users/sign_out")).to route_to('sessions#destroy')
# sign_out with GET instead of DELETE facilitates ad-hoc single-sign-out processes
# (https://gitlab.com/gitlab-org/gitlab-ce/issues/39708)
it "GET /users/sign_out" do
expect(get("/users/sign_out")).to route_to('sessions#destroy')
end
it "POST /users/password" do
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment