Commit ad663560 authored by GitLab Bot's avatar GitLab Bot

Merge remote-tracking branch 'upstream/master' into ce-to-ee-2018-01-24

# Conflicts:
#	.gitlab-ci.yml
#	app/assets/javascripts/dispatcher.js
#	app/views/search/_category.html.haml
#	features/support/db_cleaner.rb
#	lib/gitlab/git/repository.rb
#	spec/services/issues/move_service_spec.rb
#	spec/support/db_cleaner.rb

[ci skip]
parents ae5fd31f 44728e05
...@@ -6,7 +6,7 @@ image: "dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.6-golang-1.9-git ...@@ -6,7 +6,7 @@ image: "dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.6-golang-1.9-git
- gitlab-org - gitlab-org
.default-cache: &default-cache .default-cache: &default-cache
key: "ruby-235-with-yarn" key: "ruby-2.3.6-with-yarn"
paths: paths:
- vendor/ruby - vendor/ruby
- .yarn-cache/ - .yarn-cache/
...@@ -345,8 +345,11 @@ setup-test-env: ...@@ -345,8 +345,11 @@ setup-test-env:
paths: paths:
- tmp/tests - tmp/tests
<<<<<<< HEAD
rspec-pg geo: *rspec-metadata-pg-geo rspec-pg geo: *rspec-metadata-pg-geo
=======
>>>>>>> upstream/master
rspec-pg 0 27: *rspec-metadata-pg rspec-pg 0 27: *rspec-metadata-pg
rspec-pg 1 27: *rspec-metadata-pg rspec-pg 1 27: *rspec-metadata-pg
rspec-pg 2 27: *rspec-metadata-pg rspec-pg 2 27: *rspec-metadata-pg
......
...@@ -422,7 +422,7 @@ group :ed25519 do ...@@ -422,7 +422,7 @@ group :ed25519 do
end end
# Gitaly GRPC client # Gitaly GRPC client
gem 'gitaly-proto', '~> 0.74.0', require: 'gitaly' gem 'gitaly-proto', '~> 0.76.0', require: 'gitaly'
gem 'toml-rb', '~> 0.3.15', require: false gem 'toml-rb', '~> 0.3.15', require: false
......
...@@ -309,7 +309,7 @@ GEM ...@@ -309,7 +309,7 @@ GEM
po_to_json (>= 1.0.0) po_to_json (>= 1.0.0)
rails (>= 3.2.0) rails (>= 3.2.0)
gherkin-ruby (0.3.2) gherkin-ruby (0.3.2)
gitaly-proto (0.74.0) gitaly-proto (0.76.0)
google-protobuf (~> 3.1) google-protobuf (~> 3.1)
grpc (~> 1.0) grpc (~> 1.0)
github-linguist (4.7.6) github-linguist (4.7.6)
...@@ -1091,7 +1091,7 @@ DEPENDENCIES ...@@ -1091,7 +1091,7 @@ DEPENDENCIES
gettext (~> 3.2.2) gettext (~> 3.2.2)
gettext_i18n_rails (~> 1.8.0) gettext_i18n_rails (~> 1.8.0)
gettext_i18n_rails_js (~> 1.2.0) gettext_i18n_rails_js (~> 1.2.0)
gitaly-proto (~> 0.74.0) gitaly-proto (~> 0.76.0)
github-linguist (~> 4.7.0) github-linguist (~> 4.7.0)
gitlab-flowdock-git-hook (~> 1.0.1) gitlab-flowdock-git-hook (~> 1.0.1)
gitlab-license (~> 1.0) gitlab-license (~> 1.0)
......
/* eslint-disable func-names, space-before-function-paren, no-var, prefer-arrow-callback, wrap-iife, no-shadow, consistent-return, one-var, one-var-declaration-per-line, camelcase, default-case, no-new, quotes, no-duplicate-case, no-case-declarations, no-fallthrough, max-len */ /* eslint-disable func-names, space-before-function-paren, no-var, prefer-arrow-callback, wrap-iife, no-shadow, consistent-return, one-var, one-var-declaration-per-line, camelcase, default-case, no-new, quotes, no-duplicate-case, no-case-declarations, no-fallthrough, max-len */
import notificationsDropdown from './notifications_dropdown';
import LineHighlighter from './line_highlighter';
import MergeRequest from './merge_request'; import MergeRequest from './merge_request';
import Flash from './flash'; import Flash from './flash';
import BlobViewer from './blob/viewer/index';
import GfmAutoComplete from './gfm_auto_complete'; import GfmAutoComplete from './gfm_auto_complete';
import Star from './star';
import ZenMode from './zen_mode'; import ZenMode from './zen_mode';
import PerformanceBar from './performance_bar';
import initNotes from './init_notes'; import initNotes from './init_notes';
import initIssuableSidebar from './init_issuable_sidebar'; import initIssuableSidebar from './init_issuable_sidebar';
import { convertPermissionToBoolean } from './lib/utils/common_utils'; import { convertPermissionToBoolean } from './lib/utils/common_utils';
...@@ -601,6 +596,7 @@ import initLDAPGroupsSelect from 'ee/ldap_groups_select'; // eslint-disable-line ...@@ -601,6 +596,7 @@ import initLDAPGroupsSelect from 'ee/ldap_groups_select'; // eslint-disable-line
import('./pages/dashboard/groups/index') import('./pages/dashboard/groups/index')
.then(callDefault) .then(callDefault)
.catch(fail); .catch(fail);
<<<<<<< HEAD
case 'admin:licenses:new': case 'admin:licenses:new':
import(/* webpackChunkName: "admin_licenses" */ 'ee/pages/admin/licenses/new').then(m => m.default()).catch(fail); import(/* webpackChunkName: "admin_licenses" */ 'ee/pages/admin/licenses/new').then(m => m.default()).catch(fail);
break; break;
...@@ -609,6 +605,8 @@ import initLDAPGroupsSelect from 'ee/ldap_groups_select'; // eslint-disable-line ...@@ -609,6 +605,8 @@ import initLDAPGroupsSelect from 'ee/ldap_groups_select'; // eslint-disable-line
break; break;
case 'groups:ldap_group_links:index': case 'groups:ldap_group_links:index':
initLDAPGroupsSelect(); initLDAPGroupsSelect();
=======
>>>>>>> upstream/master
break; break;
} }
switch (path[0]) { switch (path[0]) {
...@@ -700,23 +698,12 @@ import initLDAPGroupsSelect from 'ee/ldap_groups_select'; // eslint-disable-line ...@@ -700,23 +698,12 @@ import initLDAPGroupsSelect from 'ee/ldap_groups_select'; // eslint-disable-line
.then(callDefault) .then(callDefault)
.catch(fail); .catch(fail);
break; break;
case 'show':
new Star();
notificationsDropdown();
break;
case 'wikis': case 'wikis':
import('./pages/projects/wikis') import('./pages/projects/wikis')
.then(callDefault) .then(callDefault)
.catch(fail); .catch(fail);
shortcut_handler = true; shortcut_handler = true;
break; break;
case 'snippets':
if (path[2] === 'show') {
new ZenMode();
new LineHighlighter();
new BlobViewer();
}
break;
} }
break; break;
} }
...@@ -726,7 +713,9 @@ import initLDAPGroupsSelect from 'ee/ldap_groups_select'; // eslint-disable-line ...@@ -726,7 +713,9 @@ import initLDAPGroupsSelect from 'ee/ldap_groups_select'; // eslint-disable-line
} }
if (document.querySelector('#peek')) { if (document.querySelector('#peek')) {
new PerformanceBar({ container: '#peek' }); import('./performance_bar')
.then(m => new m.default({ container: '#peek' })) // eslint-disable-line new-cap
.catch(fail);
} }
}; };
......
...@@ -5,8 +5,12 @@ import TreeView from '~/tree'; ...@@ -5,8 +5,12 @@ import TreeView from '~/tree';
import BlobViewer from '~/blob/viewer/index'; import BlobViewer from '~/blob/viewer/index';
import Activities from '~/activities'; import Activities from '~/activities';
import { ajaxGet } from '~/lib/utils/common_utils'; import { ajaxGet } from '~/lib/utils/common_utils';
import Star from '../../../star';
import notificationsDropdown from '../../../notifications_dropdown';
export default () => { export default () => {
new Star(); // eslint-disable-line no-new
notificationsDropdown();
new ShortcutsNavigation(); // eslint-disable-line no-new new ShortcutsNavigation(); // eslint-disable-line no-new
new NotificationsForm(); // eslint-disable-line no-new new NotificationsForm(); // eslint-disable-line no-new
new UserCallout({ // eslint-disable-line no-new new UserCallout({ // eslint-disable-line no-new
......
import initNotes from '~/init_notes'; import initNotes from '~/init_notes';
import ZenMode from '~/zen_mode'; import ZenMode from '~/zen_mode';
import LineHighlighter from '../../../../line_highlighter';
import BlobViewer from '../../../../blob/viewer';
export default function () { export default function () {
new LineHighlighter(); // eslint-disable-line no-new
new BlobViewer(); // eslint-disable-line no-new
initNotes(); initNotes();
new ZenMode(); // eslint-disable-line no-new new ZenMode(); // eslint-disable-line no-new
} }
...@@ -155,6 +155,8 @@ class ApplicationController < ActionController::Base ...@@ -155,6 +155,8 @@ class ApplicationController < ActionController::Base
format.html do format.html do
render file: Rails.root.join("public", "404"), layout: false, status: "404" render file: Rails.root.join("public", "404"), layout: false, status: "404"
end end
# Prevent the Rails CSRF protector from thinking a missing .js file is a JavaScript file
format.js { render json: '', status: :not_found, content_type: 'application/json' }
format.any { head :not_found } format.any { head :not_found }
end end
end end
......
...@@ -981,10 +981,10 @@ class Project < ActiveRecord::Base ...@@ -981,10 +981,10 @@ class Project < ActiveRecord::Base
hooks.hooks_for(hooks_scope).each do |hook| hooks.hooks_for(hooks_scope).each do |hook|
hook.async_execute(data, hooks_scope.to_s) hook.async_execute(data, hooks_scope.to_s)
end end
end
SystemHooksService.new.execute_hooks(data, hooks_scope) SystemHooksService.new.execute_hooks(data, hooks_scope)
end end
end
def execute_services(data, hooks_scope = :push_hooks) def execute_services(data, hooks_scope = :push_hooks)
# Call only service hooks that are active for this scope # Call only service hooks that are active for this scope
......
...@@ -25,6 +25,7 @@ class Repository ...@@ -25,6 +25,7 @@ class Repository
attr_accessor :full_path, :disk_path, :project, :is_wiki attr_accessor :full_path, :disk_path, :project, :is_wiki
delegate :ref_name_for_sha, to: :raw_repository delegate :ref_name_for_sha, to: :raw_repository
delegate :bundle_to_disk, to: :raw_repository
CreateTreeError = Class.new(StandardError) CreateTreeError = Class.new(StandardError)
......
...@@ -43,7 +43,6 @@ ...@@ -43,7 +43,6 @@
= webpack_bundle_tag "main" = webpack_bundle_tag "main"
= webpack_bundle_tag "raven" if current_application_settings.clientside_sentry_enabled = webpack_bundle_tag "raven" if current_application_settings.clientside_sentry_enabled
= webpack_bundle_tag "test" if Rails.env.test? = webpack_bundle_tag "test" if Rails.env.test?
= webpack_bundle_tag 'performance_bar' if performance_bar_enabled?
- if content_for?(:page_specific_javascripts) - if content_for?(:page_specific_javascripts)
= yield :page_specific_javascripts = yield :page_specific_javascripts
......
...@@ -4,6 +4,7 @@ ...@@ -4,6 +4,7 @@
- branch_label = s_('ChangeTypeActionLabel|Revert in branch') - branch_label = s_('ChangeTypeActionLabel|Revert in branch')
- revert_merge_request = _('Revert this merge request') - revert_merge_request = _('Revert this merge request')
- revert_commit = _('Revert this commit') - revert_commit = _('Revert this commit')
- description = s_('ChangeTypeAction|This will create a new commit in order to revert the existing changes.')
- title = commit.merged_merge_request(current_user) ? revert_merge_request : revert_commit - title = commit.merged_merge_request(current_user) ? revert_merge_request : revert_commit
- when 'cherry-pick' - when 'cherry-pick'
- label = s_('ChangeTypeAction|Cherry-pick') - label = s_('ChangeTypeAction|Cherry-pick')
...@@ -17,6 +18,8 @@ ...@@ -17,6 +18,8 @@
%a.close{ href: "#", "data-dismiss" => "modal" } × %a.close{ href: "#", "data-dismiss" => "modal" } ×
%h3.page-title= title %h3.page-title= title
.modal-body .modal-body
- if description
%p.append-bottom-20= description
= form_tag [type.underscore, @project.namespace.becomes(Namespace), @project, commit], method: :post, remote: false, class: "form-horizontal js-#{type}-form js-requires-input" do = form_tag [type.underscore, @project.namespace.becomes(Namespace), @project, commit], method: :post, remote: false, class: "form-horizontal js-#{type}-form js-requires-input" do
.form-group.branch .form-group.branch
= label_tag 'start_branch', branch_label, class: 'control-label' = label_tag 'start_branch', branch_label, class: 'control-label'
......
...@@ -57,7 +57,6 @@ ...@@ -57,7 +57,6 @@
Titles and Filenames Titles and Filenames
%span.badge %span.badge
= @search_results.snippet_titles_count = @search_results.snippet_titles_count
- else - else
%li{ class: active_when(@scope == 'projects') } %li{ class: active_when(@scope == 'projects') }
= link_to search_filter_path(scope: 'projects') do = link_to search_filter_path(scope: 'projects') do
...@@ -79,6 +78,7 @@ ...@@ -79,6 +78,7 @@
Milestones Milestones
%span.badge %span.badge
= limited_count(@search_results.limited_milestones_count) = limited_count(@search_results.limited_milestones_count)
<<<<<<< HEAD
- if current_application_settings.elasticsearch_search? - if current_application_settings.elasticsearch_search?
%li{ class: active_when(@scope == 'blobs') } %li{ class: active_when(@scope == 'blobs') }
= link_to search_filter_path(scope: 'blobs') do = link_to search_filter_path(scope: 'blobs') do
...@@ -95,3 +95,5 @@ ...@@ -95,3 +95,5 @@
Wiki Wiki
%span.badge %span.badge
= limited_count(@search_results.wiki_blobs_count) = limited_count(@search_results.wiki_blobs_count)
=======
>>>>>>> upstream/master
---
title: Add application create API
merge_request: 8160
author: Nicolas Merelli @PNSalocin
---
title: Changes Revert this merge request text
merge_request: 16611
author: Jacopo Beschi @jacopo-beschi
type: changed
---
title: Execute system hooks after-commit when executing project hooks
merge_request:
author:
type: fixed
---
title: Return more consistent values for merge_status on MR APIs
merge_request:
author:
type: fixed
...@@ -96,7 +96,6 @@ var config = { ...@@ -96,7 +96,6 @@ var config = {
test: './test.js', test: './test.js',
two_factor_auth: './two_factor_auth.js', two_factor_auth: './two_factor_auth.js',
users: './users/index.js', users: './users/index.js',
performance_bar: './performance_bar.js',
webpack_runtime: './webpack.js', webpack_runtime: './webpack.js',
}, },
......
# Applications API
> [Introduced][ce-8160] in GitLab 10.5
[ce-8160]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/8160
## Create a application
Create a application by posting a JSON payload.
User must be admin to do that.
Returns `200` if the request succeeds.
```
POST /applications
```
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `name` | string | yes | The name of the application |
| `redirect_uri` | string | yes | The redirect URI of the application |
| `scopes` | string | yes | The scopes of the application |
```bash
curl --request POST --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" --data "name=MyApplication&redirect_uri=http://redirect.uri&scopes=" https://gitlab.example.com/api/v3/applications
```
Example response:
```json
{
"application_id": "5832fc6e14300a0d962240a8144466eef4ee93ef0d218477e55f11cf12fc3737",
"secret": "ee1dd64b6adc89cf7e2c23099301ccc2c61b441064e9324d963c46902a85ec34",
"callback_url": "http://redirect.uri"
}
```
...@@ -111,7 +111,7 @@ future GitLab releases.** ...@@ -111,7 +111,7 @@ future GitLab releases.**
| `CI_BUILD_MANUAL` | `CI_JOB_MANUAL` | | `CI_BUILD_MANUAL` | `CI_JOB_MANUAL` |
| `CI_BUILD_TOKEN` | `CI_JOB_TOKEN` | | `CI_BUILD_TOKEN` | `CI_JOB_TOKEN` |
## `.gitlab-ci.yaml` defined variables ## `.gitlab-ci.yml` defined variables
>**Note:** >**Note:**
This feature requires GitLab Runner 0.5.0 or higher and GitLab CI 7.14 or higher. This feature requires GitLab Runner 0.5.0 or higher and GitLab CI 7.14 or higher.
......
require 'database_cleaner' require 'database_cleaner'
<<<<<<< HEAD
DatabaseCleaner[:active_record].strategy = :deletion, { except: ['licenses'] } DatabaseCleaner[:active_record].strategy = :deletion, { except: ['licenses'] }
=======
DatabaseCleaner[:active_record].strategy = :deletion
>>>>>>> upstream/master
Spinach.hooks.before_scenario do Spinach.hooks.before_scenario do
DatabaseCleaner.start DatabaseCleaner.start
......
...@@ -115,6 +115,7 @@ module API ...@@ -115,6 +115,7 @@ module API
# Keep in alphabetical order # Keep in alphabetical order
mount ::API::AccessRequests mount ::API::AccessRequests
mount ::API::Applications
mount ::API::AwardEmoji mount ::API::AwardEmoji
mount ::API::Boards mount ::API::Boards
mount ::API::Branches mount ::API::Branches
......
module API
# External applications API
class Applications < Grape::API
before { authenticated_as_admin! }
resource :applications do
desc 'Create a new application' do
detail 'This feature was introduced in GitLab 10.5'
success Entities::ApplicationWithSecret
end
params do
requires :name, type: String, desc: 'Application name'
requires :redirect_uri, type: String, desc: 'Application redirect URI'
requires :scopes, type: String, desc: 'Application scopes'
end
post do
application = Doorkeeper::Application.new(declared_params)
if application.save
present application, with: Entities::ApplicationWithSecret
else
render_validation_error! application
end
end
end
end
end
...@@ -575,7 +575,16 @@ module API ...@@ -575,7 +575,16 @@ module API
expose :work_in_progress?, as: :work_in_progress expose :work_in_progress?, as: :work_in_progress
expose :milestone, using: Entities::Milestone expose :milestone, using: Entities::Milestone
expose :merge_when_pipeline_succeeds expose :merge_when_pipeline_succeeds
expose :merge_status
# Ideally we should deprecate `MergeRequest#merge_status` exposure and
# use `MergeRequest#mergeable?` instead (boolean).
# See https://gitlab.com/gitlab-org/gitlab-ce/issues/42344 for more
# information.
expose :merge_status do |merge_request|
# In order to avoid having a breaking change for users, we keep returning the
# expected values from MergeRequest#merge_status state machine.
merge_request.mergeable? ? 'can_be_merged' : 'cannot_be_merged'
end
expose :diff_head_sha, as: :sha expose :diff_head_sha, as: :sha
expose :merge_commit_sha expose :merge_commit_sha
expose :user_notes_count expose :user_notes_count
...@@ -1440,5 +1449,15 @@ module API ...@@ -1440,5 +1449,15 @@ module API
pages_domain pages_domain
end end
end end
class Application < Grape::Entity
expose :uid, as: :application_id
expose :redirect_uri, as: :callback_url
end
# Use with care, this exposes the secret
class ApplicationWithSecret < Application
expose :secret
end
end end
end end
...@@ -34,7 +34,7 @@ module Gitlab ...@@ -34,7 +34,7 @@ module Gitlab
def raw(repository, sha) def raw(repository, sha)
Gitlab::GitalyClient.migrate(:git_blob_raw) do |is_enabled| Gitlab::GitalyClient.migrate(:git_blob_raw) do |is_enabled|
if is_enabled if is_enabled
Gitlab::GitalyClient::BlobService.new(repository).get_blob(oid: sha, limit: MAX_DATA_DISPLAY_SIZE) repository.gitaly_blob_client.get_blob(oid: sha, limit: MAX_DATA_DISPLAY_SIZE)
else else
rugged_raw(repository, sha, limit: MAX_DATA_DISPLAY_SIZE) rugged_raw(repository, sha, limit: MAX_DATA_DISPLAY_SIZE)
end end
...@@ -70,12 +70,20 @@ module Gitlab ...@@ -70,12 +70,20 @@ module Gitlab
# Returns array of Gitlab::Git::Blob # Returns array of Gitlab::Git::Blob
# Does not guarantee blob data will be set # Does not guarantee blob data will be set
def batch_lfs_pointers(repository, blob_ids) def batch_lfs_pointers(repository, blob_ids)
return [] if blob_ids.empty?
repository.gitaly_migrate(:batch_lfs_pointers) do |is_enabled|
if is_enabled
repository.gitaly_blob_client.batch_lfs_pointers(blob_ids)
else
blob_ids.lazy blob_ids.lazy
.select { |sha| possible_lfs_blob?(repository, sha) } .select { |sha| possible_lfs_blob?(repository, sha) }
.map { |sha| rugged_raw(repository, sha, limit: LFS_POINTER_MAX_SIZE) } .map { |sha| rugged_raw(repository, sha, limit: LFS_POINTER_MAX_SIZE) }
.select(&:lfs_pointer?) .select(&:lfs_pointer?)
.force .force
end end
end
end
def binary?(data) def binary?(data)
EncodingHelper.detect_libgit2_binary?(data) EncodingHelper.detect_libgit2_binary?(data)
...@@ -258,7 +266,7 @@ module Gitlab ...@@ -258,7 +266,7 @@ module Gitlab
Gitlab::GitalyClient.migrate(:git_blob_load_all_data) do |is_enabled| Gitlab::GitalyClient.migrate(:git_blob_load_all_data) do |is_enabled|
@data = begin @data = begin
if is_enabled if is_enabled
Gitlab::GitalyClient::BlobService.new(repository).get_blob(oid: id, limit: -1).data repository.gitaly_blob_client.get_blob(oid: id, limit: -1).data
else else
repository.lookup(id).content repository.lookup(id).content
end end
......
...@@ -1262,6 +1262,27 @@ module Gitlab ...@@ -1262,6 +1262,27 @@ module Gitlab
success || gitlab_projects_error success || gitlab_projects_error
end end
<<<<<<< HEAD
=======
def delete_remote_branches(remote_name, branch_names)
success = @gitlab_projects.delete_remote_branches(remote_name, branch_names)
success || gitlab_projects_error
end
def bundle_to_disk(save_path)
gitaly_migrate(:bundle_to_disk) do |is_enabled|
if is_enabled
gitaly_repository_client.create_bundle(save_path)
else
run_git!(%W(bundle create #{save_path} --all))
end
end
true
end
>>>>>>> upstream/master
# rubocop:disable Metrics/ParameterLists # rubocop:disable Metrics/ParameterLists
def multi_action( def multi_action(
user, branch_name:, message:, actions:, user, branch_name:, message:, actions:,
...@@ -1313,6 +1334,10 @@ module Gitlab ...@@ -1313,6 +1334,10 @@ module Gitlab
@gitaly_remote_client ||= Gitlab::GitalyClient::RemoteService.new(self) @gitaly_remote_client ||= Gitlab::GitalyClient::RemoteService.new(self)
end end
def gitaly_blob_client
@gitaly_blob_client ||= Gitlab::GitalyClient::BlobService.new(self)
end
def gitaly_conflicts_client(our_commit_oid, their_commit_oid) def gitaly_conflicts_client(our_commit_oid, their_commit_oid)
Gitlab::GitalyClient::ConflictsService.new(self, our_commit_oid, their_commit_oid) Gitlab::GitalyClient::ConflictsService.new(self, our_commit_oid, their_commit_oid)
end end
......
module Gitlab module Gitlab
module Git module Git
class WikiPage class WikiPage
attr_reader :url_path, :title, :format, :path, :version, :raw_data, :name, :text_data, :historical attr_reader :url_path, :title, :format, :path, :version, :raw_data, :name, :text_data, :historical, :formatted_data
# This class is meant to be serializable so that it can be constructed # This class is meant to be serializable so that it can be constructed
# by Gitaly and sent over the network to GitLab. # by Gitaly and sent over the network to GitLab.
...@@ -21,6 +21,7 @@ module Gitlab ...@@ -21,6 +21,7 @@ module Gitlab
@raw_data = gollum_page.raw_data @raw_data = gollum_page.raw_data
@name = gollum_page.name @name = gollum_page.name
@historical = gollum_page.historical? @historical = gollum_page.historical?
@formatted_data = gollum_page.formatted_data if gollum_page.is_a?(Gollum::Page)
@version = version @version = version
end end
......
...@@ -32,6 +32,26 @@ module Gitlab ...@@ -32,6 +32,26 @@ module Gitlab
binary: Gitlab::Git::Blob.binary?(data) binary: Gitlab::Git::Blob.binary?(data)
) )
end end
def batch_lfs_pointers(blob_ids)
request = Gitaly::GetLFSPointersRequest.new(
repository: @gitaly_repo,
blob_ids: blob_ids
)
response = GitalyClient.call(@gitaly_repo.storage_name, :blob_service, :get_lfs_pointers, request)
response.flat_map do |message|
message.lfs_pointers.map do |lfs_pointer|
Gitlab::Git::Blob.new(
id: lfs_pointer.oid,
size: lfs_pointer.size,
data: lfs_pointer.data,
binary: Gitlab::Git::Blob.binary?(lfs_pointer.data)
)
end
end
end
end end
end end
end end
...@@ -25,6 +25,11 @@ module Gitlab ...@@ -25,6 +25,11 @@ module Gitlab
def conflicts? def conflicts?
list_conflict_files.any? list_conflict_files.any?
rescue GRPC::FailedPrecondition
# The server raises this exception when it encounters ConflictSideMissing, which
# means a conflict exists but its `theirs` or `ours` data is nil due to a non-existent
# file in one of the trees.
true
end end
def resolve_conflicts(target_repository, resolution, source_branch, target_branch) def resolve_conflicts(target_repository, resolution, source_branch, target_branch)
......
...@@ -161,6 +161,23 @@ module Gitlab ...@@ -161,6 +161,23 @@ module Gitlab
return response.error.b, 1 return response.error.b, 1
end end
end end
def create_bundle(save_path)
request = Gitaly::CreateBundleRequest.new(repository: @gitaly_repo)
response = GitalyClient.call(
@storage,
:repository_service,
:create_bundle,
request,
timeout: GitalyClient.default_timeout
)
File.open(save_path, 'wb') do |f|
response.each do |message|
f.write(message.data)
end
end
end
end end
end end
end end
...@@ -11,10 +11,6 @@ module Gitlab ...@@ -11,10 +11,6 @@ module Gitlab
untar_with_options(archive: archive, dir: dir, options: 'zxf') untar_with_options(archive: archive, dir: dir, options: 'zxf')
end end
def git_bundle(repo_path:, bundle_path:)
execute(%W(#{git_bin_path} --git-dir=#{repo_path} bundle create #{bundle_path} --all))
end
def git_clone_bundle(repo_path:, bundle_path:) def git_clone_bundle(repo_path:, bundle_path:)
execute(%W(#{git_bin_path} clone --bare -- #{bundle_path} #{repo_path})) execute(%W(#{git_bin_path} clone --bare -- #{bundle_path} #{repo_path}))
Gitlab::Git::Repository.create_hooks(repo_path, File.expand_path(Gitlab.config.gitlab_shell.hooks_path)) Gitlab::Git::Repository.create_hooks(repo_path, File.expand_path(Gitlab.config.gitlab_shell.hooks_path))
......
...@@ -21,7 +21,7 @@ module Gitlab ...@@ -21,7 +21,7 @@ module Gitlab
def bundle_to_disk def bundle_to_disk
mkdir_p(@shared.export_path) mkdir_p(@shared.export_path)
git_bundle(repo_path: path_to_repo, bundle_path: @full_path) @project.repository.bundle_to_disk(@full_path)
rescue => e rescue => e
@shared.error(e) @shared.error(e)
false false
......
...@@ -10,7 +10,7 @@ module Gitlab ...@@ -10,7 +10,7 @@ module Gitlab
def bundle_to_disk(full_path) def bundle_to_disk(full_path)
mkdir_p(@shared.export_path) mkdir_p(@shared.export_path)
git_bundle(repo_path: path_to_repo, bundle_path: full_path) @wiki.repository.bundle_to_disk(full_path)
rescue => e rescue => e
@shared.error(e) @shared.error(e)
false false
......
...@@ -9,11 +9,21 @@ require 'fileutils' ...@@ -9,11 +9,21 @@ require 'fileutils'
# called 'bundle install' using a different Gemfile, as happens with # called 'bundle install' using a different Gemfile, as happens with
# gitlab-ce and gitaly. # gitlab-ce and gitaly.
dir = 'tmp/tests/gitaly' tmp_tests_gitaly_dir = File.expand_path('../tmp/tests/gitaly', __dir__)
abort 'gitaly build failed' unless system('make', chdir: dir) # Use the top-level bundle vendor folder so that we don't reinstall gems twice
bundle_vendor_path = File.expand_path('../vendor', __dir__)
env = {
# This ensure the `clean` config set in `scripts/prepare_build.sh` isn't taken into account
'BUNDLE_IGNORE_CONFIG' => 'true',
'BUNDLE_GEMFILE' => File.join(tmp_tests_gitaly_dir, 'ruby', 'Gemfile'),
'BUNDLE_FLAGS' => "--jobs=4 --path=#{bundle_vendor_path} --retry=3"
}
abort 'gitaly build failed' unless system(env, 'make', chdir: tmp_tests_gitaly_dir)
# Make the 'gitaly' executable look newer than 'GITALY_SERVER_VERSION'. # Make the 'gitaly' executable look newer than 'GITALY_SERVER_VERSION'.
# Without this a gitaly executable created in the setup-test-env job # Without this a gitaly executable created in the setup-test-env job
# will look stale compared to GITALY_SERVER_VERSION. # will look stale compared to GITALY_SERVER_VERSION.
FileUtils.touch(File.join(dir, 'gitaly'), mtime: Time.now + (1 << 24)) FileUtils.touch(File.join(tmp_tests_gitaly_dir, 'gitaly'), mtime: Time.now + (1 << 24))
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
export SETUP_DB=${SETUP_DB:-true} export SETUP_DB=${SETUP_DB:-true}
export CREATE_DB_USER=${CREATE_DB_USER:-$SETUP_DB} export CREATE_DB_USER=${CREATE_DB_USER:-$SETUP_DB}
export USE_BUNDLE_INSTALL=${USE_BUNDLE_INSTALL:-true} export USE_BUNDLE_INSTALL=${USE_BUNDLE_INSTALL:-true}
export BUNDLE_INSTALL_FLAGS="--without production --jobs $(nproc) --path vendor --retry 3 --quiet" export BUNDLE_INSTALL_FLAGS="--without=production --jobs=$(nproc) --path=vendor --retry=3 --quiet"
if [ "$USE_BUNDLE_INSTALL" != "false" ]; then if [ "$USE_BUNDLE_INSTALL" != "false" ]; then
bundle install --clean $BUNDLE_INSTALL_FLAGS && bundle check bundle install --clean $BUNDLE_INSTALL_FLAGS && bundle check
......
...@@ -34,6 +34,9 @@ describe 'New issue', :js do ...@@ -34,6 +34,9 @@ describe 'New issue', :js do
click_button 'Submit issue' click_button 'Submit issue'
# reCAPTCHA alerts when it can't contact the server, so just accept it and move on
page.driver.browser.switch_to.alert.accept
# it is impossible to test recaptcha automatically and there is no possibility to fill in recaptcha # it is impossible to test recaptcha automatically and there is no possibility to fill in recaptcha
# recaptcha verification is skipped in test environment and it always returns true # recaptcha verification is skipped in test environment and it always returns true
expect(page).not_to have_content('issue title') expect(page).not_to have_content('issue title')
......
...@@ -12,6 +12,10 @@ describe('JobMediator', () => { ...@@ -12,6 +12,10 @@ describe('JobMediator', () => {
mock = new MockAdapter(axios); mock = new MockAdapter(axios);
}); });
afterEach(() => {
mock.restore();
});
it('should set defaults', () => { it('should set defaults', () => {
expect(mediator.store).toBeDefined(); expect(mediator.store).toBeDefined();
expect(mediator.service).toBeDefined(); expect(mediator.service).toBeDefined();
...@@ -24,10 +28,6 @@ describe('JobMediator', () => { ...@@ -24,10 +28,6 @@ describe('JobMediator', () => {
mock.onGet().reply(200, job, {}); mock.onGet().reply(200, job, {});
}); });
afterEach(() => {
mock.restore();
});
it('should store received data', (done) => { it('should store received data', (done) => {
mediator.fetchJob(); mediator.fetchJob();
setTimeout(() => { setTimeout(() => {
......
...@@ -260,6 +260,7 @@ describe Gitlab::Git::Blob, seed_helper: true do ...@@ -260,6 +260,7 @@ describe Gitlab::Git::Blob, seed_helper: true do
) )
end end
shared_examples 'fetching batch of LFS pointers' do
it 'returns a list of Gitlab::Git::Blob' do it 'returns a list of Gitlab::Git::Blob' do
blobs = described_class.batch_lfs_pointers(repository, [lfs_blob.id]) blobs = described_class.batch_lfs_pointers(repository, [lfs_blob.id])
...@@ -280,9 +281,21 @@ describe Gitlab::Git::Blob, seed_helper: true do ...@@ -280,9 +281,21 @@ describe Gitlab::Git::Blob, seed_helper: true do
end end
it 'avoids loading large blobs into memory' do it 'avoids loading large blobs into memory' do
# This line could call `lookup` on `repository`, so do here before mocking.
non_lfs_blob_id = non_lfs_blob.id
expect(repository).not_to receive(:lookup) expect(repository).not_to receive(:lookup)
described_class.batch_lfs_pointers(repository, [non_lfs_blob.id]) described_class.batch_lfs_pointers(repository, [non_lfs_blob_id])
end
end
context 'when Gitaly batch_lfs_pointers is enabled' do
it_behaves_like 'fetching batch of LFS pointers'
end
context 'when Gitaly batch_lfs_pointers is disabled', :disable_gitaly do
it_behaves_like 'fetching batch of LFS pointers'
end end
end end
......
...@@ -1926,6 +1926,34 @@ describe Gitlab::Git::Repository, seed_helper: true do ...@@ -1926,6 +1926,34 @@ describe Gitlab::Git::Repository, seed_helper: true do
it { expect(subject.repository_relative_path).to eq(repository.relative_path) } it { expect(subject.repository_relative_path).to eq(repository.relative_path) }
end end
describe '#bundle_to_disk' do
shared_examples 'bundling to disk' do
let(:save_path) { File.join(Dir.tmpdir, "repo-#{SecureRandom.hex}.bundle") }
after do
FileUtils.rm_rf(save_path)
end
it 'saves a bundle to disk' do
repository.bundle_to_disk(save_path)
success = system(
*%W(#{Gitlab.config.git.bin_path} -C #{repository.path} bundle verify #{save_path}),
[:out, :err] => '/dev/null'
)
expect(success).to be true
end
end
context 'when Gitaly bundle_to_disk feature is enabled' do
it_behaves_like 'bundling to disk'
end
context 'when Gitaly bundle_to_disk feature is disabled', :disable_gitaly do
it_behaves_like 'bundling to disk'
end
end
context 'gitlab_projects commands' do context 'gitlab_projects commands' do
let(:gitlab_projects) { repository.gitlab_projects } let(:gitlab_projects) { repository.gitlab_projects }
let(:timeout) { Gitlab.config.gitlab_shell.git_timeout } let(:timeout) { Gitlab.config.gitlab_shell.git_timeout }
......
...@@ -3661,5 +3661,22 @@ describe Project do ...@@ -3661,5 +3661,22 @@ describe Project do
project = build(:project) project = build(:project)
project.execute_hooks({ data: 'data' }, :merge_request_hooks) project.execute_hooks({ data: 'data' }, :merge_request_hooks)
end end
it 'executes the system hooks when inside a transaction' do
allow_any_instance_of(WebHookService).to receive(:execute)
create(:system_hook, merge_requests_events: true)
project = build(:project)
# Ideally, we'd test that `WebHookWorker.jobs.size` increased by 1,
# but since the entire spec run takes place in a transaction, we never
# actually get to the `after_commit` hook that queues these jobs.
expect do
project.transaction do
project.execute_hooks({ data: 'data' }, :merge_request_hooks)
end
end.not_to raise_error # Sidekiq::Worker::EnqueueFromTransactionError
end
end end
end end
...@@ -365,12 +365,18 @@ describe Repository do ...@@ -365,12 +365,18 @@ describe Repository do
it { is_expected.to be_truthy } it { is_expected.to be_truthy }
end end
context 'non-mergeable branches' do context 'non-mergeable branches without conflict sides missing' do
subject { repository.can_be_merged?('bb5206fee213d983da88c47f9cf4cc6caf9c66dc', 'feature') } subject { repository.can_be_merged?('bb5206fee213d983da88c47f9cf4cc6caf9c66dc', 'feature') }
it { is_expected.to be_falsey } it { is_expected.to be_falsey }
end end
context 'non-mergeable branches with conflict sides missing' do
subject { repository.can_be_merged?('conflict-missing-side', 'conflict-start') }
it { is_expected.to be_falsey }
end
context 'non merged branch' do context 'non merged branch' do
subject { repository.merged_to_root_ref?('fix') } subject { repository.merged_to_root_ref?('fix') }
......
...@@ -386,6 +386,17 @@ describe WikiPage do ...@@ -386,6 +386,17 @@ describe WikiPage do
end end
end end
describe '#formatted_content' do
it 'returns processed content of the page', :disable_gitaly do
subject.create({ title: "RDoc", content: "*bold*", format: "rdoc" })
page = wiki.find_page('RDoc')
expect(page.formatted_content).to eq("\n<p><strong>bold</strong></p>\n")
destroy_page('RDoc')
end
end
private private
def remove_temp_repo(path) def remove_temp_repo(path)
......
require 'spec_helper'
describe API::Applications, :api do
include ApiHelpers
let(:admin_user) { create(:user, admin: true) }
let(:user) { create(:user, admin: false) }
describe 'POST /applications' do
context 'authenticated and authorized user' do
it 'creates and returns an OAuth application' do
expect do
post api('/applications', admin_user), name: 'application_name', redirect_uri: 'http://application.url', scopes: ''
end.to change { Doorkeeper::Application.count }.by 1
application = Doorkeeper::Application.find_by(name: 'application_name', redirect_uri: 'http://application.url')
expect(response).to have_http_status 201
expect(json_response).to be_a Hash
expect(json_response['application_id']).to eq application.uid
expect(json_response['secret']).to eq application.secret
expect(json_response['callback_url']).to eq application.redirect_uri
end
it 'does not allow creating an application with the wrong redirect_uri format' do
expect do
post api('/applications', admin_user), name: 'application_name', redirect_uri: 'wrong_url_format', scopes: ''
end.not_to change { Doorkeeper::Application.count }
expect(response).to have_http_status 400
expect(json_response).to be_a Hash
expect(json_response['message']['redirect_uri'][0]).to eq('must be an absolute URI.')
end
it 'does not allow creating an application without a name' do
expect do
post api('/applications', admin_user), redirect_uri: 'http://application.url', scopes: ''
end.not_to change { Doorkeeper::Application.count }
expect(response).to have_http_status 400
expect(json_response).to be_a Hash
expect(json_response['error']).to eq('name is missing')
end
it 'does not allow creating an application without a redirect_uri' do
expect do
post api('/applications', admin_user), name: 'application_name', scopes: ''
end.not_to change { Doorkeeper::Application.count }
expect(response).to have_http_status 400
expect(json_response).to be_a Hash
expect(json_response['error']).to eq('redirect_uri is missing')
end
it 'does not allow creating an application without scopes' do
expect do
post api('/applications', admin_user), name: 'application_name', redirect_uri: 'http://application.url'
end.not_to change { Doorkeeper::Application.count }
expect(response).to have_http_status 400
expect(json_response).to be_a Hash
expect(json_response['error']).to eq('scopes is missing')
end
end
context 'authorized user without authorization' do
it 'does not create application' do
expect do
post api('/applications', user), name: 'application_name', redirect_uri: 'http://application.url', scopes: ''
end.not_to change { Doorkeeper::Application.count }
expect(response).to have_http_status 403
end
end
context 'non-authenticated user' do
it 'does not create application' do
expect do
post api('/applications'), name: 'application_name', redirect_uri: 'http://application.url'
end.not_to change { Doorkeeper::Application.count }
expect(response).to have_http_status 401
end
end
end
end
...@@ -302,6 +302,7 @@ describe Issues::MoveService do ...@@ -302,6 +302,7 @@ describe Issues::MoveService do
it 'executes project issue hooks' do it 'executes project issue hooks' do
allow_any_instance_of(WebHookService).to receive(:execute) allow_any_instance_of(WebHookService).to receive(:execute)
<<<<<<< HEAD
# Ideally, we'd test that `WebHookWorker.jobs.size` increased by 1, # Ideally, we'd test that `WebHookWorker.jobs.size` increased by 1,
# but since the entire spec run takes place in a transaction, we never # but since the entire spec run takes place in a transaction, we never
# actually get to the `after_commit` hook that queues these jobs. # actually get to the `after_commit` hook that queues these jobs.
...@@ -316,6 +317,8 @@ describe Issues::MoveService do ...@@ -316,6 +317,8 @@ describe Issues::MoveService do
it 'executes group issue hooks' do it 'executes group issue hooks' do
allow_any_instance_of(WebHookService).to receive(:execute) allow_any_instance_of(WebHookService).to receive(:execute)
=======
>>>>>>> upstream/master
# Ideally, we'd test that `WebHookWorker.jobs.size` increased by 1, # Ideally, we'd test that `WebHookWorker.jobs.size` increased by 1,
# but since the entire spec run takes place in a transaction, we never # but since the entire spec run takes place in a transaction, we never
# actually get to the `after_commit` hook that queues these jobs. # actually get to the `after_commit` hook that queues these jobs.
......
...@@ -30,11 +30,19 @@ RSpec.configure do |config| ...@@ -30,11 +30,19 @@ RSpec.configure do |config|
end end
config.before(:each, :js) do config.before(:each, :js) do
<<<<<<< HEAD
DatabaseCleaner.strategy = :deletion, { except: %w[licenses] } DatabaseCleaner.strategy = :deletion, { except: %w[licenses] }
end end
config.before(:each, :delete) do config.before(:each, :delete) do
DatabaseCleaner.strategy = :deletion, { except: %w[licenses] } DatabaseCleaner.strategy = :deletion, { except: %w[licenses] }
=======
DatabaseCleaner.strategy = :deletion
end
config.before(:each, :delete) do
DatabaseCleaner.strategy = :deletion
>>>>>>> upstream/master
end end
config.before(:each, :migration) do config.before(:each, :migration) do
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment