Commit 1003c04e authored by Marin Jankovski's avatar Marin Jankovski

Merge branch 'ce-to-ee-2018-01-23' into 'master'

CE upstream - Tuesday

Closes gitlab-ce#38634

See merge request gitlab-org/gitlab-ee!4205
parents 5f0dab56 579d8978
This source diff could not be displayed because it is too large. You can view the blob instead.
10.4.0-pre
10.5.0-pre
......@@ -17,7 +17,6 @@ import { convertPermissionToBoolean } from './lib/utils/common_utils';
import GlFieldErrors from './gl_field_errors';
import Shortcuts from './shortcuts';
import ShortcutsIssuable from './shortcuts_issuable';
import U2FAuthenticate from './u2f/authenticate';
import Diff from './diff';
import SearchAutocomplete from './search_autocomplete';
......@@ -602,18 +601,15 @@ import initLDAPGroupsSelect from 'ee/ldap_groups_select'; // eslint-disable-line
}
switch (path[0]) {
case 'sessions':
import('./pages/sessions')
.then(callDefault)
.catch(fail);
break;
case 'omniauth_callbacks':
if (!gon.u2f) break;
const u2fAuthenticate = new U2FAuthenticate(
$('#js-authenticate-u2f'),
'#js-login-u2f-form',
gon.u2f,
document.querySelector('#js-login-2fa-device'),
document.querySelector('.js-2fa-form'),
);
u2fAuthenticate.start();
// needed in rspec
gl.u2fAuthenticate = u2fAuthenticate;
import('./pages/omniauth_callbacks')
.then(callDefault)
.catch(fail);
break;
case 'admin':
import('./pages/admin')
.then(callDefault)
......@@ -672,10 +668,6 @@ import initLDAPGroupsSelect from 'ee/ldap_groups_select'; // eslint-disable-line
break;
}
break;
case 'dashboard':
case 'root':
new UserCallout();
break;
case 'profiles':
import('./pages/profiles/index/')
.then(callDefault)
......
import initU2F from '../../shared/sessions/u2f';
export default () => {
initU2F();
};
import initU2F from '../../shared/sessions/u2f';
export default () => {
initU2F();
};
import U2FAuthenticate from '../../u2f/authenticate';
export default () => {
if (!gon.u2f) return;
const u2fAuthenticate = new U2FAuthenticate(
$('#js-authenticate-u2f'),
'#js-login-u2f-form',
gon.u2f,
document.querySelector('#js-login-2fa-device'),
document.querySelector('.js-2fa-form'),
);
u2fAuthenticate.start();
// needed in rspec
gl.u2fAuthenticate = u2fAuthenticate;
};
......@@ -32,8 +32,8 @@ export default class IssuableTemplateSelector extends TemplateSelector {
this.startLoadingSpinner();
Api.issueTemplate(this.namespacePath, this.projectPath, query.name, this.issuableType, (err, currentTemplate) => {
this.currentTemplate = currentTemplate;
if (err) return; // Error handled by global AJAX error handler
this.stopLoadingSpinner();
if (err) return; // Error handled by global AJAX error handler
this.setInputValueToTemplateContent();
});
return;
......
......@@ -2,7 +2,11 @@ module GroupTree
# rubocop:disable Gitlab/ModuleWithInstanceVariables
def render_group_tree(groups)
@groups = if params[:filter].present?
Gitlab::GroupHierarchy.new(groups.search(params[:filter]))
# We find the ancestors by ID of the search results here.
# Otherwise the ancestors would also have filters applied,
# which would cause them not to be preloaded.
group_ids = groups.search(params[:filter]).select(:id)
Gitlab::GroupHierarchy.new(Group.where(id: group_ids))
.base_and_ancestors
else
# Only show root groups if no parent-id is given
......
......@@ -27,12 +27,16 @@ class GroupDescendantsFinder
end
def execute
# The children array might be extended with the ancestors of projects when
# filtering. In that case, take the maximum so the array does not get limited
# Otherwise, allow paginating through all results
# The children array might be extended with the ancestors of projects and
# subgroups when filtering. In that case, take the maximum so the array does
# not get limited otherwise, allow paginating through all results.
#
all_required_elements = children
all_required_elements |= ancestors_for_projects if params[:filter]
if params[:filter]
all_required_elements |= ancestors_of_filtered_subgroups
all_required_elements |= ancestors_of_filtered_projects
end
total_count = [all_required_elements.size, paginator.total_count].max
Kaminari.paginate_array(all_required_elements, total_count: total_count)
......@@ -49,8 +53,11 @@ class GroupDescendantsFinder
end
def paginator
@paginator ||= Gitlab::MultiCollectionPaginator.new(subgroups, projects,
per_page: params[:per_page])
@paginator ||= Gitlab::MultiCollectionPaginator.new(
subgroups,
projects.with_route,
per_page: params[:per_page]
)
end
def direct_child_groups
......@@ -94,15 +101,21 @@ class GroupDescendantsFinder
#
# So when searching 'project', on the 'subgroup' page we want to preload
# 'nested-group' but not 'subgroup' or 'root'
def ancestors_for_groups(base_for_ancestors)
Gitlab::GroupHierarchy.new(base_for_ancestors)
def ancestors_of_groups(base_for_ancestors)
group_ids = base_for_ancestors.except(:select, :sort).select(:id)
Gitlab::GroupHierarchy.new(Group.where(id: group_ids))
.base_and_ancestors(upto: parent_group.id)
end
def ancestors_for_projects
def ancestors_of_filtered_projects
projects_to_load_ancestors_of = projects.where.not(namespace: parent_group)
groups_to_load_ancestors_of = Group.where(id: projects_to_load_ancestors_of.select(:namespace_id))
ancestors_for_groups(groups_to_load_ancestors_of)
ancestors_of_groups(groups_to_load_ancestors_of)
.with_selects_for_list(archived: params[:archived])
end
def ancestors_of_filtered_subgroups
ancestors_of_groups(subgroups)
.with_selects_for_list(archived: params[:archived])
end
......@@ -112,7 +125,7 @@ class GroupDescendantsFinder
# When filtering subgroups, we want to find all matches withing the tree of
# descendants to show to the user
groups = if params[:filter]
ancestors_for_groups(subgroups_matching_filter)
subgroups_matching_filter
else
direct_child_groups
end
......@@ -121,8 +134,10 @@ class GroupDescendantsFinder
end
def direct_child_projects
GroupProjectsFinder.new(group: parent_group, current_user: current_user, params: params)
.execute
GroupProjectsFinder.new(group: parent_group,
current_user: current_user,
options: { only_owned: true },
params: params).execute
end
# Finds all projects nested under `parent_group` or any of its descendant
......
......@@ -1042,6 +1042,8 @@ class Project < ActiveRecord::Base
end
def fork_source
return nil unless forked?
forked_from_project || fork_network&.root_project
end
......
......@@ -266,15 +266,7 @@ class Repository
return if kept_around?(sha)
# This will still fail if the file is corrupted (e.g. 0 bytes)
begin
raw_repository.write_ref(keep_around_ref_name(sha), sha, shell: false)
rescue Rugged::ReferenceError => ex
Rails.logger.error "Unable to create #{REF_KEEP_AROUND} reference for repository #{path}: #{ex}"
rescue Rugged::OSError => ex
raise unless ex.message =~ /Failed to create locked file/ && ex.message =~ /File exists/
Rails.logger.error "Unable to create #{REF_KEEP_AROUND} reference for repository #{path}: #{ex}"
end
raw_repository.write_ref(keep_around_ref_name(sha), sha, shell: false)
end
def kept_around?(sha)
......
......@@ -331,6 +331,8 @@ class User < ActiveRecord::Base
#
# Returns an ActiveRecord::Relation.
def search(query)
return none if query.blank?
query = query.downcase
order = <<~SQL
......@@ -354,6 +356,8 @@ class User < ActiveRecord::Base
# This method uses ILIKE on PostgreSQL and LIKE on MySQL.
def search_with_secondary_emails(query)
return none if query.blank?
query = query.downcase
email_table = Email.arel_table
......
xml.entry do
xml.id project_commit_url(@project, id: commit.id)
xml.link href: project_commit_url(@project, id: commit.id)
xml.title truncate(commit.title, length: 80)
xml.title truncate(commit.title, length: 80, escape: false)
xml.updated commit.committed_date.xmlschema
xml.media :thumbnail, width: "40", height: "40", url: image_url(avatar_icon(commit.author_email))
......@@ -10,5 +10,5 @@ xml.entry do
xml.email commit.author_email
end
xml.summary markdown(commit.description, pipeline: :single_line)
xml.summary markdown(commit.description, pipeline: :single_line), type: 'html'
end
- illustration = local_assigns.fetch(:illustration)
- illustration_size = local_assigns.fetch(:illustration_size)
- title = local_assigns.fetch(:title)
- content = local_assigns.fetch(:content, nil)
- content = local_assigns.fetch(:content)
- action = local_assigns.fetch(:action, nil)
.row.empty-state
......@@ -11,8 +11,7 @@
.col-xs-12
.text-content
%h4.text-center= title
- if content
%p= content
%p= content
- if action
.text-center
= action
......@@ -97,12 +97,18 @@
title: _('This job requires a manual action'),
content: _('This job depends on a user to trigger its process. Often they are used to deploy code to production environments'),
action: ( link_to _('Trigger this manual action'), play_project_job_path(@project, @build), method: :post, class: 'btn btn-primary', title: _('Trigger this manual action') )
- elsif @build.created?
= render 'empty_state',
illustration: 'illustrations/job_not_triggered.svg',
illustration_size: 'svg-306',
title: _('This job has not been triggered yet'),
content: _('This job depends on upstream jobs that need to succeed in order for this job to be triggered')
- else
= render 'empty_state',
illustration: 'illustrations/job_not_triggered.svg',
illustration_size: 'svg-306',
title: _('This job has not been triggered yet')
title: _('This job has not started yet'),
content: _('This job is in pending state and is waiting to be picked by a runner')
= render "sidebar"
.js-build-options{ data: javascript_build_options }
......
......@@ -24,6 +24,8 @@
.add-to-tree-dropdown
%ul.dropdown-menu
- if can_edit_tree?
%li.dropdown-header
#{ _('This directory') }
%li
= link_to project_new_blob_path(@project, @id) do
#{ _('New file') }
......@@ -60,6 +62,8 @@
#{ _('New directory') }
%li.divider
%li.dropdown-header
#{ _('This repository') }
%li
= link_to new_project_branch_path(@project) do
#{ _('New branch') }
......
#!/usr/bin/env ruby
require 'optparse'
options = {}
opt_parser = OptionParser.new do |opt|
opt.banner = <<DOCSTRING
Profile a URL on this GitLab instance.
Usage:
#{__FILE__} url --output=<profile-html> --sql=<sql-log> [--user=<user>] [--post=<post-data>]
Example:
#{__FILE__} /dashboard/issues --output=dashboard-profile.html --sql=dashboard.log --user=root
DOCSTRING
opt.separator ''
opt.separator 'Options:'
opt.on('-o', '--output=/tmp/profile.html', 'profile output filename') do |output|
options[:profile_output] = output
end
opt.on('-s', '--sql=/tmp/profile_sql.txt', 'SQL output filename') do |sql|
options[:sql_output] = sql
end
opt.on('-u', '--user=root', 'User to authenticate as') do |username|
options[:username] = username
end
opt.on('-p', "--post='user=john&pass=test'", 'Send HTTP POST data') do |post_data|
options[:post_data] = post_data
end
end
opt_parser.parse!
options[:url] = ARGV[0]
if options[:url].nil? ||
options[:profile_output].nil? ||
options[:sql_output].nil?
puts opt_parser
exit
end
require File.expand_path('../config/environment', File.dirname(__FILE__))
result = Gitlab::Profiler.profile(options[:url],
logger: Logger.new(options[:sql_output]),
post_data: options[:post_data],
user: User.find_by_username(options[:username]),
private_token: ENV['PRIVATE_TOKEN'])
printer = RubyProf::CallStackPrinter.new(result)
file = File.open(options[:profile_output], 'w')
printer.print(file)
file.close
This source diff could not be displayed because it is too large. You can view the blob instead.
---
title: Stop loading spinner on error of issuable templates
merge_request: 16600
author: Takuya Noguchi
type: fixed
---
title: Fix bug in which projects with forks could not change visibility settings from
Private to Public
merge_request: 16595
author:
type: fixed
---
title: Allows html text in commits atom feed
merge_request: 16603
author: Jacopo Beschi @jacopo-beschi
type: fixed
---
title: Fix error on empty query for Members API
merge_request: 16235
author:
type: fixed
---
title: Fix issues when rendering groups and their children
merge_request: 16584
author:
type: fixed
---
title: Add section headers to plus button dropdown
merge_request: 16394
author: George Tsiolis
type: added
---
title: Use has_table_privilege for TRIGGER on PostgreSQL
merge_request:
author:
type: fixed
---
title: Default to Gitaly for 'git push' HTTP/SSH, and make Gitaly mandatory for SSH
pull
merge_request: 16586
author:
type: other
......@@ -36,7 +36,8 @@ graphs/dashboards.
GitLab provides built-in tools to aid the process of improving performance:
* [Sherlock](profiling.md#sherlock)
* [Profiling](profiling.md)
* [Sherlock](profiling.md#sherlock)
* [GitLab Performance Monitoring](../administration/monitoring/performance/index.md)
* [Request Profiling](../administration/monitoring/performance/request_profiling.md)
* [QueryRecoder](query_recorder.md) for preventing `N+1` regressions
......
......@@ -4,6 +4,41 @@ To make it easier to track down performance problems GitLab comes with a set of
profiling tools, some of these are available by default while others need to be
explicitly enabled.
## Profiling a URL
There is a `Gitlab::Profiler.profile` method, and corresponding
`bin/profile-url` script, that enable profiling a GET or POST request to a
specific URL, either as an anonymous user (the default) or as a specific user.
When using the script, command-line documentation is available by passing no
arguments.
When using the method in an interactive console session, any changes to the
application code within that console session will be reflected in the profiler
output.
For example:
```ruby
Gitlab::Profiler.profile('/my-user')
# Returns a RubyProf::Profile for the regular operation of this request
class UsersController; def show; sleep 100; end; end
Gitlab::Profiler.profile('/my-user')
# Returns a RubyProf::Profile where 100 seconds is spent in UsersController#show
```
Passing a `logger:` keyword argument to `Gitlab::Profiler.profile` will send
ActiveRecord and ActionController log output to that logger. Further options are
documented with the method source.
[GitLab-Profiler](https://gitlab.com/gitlab-com/gitlab-profiler) is a project
that builds on this to add some additional niceties, such as allowing
configuration with a single Yaml file for multiple URLs, and uploading of the
profile and log output to S3.
For GitLab.com, you can find the latest results here:
<http://redash.gitlab.com/dashboard/gitlab-profiler-statistics>
## Sherlock
Sherlock is a custom profiling tool built into GitLab. Sherlock is _only_
......@@ -27,13 +62,3 @@ Bullet will log query problems to both the Rails log as well as the Chrome
console.
As a follow up to finding `N+1` queries with Bullet, consider writing a [QueryRecoder test](query_recorder.md) to prevent a regression.
## GitLab Profiler
[Gitlab-Profiler](https://gitlab.com/gitlab-com/gitlab-profiler) was built to
help developers understand why specific URLs of their application may be slow
and to provide hard data that can help reduce load times.
For GitLab.com, you can find the latest results here:
<http://redash.gitlab.com/dashboard/gitlab-profiler-statistics>
......@@ -88,6 +88,8 @@ Finished in 34.51 seconds (files took 0.76702 seconds to load)
1 example, 0 failures
```
Note: `live_debug` only works on javascript enabled specs.
### `let` variables
GitLab's RSpec suite has made extensive use of `let` variables to reduce
......
......@@ -21,10 +21,10 @@ project in an easy and automatic way:
1. [Auto Code Quality](#auto-code-quality)
1. [Auto SAST (Static Application Security Testing)](#auto-sast)
1. [Auto SAST for Docker images](#auto-sast-for-docker-images)
1. [Auto DAST (Dynamic Application Security Testing)](#auto-dast)
1. [Auto Browser Performance Testing](#auto-browser-performance-testing)
1. [Auto Review Apps](#auto-review-apps)
1. [Auto DAST (Dynamic Application Security Testing)](#auto-dast)
1. [Auto Deploy](#auto-deploy)
1. [Auto Browser Performance Testing](#auto-browser-performance-testing)
1. [Auto Monitoring](#auto-monitoring)
As Auto DevOps relies on many different components, it's good to have a basic
......@@ -229,6 +229,32 @@ check out.
In GitLab Enterprise Edition Ultimate, any security warnings are also
[shown in the merge request widget](../../user/project/merge_requests/sast_docker.md).
### Auto Review Apps
NOTE: **Note:**
This is an optional step, since many projects do not have a Kubernetes cluster
available. If the [prerequisites](#prerequisites) are not met, the job will
silently be skipped.
CAUTION: **Caution:**
Your apps should *not* be manipulated outside of Helm (using Kubernetes directly.)
This can cause confusion with Helm not detecting the change, and subsequent
deploys with Auto DevOps can undo your changes. Also, if you change something
and want to undo it by deploying again, Helm may not detect that anything changed
in the first place, and thus not realize that it needs to re-apply the old config.
[Review Apps][review-app] are temporary application environments based on the
branch's code so developers, designers, QA, product managers, and other
reviewers can actually see and interact with code changes as part of the review
process. Auto Review Apps create a Review App for each branch.
The Review App will have a unique URL based on the project name, the branch
name, and a unique number, combined with the Auto DevOps base domain. For
example, `user-project-branch-1234.example.com`. A link to the Review App shows
up in the merge request widget for easy discovery. When the branch is deleted,
for example after the merge request is merged, the Review App will automatically
be deleted.
### Auto DAST
> Introduced in [GitLab Enterprise Edition Ultimate][ee] 10.4.
......@@ -257,32 +283,6 @@ Auto Browser Performance Testing utilizes the [Sitespeed.io container](https://h
In GitLab Enterprise Edition Premium, performance differences between the source
and target branches are [shown in the merge request widget](../../user/project/merge_requests/browser_performance_testing.md).
### Auto Review Apps
NOTE: **Note:**
This is an optional step, since many projects do not have a Kubernetes cluster
available. If the [prerequisites](#prerequisites) are not met, the job will
silently be skipped.
CAUTION: **Caution:**
Your apps should *not* be manipulated outside of Helm (using Kubernetes directly.)
This can cause confusion with Helm not detecting the change, and subsequent
deploys with Auto DevOps can undo your changes. Also, if you change something
and want to undo it by deploying again, Helm may not detect that anything changed
in the first place, and thus not realize that it needs to re-apply the old config.
[Review Apps][review-app] are temporary application environments based on the
branch's code so developers, designers, QA, product managers, and other
reviewers can actually see and interact with code changes as part of the review
process. Auto Review Apps create a Review App for each branch.
The Review App will have a unique URL based on the project name, the branch
name, and a unique number, combined with the Auto DevOps base domain. For
example, `user-project-branch-1234.example.com`. A link to the Review App shows
up in the merge request widget for easy discovery. When the branch is deleted,
for example after the merge request is merged, the Review App will automatically
be deleted.
### Auto Deploy
NOTE: **Note:**
......
module API
module Helpers
module InternalHelpers
SSH_GITALY_FEATURES = {
'git-receive-pack' => [:ssh_receive_pack, Gitlab::GitalyClient::MigrationStatus::OPT_IN],
'git-upload-pack' => [:ssh_upload_pack, Gitlab::GitalyClient::MigrationStatus::OPT_OUT]
}.freeze
attr_reader :redirected_path
def wiki?
......@@ -102,8 +97,14 @@ module API
# Return the Gitaly Address if it is enabled
def gitaly_payload(action)
feature, status = SSH_GITALY_FEATURES[action]
return unless feature && Gitlab::GitalyClient.feature_enabled?(feature, status: status)
return unless %w[git-receive-pack git-upload-pack].include?(action)
if action == 'git-receive-pack'
return unless Gitlab::GitalyClient.feature_enabled?(
:ssh_receive_pack,
status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT
)
end
{
repository: repository.gitaly_repository,
......
......@@ -22,7 +22,7 @@ module API
source = find_source(source_type, params[:id])
users = source.users
users = users.merge(User.search(params[:query])) if params[:query]
users = users.merge(User.search(params[:query])) if params[:query].present?
present paginate(users), with: Entities::Member, source: source
end
......
......@@ -23,7 +23,7 @@ module API
source = find_source(source_type, params[:id])
users = source.users
users = users.merge(User.search(params[:query])) if params[:query]
users = users.merge(User.search(params[:query])) if params[:query].present?
present paginate(users), with: ::API::Entities::Member, source: source
end
......
......@@ -12,30 +12,40 @@ module Gitlab
# Returns true if the current user can create and execute triggers on the
# given table.
def self.create_and_execute_trigger?(table)
priv =
if Database.postgresql?
where(privilege_type: 'TRIGGER', table_name: table)
.where('grantee = user')
else
queries = [
Grant.select(1)
.from('information_schema.user_privileges')
.where("PRIVILEGE_TYPE = 'SUPER'")
.where("GRANTEE = CONCAT('\\'', REPLACE(CURRENT_USER(), '@', '\\'@\\''), '\\'')"),
if Database.postgresql?
# We _must not_ use quote_table_name as this will produce double
# quotes on PostgreSQL and for "has_table_privilege" we need single
# quotes.
quoted_table = connection.quote(table)
Grant.select(1)
.from('information_schema.schema_privileges')
.where("PRIVILEGE_TYPE = 'TRIGGER'")
.where('TABLE_SCHEMA = ?', Gitlab::Database.database_name)
.where("GRANTEE = CONCAT('\\'', REPLACE(CURRENT_USER(), '@', '\\'@\\''), '\\'')")
]
begin
from(nil)
.pluck("has_table_privilege(#{quoted_table}, 'TRIGGER')")
.first
rescue ActiveRecord::StatementInvalid
# This error is raised when using a non-existing table name. In this
# case we just want to return false as a user technically can't
# create triggers for such a table.
false
end
else
queries = [
Grant.select(1)
.from('information_schema.user_privileges')
.where("PRIVILEGE_TYPE = 'SUPER'")
.where("GRANTEE = CONCAT('\\'', REPLACE(CURRENT_USER(), '@', '\\'@\\''), '\\'')"),
union = SQL::Union.new(queries).to_sql
Grant.select(1)
.from('information_schema.schema_privileges')
.where("PRIVILEGE_TYPE = 'TRIGGER'")
.where('TABLE_SCHEMA = ?', Gitlab::Database.database_name)
.where("GRANTEE = CONCAT('\\'', REPLACE(CURRENT_USER(), '@', '\\'@\\''), '\\'')")
]
Grant.from("(#{union}) privs")
end
union = SQL::Union.new(queries).to_sql
priv.any?
Grant.from("(#{union}) privs").any?
end
end
end
end
......
......@@ -132,6 +132,8 @@ module Gitlab
end
def find_by_gitaly(repository, sha, path, limit: MAX_DATA_DISPLAY_SIZE)
return unless path
path = path.sub(/\A\/*/, '')
path = '/' if path.empty?
name = File.basename(path)
......@@ -173,6 +175,8 @@ module Gitlab
end
def find_by_rugged(repository, sha, path, limit:)
return unless path
rugged_commit = repository.lookup(sha)
root_tree = rugged_commit.tree
......
......@@ -1091,19 +1091,6 @@ module Gitlab
end
end
def shell_write_ref(ref_path, ref, old_ref)
raise ArgumentError, "invalid ref_path #{ref_path.inspect}" if ref_path.include?(' ')
raise ArgumentError, "invalid ref #{ref.inspect}" if ref.include?("\x00")
raise ArgumentError, "invalid old_ref #{old_ref.inspect}" if !old_ref.nil? && old_ref.include?("\x00")
input = "update #{ref_path}\x00#{ref}\x00#{old_ref}\x00"
run_git!(%w[update-ref --stdin -z]) { |stdin| stdin.write(input) }
end
def rugged_write_ref(ref_path, ref)
rugged.references.create(ref_path, ref, force: true)
end
def fetch_ref(source_repository, source_ref:, target_ref:)
Gitlab::Git.check_namespace!(source_repository)
source_repository = RemoteRepository.new(source_repository) unless source_repository.is_a?(RemoteRepository)
......@@ -1359,6 +1346,25 @@ module Gitlab
private
def shell_write_ref(ref_path, ref, old_ref)
raise ArgumentError, "invalid ref_path #{ref_path.inspect}" if ref_path.include?(' ')
raise ArgumentError, "invalid ref #{ref.inspect}" if ref.include?("\x00")
raise ArgumentError, "invalid old_ref #{old_ref.inspect}" if !old_ref.nil? && old_ref.include?("\x00")
input = "update #{ref_path}\x00#{ref}\x00#{old_ref}\x00"
run_git!(%w[update-ref --stdin -z]) { |stdin| stdin.write(input) }
end
def rugged_write_ref(ref_path, ref)
rugged.references.create(ref_path, ref, force: true)
rescue Rugged::ReferenceError => ex
Rails.logger.error "Unable to create #{ref_path} reference for repository #{path}: #{ex}"
rescue Rugged::OSError => ex
raise unless ex.message =~ /Failed to create locked file/ && ex.message =~ /File exists/
Rails.logger.error "Unable to create #{ref_path} reference for repository #{path}: #{ex}"
end
def fresh_worktree?(path)
File.exist?(path) && !clean_stuck_worktree(path)
end
......
# coding: utf-8
module Gitlab
module Profiler
FILTERED_STRING = '[FILTERED]'.freeze
IGNORE_BACKTRACES = %w[
lib/gitlab/i18n.rb
lib/gitlab/request_context.rb
config/initializers
lib/gitlab/database/load_balancing/
lib/gitlab/etag_caching/
lib/gitlab/metrics/
lib/gitlab/middleware/
lib/gitlab/performance_bar/
lib/gitlab/request_profiler/
lib/gitlab/profiler.rb
].freeze
# Takes a URL to profile (can be a fully-qualified URL, or an absolute path)
# and returns the ruby-prof profile result. Formatting that result is the
# caller's responsibility. Requests are GET requests unless post_data is
# passed.
#
# Optional arguments:
# - logger: will be used for SQL logging, including a summary at the end of
# the log file of the total time spent per model class.
#
# - post_data: a string of raw POST data to use. Changes the HTTP verb to
# POST.
#
# - user: a user to authenticate as. Only works if the user has a valid
# personal access token.
#
# - private_token: instead of providing a user instance, the token can be
# given as a string. Takes precedence over the user option.
def self.profile(url, logger: nil, post_data: nil, user: nil, private_token: nil)
app = ActionDispatch::Integration::Session.new(Rails.application)
verb = :get
headers = {}
if post_data
verb = :post
headers['Content-Type'] = 'application/json'
end
if user
private_token ||= user.personal_access_tokens.active.pluck(:token).first
end
headers['Private-Token'] = private_token if private_token
logger = create_custom_logger(logger, private_token: private_token)
RequestStore.begin!
# Make an initial call for an asset path in development mode to avoid
# sprockets dominating the profiler output.
ActionController::Base.helpers.asset_path('katex.css') if Rails.env.development?
# Rails loads internationalization files lazily the first time a
# translation is needed. Running this prevents this overhead from showing
# up in profiles.
::I18n.t('.')[:test_string]
# Remove API route mounting from the profile.
app.get('/api/v4/users')
result = with_custom_logger(logger) do
RubyProf.profile { app.public_send(verb, url, post_data, headers) } # rubocop:disable GitlabSecurity/PublicSend
end
RequestStore.end!
log_load_times_by_model(logger)
result
end
def self.create_custom_logger(logger, private_token: nil)
return unless logger
logger.dup.tap do |new_logger|
new_logger.instance_variable_set(:@private_token, private_token)
class << new_logger
attr_reader :load_times_by_model, :private_token
def debug(message, *)
message.gsub!(private_token, FILTERED_STRING) if private_token
_, type, time = *message.match(/(\w+) Load \(([0-9.]+)ms\)/)
if type && time
@load_times_by_model ||= {}
@load_times_by_model[type] ||= 0
@load_times_by_model[type] += time.to_f
end
super
backtrace = Rails.backtrace_cleaner.clean(caller)
backtrace.each do |caller_line|
next if caller_line.match(Regexp.union(IGNORE_BACKTRACES))
stripped_caller_line = caller_line.sub("#{Rails.root}/", '')
super(" ↳ #{stripped_caller_line}")
end
end
end
end
end
def self.with_custom_logger(logger)
original_colorize_logging = ActiveSupport::LogSubscriber.colorize_logging
original_activerecord_logger = ActiveRecord::Base.logger
original_actioncontroller_logger = ActionController::Base.logger
if logger
ActiveSupport::LogSubscriber.colorize_logging = false
ActiveRecord::Base.logger = logger
ActionController::Base.logger = logger
end
result = yield
ActiveSupport::LogSubscriber.colorize_logging = original_colorize_logging
ActiveRecord::Base.logger = original_activerecord_logger
ActionController::Base.logger = original_actioncontroller_logger
result
end
def self.log_load_times_by_model(logger)
return unless logger.respond_to?(:load_times_by_model)
logger.load_times_by_model.to_a.sort_by(&:last).reverse.each do |(model, time)|
logger.info("#{model} total: #{time.round(2)}ms")
end
end
end
end
......@@ -34,7 +34,10 @@ module Gitlab
feature_enabled = case action.to_s
when 'git_receive_pack'
Gitlab::GitalyClient.feature_enabled?(:post_receive_pack)
Gitlab::GitalyClient.feature_enabled?(
:post_receive_pack,
status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT
)
when 'git_upload_pack'
true
when 'info_refs'
......
......@@ -20,4 +20,24 @@ describe Dashboard::GroupsController do
expect(assigns(:groups)).to contain_exactly(member_of_group)
end
context 'when rendering an expanded hierarchy with public groups you are not a member of', :nested_groups do
let!(:top_level_result) { create(:group, name: 'chef-top') }
let!(:top_level_a) { create(:group, name: 'top-a') }
let!(:sub_level_result_a) { create(:group, name: 'chef-sub-a', parent: top_level_a) }
let!(:other_group) { create(:group, name: 'other') }
before do
top_level_result.add_master(user)
top_level_a.add_master(user)
end
it 'renders only groups the user is a member of when searching hierarchy correctly' do
get :index, filter: 'chef', format: :json
expect(response).to have_gitlab_http_status(200)
all_groups = [top_level_result, top_level_a, sub_level_result_a]
expect(assigns(:groups)).to contain_exactly(*all_groups)
end
end
end
......@@ -160,6 +160,30 @@ describe Groups::ChildrenController do
expect(json_response).to eq([])
end
it 'succeeds if multiple pages contain matching subgroups' do
create(:group, parent: group, name: 'subgroup-filter-1')
create(:group, parent: group, name: 'subgroup-filter-2')
# Creating the group-to-nest first so it would be loaded into the
# relation first before it's parents, this is what would cause the
# crash in: https://gitlab.com/gitlab-org/gitlab-ce/issues/40785.
#
# If we create the parent groups first, those would be loaded into the
# collection first, and the pagination would cut off the actual search
# result. In this case the hierarchy can be rendered without crashing,
# it's just incomplete.
group_to_nest = create(:group, parent: group, name: 'subsubgroup-filter-3')
subgroup = create(:group, parent: group)
3.times do |i|
subgroup = create(:group, parent: subgroup)
end
group_to_nest.update!(parent: subgroup)
get :index, group_id: group.to_param, filter: 'filter', per_page: 3, format: :json
expect(response).to have_gitlab_http_status(200)
end
it 'includes pagination headers' do
2.times { |i| create(:group, :public, parent: public_subgroup, name: "filterme#{i}") }
......
require 'spec_helper'
describe Projects::AvatarsController do
let(:project) { create(:project, avatar: fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "image/png")) }
let(:project) { create(:project, :repository, avatar: fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "image/png")) }
let(:user) { create(:user) }
before do
......@@ -10,6 +10,12 @@ describe Projects::AvatarsController do
controller.instance_variable_set(:@project, project)
end
it 'GET #show' do
get :show, namespace_id: project.namespace.id, project_id: project.id
expect(response).to have_gitlab_http_status(404)
end
it 'removes avatar from DB by calling destroy' do
delete :destroy, namespace_id: project.namespace.id, project_id: project.id
expect(project.avatar.present?).to be_falsey
......
......@@ -41,15 +41,21 @@ describe Projects::CommitsController do
context "when the ref name ends in .atom" do
context "when the ref does not exist with the suffix" do
it "renders as atom" do
before do
get(:show,
namespace_id: project.namespace,
project_id: project,
id: "master.atom")
end
it "renders as atom" do
expect(response).to be_success
expect(response.content_type).to eq('application/atom+xml')
end
it 'renders summary with type=html' do
expect(response.body).to include('<summary type="html">')
end
end
context "when the ref exists with the suffix" do
......
......@@ -395,12 +395,12 @@ feature 'Jobs' do
expect(page).to have_link('Trigger this manual action')
end
it 'plays manual action', :js do
it 'plays manual action and shows pending status', :js do
click_link 'Trigger this manual action'
wait_for_requests
expect(page).to have_content('This job has not been triggered')
expect(page).to have_content('This job is stuck, because the project doesn\'t have any runners online assigned to it.')
expect(page).to have_content('This job has not started yet')
expect(page).to have_content('This job is in pending state and is waiting to be picked by a runner')
expect(page).to have_content('pending')
end
end
......@@ -414,6 +414,20 @@ feature 'Jobs' do
it 'shows empty state' do
expect(page).to have_content('This job has not been triggered yet')
expect(page).to have_content('This job depends on upstream jobs that need to succeed in order for this job to be triggered')
end
end
context 'Pending job' do
let(:job) { create(:ci_build, :pending, pipeline: pipeline) }
before do
visit project_job_path(project, job)
end
it 'shows pending empty state' do
expect(page).to have_content('This job has not started yet')
expect(page).to have_content('This job is in pending state and is waiting to be picked by a runner')
end
end
end
......
......@@ -35,6 +35,15 @@ describe GroupDescendantsFinder do
expect(finder.execute).to contain_exactly(project)
end
it 'does not include projects shared with the group' do
project = create(:project, namespace: group)
other_project = create(:project)
other_project.project_group_links.create(group: group,
group_access: ProjectGroupLink::MASTER)
expect(finder.execute).to contain_exactly(project)
end
context 'when archived is `true`' do
let(:params) { { archived: 'true' } }
......@@ -189,6 +198,17 @@ describe GroupDescendantsFinder do
expect(finder.execute).to contain_exactly(subgroup, matching_project)
end
context 'with a small page size' do
let(:params) { { filter: 'test', per_page: 1 } }
it 'contains all the ancestors of a matching subgroup regardless the page size' do
subgroup = create(:group, :private, parent: group)
matching = create(:group, :private, name: 'testgroup', parent: subgroup)
expect(finder.execute).to contain_exactly(subgroup, matching)
end
end
it 'does not include the parent itself' do
group.update!(name: 'test')
......
require 'spec_helper'
describe ProjectsHelper do
include ProjectForksHelper
describe "#project_status_css_class" do
it "returns appropriate class" do
expect(project_status_css_class("started")).to eq("active")
......@@ -10,9 +12,9 @@ describe ProjectsHelper do
end
describe "can_change_visibility_level?" do
let(:project) { create(:project, :repository) }
let(:project) { create(:project) }
let(:user) { create(:project_member, :reporter, user: create(:user), project: project).user }
let(:fork_project) { Projects::ForkService.new(project, user).execute }
let(:forked_project) { fork_project(project, user) }
it "returns false if there are no appropriate permissions" do
allow(helper).to receive(:can?) { false }
......@@ -26,21 +28,29 @@ describe ProjectsHelper do
expect(helper.can_change_visibility_level?(project, user)).to be_truthy
end
it 'allows visibility level to be changed if the project is forked' do
allow(helper).to receive(:can?).with(user, :change_visibility_level, project) { true }
project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
fork_project(project)
expect(helper.can_change_visibility_level?(project, user)).to be_truthy
end
context "forks" do
it "returns false if there are permissions and origin project is PRIVATE" do
allow(helper).to receive(:can?) { true }
project.update visibility_level: Gitlab::VisibilityLevel::PRIVATE
project.update(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
expect(helper.can_change_visibility_level?(fork_project, user)).to be_falsey
expect(helper.can_change_visibility_level?(forked_project, user)).to be_falsey
end
it "returns true if there are permissions and origin project is INTERNAL" do
allow(helper).to receive(:can?) { true }
project.update visibility_level: Gitlab::VisibilityLevel::INTERNAL
project.update(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
expect(helper.can_change_visibility_level?(fork_project, user)).to be_truthy
expect(helper.can_change_visibility_level?(forked_project, user)).to be_truthy
end
end
end
......
......@@ -16,6 +16,18 @@ describe Gitlab::Git::Blob, seed_helper: true do
end
shared_examples 'finding blobs' do
context 'nil path' do
let(:blob) { Gitlab::Git::Blob.find(repository, SeedRepo::Commit::ID, nil) }
it { expect(blob).to eq(nil) }
end
context 'blank path' do
let(:blob) { Gitlab::Git::Blob.find(repository, SeedRepo::Commit::ID, '') }
it { expect(blob).to eq(nil) }
end
context 'file in subdir' do
let(:blob) { Gitlab::Git::Blob.find(repository, SeedRepo::Commit::ID, "files/ruby/popen.rb") }
......
require 'spec_helper'
describe Gitlab::Profiler do
RSpec::Matchers.define_negated_matcher :not_change, :change
let(:null_logger) { Logger.new('/dev/null') }
let(:private_token) { 'private' }
describe '.profile' do
let(:app) { double(:app) }
before do
allow(ActionDispatch::Integration::Session).to receive(:new).and_return(app)
allow(app).to receive(:get)
end
it 'returns a profile result' do
expect(described_class.profile('/')).to be_an_instance_of(RubyProf::Profile)
end
it 'uses the custom logger given' do
expect(described_class).to receive(:create_custom_logger)
.with(null_logger, private_token: anything)
.and_call_original
described_class.profile('/', logger: null_logger)
end
it 'sends a POST request when data is passed' do
post_data = '{"a":1}'
expect(app).to receive(:post).with(anything, post_data, anything)
described_class.profile('/', post_data: post_data)
end
it 'uses the private_token for auth if given' do
expect(app).to receive(:get).with('/', nil, 'Private-Token' => private_token)
expect(app).to receive(:get).with('/api/v4/users')
described_class.profile('/', private_token: private_token)
end
it 'uses the user for auth if given' do
user = double(:user)
user_token = 'user'
allow(user).to receive_message_chain(:personal_access_tokens, :active, :pluck, :first).and_return(user_token)
expect(app).to receive(:get).with('/', nil, 'Private-Token' => user_token)
expect(app).to receive(:get).with('/api/v4/users')
described_class.profile('/', user: user)
end
it 'uses the private_token for auth if both it and user are set' do
user = double(:user)
user_token = 'user'
allow(user).to receive_message_chain(:personal_access_tokens, :active, :pluck, :first).and_return(user_token)
expect(app).to receive(:get).with('/', nil, 'Private-Token' => private_token)
expect(app).to receive(:get).with('/api/v4/users')
described_class.profile('/', user: user, private_token: private_token)
end
end
describe '.create_custom_logger' do
it 'does nothing when nil is passed' do
expect(described_class.create_custom_logger(nil)).to be_nil
end
context 'the new logger' do
let(:custom_logger) do
described_class.create_custom_logger(null_logger, private_token: private_token)
end
it 'does not affect the existing logger' do
expect(null_logger).not_to receive(:debug)
expect(custom_logger).to receive(:debug).and_call_original
custom_logger.debug('Foo')
end
it 'strips out the private token' do
expect(custom_logger).to receive(:add) do |severity, _progname, message|
expect(severity).to eq(Logger::DEBUG)
expect(message).to include('public').and include(described_class::FILTERED_STRING)
expect(message).not_to include(private_token)
end
custom_logger.debug("public #{private_token}")
end
it 'tracks model load times by model' do
custom_logger.debug('This is not a model load')
custom_logger.debug('User Load (1.2ms)')
custom_logger.debug('User Load (1.3ms)')
custom_logger.debug('Project Load (10.4ms)')
expect(custom_logger.load_times_by_model).to eq('User' => 2.5,
'Project' => 10.4)
end
it 'logs the backtrace, ignoring lines as appropriate' do
# Skip Rails's backtrace cleaning.
allow(Rails.backtrace_cleaner).to receive(:clean, &:itself)
expect(custom_logger).to receive(:add)
.with(Logger::DEBUG,
anything,
a_string_matching(File.basename(__FILE__)))
.twice
expect(custom_logger).not_to receive(:add).with(Logger::DEBUG,
anything,
a_string_matching('lib/gitlab/profiler.rb'))
# Force a part of the backtrace to be in the (ignored) profiler source
# file.
described_class.with_custom_logger(nil) { custom_logger.debug('Foo') }
end
end
end
describe '.with_custom_logger' do
context 'when the logger is set' do
it 'uses the replacement logger for the duration of the block' do
expect(null_logger).to receive(:debug).and_call_original
expect { described_class.with_custom_logger(null_logger) { ActiveRecord::Base.logger.debug('foo') } }
.to not_change { ActiveRecord::Base.logger }
.and not_change { ActionController::Base.logger }
.and not_change { ActiveSupport::LogSubscriber.colorize_logging }
end
it 'returns the result of the block' do
expect(described_class.with_custom_logger(null_logger) { 2 }).to eq(2)
end
end
context 'when the logger is nil' do
it 'returns the result of the block' do
expect(described_class.with_custom_logger(nil) { 2 }).to eq(2)
end
it 'does not modify the standard Rails loggers' do
expect { described_class.with_custom_logger(nil) { } }
.to not_change { ActiveRecord::Base.logger }
.and not_change { ActionController::Base.logger }
.and not_change { ActiveSupport::LogSubscriber.colorize_logging }
end
end
end
end
......@@ -3,12 +3,19 @@ require Rails.root.join('db', 'post_migrate', '20171114104051_remove_empty_fork_
describe RemoveEmptyForkNetworks, :migration do
let!(:fork_networks) { table(:fork_networks) }
let!(:projects) { table(:projects) }
let!(:fork_network_members) { table(:fork_network_members) }
let(:deleted_project) { create(:project) }
let!(:empty_network) { create(:fork_network, id: 1, root_project_id: deleted_project.id) }
let!(:other_network) { create(:fork_network, id: 2, root_project_id: create(:project).id) }
let(:deleted_project) { projects.create! }
let!(:empty_network) { fork_networks.create!(id: 1, root_project_id: deleted_project.id) }
let!(:other_network) { fork_networks.create!(id: 2, root_project_id: projects.create.id) }
before do
fork_network_members.create(fork_network_id: empty_network.id,
project_id: empty_network.root_project_id)
fork_network_members.create(fork_network_id: other_network.id,
project_id: other_network.root_project_id)
deleted_project.destroy!
end
......
......@@ -2273,6 +2273,10 @@ describe Project do
expect(second_fork.fork_source).to eq(project)
end
it 'returns nil if it is the root of the fork network' do
expect(project.fork_source).to be_nil
end
end
describe '#lfs_storage_project' do
......
......@@ -994,6 +994,14 @@ describe User do
expect(described_class.search(user3.username.upcase)).to eq([user3])
end
end
it 'returns no matches for an empty string' do
expect(described_class.search('')).to be_empty
end
it 'returns no matches for nil' do
expect(described_class.search(nil)).to be_empty
end
end
describe '.search_with_secondary_emails' do
......@@ -1048,6 +1056,14 @@ describe User do
it 'does not return users with a matching part of secondary email' do
expect(search_with_secondary_emails(email.email[1..4])).not_to include([email.user])
end
it 'returns no matches for an empty string' do
expect(search_with_secondary_emails('')).to be_empty
end
it 'returns no matches for nil' do
expect(search_with_secondary_emails(nil)).to be_empty
end
end
describe '.find_by_ssh_key_id' do
......
......@@ -317,35 +317,20 @@ describe API::Internal do
end
context "git pull" do
context "gitaly disabled", :disable_gitaly do
it "has the correct payload" do
pull(key, project)
expect(response).to have_gitlab_http_status(200)
expect(json_response["status"]).to be_truthy
expect(json_response["repository_path"]).to eq(project.repository.path_to_repo)
expect(json_response["gl_repository"]).to eq("project-#{project.id}")
expect(json_response["gitaly"]).to be_nil
expect(user).to have_an_activity_record
end
end
context "gitaly enabled" do
it "has the correct payload" do
pull(key, project)
it "has the correct payload" do
pull(key, project)
expect(response).to have_gitlab_http_status(200)
expect(json_response["status"]).to be_truthy
expect(json_response["repository_path"]).to eq(project.repository.path_to_repo)
expect(json_response["gl_repository"]).to eq("project-#{project.id}")
expect(json_response["gitaly"]).not_to be_nil
expect(json_response["gitaly"]["repository"]).not_to be_nil
expect(json_response["gitaly"]["repository"]["storage_name"]).to eq(project.repository.gitaly_repository.storage_name)
expect(json_response["gitaly"]["repository"]["relative_path"]).to eq(project.repository.gitaly_repository.relative_path)
expect(json_response["gitaly"]["address"]).to eq(Gitlab::GitalyClient.address(project.repository_storage))
expect(json_response["gitaly"]["token"]).to eq(Gitlab::GitalyClient.token(project.repository_storage))
expect(user).to have_an_activity_record
end
expect(response).to have_gitlab_http_status(200)
expect(json_response["status"]).to be_truthy
expect(json_response["repository_path"]).to eq(project.repository.path_to_repo)
expect(json_response["gl_repository"]).to eq("project-#{project.id}")
expect(json_response["gitaly"]).not_to be_nil
expect(json_response["gitaly"]["repository"]).not_to be_nil
expect(json_response["gitaly"]["repository"]["storage_name"]).to eq(project.repository.gitaly_repository.storage_name)
expect(json_response["gitaly"]["repository"]["relative_path"]).to eq(project.repository.gitaly_repository.relative_path)
expect(json_response["gitaly"]["address"]).to eq(Gitlab::GitalyClient.address(project.repository_storage))
expect(json_response["gitaly"]["token"]).to eq(Gitlab::GitalyClient.token(project.repository_storage))
expect(user).to have_an_activity_record
end
end
......
......@@ -65,6 +65,16 @@ describe API::Members do
expect(json_response.count).to eq(1)
expect(json_response.first['username']).to eq(master.username)
end
it 'finds all members with no query specified' do
get api("/#{source_type.pluralize}/#{source.id}/members", developer), query: ''
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.count).to eq(2)
expect(json_response.map { |u| u['id'] }).to match_array [master.id, developer.id]
end
end
end
......
......@@ -58,6 +58,16 @@ describe API::V3::Members do
expect(json_response.count).to eq(1)
expect(json_response.first['username']).to eq(master.username)
end
it 'finds all members with no query specified' do
get v3_api("/#{source_type.pluralize}/#{source.id}/members", developer), query: ''
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.count).to eq(2)
expect(json_response.map { |u| u['id'] }).to match_array [master.id, developer.id]
end
end
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment