Commit 4037a738 authored by James Lopez's avatar James Lopez

Merge branch 'feature/project-import' of gitlab.com:gitlab-org/gitlab-ce into...

Merge branch 'feature/project-import' of gitlab.com:gitlab-org/gitlab-ce into feature/project-export-ui-experimental
parents e5a59331 22ff009a
......@@ -65,7 +65,7 @@ linters:
# Reports when you have an empty rule set.
EmptyRule:
enabled: false
enabled: true
# Reports when you have an @extend directive.
ExtendDirective:
......
Please view this file on the master branch, on stable branches it's out of date.
v 8.8.0 (unreleased)
- Remove future dates from contribution calendar graph.
v 8.7.0 (unreleased)
v 8.7.1 (unreleased)
- Fix .gitlab-ci.yml parsing issue when hidde job is a template without script definition. !3849
- Fix license detection to detect all license files, not only known licenses. !3878
- Use the `can?` helper instead of `current_user.can?`. !3882
- Prevent users from deleting Webhooks via API they do not own
- Fix Error 500 due to stale cache when projects are renamed or transferred
v 8.7.0
- Gitlab::GitAccess and Gitlab::GitAccessWiki are now instrumented
- Fix vulnerability that made it possible to gain access to private labels and milestones
- The number of InfluxDB points stored per UDP packet can now be configured
......
......@@ -178,7 +178,7 @@ gem 'ruby-fogbugz', '~> 0.2.1'
gem 'd3_rails', '~> 3.5.0'
#cal-heatmap
gem 'cal-heatmap-rails', '~> 3.5.0'
gem 'cal-heatmap-rails', '~> 3.6.0'
# underscore-rails
gem "underscore-rails", "~> 1.8.0"
......
......@@ -103,7 +103,7 @@ GEM
bundler (~> 1.2)
thor (~> 0.18)
byebug (8.2.1)
cal-heatmap-rails (3.5.1)
cal-heatmap-rails (3.6.0)
capybara (2.6.2)
addressable
mime-types (>= 1.16)
......@@ -134,7 +134,7 @@ GEM
execjs
coffee-script-source (1.10.0)
colorize (0.7.7)
concurrent-ruby (1.0.0)
concurrent-ruby (1.0.1)
connection_pool (2.2.0)
coveralls (0.8.13)
json (~> 1.8)
......@@ -629,7 +629,7 @@ GEM
recaptcha (1.0.2)
json
redcarpet (3.3.3)
redis (3.2.2)
redis (3.3.0)
redis-actionpack (4.0.1)
actionpack (~> 4)
redis-rack (~> 1.5.0)
......@@ -736,10 +736,9 @@ GEM
rack
shoulda-matchers (2.8.0)
activesupport (>= 3.0.0)
sidekiq (4.0.1)
sidekiq (4.1.1)
concurrent-ruby (~> 1.0)
connection_pool (~> 2.2, >= 2.2.0)
json (~> 1.0)
redis (~> 3.2, >= 3.2.1)
sidekiq-cron (0.4.0)
redis-namespace (>= 1.5.2)
......@@ -905,7 +904,7 @@ DEPENDENCIES
bullet
bundler-audit
byebug
cal-heatmap-rails (~> 3.5.0)
cal-heatmap-rails (~> 3.6.0)
capybara (~> 2.6.2)
capybara-screenshot (~> 1.0.0)
carrierwave (~> 0.10.0)
......
......@@ -223,7 +223,7 @@ class @LabelsSelect
fieldName: $dropdown.data('field-name')
id: (label) ->
if $dropdown.hasClass("js-filter-submit") and not label.isAny?
label.title
_.escape label.title
else
label.id
......
......@@ -102,7 +102,8 @@ class @Todos
todoLink = $(this).data('url')
return unless todoLink
if e.metaKey
# Allow Meta-Click or Mouse3-click to open in a new tab
if e.metaKey or e.which is 2
e.preventDefault()
window.open(todoLink,'_blank')
else
......
......@@ -54,6 +54,10 @@
fill: #254e77 !important;
}
.future {
visibility: hidden;
}
.domain-background {
fill: none;
shape-rendering: crispedges;
......
......@@ -84,10 +84,6 @@
}
}
&.blob_file {
}
&.blob-no-preview {
background: #eee;
text-shadow: 0 1px 2px #fff;
......
......@@ -121,9 +121,6 @@
}
}
.select2-container-multi .select2-choices .select2-search-choice {
}
.select2-drop-active {
margin-top: 6px;
font-size: 14px;
......
......@@ -111,8 +111,6 @@
.vg { color: #f8f8f2 } /* Name.Variable.Global */
.vi { color: #f8f8f2 } /* Name.Variable.Instance */
.il { color: #ae81ff } /* Literal.Number.Integer.Long */
.gh { } /* Generic Heading & Diff Header */
.gu { color: #75715e; } /* Generic.Subheading & Diff Unified/Comment? */
.gd { color: #f92672; } /* Generic.Deleted & Diff Deleted */
.gi { color: #a6e22e; } /* Generic.Inserted & Diff Inserted */
......
......@@ -18,9 +18,6 @@
}
.graphs {
.graph-author-commits-count {
}
.graph-author-email {
float: right;
color: #777;
......
class Import::GitlabProjectController < Import::BaseController
before_action :verify_gitlab_project_import_enabled
before_action :gitlab_project_auth, except: :callback
rescue_from OAuth::Error, with: :gitlab_project_unauthorized
#TODO permissions stuff
def callback
redirect_to status_import_gitlab_project_url
end
def status
@repos = client.projects
@incompatible_repos = client.incompatible_projects
@already_added_projects = current_user.created_projects.where(import_type: "gitlab_project")
already_added_projects_names = @already_added_projects.pluck(:import_source)
@repos.to_a.reject!{ |repo| already_added_projects_names.include? "#{repo["owner"]}/#{repo["slug"]}" }
end
def jobs
jobs = current_user.created_projects.where(import_type: "gitlab_project").to_json(only: [:id, :import_status])
render json: jobs
end
def create
@file = params[:file]
# @project_name =
repo_owner = current_user.username
@target_namespace = params[:new_namespace].presence || repo_owner
namespace = get_or_create_namespace || (render and return)
@project = Gitlab::ImportExport::ImportService.execute(archive_file: file, owner: repo_owner)
end
private
def verify_gitlab_project_import_enabled
render_404 unless gitlab_project_import_enabled?
end
end
......@@ -123,6 +123,18 @@ module ProjectsHelper
end
end
def license_short_name(project)
no_license_key = project.repository.license_key.nil? ||
# Back-compat if cache contains 'no-license', can be removed in a few weeks
project.repository.license_key == 'no-license'
return 'LICENSE' if no_license_key
license = Licensee::License.new(project.repository.license_key)
license.nickname || license.name
end
private
def get_project_nav_tabs(project, current_user)
......@@ -320,14 +332,6 @@ module ProjectsHelper
@ref || @repository.try(:root_ref)
end
def license_short_name(project)
license = Licensee::License.new(project.repository.license_key)
license.nickname || license.name
end
private
def filename_path(project, filename)
if project && blob = project.repository.send(filename)
namespace_project_blob_path(
......
......@@ -820,13 +820,11 @@ class Project < ActiveRecord::Base
wiki = Repository.new("#{old_path}.wiki", self)
if repo.exists?
repo.expire_cache
repo.expire_emptiness_caches
repo.before_delete
end
if wiki.exists?
wiki.expire_cache
wiki.expire_emptiness_caches
wiki.before_delete
end
end
......
......@@ -466,8 +466,8 @@ class Repository
return nil if !exists? || empty?
cache.fetch(:license_blob) do
if licensee_project.license
blob_at_branch(root_ref, licensee_project.matched_file.filename)
tree(:head).blobs.find do |file|
file.name =~ /\A(licen[sc]e|copying)(\..+|\z)/i
end
end
end
......@@ -476,7 +476,7 @@ class Repository
return nil if !exists? || empty?
cache.fetch(:license_key) do
licensee_project.license.try(:key) || 'no-license'
Licensee.license(path).try(:key)
end
end
......@@ -959,8 +959,4 @@ class Repository
def cache
@cache ||= RepositoryCache.new(path_with_namespace)
end
def licensee_project
@licensee_project ||= Licensee.project(path)
end
end
......@@ -34,6 +34,8 @@ module Projects
raise TransferError.new("Project with same path in target namespace already exists")
end
project.expire_caches_before_rename(old_path)
# Apply new namespace id and visibility level
project.namespace = new_namespace
project.visibility_level = new_namespace.visibility_level unless project.visibility_level_allowed_by_group?
......
......@@ -8,7 +8,7 @@
group, members with
%strong #{group_links.human_access}
role (#{shared_group_users_count})
- if current_user.can?(:admin_group, shared_group)
- if can?(current_user, :admin_group, shared_group)
.panel-head-actions
= link_to group_group_members_path(shared_group), class: 'btn btn-sm' do
%i.fa.fa-pencil-square-o
......
#!/usr/bin/env ruby
begin
load File.expand_path('../spring', __FILE__)
rescue LoadError => e
raise unless e.message.include?('spring')
end
APP_PATH = File.expand_path('../../config/application', __FILE__)
require_relative '../config/boot'
require 'rails/commands'
#!/usr/bin/env ruby
begin
load File.expand_path('../spring', __FILE__)
rescue LoadError => e
raise unless e.message.include?('spring')
end
require_relative '../config/boot'
require 'rake'
Rake.application.run
#!/usr/bin/env ruby
begin
load File.expand_path("../spring", __FILE__)
rescue LoadError
load File.expand_path('../spring', __FILE__)
rescue LoadError => e
raise unless e.message.include?('spring')
end
require 'bundler/setup'
load Gem.bin_path('rspec-core', 'rspec')
#!/usr/bin/env ruby
begin
load File.expand_path("../spring", __FILE__)
rescue LoadError
load File.expand_path('../spring', __FILE__)
rescue LoadError => e
raise unless e.message.include?('spring')
end
require 'bundler/setup'
load Gem.bin_path('spinach', 'spinach')
......@@ -4,12 +4,12 @@
# It gets overwritten when you run the `spring binstub` command.
unless defined?(Spring)
require "rubygems"
require "bundler"
require 'rubygems'
require 'bundler'
if match = Bundler.default_lockfile.read.match(/^GEM$.*?^ (?: )*spring \((.*?)\)$.*?^$/m)
Gem.paths = { "GEM_PATH" => [Bundler.bundle_path.to_s, *Gem.path].uniq }
gem "spring", match[1]
require "spring/binstub"
if (match = Bundler.default_lockfile.read.match(/^GEM$.*?^ (?: )*spring \((.*?)\)$.*?^$/m))
Gem.paths = { 'GEM_PATH' => [Bundler.bundle_path.to_s, *Gem.path].uniq.join(Gem.path_separator) }
gem 'spring', match[1]
require 'spring/binstub'
end
end
#!/usr/bin/env ruby
begin
load File.expand_path('../spring', __FILE__)
rescue LoadError => e
raise unless e.message.include?('spring')
end
require 'bundler/setup'
load Gem.bin_path('teaspoon', 'teaspoon')
......@@ -156,6 +156,12 @@ Rails.application.routes.draw do
get :new_user_map, path: :user_map
post :create_user_map, path: :user_map
end
resource :gitlab_project, only: [:create, :new], controller: :gitlab_projects do
get :status
get :callback
get :jobs
end
end
#
......
......@@ -41,6 +41,8 @@
- [Git LFS configuration](workflow/lfs/lfs_administration.md)
- [Housekeeping](administration/housekeeping.md) Keep your Git repository tidy and fast.
- [GitLab Performance Monitoring](monitoring/performance/introduction.md) Configure GitLab and InfluxDB for measuring performance metrics
- [Sidekiq Troubleshooting](administration/troubleshooting/sidekiq.md) Debug when Sidekiq appears hung and is not processing jobs
- [High Availability](administration/high_availability/README.md) Configure multiple servers for scaling or high availability
## Contributor documentation
......
# High Availability
GitLab supports several different types of clustering and high-availability.
The solution you choose will be based on the level of scalability and
availability you require. The easiest solutions are scalable, but not necessarily
highly available.
## Architecture
### Active/Passive
For pure high-availability/failover with no scaling you can use an
active/passive configuration. This utilizes DRBD (Distributed Replicated
Block Device) to keep all data in sync. DRBD requires a low latency link to
remain in sync. It is not advisable to attempt to run DRBD between data centers
or in different cloud availability zones.
Components/Servers Required:
- 2 servers/virtual machines (one active/one passive)
### Active/Active
This architecture scales easily because all application servers handle
user requests simultaneously. The database, Redis, and GitLab application are
all deployed on separate servers. The configuration is **only** highly-available
if the database, Redis and storage are also configured as such.
**Steps to configure active/active:**
1. [Configure the database](database.md)
1. [Configure Redis](redis.md)
1. [Configure NFS](nfs.md)
1. [Configure the GitLab application servers](gitlab.md)
1. [Configure the load balancers](load_balancer.md)
# Configuring a Database for GitLab HA
You can choose to install and manage a database server (PostgreSQL/MySQL)
yourself, or you can use GitLab Omnibus packages to help. GitLab recommends
PostgreSQL. This is the database that will be installed if you use the
Omnibus package to manage your database.
## Configure your own database server
If you're hosting GitLab on a cloud provider, you can optionally use a
managed service for PostgreSQL. For example, AWS offers a managed Relational
Database Service (RDS) that runs PostgreSQL.
If you use a cloud-managed service, or provide your own PostgreSQL:
1. Set up a `gitlab` username with a password of your choice. The `gitlab` user
needs privileges to create the `gitlabhq_production` database.
1. Configure the GitLab application servers with the appropriate details.
This step is covered in [Configuring GitLab for HA](gitlab.md)
## Configure using Omnibus
1. Download/install GitLab Omnibus using **steps 1 and 2** from
[GitLab downloads](https://about.gitlab.com/downloads). Do not complete other
steps on the download page.
1. Create/edit `/etc/gitlab/gitlab.rb` and use the following configuration.
Be sure to change the `external_url` to match your eventual GitLab front-end
URL.
```ruby
external_url 'https://gitlab.example.com'
# Disable all components except PostgreSQL
postgresql['enable'] = true
bootstrap['enable'] = false
nginx['enable'] = false
unicorn['enable'] = false
sidekiq['enable'] = false
redis['enable'] = false
gitlab_workhorse['enable'] = false
mailroom['enable'] = false
# PostgreSQL configuration
postgresql['sql_password'] = 'DB password'
postgresql['md5_auth_cidr_addresses'] = ['0.0.0.0/0']
postgresql['listen_address'] = '0.0.0.0'
```
1. Run `sudo gitlab-ctl reconfigure` to install and configure PostgreSQL.
> **Note**: This `reconfigure` step will result in some errors.
That's OK - don't be alarmed.
1. Open a database prompt:
```
su - gitlab-psql
/bin/bash
psql -h /var/opt/gitlab/postgresql -d template1
# Output:
psql (9.2.15)
Type "help" for help.
template1=#
```
1. Run the following command at the database prompt and you will be asked to
enter the new password for the PostgreSQL superuser.
```
\password
# Output:
Enter new password:
Enter it again:
```
1. Similarly, set the password for the `gitlab` database user. Use the same
password that you specified in the `/etc/gitlab/gitlab.rb` file for
`postgresql['sql_password']`.
```
\password gitlab
# Output:
Enter new password:
Enter it again:
```
1. Enable the `pg_trgm` extension:
```
CREATE EXTENSION pg_trgm;
# Output:
CREATE EXTENSION
```
1. Exit the database prompt by typing `\q` and Enter.
1. Exit the `gitlab-psql` user by running `exit` twice.
1. Run `sudo gitlab-ctl reconfigure` a final time.
1. Run `touch /etc/gitlab/skip-auto-migrations` to prevent database migrations
from running on upgrade. Only the primary GitLab application server should
handle migrations.
---
Read more on high-availability configuration:
1. [Configure Redis](redis.md)
1. [Configure NFS](nfs.md)
1. [Configure the GitLab application servers](gitlab.md)
1. [Configure the load balancers](load_balancer.md)
# Configuring GitLab for HA
Assuming you have already configured a database, Redis, and NFS, you can
configure the GitLab application server(s) now. Complete the steps below
for each GitLab application server in your environment.
> **Note:** There is some additional configuration near the bottom for
secondary GitLab application servers. It's important to read and understand
these additional steps before proceeding with GitLab installation.
1. If necessary, install the NFS client utility packages using the following
commands:
```
# Ubuntu/Debian
apt-get install nfs-common
# CentOS/Red Hat
yum install nfs-utils nfs-utils-lib
```
1. Specify the necessary NFS shares. Mounts are specified in
`/etc/fstab`. The exact contents of `/etc/fstab` will depend on how you chose
to configure your NFS server. See [NFS documentation](nfs.md) for the various
options. Here is an example snippet to add to `/etc/fstab`:
```
10.1.0.1:/var/opt/gitlab/.ssh /var/opt/gitlab/.ssh nfs defaults,soft,rsize=1048576,wsize=1048576,noatime,nobootwait,lookupcache=positive 0 2
10.1.0.1:/var/opt/gitlab/gitlab-rails/uploads /var/opt/gitlab/gitlab-rails/uploads nfs defaults,soft,rsize=1048576,wsize=1048576,noatime,nobootwait,lookupcache=positive 0 2
10.1.0.1:/var/opt/gitlab/gitlab-rails/shared /var/opt/gitlab/gitlab-rails/shared nfs defaults,soft,rsize=1048576,wsize=1048576,noatime,nobootwait,lookupcache=positive 0 2
10.1.0.1:/var/opt/gitlab/gitlab-ci/builds /var/opt/gitlab/gitlab-ci/builds nfs defaults,soft,rsize=1048576,wsize=1048576,noatime,nobootwait,lookupcache=positive 0 2
10.1.1.1:/var/opt/gitlab/git-data /var/opt/gitlab/git-data nfs defaults,soft,rsize=1048576,wsize=1048576,noatime,nobootwait,lookupcache=positive 0 2
```
1. Create the shared directories. These may be different depending on your NFS
mount locations.
```
mkdir -p /var/opt/gitlab/.ssh /var/opt/gitlab/gitlab-rails/uploads /var/opt/gitlab/gitlab-rails/shared /var/opt/gitlab/gitlab-ci/builds /var/opt/gitlab/git-data
```
1. Download/install GitLab Omnibus using **steps 1 and 2** from
[GitLab downloads](https://about.gitlab.com/downloads). Do not complete other
steps on the download page.
1. Create/edit `/etc/gitlab/gitlab.rb` and use the following configuration.
Be sure to change the `external_url` to match your eventual GitLab front-end
URL. Depending your the NFS configuration, you may need to change some GitLab
data locations. See [NFS documentation](nfs.md) for `/etc/gitlab/gitlab.rb`
configuration values for various scenarios. The example below assumes you've
added NFS mounts in the default data locations.
```ruby
external_url 'https://gitlab.example.com'
# Prevent GitLab from starting if NFS data mounts are not available
high_availability['mountpoint'] = '/var/opt/gitlab/git-data'
# Disable components that will not be on the GitLab application server
postgresql['enable'] = false
redis['enable'] = false
# PostgreSQL connection details
gitlab_rails['db_adapter'] = 'postgresql'
gitlab_rails['db_encoding'] = 'unicode'
gitlab_rails['db_host'] = '10.1.0.5' # IP/hostname of database server
gitlab_rails['db_password'] = 'DB password'
# Redis connection details
gitlab_rails['redis_port'] = '6379'
gitlab_rails['redis_host'] = '10.1.0.6' # IP/hostname of Redis server
gitlab_rails['redis_password'] = 'Redis Password'
```
1. Run `sudo gitlab-ctl reconfigure` to compile the configuration.
## Primary GitLab application server
As a final step, run the setup rake task on the first GitLab application server.
It is not necessary to run this on additional application servers.
1. Initialize the database by running `sudo gitlab-rake gitlab:setup`.
> **WARNING:** Only run this setup task on **NEW** GitLab instances because it
will wipe any existing data.
> **Note:** When you specify `https` in the `external_url`, as in the example
above, GitLab assumes you have SSL certificates in `/etc/gitlab/ssl/`. If
certificates are not present, Nginx will fail to start. See
[Nginx documentation](http://docs.gitlab.com/omnibus/settings/nginx.html#enable-https)
for more information.
## Additional configuration for secondary GitLab application servers
Secondary GitLab servers (servers configured **after** the first GitLab server)
need some additional configuration.
1. Configure shared secrets. These values can be obtained from the primary
GitLab server in `/etc/gitlab/gitlab-secrets.json`. Add these to
`/etc/gitlab/gitlab.rb` **prior to** running the first `reconfigure` in
the steps above.
```ruby
gitlab_shell['secret_token'] = 'fbfb19c355066a9afb030992231c4a363357f77345edd0f2e772359e5be59b02538e1fa6cae8f93f7d23355341cea2b93600dab6d6c3edcdced558fc6d739860'
gitlab_rails['secret_token'] = 'b719fe119132c7810908bba18315259ed12888d4f5ee5430c42a776d840a396799b0a5ef0a801348c8a357f07aa72bbd58e25a84b8f247a25c72f539c7a6c5fa'
gitlab_ci['secret_key_base'] = '6e657410d57c71b4fc3ed0d694e7842b1895a8b401d812c17fe61caf95b48a6d703cb53c112bc01ebd197a85da81b18e29682040e99b4f26594772a4a2c98c6d'
gitlab_ci['db_key_base'] = 'bf2e47b68d6cafaef1d767e628b619365becf27571e10f196f98dc85e7771042b9203199d39aff91fcb6837c8ed83f2a912b278da50999bb11a2fbc0fba52964'
```
1. Run `touch /etc/gitlab/skip-auto-migrations` to prevent database migrations
from running on upgrade. Only the primary GitLab application server should
handle migrations.
## Troubleshooting
- `mount: wrong fs type, bad option, bad superblock on`
You have not installed the necessary NFS client utilities. See step 1 above.
- `mount: mount point /var/opt/gitlab/... does not exist`
This particular directory does not exist on the NFS server. Ensure
the share is exported and exists on the NFS server and try to remount.
---
Read more on high-availability configuration:
1. [Configure the database](database.md)
1. [Configure Redis](redis.md)
1. [Configure NFS](nfs.md)
1. [Configure the load balancers](load_balancer.md)
# Load Balancer for GitLab HA
In an active/active GitLab configuration, you will need a load balancer to route
traffic to the application servers. The specifics on which load balancer to use
or the exact configuration is beyond the scope of GitLab documentation. We hope
that if you're managing HA systems like GitLab you have a load balancer of
choice already. Some examples including HAProxy (open-source), F5 Big-IP LTM,
and Citrix Net Scaler. This documentation will outline what ports and protocols
you need to use with GitLab.
## Basic ports
| LB Port | Backend Port | Protocol |
| ------- | ------------ | -------- |
| 80 | 80 | HTTP |
| 443 | 443 | HTTPS [^1] |
| 22 | 22 | TCP |
## GitLab Pages Ports
If you're using GitLab Pages you will need some additional port configurations.
GitLab Pages requires a separate VIP. Configure DNS to point the
`pages_external_url` from `/etc/gitlab/gitlab.rb` at the new VIP. See the
[GitLab Pages documentation][gitlab-pages] for more information.
| LB Port | Backend Port | Protocol |
| ------- | ------------ | -------- |
| 80 | Varies [^2] | HTTP |
| 443 | Varies [^2] | TCP [^3] |
## Alternate SSH Port
Some organizations have policies against opening SSH port 22. In this case,
it may be helpful to configure an alternate SSH hostname that allows users
to use SSH on port 443. An alternate SSH hostname will require a new VIP
compared to the other GitLab HTTP configuration above.
Configure DNS for an alternate SSH hostname such as altssh.gitlab.example.com.
| LB Port | Backend Port | Protocol |
| ------- | ------------ | -------- |
| 443 | 22 | TCP |
---
Read more on high-availability configuration:
1. [Configure the database](database.md)
1. [Configure Redis](redis.md)
1. [Configure NFS](nfs.md)
1. [Configure the GitLab application servers](gitlab.md)
[^1]: When using HTTPS protocol for port 443, you will need to add an SSL
certificate to the load balancers. If you wish to terminate SSL at the
GitLab application server instead, use TCP protocol.
[^2]: The backend port for GitLab Pages depends on the
`gitlab_pages['external_http']` and `gitlab_pages['external_https']`
setting. See [GitLab Pages documentation][gitlab-pages] for more details.
[^3]: Port 443 for GitLab Pages should always use the TCP protocol. Users can
configure custom domains with custom SSL, which would not be possible
if SSL was terminated at the load balancer.
[gitlab-pages]: http://doc.gitlab.com/ee/pages/administration.html
# NFS
## Required NFS Server features
**File locking**: GitLab **requires** file locking which is only supported
natively in NFS version 4. NFSv3 also supports locking as long as
Linux Kernel 2.6.5+ is used. We recommend using version 4 and do not
specifically test NFSv3.
**no_root_squash**: NFS normally changes the `root` user to `nobody`. This is
a good security measure when NFS shares will be accessed by many different
users. However, in this case only GitLab will use the NFS share so it
is safe. GitLab requires the `no_root_squash` setting because we need to
manage file permissions automatically. Without the setting you will receive
errors when the Omnibus package tries to alter permissions. Note that GitLab
and other bundled components do **not** run as `root` but as non-privileged
users. The requirement for `no_root_squash` is to allow the Omnibus package to
set ownership and permissions on files, as needed.
### Recommended options
When you define your NFS exports, we recommend you also add the following
options:
- `sync` - Force synchronous behavior. Default is asynchronous and under certain
circumstances it could lead to data loss if a failure occurs before data has
synced.
## Client mount options
Below is an example of an NFS mount point we use on GitLab.com:
```
10.1.1.1:/var/opt/gitlab/git-data /var/opt/gitlab/git-data nfs4 defaults,soft,rsize=1048576,wsize=1048576,noatime,nobootwait,lookupcache=positive 0 2
```
Notice several options that you should consider using:
| Setting | Description |
| ------- | ----------- |
| `nobootwait` | Don't halt boot process waiting for this mount to become available
| `lookupcache=positive` | Tells the NFS client to honor `positive` cache results but invalidates any `negative` cache results. Negative cache results cause problems with Git. Specifically, a `git push` can fail to register uniformly across all NFS clients. The negative cache causes the clients to 'remember' that the files did not exist previously.
## Mount locations
When using default Omnibus configuration you will need to share 5 data locations
between all GitLab cluster nodes. No other locations should be shared. The
following are the 5 locations you need to mount:
| Location | Description |
| -------- | ----------- |
| `/var/opt/gitlab/git-data` | Git repository data. This will account for a large portion of your data
| `/var/opt/gitlab/.ssh` | SSH `authorized_keys` file and keys used to import repositories from some other Git services
| `/var/opt/gitlab/gitlab-rails/uploads` | User uploaded attachments
| `/var/opt/gitlab/gitlab-rails/shared` | Build artifacts, GitLab Pages, LFS objects, temp files, etc. If you're using LFS this may also account for a large portion of your data
| `/var/opt/gitlab/gitlab-ci/builds` | GitLab CI build traces
Other GitLab directories should not be shared between nodes. They contain
node-specific files and GitLab code that does not need to be shared. To ship
logs to a central location consider using remote syslog. GitLab Omnibus packages
provide configuration for [UDP log shipping][udp-log-shipping].
### Consolidating mount points
If you don't want to configure 5-6 different NFS mount points, you have a few
alternative options.
#### Change default file locations
Omnibus allows you to configure the file locations. With custom configuration
you can specify just one main mountpoint and have all of these locations
as subdirectories. Mount `/gitlab-data` then use the following Omnibus
configuration to move each data location to a subdirectory:
```ruby
user['home'] = '/gitlab-data/home'
git_data_dir '/gitlab-data/git-data'
gitlab_rails['shared_path'] = '/gitlab-data/shared'
gitlab_rails['uploads_directory'] = "/gitlab-data/uploads"
gitlab_ci['builds_directory'] = '/gitlab-data/builds'
```
To move the `git` home directory, all GitLab services must be stopped. Run
`gitlab-ctl stop && initctl stop gitlab-runsvdir`. Then continue with the
reconfigure.
Run `sudo gitlab-ctl reconfigure` to start using the central location. Please
be aware that if you had existing data you will need to manually copy/rsync it
to these new locations and then restart GitLab.
#### Bind mounts
Bind mounts provide a way to specify just one NFS mount and then
bind the default GitLab data locations to the NFS mount. Start by defining your
single NFS mount point as you normally would in `/etc/fstab`. Let's assume your
NFS mount point is `/gitlab-data`. Then, add the following bind mounts in
`/etc/fstab`:
```bash
/gitlab-data/git-data /var/opt/gitlab/git-data none bind 0 0
/gitlab-data/.ssh /var/opt/gitlab/.ssh none bind 0 0
/gitlab-data/uploads /var/opt/gitlab/gitlab-rails/uploads none bind 0 0
/gitlab-data/shared /var/opt/gitlab/gitlab-rails/shared none bind 0 0
/gitlab-data/builds /var/opt/gitlab/gitlab-ci/builds none bind 0 0
```
---
Read more on high-availability configuration:
1. [Configure the database](database.md)
1. [Configure Redis](redis.md)
1. [Configure the GitLab application servers](gitlab.md)
1. [Configure the load balancers](load_balancer.md)
[udp-log-shipping]: http://doc.gitlab.com/omnibus/settings/logs.html#udp-log-shipping-gitlab-enterprise-edition-only "UDP log shipping"
# Configuring Redis for GitLab HA
You can choose to install and manage Redis yourself, or you can use GitLab
Omnibus packages to help.
## Configure your own Redis server
If you're hosting GitLab on a cloud provider, you can optionally use a
managed service for Redis. For example, AWS offers a managed ElastiCache service
that runs Redis.
> **Note:** Redis does not require authentication by default. See
[Redis Security](http://redis.io/topics/security) documentation for more
information. We recommend using a combination of a Redis password and tight
firewall rules to secure your Redis service.
## Configure using Omnibus
1. Download/install GitLab Omnibus using **steps 1 and 2** from
[GitLab downloads](https://about.gitlab.com/downloads). Do not complete other
steps on the download page.
1. Create/edit `/etc/gitlab/gitlab.rb` and use the following configuration.
Be sure to change the `external_url` to match your eventual GitLab front-end
URL.
```ruby
external_url 'https://gitlab.example.com'
# Disable all components except PostgreSQL
redis['enable'] = true
bootstrap['enable'] = false
nginx['enable'] = false
unicorn['enable'] = false
sidekiq['enable'] = false
postgresql['enable'] = false
gitlab_workhorse['enable'] = false
mailroom['enable'] = false
# Redis configuration
redis['port'] = 6379
redis['bind'] = '0.0.0.0'
# If you wish to use Redis authentication (recommended)
redis['password'] = 'Redis Password'
```
1. Run `sudo gitlab-ctl reconfigure` to install and configure PostgreSQL.
> **Note**: This `reconfigure` step will result in some errors.
That's OK - don't be alarmed.
1. Run `touch /etc/gitlab/skip-auto-migrations` to prevent database migrations
from running on upgrade. Only the primary GitLab application server should
handle migrations.
---
Read more on high-availability configuration:
1. [Configure the database](database.md)
1. [Configure NFS](nfs.md)
1. [Configure the GitLab application servers](gitlab.md)
1. [Configure the load balancers](load_balancer.md)
# Troubleshooting Sidekiq
Sidekiq is the background job processor GitLab uses to asynchronously run
tasks. When things go wrong it can be difficult to troubleshoot. These
situations also tend to be high-pressure because a production system job queue
may be filling up. Users will notice when this happens because new branches
may not show up and merge requests may not be updated. The following are some
troubleshooting steps that will help you diagnose the bottleneck.
> **Note:** GitLab administrators/users should consider working through these
debug steps with GitLab Support so the backtraces can be analyzed by our team.
It may reveal a bug or necessary improvement in GitLab.
> **Note:** In any of the backtraces, be weary of suspecting cases where every
thread appears to be waiting in the database, Redis, or waiting to acquire
a mutex. This **may** mean there's contention in the database, for example,
but look for one thread that is different than the rest. This other thread
may be using all available CPU, or have a Ruby Global Interpreter Lock,
preventing other threads from continuing.
## Thread dump
Send the Sidekiq process ID the `TTIN` signal and it will output thread
backtraces in the log file.
```
kill -TTIN <sidekiq_pid>
```
Check in `/var/log/gitlab/sidekiq/current` or `$GITLAB_HOME/log/sidekiq.log` for
the backtrace output. The backtraces will be lengthy and generally start with
several `WARN` level messages. Here's an example of a single thread's backtrace:
```
2016-04-13T06:21:20.022Z 31517 TID-orn4urby0 WARN: ActiveRecord::RecordNotFound: Couldn't find Note with 'id'=3375386
2016-04-13T06:21:20.022Z 31517 TID-orn4urby0 WARN: /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/activerecord-4.2.5.2/lib/active_record/core.rb:155:in `find'
/opt/gitlab/embedded/service/gitlab-rails/app/workers/new_note_worker.rb:7:in `perform'
/opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/sidekiq-4.0.1/lib/sidekiq/processor.rb:150:in `execute_job'
/opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/sidekiq-4.0.1/lib/sidekiq/processor.rb:132:in `block (2 levels) in process'
/opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/sidekiq-4.0.1/lib/sidekiq/middleware/chain.rb:127:in `block in invoke'
/opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/sidekiq_middleware/memory_killer.rb:17:in `call'
/opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/sidekiq-4.0.1/lib/sidekiq/middleware/chain.rb:129:in `block in invoke'
/opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/sidekiq_middleware/arguments_logger.rb:6:in `call'
...
```
In some cases Sidekiq may be hung and unable to respond to the `TTIN` signal.
Move on to other troubleshooting methods if this happens.
## Process profiling with `perf`
Linux has a process profiling tool called `perf` that is helpful when a certain
process is eating up a lot of CPU. If you see high CPU usage and Sidekiq won't
respond to the `TTIN` signal, this is a good next step.
If `perf` is not installed on your system, install it with `apt-get` or `yum`:
```
# Debian
sudo apt-get install linux-tools
# Ubuntu (may require these additional Kernel packages)
sudo apt-get install linux-tools-common linux-tools-generic linux-tools-`uname -r`
# Red Hat/CentOS
sudo yum install perf
```
Run perf against the Sidekiq PID:
```
sudo perf record -p <sidekiq_pid>
```
Let this run for 30-60 seconds and then press Ctrl-C. Then view the perf report:
```
sudo perf report
# Sample output
Samples: 348K of event 'cycles', Event count (approx.): 280908431073
97.69% ruby nokogiri.so [.] xmlXPathNodeSetMergeAndClear
0.18% ruby libruby.so.2.1.0 [.] objspace_malloc_increase
0.12% ruby libc-2.12.so [.] _int_malloc
0.10% ruby libc-2.12.so [.] _int_free
```
Above you see sample output from a perf report. It shows that 97% of the CPU is
being spent inside Nokogiri and `xmlXPathNodeSetMergeAndClear`. For something
this obvious you should then go investigate what job in GitLab would use
Nokogiri and XPath. Combine with `TTIN` or `gdb` output to show the
corresponding Ruby code where this is happening.
## The GNU Project Debugger (gdb)
`gdb` can be another effective tool for debugging Sidekiq. It gives you a little
more interactive way to look at each thread and see what's causing problems.
> **Note:** Attaching to a process with `gdb` will suspends the normal operation
of the process (Sidekiq will not process jobs while `gdb` is attached).
Start by attaching to the Sidekiq PID:
```
gdb -p <sidekiq_pid>
```
Then gather information on all the threads:
```
info threads
# Example output
30 Thread 0x7fe5fbd63700 (LWP 26060) 0x0000003f7cadf113 in poll () from /lib64/libc.so.6
29 Thread 0x7fe5f2b3b700 (LWP 26533) 0x0000003f7ce0b68c in pthread_cond_wait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
28 Thread 0x7fe5f2a3a700 (LWP 26534) 0x0000003f7ce0ba5e in pthread_cond_timedwait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
27 Thread 0x7fe5f2939700 (LWP 26535) 0x0000003f7ce0b68c in pthread_cond_wait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
26 Thread 0x7fe5f2838700 (LWP 26537) 0x0000003f7ce0b68c in pthread_cond_wait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
25 Thread 0x7fe5f2737700 (LWP 26538) 0x0000003f7ce0b68c in pthread_cond_wait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
24 Thread 0x7fe5f2535700 (LWP 26540) 0x0000003f7ce0b68c in pthread_cond_wait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
23 Thread 0x7fe5f2434700 (LWP 26541) 0x0000003f7ce0b68c in pthread_cond_wait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
22 Thread 0x7fe5f2232700 (LWP 26543) 0x0000003f7ce0b68c in pthread_cond_wait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
21 Thread 0x7fe5f2131700 (LWP 26544) 0x00007fe5f7b570f0 in xmlXPathNodeSetMergeAndClear ()
from /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/nokogiri-1.6.7.2/lib/nokogiri/nokogiri.so
...
```
If you see a suspicious thread, like the Nokogiri one above, you may want
to get more information:
```
thread 21
bt
# Example output
#0 0x00007ff0d6afe111 in xmlXPathNodeSetMergeAndClear () from /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/nokogiri-1.6.7.2/lib/nokogiri/nokogiri.so
#1 0x00007ff0d6b0b836 in xmlXPathNodeCollectAndTest () from /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/nokogiri-1.6.7.2/lib/nokogiri/nokogiri.so
#2 0x00007ff0d6b09037 in xmlXPathCompOpEval () from /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/nokogiri-1.6.7.2/lib/nokogiri/nokogiri.so
#3 0x00007ff0d6b09017 in xmlXPathCompOpEval () from /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/nokogiri-1.6.7.2/lib/nokogiri/nokogiri.so
#4 0x00007ff0d6b092e0 in xmlXPathCompOpEval () from /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/nokogiri-1.6.7.2/lib/nokogiri/nokogiri.so
#5 0x00007ff0d6b0bc37 in xmlXPathRunEval () from /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/nokogiri-1.6.7.2/lib/nokogiri/nokogiri.so
#6 0x00007ff0d6b0be5f in xmlXPathEvalExpression () from /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/nokogiri-1.6.7.2/lib/nokogiri/nokogiri.so
#7 0x00007ff0d6a97dc3 in evaluate (argc=2, argv=0x1022d058, self=<value optimized out>) at xml_xpath_context.c:221
#8 0x00007ff0daeab0ea in vm_call_cfunc_with_frame (th=0x1022a4f0, reg_cfp=0x1032b810, ci=<value optimized out>) at vm_insnhelper.c:1510
```
To output a backtrace from all threads at once:
```
apply all thread bt
```
## Check for blocking queries
Sometimes the speed at which Sidekiq processes jobs can be so fast that it can
cause database contention. Check for blocking queries when backtraces above
show that many threads are stuck in the database adapter.
The PostgreSQL wiki has details on the query you can run to see blocking
queries. The query is different based on PostgreSQL version. See
[Lock Monitoring](https://wiki.postgresql.org/wiki/Lock_Monitoring) for
the query details.
......@@ -45,8 +45,8 @@ sudo -u git -H git checkout 8-7-stable-ee
```bash
cd /home/git/gitlab-shell
sudo -u git -H git fetch --all
sudo -u git -H git checkout v2.7.0
sudo -u git -H git fetch --all --tags
sudo -u git -H git checkout v2.7.2
```
### 5. Update gitlab-workhorse
......
......@@ -103,10 +103,10 @@ module API
required_attributes! [:hook_id]
begin
@hook = ProjectHook.find(params[:hook_id])
@hook.destroy
@hook = user_project.hooks.destroy(params[:hook_id])
rescue
# ProjectHook can raise Error if hook_id not found
not_found!("Error deleting hook #{params[:hook_id]}")
end
end
end
......
......@@ -61,23 +61,21 @@ module Ci
@stages = @config[:stages] || @config[:types]
@variables = @config[:variables] || {}
@cache = @config[:cache]
@jobs = {}
@config.except!(*ALLOWED_YAML_KEYS)
@config.each { |name, param| add_job(name, param) }
# anything that doesn't have script is considered as unknown
@config.each do |name, param|
raise ValidationError, "Unknown parameter: #{name}" unless param.is_a?(Hash) && param.has_key?(:script)
end
raise ValidationError, "Please define at least one job" if @jobs.none?
end
unless @config.values.any?{|job| job.is_a?(Hash)}
raise ValidationError, "Please define at least one job"
end
def add_job(name, job)
return if name.to_s.start_with?('.')
@jobs = {}
@config.each do |key, job|
next if key.to_s.start_with?('.')
stage = job[:stage] || job[:type] || DEFAULT_STAGE
@jobs[key] = { stage: stage }.merge(job)
end
raise ValidationError, "Unknown parameter: #{name}" unless job.is_a?(Hash) && job.has_key?(:script)
stage = job[:stage] || job[:type] || DEFAULT_STAGE
@jobs[name] = { stage: stage }.merge(job)
end
def build_job(name, job)
......@@ -112,8 +110,6 @@ module Ci
true
end
private
def validate_global!
unless validate_array_of_strings(@before_script)
raise ValidationError, "before_script should be an array of strings"
......
module Projects
module Gitlab
module ImportExport
class ExportService < BaseService
def execute(options = {})
archive_file = options[:archive_file]
Gitlab::ImportExport::Importer.import(archive_file: archive_file, storage_path: storage_path)
class ImportService
def self.execute(*args)
new(args).execute
end
def initialize(options = {})
@archive_file = options[:archive_file]
@current_user = options[:owner]
end
def execute
Gitlab::ImportExport::Importer.import(archive_file: @archive_file, storage_path: storage_path)
restore_project_tree
restore_repo(project_tree.project)
end
......
require 'spec_helper'
describe "Dashboard projects filters", feature: true, js: true do
describe "Dashboard > User filters projects", feature: true do
context 'filtering personal projects' do
describe 'filtering personal projects' do
before do
user = create(:user)
project = create(:project, name: "Victorialand", namespace: user.namespace)
project.team << [user, :master]
user2 = create(:user)
project2 = create(:project, name: "Treasure", namespace: user2.namespace)
project2.team << [user, :developer]
login_as(user)
visit dashboard_projects_path
open_filter_dropdown
click_link "Owned by me"
end
it 'filters by projects "Owned by me"' do
sleep 1
open_filter_dropdown
page.within('ul.dropdown-menu.dropdown-menu-align-right') do
expect(page).to have_css('.is-active', text: 'Owned by me')
end
end
end
click_link "Owned by me"
def open_filter_dropdown
find('button.dropdown-toggle.btn').click
expect(page).to have_css('.is-active', text: 'Owned by me')
expect(page).to have_content('Victorialand')
expect(page).not_to have_content('Treasure')
end
end
end
require 'spec_helper'
feature 'Projects > Members > Anonymous user sees members', feature: true do
let(:user) { create(:user) }
let(:group) { create(:group, :public) }
let(:project) { create(:empty_project, :public) }
background do
project.team << [user, :master]
create(:project_group_link, project: project, group: group)
end
scenario "anonymous user visits the project's members page and sees the list of members" do
visit namespace_project_project_members_path(project.namespace, project)
expect(current_path).to eq(
namespace_project_project_members_path(project.namespace, project))
expect(page).to have_content(user.name)
end
end
require 'spec_helper'
describe 'Dashboard Todos', feature: true do
let(:user){ create(:user) }
let(:author){ create(:user) }
let(:project){ create(:project) }
let(:issue){ create(:issue) }
let(:todos_per_page){ Todo.default_per_page }
let(:todos_total){ todos_per_page + 1 }
let(:user) { create(:user) }
let(:author) { create(:user) }
let(:project) { create(:project) }
let(:issue) { create(:issue) }
describe 'GET /dashboard/todos' do
context 'User does not have todos' do
......@@ -46,31 +44,35 @@ describe 'Dashboard Todos', feature: true do
end
context 'User has multiple pages of Todos' do
let(:todo_total_pages){ (todos_total.to_f/todos_per_page).ceil }
before do
todos_total.times do
create(:todo, :mentioned, user: user, project: project, target: issue, author: author)
end
allow(Todo).to receive(:default_per_page).and_return(1)
# Create just enough records to cause us to paginate
create_list(:todo, 2, :mentioned, user: user, project: project, target: issue, author: author)
login_as(user)
visit dashboard_todos_path
end
it 'is paginated' do
visit dashboard_todos_path
expect(page).to have_selector('.gl-pagination')
end
it 'is has the right number of pages' do
expect(page).to have_selector('.gl-pagination .page', count: todo_total_pages)
visit dashboard_todos_path
expect(page).to have_selector('.gl-pagination .page', count: 2)
end
describe 'deleting last todo from last page', js: true do
describe 'completing last todo from last page', js: true do
it 'redirects to the previous page' do
page.within('.gl-pagination') do
click_link todo_total_pages.to_s
end
first('.done-todo').click
visit dashboard_todos_path(page: 2)
expect(page).to have_content(Todo.first.body)
click_link('Done')
expect(current_path).to eq dashboard_todos_path
expect(page).to have_content(Todo.last.body)
end
end
......
......@@ -105,4 +105,30 @@ describe ProjectsHelper do
end
end
end
describe '#license_short_name' do
let(:project) { create(:project) }
context 'when project.repository has a license_key' do
it 'returns the nickname of the license if present' do
allow(project.repository).to receive(:license_key).and_return('agpl-3.0')
expect(helper.license_short_name(project)).to eq('GNU AGPLv3')
end
it 'returns the name of the license if nickname is not present' do
allow(project.repository).to receive(:license_key).and_return('mit')
expect(helper.license_short_name(project)).to eq('MIT License')
end
end
context 'when project.repository has no license_key but a license_blob' do
it 'returns LICENSE' do
allow(project.repository).to receive(:license_key).and_return(nil)
expect(helper.license_short_name(project)).to eq('LICENSE')
end
end
end
end
......@@ -648,70 +648,131 @@ module Ci
end
describe "Hidden jobs" do
let(:config) do
YAML.dump({
'.hidden_job' => { script: 'test' },
'normal_job' => { script: 'test' }
})
let(:config_processor) { GitlabCiYamlProcessor.new(config) }
subject { config_processor.builds_for_stage_and_ref("test", "master") }
shared_examples 'hidden_job_handling' do
it "doesn't create jobs that start with dot" do
expect(subject.size).to eq(1)
expect(subject.first).to eq({
except: nil,
stage: "test",
stage_idx: 1,
name: :normal_job,
only: nil,
commands: "test",
tag_list: [],
options: {},
when: "on_success",
allow_failure: false
})
end
end
let(:config_processor) { GitlabCiYamlProcessor.new(config) }
context 'when hidden job have a script definition' do
let(:config) do
YAML.dump({
'.hidden_job' => { image: 'ruby:2.1', script: 'test' },
'normal_job' => { script: 'test' }
})
end
subject { config_processor.builds_for_stage_and_ref("test", "master") }
it_behaves_like 'hidden_job_handling'
end
it "doesn't create jobs that starts with dot" do
expect(subject.size).to eq(1)
expect(subject.first).to eq({
except: nil,
stage: "test",
stage_idx: 1,
name: :normal_job,
only: nil,
commands: "test",
tag_list: [],
options: {},
when: "on_success",
allow_failure: false
})
context "when hidden job doesn't have a script definition" do
let(:config) do
YAML.dump({
'.hidden_job' => { image: 'ruby:2.1' },
'normal_job' => { script: 'test' }
})
end
it_behaves_like 'hidden_job_handling'
end
end
describe "YAML Alias/Anchor" do
it "is correctly supported for jobs" do
config = <<EOT
let(:config_processor) { GitlabCiYamlProcessor.new(config) }
subject { config_processor.builds_for_stage_and_ref("build", "master") }
shared_examples 'job_templates_handling' do
it "is correctly supported for jobs" do
expect(subject.size).to eq(2)
expect(subject.first).to eq({
except: nil,
stage: "build",
stage_idx: 0,
name: :job1,
only: nil,
commands: "execute-script-for-job",
tag_list: [],
options: {},
when: "on_success",
allow_failure: false
})
expect(subject.second).to eq({
except: nil,
stage: "build",
stage_idx: 0,
name: :job2,
only: nil,
commands: "execute-script-for-job",
tag_list: [],
options: {},
when: "on_success",
allow_failure: false
})
end
end
context 'when template is a job' do
let(:config) do
<<EOT
job1: &JOBTMPL
stage: build
script: execute-script-for-job
job2: *JOBTMPL
EOT
end
config_processor = GitlabCiYamlProcessor.new(config)
it_behaves_like 'job_templates_handling'
end
expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(2)
expect(config_processor.builds_for_stage_and_ref("test", "master").first).to eq({
except: nil,
stage: "test",
stage_idx: 1,
name: :job1,
only: nil,
commands: "execute-script-for-job",
tag_list: [],
options: {},
when: "on_success",
allow_failure: false
})
expect(config_processor.builds_for_stage_and_ref("test", "master").second).to eq({
except: nil,
stage: "test",
stage_idx: 1,
name: :job2,
only: nil,
commands: "execute-script-for-job",
tag_list: [],
options: {},
when: "on_success",
allow_failure: false
})
context 'when template is a hidden job' do
let(:config) do
<<EOT
.template: &JOBTMPL
stage: build
script: execute-script-for-job
job1: *JOBTMPL
job2: *JOBTMPL
EOT
end
it_behaves_like 'job_templates_handling'
end
context 'when job adds its own keys to a template definition' do
let(:config) do
<<EOT
.template: &JOBTMPL
stage: build
job1:
<<: *JOBTMPL
script: execute-script-for-job
job2:
<<: *JOBTMPL
script: execute-script-for-job
EOT
end
it_behaves_like 'job_templates_handling'
end
end
......
......@@ -719,11 +719,8 @@ describe Project, models: true do
with('foo.wiki', project).
and_return(wiki)
expect(repo).to receive(:expire_cache)
expect(repo).to receive(:expire_emptiness_caches)
expect(wiki).to receive(:expire_cache)
expect(wiki).to receive(:expire_emptiness_caches)
expect(repo).to receive(:before_delete)
expect(wiki).to receive(:before_delete)
project.expire_caches_before_rename('foo')
end
......
......@@ -132,7 +132,6 @@ describe Repository, models: true do
it { expect(subject.basename).to eq('a/b/c') }
end
end
end
describe '#license_blob' do
......@@ -148,39 +147,18 @@ describe Repository, models: true do
expect(repository.license_blob).to be_nil
end
it 'favors license file with no extension' do
repository.commit_file(user, 'LICENSE', Licensee::License.new('mit').content, 'Add LICENSE', 'master', false)
repository.commit_file(user, 'LICENSE.md', Licensee::License.new('mit').content, 'Add LICENSE.md', 'master', false)
expect(repository.license_blob.name).to eq('LICENSE')
end
it 'favors .md file to .txt' do
repository.commit_file(user, 'LICENSE.md', Licensee::License.new('mit').content, 'Add LICENSE.md', 'master', false)
repository.commit_file(user, 'LICENSE.txt', Licensee::License.new('mit').content, 'Add LICENSE.txt', 'master', false)
expect(repository.license_blob.name).to eq('LICENSE.md')
end
it 'favors LICENCE to LICENSE' do
repository.commit_file(user, 'LICENSE', Licensee::License.new('mit').content, 'Add LICENSE', 'master', false)
repository.commit_file(user, 'LICENCE', Licensee::License.new('mit').content, 'Add LICENCE', 'master', false)
expect(repository.license_blob.name).to eq('LICENCE')
end
it 'favors LICENSE to COPYING' do
repository.commit_file(user, 'LICENSE', Licensee::License.new('mit').content, 'Add LICENSE', 'master', false)
repository.commit_file(user, 'COPYING', Licensee::License.new('mit').content, 'Add COPYING', 'master', false)
it 'detects license file with no recognizable open-source license content' do
repository.commit_file(user, 'LICENSE', 'Copyright!', 'Add LICENSE', 'master', false)
expect(repository.license_blob.name).to eq('LICENSE')
end
it 'favors LICENCE to COPYING' do
repository.commit_file(user, 'LICENCE', Licensee::License.new('mit').content, 'Add LICENCE', 'master', false)
repository.commit_file(user, 'COPYING', Licensee::License.new('mit').content, 'Add COPYING', 'master', false)
%w[LICENSE LICENCE LiCensE LICENSE.md LICENSE.foo COPYING COPYING.md].each do |filename|
it "detects '#{filename}'" do
repository.commit_file(user, filename, Licensee::License.new('mit').content, "Add #{filename}", 'master', false)
expect(repository.license_blob.name).to eq('LICENCE')
expect(repository.license_blob.name).to eq(filename)
end
end
end
......@@ -190,8 +168,14 @@ describe Repository, models: true do
repository.remove_file(user, 'LICENSE', 'Remove LICENSE', 'master')
end
it 'returns "no-license" when no license is detected' do
expect(repository.license_key).to eq('no-license')
it 'returns nil when no license is detected' do
expect(repository.license_key).to be_nil
end
it 'detects license file with no recognizable open-source license content' do
repository.commit_file(user, 'LICENSE', 'Copyright!', 'Add LICENSE', 'master', false)
expect(repository.license_key).to be_nil
end
it 'returns the license key' do
......
......@@ -148,14 +148,24 @@ describe API::API, 'ProjectHooks', api: true do
expect(response.status).to eq(200)
end
it "should return success when deleting non existent hook" do
it "should return a 404 error when deleting non existent hook" do
delete api("/projects/#{project.id}/hooks/42", user)
expect(response.status).to eq(200)
expect(response.status).to eq(404)
end
it "should return a 405 error if hook id not given" do
delete api("/projects/#{project.id}/hooks", user)
expect(response.status).to eq(405)
end
it "shold return a 404 if a user attempts to delete project hooks he/she does not own" do
test_user = create(:user)
other_project = create(:project)
other_project.team << [test_user, :master]
delete api("/projects/#{other_project.id}/hooks/#{hook.id}", test_user)
expect(response.status).to eq(404)
expect(WebHook.exists?(hook.id)).to be_truthy
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment