Commit 223053c1 authored by Micaël Bergeron's avatar Micaël Bergeron

Merge remote-tracking branch 'origin/master' into ee-40781-os-to-ce

parents 52783cc7 371de4d9
......@@ -35,15 +35,22 @@
"import/no-commonjs": "error",
"no-multiple-empty-lines": ["error", { "max": 1 }],
"promise/catch-or-return": "error",
"no-underscore-dangle": ["error", { "allow": ["__", "_links"]}],
"vue/html-self-closing": ["error", {
"html": {
"void": "always",
"normal": "never",
"component": "always"
},
"svg": "always",
"math": "always"
}]
"no-underscore-dangle": ["error", { "allow": ["__", "_links"] }],
"no-mixed-operators": 0,
"space-before-function-paren": 0,
"curly": 0,
"arrow-parens": 0,
"vue/html-self-closing": [
"error",
{
"html": {
"void": "always",
"normal": "never",
"component": "always"
},
"svg": "always",
"math": "always"
}
]
}
}
......@@ -11,6 +11,7 @@
eslint-report.html
/.gitlab_shell_secret
.idea
/.vscode/*
/.rbenv-version
.rbx/
/.ruby-gemset
......
......@@ -77,19 +77,32 @@ stages:
# (as many users are still using 9.2).
- postgres:9.2
- redis:alpine
- docker.elastic.co/elasticsearch/elasticsearch:5.5.2
.use-pg-9-6-no-elasticsearch: &use-pg-9-6-no-elasticsearch
.use-mysql: &use-mysql
services:
- mysql:latest
- redis:alpine
# BEGIN EE-only service helpers
.use-pg-9-6: &use-pg-9-6
services:
- postgres:9.6
- redis:alpine
.use-mysql: &use-mysql
.use-pg-with-elasticsearch: &use-pg-with-elasticsearch
services:
- postgres:9.2
- redis:alpine
- docker.elastic.co/elasticsearch/elasticsearch:5.5.2
.use-mysql-with-elasticsearch: &use-mysql-with-elasticsearch
services:
- mysql:latest
- redis:alpine
- docker.elastic.co/elasticsearch/elasticsearch:5.5.2
# END EE-only service helpers
# Skip all jobs except the ones that begin with 'docs/'.
# Used for commits including ONLY documentation changes.
......@@ -179,15 +192,15 @@ stages:
.rspec-ee-pg: &rspec-ee-pg
<<: *rspec-metadata-ee
<<: *use-pg
<<: *use-pg-with-elasticsearch
.rspec-ee-mysql: &rspec-ee-mysql
<<: *rspec-metadata-ee
<<: *use-mysql
<<: *use-mysql-with-elasticsearch
.rspec-geo-pg-9-6: &rspec-metadata-pg-geo
<<: *rspec-metadata
<<: *use-pg-9-6-no-elasticsearch
<<: *use-pg-9-6
stage: test
script:
- export NO_KNAPSACK=1
......
{
"singleQuote": true,
"trailingComma": "all"
}
Please view this file on the master branch, on stable branches it's out of date.
## 10.5.5 (2018-03-15)
### Fixed (1 change)
- Geo: Fix Wiki resync when Wiki repository does not exist.
## 10.5.4 (2018-03-08)
### Fixed (4 changes)
......
......@@ -2,6 +2,20 @@
documentation](doc/development/changelog.md) for instructions on adding your own
entry.
## 10.5.5 (2018-03-15)
### Fixed (3 changes)
- Fix missing uploads after group transfer. !17658
- Fix code and wiki search results when filename is non-ASCII.
- Remove double caching of Repository#empty?.
### Performance (2 changes)
- Adding missing indexes on taggings table.
- Add index on section_name_id on ci_build_trace_sections table.
## 10.5.4 (2018-03-08)
### Fixed (11 changes)
......
......@@ -34,7 +34,7 @@ gem 'omniauth-gitlab', '~> 1.0.2'
gem 'omniauth-google-oauth2', '~> 0.5.2'
gem 'omniauth-kerberos', '~> 0.3.0', group: :kerberos
gem 'omniauth-oauth2-generic', '~> 0.2.2'
gem 'omniauth-saml', '~> 1.7.0'
gem 'omniauth-saml', '~> 1.10.0'
gem 'omniauth-shibboleth', '~> 1.2.0'
gem 'omniauth-twitter', '~> 1.2.0'
gem 'omniauth_crowd', '~> 2.2.0'
......@@ -162,7 +162,7 @@ end
gem 'state_machines-activerecord', '~> 0.4.0'
# Issue tags
gem 'acts-as-taggable-on', '~> 4.0'
gem 'acts-as-taggable-on', '~> 5.0'
# Background jobs
gem 'sidekiq', '~> 5.0'
......@@ -275,7 +275,7 @@ gem 'base32', '~> 0.3.0'
gem "gitlab-license", "~> 1.0"
# Sentry integration
gem 'sentry-raven', '~> 2.5.3'
gem 'sentry-raven', '~> 2.7'
gem 'premailer-rails', '~> 1.9.7'
......@@ -392,14 +392,14 @@ group :test do
gem 'test-prof', '~> 0.2.5'
end
gem 'octokit', '~> 4.6.2'
gem 'octokit', '~> 4.8'
gem 'mail_room', '~> 0.9.1'
gem 'email_reply_trimmer', '~> 0.1'
gem 'html2text'
gem 'ruby-prof', '~> 0.16.2'
gem 'ruby-prof', '~> 0.17.0'
# OAuth
gem 'oauth2', '~> 1.4'
......@@ -415,7 +415,7 @@ gem 'sys-filesystem', '~> 1.1.6'
gem 'net-ntp'
# SSH host key support
gem 'net-ssh', '~> 4.1.0'
gem 'net-ssh', '~> 4.2.0'
gem 'sshkey', '~> 1.9.0'
# Required for ED25519 SSH host key support
......
......@@ -40,8 +40,8 @@ GEM
minitest (~> 5.1)
thread_safe (~> 0.3, >= 0.3.4)
tzinfo (~> 1.1)
acts-as-taggable-on (4.0.0)
activerecord (>= 4.0)
acts-as-taggable-on (5.0.0)
activerecord (>= 4.2.8)
adamantium (0.2.0)
ice_nine (~> 0.11.0)
memoizable (~> 0.4.0)
......@@ -416,7 +416,7 @@ GEM
thor
tilt
hashdiff (0.3.4)
hashie (3.5.6)
hashie (3.5.7)
hashie-forbidden_attributes (0.1.1)
hashie (>= 3.0)
health_check (2.6.0)
......@@ -443,7 +443,7 @@ GEM
json (~> 1.8)
multi_xml (>= 0.5.2)
httpclient (2.8.3)
i18n (0.9.1)
i18n (0.9.5)
concurrent-ruby (~> 1.0)
ice_nine (0.11.2)
influxdb (0.2.3)
......@@ -539,7 +539,7 @@ GEM
mysql2 (0.4.10)
net-ldap (0.16.0)
net-ntp (2.1.3)
net-ssh (4.1.0)
net-ssh (4.2.0)
netrc (0.11.0)
nokogiri (1.8.2)
mini_portile2 (~> 2.3.0)
......@@ -551,12 +551,12 @@ GEM
multi_json (~> 1.3)
multi_xml (~> 0.5)
rack (>= 1.2, < 3)
octokit (4.6.2)
octokit (4.8.0)
sawyer (~> 0.8.0, >= 0.5.3)
oj (2.17.5)
omniauth (1.4.2)
omniauth (1.4.3)
hashie (>= 1.2, < 4)
rack (>= 1.0, < 3)
rack (>= 1.6.2, < 3)
omniauth-auth0 (1.4.1)
omniauth-oauth2 (~> 1.1)
omniauth-authentiq (0.3.1)
......@@ -595,9 +595,9 @@ GEM
omniauth (~> 1.2)
omniauth-oauth2-generic (0.2.2)
omniauth-oauth2 (~> 1.0)
omniauth-saml (1.7.0)
omniauth (~> 1.3)
ruby-saml (~> 1.4)
omniauth-saml (1.10.0)
omniauth (~> 1.3, >= 1.3.2)
ruby-saml (~> 1.7)
omniauth-shibboleth (1.2.1)
omniauth (>= 1.0.0)
omniauth-twitter (1.2.1)
......@@ -676,7 +676,7 @@ GEM
pry (>= 0.9.10)
public_suffix (3.0.2)
pyu-ruby-sasl (0.0.3.3)
rack (1.6.8)
rack (1.6.9)
rack-accept (0.4.5)
rack (>= 0.4)
rack-attack (4.4.1)
......@@ -829,9 +829,9 @@ GEM
i18n
ruby-fogbugz (0.2.1)
crack (~> 0.4)
ruby-prof (0.16.2)
ruby-prof (0.17.0)
ruby-progressbar (1.9.0)
ruby-saml (1.4.1)
ruby-saml (1.7.2)
nokogiri (>= 1.5.10)
ruby_parser (3.9.0)
sexp_processor (~> 4.1)
......@@ -870,7 +870,7 @@ GEM
selenium-webdriver (3.5.0)
childprocess (~> 0.5)
rubyzip (~> 1.0)
sentry-raven (2.5.3)
sentry-raven (2.7.2)
faraday (>= 0.7.6, < 1.0)
settingslogic (2.0.9)
sexp_processor (4.9.0)
......@@ -959,7 +959,7 @@ GEM
truncato (0.7.10)
htmlentities (~> 4.3.1)
nokogiri (~> 1.8.0, >= 1.7.0)
tzinfo (1.2.4)
tzinfo (1.2.5)
thread_safe (~> 0.1)
u2f (0.2.1)
uber (0.1.0)
......@@ -1020,7 +1020,7 @@ DEPENDENCIES
RedCloth (~> 4.3.2)
ace-rails-ap (~> 4.1.0)
activerecord_sane_schema_dumper (= 0.2)
acts-as-taggable-on (~> 4.0)
acts-as-taggable-on (~> 5.0)
addressable (~> 2.5.2)
akismet (~> 2.0)
allocations (~> 1.0)
......@@ -1140,10 +1140,10 @@ DEPENDENCIES
mysql2 (~> 0.4.10)
net-ldap
net-ntp
net-ssh (~> 4.1.0)
net-ssh (~> 4.2.0)
nokogiri (~> 1.8.2)
oauth2 (~> 1.4)
octokit (~> 4.6.2)
octokit (~> 4.8)
oj (~> 2.17.4)
omniauth (~> 1.4.2)
omniauth-auth0 (~> 1.4.1)
......@@ -1156,7 +1156,7 @@ DEPENDENCIES
omniauth-google-oauth2 (~> 0.5.2)
omniauth-kerberos (~> 0.3.0)
omniauth-oauth2-generic (~> 0.2.2)
omniauth-saml (~> 1.7.0)
omniauth-saml (~> 1.10.0)
omniauth-shibboleth (~> 1.2.0)
omniauth-twitter (~> 1.2.0)
omniauth_crowd (~> 2.2.0)
......@@ -1206,7 +1206,7 @@ DEPENDENCIES
rubocop (~> 0.52.1)
rubocop-rspec (~> 1.22.1)
ruby-fogbugz (~> 0.2.1)
ruby-prof (~> 0.16.2)
ruby-prof (~> 0.17.0)
ruby_parser (~> 3.8)
rufus-scheduler (~> 3.4)
rugged (~> 0.26.0)
......@@ -1216,7 +1216,7 @@ DEPENDENCIES
seed-fu (~> 2.3.7)
select2-rails (~> 3.5.9)
selenium-webdriver (~> 3.5)
sentry-raven (~> 2.5.3)
sentry-raven (~> 2.7)
settingslogic (~> 2.0.9)
sham_rack (~> 1.3.6)
shoulda-matchers (~> 3.1.2)
......
<script>
import $ from 'jquery';
import eventHub from '../eventhub';
import ProjectSelect from 'ee/boards/components/project_select.vue'; // eslint-disable-line import/first
import ProjectSelect from './project_select.vue';
import ListIssue from '../models/issue';
const Store = gl.issueBoards.BoardsStore;
......
......@@ -73,6 +73,7 @@ export default class MergeRequestTabs {
constructor({ action, setUrl, stubLocation } = {}) {
const mergeRequestTabs = document.querySelector('.js-tabs-affix');
const navbar = document.querySelector('.navbar-gitlab');
const peek = document.getElementById('peek');
const paddingTop = 16;
this.diffsLoaded = false;
......@@ -86,6 +87,10 @@ export default class MergeRequestTabs {
this.showTab = this.showTab.bind(this);
this.stickyTop = navbar ? navbar.offsetHeight - paddingTop : 0;
if (peek) {
this.stickyTop += peek.offsetHeight;
}
if (mergeRequestTabs) {
this.stickyTop += mergeRequestTabs.offsetHeight;
}
......
import Vue from 'vue';
import commitPipelineStatus from '~/projects/tree/components/commit_pipeline_status_component.vue';
import BlobViewer from '~/blob/viewer/index';
import initBlob from '~/pages/projects/init_blob';
document.addEventListener('DOMContentLoaded', () => {
new BlobViewer(); // eslint-disable-line no-new
initBlob();
const CommitPipelineStatusEl = document.querySelector('.js-commit-pipeline-status');
const statusLink = document.querySelector('.commit-actions .ci-status-link');
if (statusLink) {
statusLink.remove();
// eslint-disable-next-line no-new
new Vue({
el: CommitPipelineStatusEl,
components: {
commitPipelineStatus,
},
render(createElement) {
return createElement('commit-pipeline-status', {
props: {
endpoint: CommitPipelineStatusEl.dataset.endpoint,
},
});
},
});
}
});
......@@ -7,29 +7,24 @@ module Storage
raise Gitlab::UpdatePathError.new('Namespace cannot be moved, because at least one project has tags in container registry')
end
expires_full_path_cache
# Move the namespace directory in all storage paths used by member projects
repository_storage_paths.each do |repository_storage_path|
# Ensure old directory exists before moving it
gitlab_shell.add_namespace(repository_storage_path, full_path_was)
# Ensure new directory exists before moving it (if there's a parent)
gitlab_shell.add_namespace(repository_storage_path, parent.full_path) if parent
parent_was = if parent_changed? && parent_id_was.present?
Namespace.find(parent_id_was) # raise NotFound early if needed
end
unless gitlab_shell.mv_namespace(repository_storage_path, full_path_was, full_path)
expires_full_path_cache
Rails.logger.error "Exception moving path #{repository_storage_path} from #{full_path_was} to #{full_path}"
move_repositories
# if we cannot move namespace directory we should rollback
# db changes in order to prevent out of sync between db and fs
raise Gitlab::UpdatePathError.new('namespace directory cannot be moved')
end
if parent_changed?
former_parent_full_path = parent_was&.full_path
parent_full_path = parent&.full_path
Gitlab::UploadsTransfer.new.move_namespace(path, former_parent_full_path, parent_full_path)
Gitlab::PagesTransfer.new.move_namespace(path, former_parent_full_path, parent_full_path)
else
Gitlab::UploadsTransfer.new.rename_namespace(full_path_was, full_path)
Gitlab::PagesTransfer.new.rename_namespace(full_path_was, full_path)
end
Gitlab::UploadsTransfer.new.rename_namespace(full_path_was, full_path)
Gitlab::PagesTransfer.new.rename_namespace(full_path_was, full_path)
remove_exports!
# If repositories moved successfully we need to
......@@ -57,6 +52,26 @@ module Storage
private
def move_repositories
# Move the namespace directory in all storage paths used by member projects
repository_storage_paths.each do |repository_storage_path|
# Ensure old directory exists before moving it
gitlab_shell.add_namespace(repository_storage_path, full_path_was)
# Ensure new directory exists before moving it (if there's a parent)
gitlab_shell.add_namespace(repository_storage_path, parent.full_path) if parent
unless gitlab_shell.mv_namespace(repository_storage_path, full_path_was, full_path)
Rails.logger.error "Exception moving path #{repository_storage_path} from #{full_path_was} to #{full_path}"
# if we cannot move namespace directory we should rollback
# db changes in order to prevent out of sync between db and fs
raise Gitlab::UpdatePathError.new('namespace directory cannot be moved')
end
end
end
def old_repository_storage_paths
@old_repository_storage_paths ||= repository_storage_paths
end
......
......@@ -605,9 +605,10 @@ class MergeRequest < ActiveRecord::Base
return unless open?
old_diff_refs = self.diff_refs
new_diff = create_merge_request_diff
MergeRequests::MergeRequestDiffCacheService.new.execute(self, new_diff)
create_merge_request_diff
MergeRequests::MergeRequestDiffCacheService.new.execute(self)
new_diff_refs = self.diff_refs
update_diff_discussion_positions(
......
module MergeRequests
class MergeRequestDiffCacheService
def execute(merge_request)
def execute(merge_request, new_diff)
# Executing the iteration we cache all the highlighted diff information
merge_request.diffs.diff_files.to_a
# Remove cache for all diffs on this MR. Do not use the association on the
# model, as that will interfere with other actions happening when
# reloading the diff.
MergeRequestDiff.where(merge_request: merge_request).each do |merge_request_diff|
next if merge_request_diff == new_diff
merge_request_diff.diffs.clear_cache!
end
end
end
end
......@@ -9,6 +9,10 @@
%span.runner-state.runner-state-specific
Specific
- add_to_breadcrumbs _("Runners"), admin_runners_path
- breadcrumb_title "##{@runner.id}"
- @no_container = true
- if @runner.shared?
.bs-callout.bs-callout-success
%h4 This Runner will process jobs from ALL UNASSIGNED projects
......
---
title: Set breadcrumb for admin/runners/show
merge_request: 17431
author: Takuya Noguchi
type: fixed
---
title: Update ruby-saml to 1.7.2 and omniauth-saml to 1.10.0
merge_request: 17734
author: Takuya Noguchi
type: security
---
title: Stop caching highlighted diffs in Redis unnecessarily
merge_request: 17746
author:
type: performance
---
title: Add slash command for moving issues
merge_request:
author: Adam Pahlevi
type: added
---
title: Fix importing multiple assignees from GitLab export
merge_request: 17718
author:
type: fixed
---
title: Remove double caching of Repository#empty?
title: Fix relative uri when "#" is in branch name
merge_request:
author:
author: Jan
type: fixed
---
title: Fix code and wiki search results when filename is non-ASCII
merge_request:
author:
type: fixed
---
title: Add realtime pipeline status for adding/viewing files
merge_request: 17705
author:
type: other
# Creates a project with labeled issues for an user.
# Run this single seed file using: rake db:seed_fu FILTER=labeled USER_ID=74.
# If an USER_ID is not provided it will use the last created user.
require './spec/support/sidekiq'
class Gitlab::Seeder::LabeledIssues
include ::Gitlab::Utils
def initialize(user)
@user = user
end
def seed!
Sidekiq::Testing.inline! do
group = create_group
create_projects(group)
create_labels(group)
create_issues(group)
end
print '.'
end
private
def create_group
group_name = "group_of_#{@user.username}_#{SecureRandom.hex(4)}"
group_params = {
name: group_name,
path: group_name,
description: FFaker::Lorem.sentence
}
Groups::CreateService.new(@user, group_params).execute
end
def create_projects(group)
5.times do
project_name = "project_#{SecureRandom.hex(6)}"
params = {
namespace_id: group.id,
name: project_name,
description: FFaker::Lorem.sentence,
visibility_level: Gitlab::VisibilityLevel.values.sample
}
Projects::CreateService.new(@user, params).execute
end
end
def create_labels(group)
group.projects.each do |project|
5.times do
label_title = FFaker::Vehicle.model
Labels::CreateService.new(title: label_title, color: "#69D100").execute(project: project)
end
end
10.times do
label_title = FFaker::Product.brand
Labels::CreateService.new(title: label_title).execute(group: group)
end
end
def create_issues(group)
# Get only group labels
group_labels =
LabelsFinder.new(@user, group_id: group.id).execute.where.not(group_id: nil)
group.projects.each do |project|
label_ids = project.labels.pluck(:id).sample(5)
label_ids.push(*group.labels.sample(4))
20.times do
issue_params = {
title: FFaker::Lorem.sentence(6),
description: FFaker::Lorem.sentence,
state: 'opened',
label_ids: label_ids
}
Issues::CreateService.new(project, @user, issue_params).execute if project.project_feature.present?
end
end
end
end
Gitlab::Seeder.quiet do
user_id = ENV['USER_ID']
user =
if user_id.present?
User.find(user_id)
else
User.last
end
Gitlab::Seeder::LabeledIssues.new(user).seed!
end
......@@ -3,13 +3,15 @@ class CreateUserInteractedProjectsTable < ActiveRecord::Migration
DOWNTIME = false
disable_ddl_transaction!
INDEX_NAME = 'user_interacted_projects_non_unique_index'
def up
create_table :user_interacted_projects, id: false do |t|
t.references :user, null: false
t.references :project, null: false
end
add_index :user_interacted_projects, [:project_id, :user_id], name: INDEX_NAME
end
def down
......
require_relative '../migrate/20180223120443_create_user_interacted_projects_table.rb'
# rubocop:disable AddIndex
# rubocop:disable AddConcurrentForeignKey
class BuildUserInteractedProjectsTable < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
UNIQUE_INDEX_NAME = 'index_user_interacted_projects_on_project_id_and_user_id'
disable_ddl_transaction!
def up
......@@ -13,16 +18,8 @@ class BuildUserInteractedProjectsTable < ActiveRecord::Migration
MysqlStrategy.new
end.up
unless index_exists?(:user_interacted_projects, [:project_id, :user_id])
add_concurrent_index :user_interacted_projects, [:project_id, :user_id], unique: true
end
unless foreign_key_exists?(:user_interacted_projects, :user_id)
add_concurrent_foreign_key :user_interacted_projects, :users, column: :user_id, on_delete: :cascade
end
unless foreign_key_exists?(:user_interacted_projects, :project_id)
add_concurrent_foreign_key :user_interacted_projects, :projects, column: :project_id, on_delete: :cascade
if index_exists_by_name?(:user_interacted_projects, CreateUserInteractedProjectsTable::INDEX_NAME)
remove_concurrent_index_by_name :user_interacted_projects, CreateUserInteractedProjectsTable::INDEX_NAME
end
end
......@@ -37,31 +34,16 @@ class BuildUserInteractedProjectsTable < ActiveRecord::Migration
remove_foreign_key :user_interacted_projects, :projects
end
if index_exists_by_name?(:user_interacted_projects, 'index_user_interacted_projects_on_project_id_and_user_id')
remove_concurrent_index_by_name :user_interacted_projects, 'index_user_interacted_projects_on_project_id_and_user_id'
if index_exists_by_name?(:user_interacted_projects, UNIQUE_INDEX_NAME)
remove_concurrent_index_by_name :user_interacted_projects, UNIQUE_INDEX_NAME
end
end
private
# Rails' index_exists? doesn't work when you only give it a table and index
# name. As such we have to use some extra code to check if an index exists for
# a given name.
def index_exists_by_name?(table, index)
indexes_for_table[table].include?(index)
end
def indexes_for_table
@indexes_for_table ||= Hash.new do |hash, table_name|
hash[table_name] = indexes(table_name).map(&:name)
unless index_exists_by_name?(:user_interacted_projects, CreateUserInteractedProjectsTable::INDEX_NAME)
add_concurrent_index :user_interacted_projects, [:project_id, :user_id], name: CreateUserInteractedProjectsTable::INDEX_NAME
end
end
def foreign_key_exists?(table, column)
foreign_keys(table).any? do |key|
key.options[:column] == column.to_s
end
end
private
class PostgresStrategy < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
......@@ -71,33 +53,86 @@ class BuildUserInteractedProjectsTable < ActiveRecord::Migration
def up
with_index(:events, [:author_id, :project_id], name: 'events_user_interactions_temp', where: 'project_id IS NOT NULL') do
iteration = 0
records = 0
begin
Rails.logger.info "Building user_interacted_projects table, batch ##{iteration}"
result = execute <<~SQL
insert_missing_records
# Do this once without lock to speed up the second invocation
remove_duplicates
with_table_lock(:user_interacted_projects) do
remove_duplicates
create_unique_index
end
remove_without_project
with_table_lock(:user_interacted_projects, :projects) do
remove_without_project
create_fk :user_interacted_projects, :projects, :project_id
end
remove_without_user
with_table_lock(:user_interacted_projects, :users) do
remove_without_user
create_fk :user_interacted_projects, :users, :user_id
end
end
execute "ANALYZE user_interacted_projects"
end
private
def insert_missing_records
iteration = 0
records = 0
begin
Rails.logger.info "Building user_interacted_projects table, batch ##{iteration}"
result = execute <<~SQL
INSERT INTO user_interacted_projects (user_id, project_id)
SELECT e.user_id, e.project_id
FROM (SELECT DISTINCT author_id AS user_id, project_id FROM events WHERE project_id IS NOT NULL) AS e
LEFT JOIN user_interacted_projects ucp USING (user_id, project_id)
WHERE ucp.user_id IS NULL
LIMIT #{BATCH_SIZE}
SQL
iteration += 1
records += result.cmd_tuples
Rails.logger.info "Building user_interacted_projects table, batch ##{iteration} complete, created #{records} overall"
Kernel.sleep(SLEEP_TIME) if result.cmd_tuples > 0
rescue ActiveRecord::InvalidForeignKey => e
Rails.logger.info "Retry on InvalidForeignKey: #{e}"
retry
end while result.cmd_tuples > 0
end
SQL
iteration += 1
records += result.cmd_tuples
Rails.logger.info "Building user_interacted_projects table, batch ##{iteration} complete, created #{records} overall"
Kernel.sleep(SLEEP_TIME) if result.cmd_tuples > 0
end while result.cmd_tuples > 0
end
execute "ANALYZE user_interacted_projects"
def remove_duplicates
execute <<~SQL
WITH numbered AS (select ctid, ROW_NUMBER() OVER (PARTITION BY (user_id, project_id)) as row_number, user_id, project_id from user_interacted_projects)
DELETE FROM user_interacted_projects WHERE ctid IN (SELECT ctid FROM numbered WHERE row_number > 1);
SQL
end
def remove_without_project
execute "DELETE FROM user_interacted_projects WHERE NOT EXISTS (SELECT 1 FROM projects WHERE id = user_interacted_projects.project_id)"
end
private
def remove_without_user
execute "DELETE FROM user_interacted_projects WHERE NOT EXISTS (SELECT 1 FROM users WHERE id = user_interacted_projects.user_id)"
end
def create_fk(table, target, column)
return if foreign_key_exists?(table, column)
add_foreign_key table, target, column: column, on_delete: :cascade
end
def create_unique_index
return if index_exists_by_name?(:user_interacted_projects, UNIQUE_INDEX_NAME)
add_index :user_interacted_projects, [:project_id, :user_id], unique: true, name: UNIQUE_INDEX_NAME
end
# Protect table against concurrent data changes while still allowing reads
def with_table_lock(*tables)
ActiveRecord::Base.connection.transaction do
execute "LOCK TABLE #{tables.join(", ")} IN SHARE MODE"
yield
end
end
def with_index(*args)
add_concurrent_index(*args) unless index_exists?(*args)
......@@ -118,7 +153,18 @@ class BuildUserInteractedProjectsTable < ActiveRecord::Migration
LEFT JOIN user_interacted_projects ucp USING (user_id, project_id)
WHERE ucp.user_id IS NULL
SQL
unless index_exists?(:user_interacted_projects, [:project_id, :user_id])
add_concurrent_index :user_interacted_projects, [:project_id, :user_id], unique: true, name: UNIQUE_INDEX_NAME
end
unless foreign_key_exists?(:user_interacted_projects, :user_id)
add_concurrent_foreign_key :user_interacted_projects, :users, column: :user_id, on_delete: :cascade
end
unless foreign_key_exists?(:user_interacted_projects, :project_id)
add_concurrent_foreign_key :user_interacted_projects, :projects, column: :project_id, on_delete: :cascade
end
end
end
end
......@@ -29,7 +29,7 @@ in your testing/production environment.
GitLab stores a number of secret values in the `/etc/gitlab/gitlab-secrets.json`
file which *must* match between the primary and secondary nodes. Until there is
a means of automatically replicating these between nodes (see issue [gitlab-org/gitlab-ee#3789]),
a means of automatically replicating these between nodes (see issue [gitlab-org/gitlab-ee#3789]),
they must be manually replicated to the secondary.
1. SSH into the **primary** node, and execute the command below:
......@@ -127,7 +127,11 @@ keys must be manually replicated to the secondary node.
1. Restart sshd:
```bash
service ssh restart
# Debian or Ubuntu installations
sudo service ssh reload
# CentOS installations
sudo service sshd reload
```
### Step 3. Add the secondary GitLab node
......@@ -145,13 +149,13 @@ keys must be manually replicated to the secondary node.
```
gitlab-ctl restart
```
Check if there are any common issue with your Geo setup by running:
```
gitlab-rake gitlab:geo:check
```
1. SSH into your GitLab **primary** server and login as root to verify the
secondary is reachable or there are any common issue with your Geo setup:
......@@ -164,13 +168,13 @@ replicating missing data from the primary in a process known as **backfill**.
Meanwhile, the primary node will start to notify the secondary of any changes, so
that the secondary can act on those notifications immediately.
Make sure the secondary instance is running and accessible.
Make sure the secondary instance is running and accessible.
You can login to the secondary node with the same credentials as used in the primary.
### Step 4. (Optional) Enabling hashed storage (from GitLab 10.0)
CAUTION: **Warning**:
Hashed storage is in **Beta**. It is not considered production-ready. See
Hashed storage is in **Beta**. It is not considered production-ready. See
[Hashed Storage] for more detail, and for the latest updates, check
infrastructure issue [gitlab-com/infrastructure#2821].
......
......@@ -47,8 +47,11 @@ There's also a collection of repositories with [example projects](https://gitlab
## Static Application Security Testing (SAST)
- **(Ultimate)** [Scan your code for vulnerabilities](sast.md)
- [Scan your Docker images for vulnerabilities](sast_docker.md)
**(Ultimate)** [Scan your code for vulnerabilities](sast.md)
## Container Scanning
[Scan your Docker images for vulnerabilities](container_scanning.md)
## Dynamic Application Security Testing (DAST)
......
......@@ -16,18 +16,26 @@ codequality:
- docker:dind
script:
- docker pull codeclimate/codeclimate
- docker run --env CODECLIMATE_CODE="$PWD" --volume "$PWD":/code --volume /var/run/docker.sock:/var/run/docker.sock --volume /tmp/cc:/tmp/cc codeclimate/codeclimate:0.69.0 init
- docker run --env CODECLIMATE_CODE="$PWD" --volume "$PWD":/code --volume /var/run/docker.sock:/var/run/docker.sock --volume /tmp/cc:/tmp/cc codeclimate/codeclimate:0.69.0 analyze -f json > codeclimate.json || true
- export SP_VERSION=$(echo "$CI_SERVER_VERSION" | sed 's/^\([0-9]*\)\.\([0-9]*\).*/\1-\2-stable/')
- docker run
--env SOURCE_CODE="$PWD" \
--volume "$PWD":/code \
--volume /var/run/docker.sock:/var/run/docker.sock \
"registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code
artifacts:
paths: [codeclimate.json]
```
This will create a `codequality` job in your CI pipeline and will allow you to
download and analyze the report artifact in JSON format.
The above example will create a `codequality` job in your CI/CD pipeline which
will scan your source code for code quality issues. The report will be saved
as an artifact that you can later download and analyze.
For [GitLab Starter][ee] users, this information can be automatically
extracted and shown right in the merge request widget. [Learn more on code quality
diffs in merge requests](../../user/project/merge_requests/code_quality_diff.md).
TIP: **Tip:**
Starting with [GitLab Starter][ee] 9.3, this information will
be automatically extracted and shown right in the merge request widget. To do
so, the CI/CD job must be named `codequality` and the artifact path must be
`codeclimate.json`.
[Learn more on code quality diffs in merge requests](../../user/project/merge_requests/code_quality_diff.md).
[cli]: https://github.com/codeclimate/codeclimate
[dind]: ../docker/using_docker_build.md#use-docker-in-docker-executor
......
# Container Scanning with GitLab CI/CD
You can check your Docker images (or more precisely the containers) for known
vulnerabilities by using [Clair](https://github.com/coreos/clair) and
[clair-scanner](https://github.com/arminc/clair-scanner), two open source tools
for Vulnerability Static Analysis for containers.
All you need is a GitLab Runner with the Docker executor (the shared Runners on
GitLab.com will work fine). You can then add a new job to `.gitlab-ci.yml`,
called `sast:container`:
```yaml
sast:container:
image: docker:latest
variables:
DOCKER_DRIVER: overlay2
## Define two new variables based on GitLab's CI/CD predefined variables
## https://docs.gitlab.com/ee/ci/variables/#predefined-variables-environment-variables
CI_APPLICATION_REPOSITORY: $CI_REGISTRY_IMAGE/$CI_COMMIT_REF_SLUG
CI_APPLICATION_TAG: $CI_COMMIT_SHA
allow_failure: true
services:
- docker:dind
script:
- docker run -d --name db arminc/clair-db:latest
- docker run -p 6060:6060 --link db:postgres -d --name clair arminc/clair-local-scan:v2.0.1
- apk add -U wget ca-certificates
- docker pull ${CI_APPLICATION_REPOSITORY}:${CI_APPLICATION_TAG}
- wget https://github.com/arminc/clair-scanner/releases/download/v8/clair-scanner_linux_amd64
- mv clair-scanner_linux_amd64 clair-scanner
- chmod +x clair-scanner
- touch clair-whitelist.yml
- ./clair-scanner -c http://docker:6060 --ip $(hostname -i) -r gl-sast-container-report.json -l clair.log -w clair-whitelist.yml ${CI_APPLICATION_REPOSITORY}:${CI_APPLICATION_TAG} || true
artifacts:
paths: [gl-sast-container-report.json]
```
The above example will create a `sast:container` job in your CI/CD pipeline, pull
the image from the [Container Registry](../../user/project/container_registry.md)
(whose name is defined from the two `CI_APPLICATION_` variables) and scan it
for possible vulnerabilities. The report will be saved as an artifact that you
can later download and analyze.
If you want to whitelist some specific vulnerabilities, you can do so by defining
them in a [YAML file](https://github.com/arminc/clair-scanner/blob/master/README.md#example-whitelist-yaml-file),
in our case its named `clair-whitelist.yml`.
TIP: **Tip:**
Starting with [GitLab Ultimate][ee] 10.4, this information will
be automatically extracted and shown right in the merge request widget. To do
so, the CI/CD job must be named `sast:container` and the artifact path must be
`gl-sast-container-report.json`.
[Learn more on container scanning results shown in merge requests](https://docs.gitlab.com/ee/user/project/merge_requests/container_scanning.html).
[ee]: https://about.gitlab.com/products/
# Static Application Security Testing for Docker containers with GitLab CI/CD
You can check your Docker images (or more precisely the containers) for known
vulnerabilities by using [Clair](https://github.com/coreos/clair) and
[clair-scanner](https://github.com/arminc/clair-scanner), two open source tools
for Vulnerability Static Analysis for containers.
All you need is a GitLab Runner with the Docker executor (the shared Runners on
GitLab.com will work fine). You can then add a new job to `.gitlab-ci.yml`,
called `sast:container`:
```yaml
sast:container:
image: docker:latest
variables:
DOCKER_DRIVER: overlay2
## Define two new variables based on GitLab's CI/CD predefined variables
## https://docs.gitlab.com/ee/ci/variables/#predefined-variables-environment-variables
CI_APPLICATION_REPOSITORY: $CI_REGISTRY_IMAGE/$CI_COMMIT_REF_SLUG
CI_APPLICATION_TAG: $CI_COMMIT_SHA
allow_failure: true
services:
- docker:dind
script:
- docker run -d --name db arminc/clair-db:latest
- docker run -p 6060:6060 --link db:postgres -d --name clair arminc/clair-local-scan:v2.0.1
- apk add -U wget ca-certificates
- docker pull ${CI_APPLICATION_REPOSITORY}:${CI_APPLICATION_TAG}
- wget https://github.com/arminc/clair-scanner/releases/download/v8/clair-scanner_linux_amd64
- mv clair-scanner_linux_amd64 clair-scanner
- chmod +x clair-scanner
- touch clair-whitelist.yml
- ./clair-scanner -c http://docker:6060 --ip $(hostname -i) -r gl-sast-container-report.json -l clair.log -w clair-whitelist.yml ${CI_APPLICATION_REPOSITORY}:${CI_APPLICATION_TAG} || true
artifacts:
paths: [gl-sast-container-report.json]
```
The above example will create a `sast:container` job in your CI/CD pipeline, pull
the image from the [Container Registry](../../user/project/container_registry.md)
(whose name is defined from the two `CI_APPLICATION_` variables) and scan it
for possible vulnerabilities. The report will be saved as an artifact that you
can later download and analyze.
If you want to whitelist some specific vulnerabilities, you can do so by defining
them in a [YAML file](https://github.com/arminc/clair-scanner/blob/master/README.md#example-whitelist-yaml-file),
in our case its named `clair-whitelist.yml`.
TIP: **Tip:**
Starting with [GitLab Ultimate][ee] 10.4, this information will
be automatically extracted and shown right in the merge request widget. To do
so, the CI/CD job must be named `sast:container` and the artifact path must be
`gl-sast-container-report.json`.
[Learn more on application security testing results shown in merge requests](../../user/project/merge_requests/sast_docker.md).
[ee]: https://about.gitlab.com/products/
This document was moved to [another location](./container_scanning.md).
......@@ -966,7 +966,7 @@ tag including only the files that are untracked by Git:
```yaml
job:
artifacts:
name: "${CI_JOB_NAME}_${CI_COMMIT_REF_NAME}"
name: "$CI_JOB_NAME-$CI_COMMIT_REF_NAME"
untracked: true
```
......@@ -975,7 +975,7 @@ To create an archive with a name of the current [stage](#stages) and branch name
```yaml
job:
artifacts:
name: "${CI_JOB_STAGE}_${CI_COMMIT_REF_NAME}"
name: "$CI_JOB_STAGE-$CI_COMMIT_REF_NAME"
untracked: true
```
......@@ -987,7 +987,7 @@ If you use **Windows Batch** to run your shell scripts you need to replace
```yaml
job:
artifacts:
name: "%CI_JOB_STAGE%_%CI_COMMIT_REF_NAME%"
name: "%CI_JOB_STAGE%-%CI_COMMIT_REF_NAME%"
untracked: true
```
......@@ -997,7 +997,7 @@ If you use **Windows PowerShell** to run your shell scripts you need to replace
```yaml
job:
artifacts:
name: "$env:CI_JOB_STAGE_$env:CI_COMMIT_REF_NAME"
name: "$env:CI_JOB_STAGE-$env:CI_COMMIT_REF_NAME"
untracked: true
```
......
This diff is collapsed.
......@@ -17,10 +17,13 @@ are very appreciative of the work done by translators and proofreaders!
- French
- Rémy Coutable - [GitLab](https://gitlab.com/rymai), [Crowdin](https://crowdin.com/profile/rymai)
- German
- Indonesian
- Ahmad Naufal Mukhtar - [GitLab](https://gitlab.com/anaufalm), [Crowdin](https://crowdin.com/profile/anaufalm)
- Italian
- Paolo Falomo - [GitLab](https://gitlab.com/paolofalomo), [Crowdin](https://crowdin.com/profile/paolo.falomo)
- Japanese
- Korean
- Chang-Ho Cha - [GitLab](https://gitlab.com/changho-cha), [Crowdin](https://crowdin.com/profile/zzazang)
- Huang Tao - [GitLab](https://gitlab.com/htve), [Crowdin](https://crowdin.com/profile/htve)
- Polish
- Filip Mech - [GitLab](https://gitlab.com/mehenz), [Crowdin](https://crowdin.com/profile/mehenz)
......
......@@ -19,7 +19,7 @@ Guidance on topics related to development.
Learn about all the dependencies that make up our frontend, including some of our own custom built libraries.
## [Style](style/index.md)
## [Style guides](style/index.md)
Style guides to keep our code consistent.
......
# Principles
> TODO: Add principles
These principles will ensure that your frontend contribution starts off in the right direction.
## Discuss architecture before implementation
Discuss your architecture design in an issue before writing code. This helps decrease the review time and also provides good practice for writing and thinking about system design.
## Be consistent
There are multiple ways of writing code to accomplish the same results. We should be as consistent as possible in how we write code across our codebases. This will make it more easier us to maintain our code across GitLab.
## Enhance progressively
Whenever you see with existing code that does not follow our current style guide, update it proactively. Refrain from changing everything but each merge request should progressively enhance our codebase and reduce technical debt.
## When to use Vue
- Use Vue for feature that make use of heavy DOM manipulation
- Use Vue for reusable components
## When to use jQuery
- Use jQuery to interact with Bootstrap JavaScript components
- Avoid jQuery when a better alternative exists. We are slowly moving away from it [#43559][jquery-future]
## Mixing Vue and jQuery
- Mixing Vue and jQuery is not recommended.
- If you need to use a specific jQuery plugin in Vue, [create a wrapper around it][select2].
- It is acceptable for Vue to listen to existing jQuery events using jQuery event listeners.
- It is not recommended to add new jQuery events for Vue to interact with jQuery.
[jquery-future]: https://gitlab.com/gitlab-org/gitlab-ce/issues/43559
[select2]: https://vuejs.org/v2/examples/select2.html
# HTML style guide
> TODO: Add content
## Buttons
<a name="button-type"></a><a name="1.1"></a>
- [1.1](#button-type) **Use button type** Button tags requires a `type` attribute according to the [W3C HTML specification][button-type-spec].
```
// bad
<button></button>
// good
<button type="button"></button>
```
<a name="button-role"></a><a name="1.2"></a>
- [1.2](#button-role) **Use button role for non buttons** If an HTML element has an onClick handler but is not a button, it should have `role="button"`. This is more [accessible][button-role-accessible].
```
// bad
<div onClick="doSomething"></div>
// good
<div role="button" onClick="doSomething"></div>
```
## Links
<a name="blank-links"></a><a name="2.1"></a>
- [2.1](#blank-links) **Use rel for target blank** Use `rel="noopener noreferrer"` whenever your links open in a new window i.e. `target="_blank"`. This prevents [the following][jitbit-target-blank] security vulnerability documented by JitBit
```
// bad
<a href="url" target="_blank"></a>
// good
<a href="url" target="_blank" rel="noopener noreferrer"></a>
```
<a name="fake-links"></a><a name="2.2"></a>
- [2.2](#fake-links) **Do not use fake links** Use a button tag if a link only invokes JavaScript click event handlers. This is more semantic.
```
// bad
<a class="js-do-something" href="#"></a>
// good
<button class="js-do-something" type="button"></button>
```
[button-type-spec]: https://www.w3.org/TR/2011/WD-html5-20110525/the-button-element.html#dom-button-type
[button-role-accessible]: https://developer.mozilla.org/en-US/docs/Web/Accessibility/ARIA/ARIA_Techniques/Using_the_button_role
[jitbit-target-blank]: https://www.jitbit.com/alexblog/256-targetblank---the-most-underestimated-vulnerability-ever/
# Style
# Style guides
## [HTML style guide](html.md)
......@@ -7,3 +7,9 @@
## [JavaScript style guide](javascript.md)
## [Vue style guide](vue.md)
# Tooling
## [Prettier](prettier.md)
Our code is automatically formatted with [Prettier](https://prettier.io) to follow our guidelines.
# Formatting with Prettier
Our code is automatically formatted with [Prettier](https://prettier.io) to follow our style guides. Prettier is taking care of formatting .js, .vue, and .scss files based on the standard prettier rules. You can find all settings for Prettier in `.prettierrc`.
## Editor
The easiest way to include prettier in your workflow is by setting up your preferred editor (all major editors are supported) accordingly. We suggest setting up prettier to run automatically when each file is saved. Find [here](https://prettier.io/docs/en/editors.html) the best way to set it up in your preferred editor.
Please take care that you only let Prettier format the same file types as the global Yarn script does (.js, .vue, and .scss). In VSCode by example you can easily exclude file formats in your settings file:
```
"prettier.disableLanguages": [
"json",
"markdown"
],
```
## Yarn Script
The following yarn scripts are available to do global formatting:
```
yarn prettier-staged-save
```
Updates all currently staged files (based on `git diff`) with Prettier and saves the needed changes.
```
yarn prettier-staged
```
Checks all currently staged files (based on `git diff`) with Prettier and log which files would need manual updating to the console.
```
yarn prettier-all
```
Checks all files with Prettier and logs which files need manual updating to the console.
```
yarn prettier-all-save
```
Formats all files in the repository with Prettier. (This should only be used to test global rule updates otherwise you would end up with huge MR's).
The source of these Yarn scripts can be found in `/scripts/frontend/prettier.js`.
This diff is collapsed.
......@@ -15,9 +15,10 @@ Taking the trigger term as `project-name`, the commands are:
| `/project-name issue new <title> <shift+return> <description>` | Creates a new issue with title `<title>` and description `<description>` |
| `/project-name issue show <id>` | Shows the issue with id `<id>` |
| `/project-name issue search <query>` | Shows up to 5 issues matching `<query>` |
| `/project-name issue move <id> to <project>` | Moves issue ID `<id>` to `<project>` |
| `/project-name deploy <from> to <to>` | Deploy from the `<from>` environment to the `<to>` environment |
Note that if you are using the [GitLab Slack application](https://docs.gitlab.com/ee/user/project/integrations/gitlab_slack_application.html) for
Note that if you are using the [GitLab Slack application](https://docs.gitlab.com/ee/user/project/integrations/gitlab_slack_application.html) for
your GitLab.com projects, you need to [add the `gitlab` keyword at the beginning of the command](https://docs.gitlab.com/ee/user/project/integrations/gitlab_slack_application.html#usage).
## Issue commands
......
......@@ -20,7 +20,7 @@ project in an easy and automatic way:
1. [Auto Test](#auto-test)
1. [Auto Code Quality](#auto-code-quality)
1. [Auto SAST (Static Application Security Testing)](#auto-sast)
1. [Auto SAST for Docker images](#auto-sast-for-docker-images)
1. [Auto Container Scanning](#auto-container-scanning)
1. [Auto Review Apps](#auto-review-apps)
1. [Auto DAST (Dynamic Application Security Testing)](#auto-dast)
1. [Auto Deploy](#auto-deploy)
......@@ -95,7 +95,7 @@ Auto Deploy, and Auto Monitoring will be silently skipped.
The Auto DevOps base domain is required if you want to make use of [Auto
Review Apps](#auto-review-apps) and [Auto Deploy](#auto-deploy). It is defined
either under the project's CI/CD settings while
either under the project's CI/CD settings while
[enabling Auto DevOps](#enabling-auto-devops) or in instance-wide settings in
the CI/CD section.
It can also be set at the project or group level as a variable, `AUTO_DEVOPS_DOMAIN`.
......@@ -217,7 +217,7 @@ check out.
In GitLab Ultimate, any security warnings are also
[shown in the merge request widget](../../user/project/merge_requests/sast.md).
### Auto SAST for Docker images
### Auto Container Scanning
> Introduced in GitLab 10.4.
......@@ -228,7 +228,7 @@ created, it's uploaded as an artifact which you can later download and
check out.
In GitLab Ultimate, any security warnings are also
[shown in the merge request widget](../../user/project/merge_requests/sast_docker.md).
[shown in the merge request widget](../../user/project/merge_requests/container_scanning.md).
### Auto Review Apps
......
# Container Scanning
> [Introduced][ee-3672] in [GitLab Ultimate][ee] 10.4.
## Overview
If you are using [GitLab CI/CD][ci], you can analyze your Docker images for known
vulnerabilities using [Clair](https://github.com/coreos/clair),
a Vulnerability Static Analysis tool for containers.
Going a step further, GitLab can show the vulnerability list right in the merge
request widget area.
![Container Scanning Widget](img/container_scanning.png)
## Use cases
If you distribute your application with Docker, then there's a great chance
that your image is based on other Docker images that may in turn contain some
known vulnerabilities that could be exploited.
Having an extra job in your pipeline that checks for those vulnerabilities,
and the fact that they are displayed inside a merge request, makes it very easy
to perform audits for your Docker-based apps.
## How it works
In order for the report to show in the merge request, you need to specify a
`sast:container` job (exact name) that will analyze the code and upload the
resulting `gl-sast-container-report.json` file as an artifact (exact filename).
GitLab will then check this file and show the information inside the merge request.
For more information on how the `sast:container` job should look like, check the
example on [analyzing a Docker image for vulnerabilities][cc-docs].
[ee-3672]: https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/3672
[ee]: https://about.gitlab.com/products/
[ci]: ../../../ci/README.md
[cc-docs]: ../../../ci/examples/container_scanning.md
......@@ -228,7 +228,7 @@ merge request widget area.
[Read more about Static Application Security Testing reports.](sast.md)
## Static Application Security Testing for Docker containers
## Container Scanning
> Introduced in [GitLab Ultimate][products] 10.4.
......@@ -237,7 +237,7 @@ vulnerabilities.
Going a step further, GitLab can show the vulnerability report right in the
merge request widget area.
[Read more about SAST Docker images reports.](sast_docker.md)
[Read more about Container Scanning reports.](container_scanning.md)
## Dynamic Application Security Testing
......
# Static Application Security Testing for Docker containers
> [Introduced][ee-3672] in [GitLab Ultimate][ee] 10.4.
## Overview
If you are using [GitLab CI/CD][ci], you can analyze your Docker images for known
vulnerabilities using [Clair](https://github.com/coreos/clair),
a Vulnerability Static Analysis tool for containers.
Going a step further, GitLab can show the vulnerability list right in the merge
request widget area.
![SAST Docker Widget](img/sast_docker.png)
## Use cases
If you distribute your application with Docker, then there's a great chance
that your image is based on other Docker images that may in turn contain some
known vulnerabilities that could be exploited.
Having an extra job in your pipeline that checks for those vulnerabilities,
and the fact that they are displayed inside a merge request, makes it very easy
to perform audits for your Docker-based apps.
## How it works
In order for the report to show in the merge request, you need to specify a
`sast:container` job (exact name) that will analyze the code and upload the
resulting `gl-sast-container-report.json` file as an artifact (exact filename).
GitLab will then check this file and show the information inside the merge request.
For more information on how the `sast:container` job should look like, check the
example on [analyzing a Docker image for vulnerabilities][cc-docs].
[ee-3672]: https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/3672
[ee]: https://about.gitlab.com/products/
[ci]: ../../../ci/README.md
[cc-docs]: ../../../ci/examples/sast_docker.md
This document was moved to [another location](./container_scanning.md).
\ No newline at end of file
<script>
/* global ListIssue */
import $ from 'jquery';
import _ from 'underscore';
import eventHub from '~/boards/eventhub';
import loadingIcon from '~/vue_shared/components/loading_icon.vue';
import Api from '~/api';
export default {
name: 'BoardProjectSelect',
components: {
loadingIcon,
},
props: {
groupId: {
type: Number,
required: true,
default: 0,
},
},
data() {
return {
loading: true,
selectedProject: {},
};
},
computed: {
selectedProjectName() {
return this.selectedProject.name || 'Select a project';
},
},
mounted() {
$(this.$refs.projectsDropdown).glDropdown({
filterable: true,
filterRemote: true,
search: {
fields: ['name_with_namespace'],
},
clicked: ({ $el, e }) => {
e.preventDefault();
this.selectedProject = {
id: $el.data('project-id'),
name: $el.data('project-name'),
};
eventHub.$emit('setSelectedProject', this.selectedProject);
},
selectable: true,
data: (term, callback) => {
this.loading = true;
return Api.groupProjects(this.groupId, term, (projects) => {
this.loading = false;
callback(projects);
});
},
renderRow(project) {
return `
<li>
<a href='#' class='dropdown-menu-link' data-project-id="${project.id}" data-project-name="${project.name}">
${_.escape(project.name)}
</a>
</li>
`;
},
text: project => project.name,
});
},
};
</script>
<template>
<div>
<label class="label-light prepend-top-10">
Project
</label>
<div
ref="projectsDropdown"
class="dropdown"
>
<button
class="dropdown-menu-toggle wide"
type="button"
data-toggle="dropdown"
aria-expanded="false"
>
{{ selectedProjectName }}
<i
class="fa fa-chevron-down"
aria-hidden="true"
>
</i>
</button>
<div class="dropdown-menu dropdown-menu-selectable dropdown-menu-full-width">
<div class="dropdown-title">
<span>Projects</span>
<button
aria-label="Close"
type="button"
class="dropdown-title-button dropdown-menu-close"
>
<i
aria-hidden="true"
data-hidden="true"
class="fa fa-times dropdown-menu-close-icon"
>
</i>
</button>
</div>
<div class="dropdown-input">
<input
class="dropdown-input-field"
type="search"
placeholder="Search projects"
/>
<i
aria-hidden="true"
data-hidden="true"
class="fa fa-search dropdown-input-search"
>
</i>
</div>
<div class="dropdown-content"></div>
<div class="dropdown-loading">
<loading-icon />
</div>
</div>
</div>
</div>
</template>
......@@ -133,11 +133,11 @@ module EE
end
def elasticsearch_indexing_column_exists?
ActiveRecord::Base.connection.column_exists?(:application_settings, :elasticsearch_indexing)
::Gitlab::Database.cached_column_exists?(:application_settings, :elasticsearch_indexing)
end
def elasticsearch_search_column_exists?
ActiveRecord::Base.connection.column_exists?(:application_settings, :elasticsearch_search)
::Gitlab::Database.cached_column_exists?(:application_settings, :elasticsearch_search)
end
end
end
---
title: Cache column_exists? for Elasticsearch columns
merge_request:
author:
type: performance
---
title: 'Geo: Fix Wiki resync when Wiki repository does not exist'
merge_request:
author:
type: fixed
require 'spec_helper'
describe 'Project elastic search', :js, :elastic do
let(:user) { create(:user) }
let(:project) { create(:project, :repository, namespace: user.namespace) }
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
project.add_master(user)
sign_in(user)
end
describe 'searching' do
it 'finds issues' do
create(:issue, project: project, title: 'Test searching for an issue')
expect_search_result(scope: 'Issues', term: 'Test', result: 'Test searching for an issue')
end
it 'finds merge requests' do
create(:merge_request, source_project: project, target_project: project, title: 'Test searching for an MR')
expect_search_result(scope: 'Merge requests', term: 'Test', result: 'Test searching for an MR')
end
it 'finds milestones' do
create(:milestone, project: project, title: 'Test searching for a milestone')
expect_search_result(scope: 'Milestones', term: 'Test', result: 'Test searching for a milestone')
end
it 'finds wiki pages' do
project.wiki.create_page('test.md', 'Test searching for a wiki page')
expect_search_result(scope: 'Wiki', term: 'Test', result: 'Test searching for a wiki page')
end
it 'finds notes' do
create(:note, project: project, note: 'Test searching for a note')
search(scope: 'Comments', term: 'Test')
expect(page).to have_content(/showing (\d+) - (\d+) of (\d+) notes/i)
expect(page).to have_content('Test searching for a note')
end
it 'finds commits' do
project.repository.index_commits
search(scope: 'Commits', term: 'initial')
expect(page).to have_content(/showing (\d+) - (\d+) of (\d+) commits/i)
expect(page).to have_content('Initial commit')
end
it 'finds blobs' do
project.repository.index_blobs
search(scope: 'Code', term: 'def')
expect(page).to have_content(/showing (\d+) - (\d+) of (\d+) blobs/i)
expect(page).to have_content('def username_regex')
end
end
def search(scope:, term:)
visit project_path(project)
fill_in('search', with: term)
find('#search').native.send_keys(:enter)
page.within '.search-filter' do
click_link scope
end
end
def expect_search_result(scope:, term:, result:)
search(scope: scope, term: term)
expect(page).to have_content(/showing (\d+) - (\d+) of (\d+) #{Regexp.escape(scope)}/i)
expect(page).to have_content(result)
end
end
require 'spec_helper'
describe 'Snippet elastic search', :js, :elastic do
let(:user) { create(:user) }
let(:project) { create(:project, namespace: user.namespace) }
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
project.add_master(user)
sign_in(user)
end
describe 'searching' do
it 'finds a personal snippet' do
create(:personal_snippet, author: user, content: 'Test searching for personal snippets')
visit explore_snippets_path
fill_in 'search', with: 'Test'
click_button 'Go'
expect(page).to have_content('Test searching for personal snippets')
end
it 'finds a project snippet' do
create(:project_snippet, project: project, content: 'Test searching for personal snippets')
visit explore_snippets_path
fill_in 'search', with: 'Test'
click_button 'Go'
expect(page).to have_content('Test searching for personal snippets')
end
end
end
require 'spec_helper'
describe SearchHelper do
describe '#parse_search_result_from_elastic' do
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end
it "returns parsed result" do
project = create :project, :repository
project.repository.index_blobs
Gitlab::Elastic::Helper.refresh_index
result = project.repository.search(
'def popen',
type: :blob,
options: { highlight: true }
)[:blobs][:results][0]
parsed_result = helper.parse_search_result(result)
expect(parsed_result.ref). to eq('b83d6e391c22777fca1ed3012fce84f633d7fed0')
expect(parsed_result.filename).to eq('files/ruby/popen.rb')
expect(parsed_result.startline).to eq(2)
expect(parsed_result.data).to include("Popen")
end
end
end
require 'spec_helper'
describe Issue, elastic: true do
describe Issue, :elastic do
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end
let(:project) { create :project }
......
require 'spec_helper'
describe MergeRequest, elastic: true do
describe MergeRequest, :elastic do
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end
it "searches merge requests" do
......
require 'spec_helper'
describe Milestone, elastic: true do
describe Milestone, :elastic do
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end
it "searches milestones" do
......
require 'spec_helper'
describe Note, elastic: true do
describe Note, :elastic do
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end
it "searches notes" do
......
require 'spec_helper'
describe Project, elastic: true do
describe Project, :elastic do
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end
it "finds projects" do
......
require 'spec_helper'
describe ProjectWiki, elastic: true do
describe ProjectWiki, :elastic do
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end
it "searches wiki page" do
......
require 'spec_helper'
describe Repository, elastic: true do
describe Repository, :elastic do
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end
def index!(project)
......
require 'spec_helper'
describe Snippet, elastic: true do
describe Snippet, :elastic do
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end
context 'searching snippets by code' do
......
......@@ -1312,4 +1312,48 @@ describe Project do
expect(project.user_can_push_to_empty_repo?(user)).to be_falsey
end
end
describe 'project import state transitions' do
context 'state transition: [:started] => [:finished]' do
context 'elasticsearch indexing disabled' do
before do
stub_ee_application_setting(elasticsearch_indexing: false)
end
it 'does not index the repository' do
project = create(:project, :import_started, import_type: :github)
expect(ElasticCommitIndexerWorker).not_to receive(:perform_async)
project.import_finish
end
end
context 'elasticsearch indexing enabled' do
let(:project) { create(:project, :import_started, import_type: :github) }
before do
stub_ee_application_setting(elasticsearch_indexing: true)
end
context 'no index status' do
it 'schedules a full index of the repository' do
expect(ElasticCommitIndexerWorker).to receive(:perform_async).with(project.id, nil)
project.import_finish
end
end
context 'with index status' do
let!(:index_status) { project.create_index_status!(indexed_at: Time.now, last_commit: 'foo') }
it 'schedules a progressive index of the repository' do
expect(ElasticCommitIndexerWorker).to receive(:perform_async).with(project.id, index_status.last_commit)
project.import_finish
end
end
end
end
end
end
......@@ -26,4 +26,40 @@ describe Repository do
repository.after_sync
end
end
describe "Elastic search", :elastic do
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
end
describe "class method find_commits_by_message_with_elastic" do
it "returns commits" do
project = create :project, :repository
project1 = create :project, :repository
project.repository.index_commits
project1.repository.index_commits
Gitlab::Elastic::Helper.refresh_index
expect(described_class.find_commits_by_message_with_elastic('initial').first).to be_a(Commit)
expect(described_class.find_commits_by_message_with_elastic('initial').count).to eq(2)
expect(described_class.find_commits_by_message_with_elastic('initial').total_count).to eq(2)
end
end
describe "find_commits_by_message_with_elastic" do
it "returns commits" do
project = create :project, :repository
project.repository.index_commits
Gitlab::Elastic::Helper.refresh_index
expect(project.repository.find_commits_by_message_with_elastic('initial').first).to be_a(Commit)
expect(project.repository.find_commits_by_message_with_elastic('initial').count).to eq(1)
expect(project.repository.find_commits_by_message_with_elastic('initial').total_count).to eq(1)
end
end
end
end
require 'spec_helper'
describe API::Search do
set(:user) { create(:user) }
set(:group) { create(:group) }
let(:project) { create(:project, :public, name: 'awesome project', group: group) }
let(:repo_project) { create(:project, :public, :repository, group: group) }
shared_examples 'response is correct' do |schema:, size: 1|
it { expect(response).to have_gitlab_http_status(200) }
it { expect(response).to match_response_schema(schema) }
it { expect(response).to include_limited_pagination_headers }
it { expect(json_response.size).to eq(size) }
end
shared_examples 'elasticsearch disabled' do
it 'returns 400 error for wiki_blobs scope' do
get api(endpoint, user), scope: 'wiki_blobs', search: 'awesome'
expect(response).to have_gitlab_http_status(400)
end
it 'returns 400 error for blobs scope' do
get api(endpoint, user), scope: 'blobs', search: 'monitors'
expect(response).to have_gitlab_http_status(400)
end
it 'returns 400 error for commits scope' do
get api(endpoint, user), scope: 'commits', search: 'folder'
expect(response).to have_gitlab_http_status(400)
end
end
shared_examples 'elasticsearch enabled' do
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end
context 'for wiki_blobs scope' do
before do
wiki = create(:project_wiki, project: project)
create(:wiki_page, wiki: wiki, attrs: { title: 'home', content: "Awesome page" })
project.wiki.index_blobs
Gitlab::Elastic::Helper.refresh_index
get api(endpoint, user), scope: 'wiki_blobs', search: 'awesome'
end
it_behaves_like 'response is correct', schema: 'public_api/v4/blobs'
end
context 'for commits scope' do
before do
repo_project.repository.index_commits
Gitlab::Elastic::Helper.refresh_index
get api(endpoint, user), scope: 'commits', search: 'folder'
end
it_behaves_like 'response is correct', schema: 'public_api/v4/commits_details', size: 2
end
context 'for blobs scope' do
before do
repo_project.repository.index_blobs
Gitlab::Elastic::Helper.refresh_index
get api(endpoint, user), scope: 'blobs', search: 'monitors'
end
it_behaves_like 'response is correct', schema: 'public_api/v4/blobs'
end
end
describe 'GET /search' do
context 'with correct params' do
context 'when elasticsearch is disabled' do
it_behaves_like 'elasticsearch disabled' do
let(:endpoint) { '/search' }
end
end
context 'when elasticsearch is enabled' do
it_behaves_like 'elasticsearch enabled' do
let(:endpoint) { '/search' }
end
end
end
end
describe "GET /groups/:id/-/search" do
context 'with correct params' do
context 'when elasticsearch is disabled' do
it_behaves_like 'elasticsearch disabled' do
let(:endpoint) { "/groups/#{group.id}/-/search" }
end
end
context 'when elasticsearch is enabled' do
it_behaves_like 'elasticsearch enabled' do
let(:endpoint) { "/groups/#{group.id}/-/search" }
end
end
end
end
end
require 'spec_helper'
describe Search::GroupService do
shared_examples_for 'group search' do
context 'finding projects by name' do
let(:user) { create(:user) }
let(:term) { "Project Name" }
let(:nested_group) { create(:group, :nested) }
# These projects shouldn't be found
let!(:outside_project) { create(:project, :public, name: "Outside #{term}") }
let!(:private_project) { create(:project, :private, namespace: nested_group, name: "Private #{term}" )}
let!(:other_project) { create(:project, :public, namespace: nested_group, name: term.reverse) }
# These projects should be found
let!(:project1) { create(:project, :internal, namespace: nested_group, name: "Inner #{term} 1") }
let!(:project2) { create(:project, :internal, namespace: nested_group, name: "Inner #{term} 2") }
let!(:project3) { create(:project, :internal, namespace: nested_group.parent, name: "Outer #{term}") }
let(:results) { described_class.new(user, search_group, search: term).execute }
subject { results.objects('projects') }
context 'in parent group' do
let(:search_group) { nested_group.parent }
it { is_expected.to match_array([project1, project2, project3]) }
end
context 'in subgroup' do
let(:search_group) { nested_group }
it { is_expected.to match_array([project1, project2]) }
end
end
end
describe 'elasticsearch' do
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
# Ensure these are present when the index is refreshed
_ = [
outside_project, private_project, other_project,
project1, project2, project3
]
Gitlab::Elastic::Helper.refresh_index
end
after do
Gitlab::Elastic::Helper.delete_index
end
include_examples 'group search'
end
end
RSpec.configure do |config|
config.before(:each, :elastic) do
Gitlab::Elastic::Helper.create_empty_index
end
config.after(:each, :elastic) do
Gitlab::Elastic::Helper.delete_index
end
end
require 'spec_helper'
describe ElasticIndexerWorker, elastic: true do
describe ElasticIndexerWorker, :elastic do
subject { described_class.new }
before do
......@@ -8,12 +8,6 @@ describe ElasticIndexerWorker, elastic: true do
Elasticsearch::Model.client =
Gitlab::Elastic::Client.build(Gitlab::CurrentSettings.elasticsearch_config)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
end
it 'returns true if ES disabled' do
......
Feature: Global Search
Background:
Given I sign in as a user
And I own project "Shop"
And Elasticsearch is enabled
Scenario: I search through the all projects
Given project has all data available for the search
And I visit dashboard page
Then I search "initial"
And I find an Issue
And I find a Merge Request
And I find a Milestone
\ No newline at end of file
Feature: Project Search
Background:
Given I sign in as a user
And I own project "Shop"
And Elasticsearch is enabled
Scenario: I search through the all project items
Given project has all data available for the search
And I visit my project's home page
Then I search "initial"
And I find an Issue
And I find a Merge Request
And I find a Milestone
And I find a Comment
And I find a Commit
And I find a Wiki Page
Then I visit my project's home page
Then I search "def"
And I find a Code
Feature: Snippets Search
Background:
Given I sign in as a user
And Elasticsearch is enabled
Scenario: I search through the snippets
Given there is a snippet "index" with "php rocks" string
And there is a snippet "php" with "benefits" string
And I visit snippets page
Then I search "php"
And I find "index" snippet
Then I select search by titles and filenames
And I find "php" snippet
\ No newline at end of file
require_dependency Rails.root.join('spec', 'support', 'stub_configuration')
class Spinach::Features::GlobalSearch < Spinach::FeatureSteps
include SharedAuthentication
include SharedPaths
include SharedProject
include SharedElastic
include StubConfiguration
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end
step 'project has all data available for the search' do
@project = create :project
@project.add_master(current_user)
@issue = create :issue, title: 'bla-bla initial', project: @project
@merge_request = create :merge_request, title: 'bla-bla initial', source_project: @project
@milestone = create :milestone, title: 'bla-bla initial', project: @project
end
end
class Spinach::Features::ProjectSearch < Spinach::FeatureSteps
include SharedAuthentication
include SharedPaths
include SharedProject
include SharedElastic
include StubConfiguration
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end
step 'project has all data available for the search' do
@project = create :project, :repository
@project.add_master(current_user)
@issue = create :issue, title: 'bla-bla initial', project: @project
@merge_request = create :merge_request, title: 'bla-bla initial', source_project: @project
@milestone = create :milestone, title: 'bla-bla initial', project: @project
@note = create :note, note: 'bla-bla initial', project: @project, noteable: @issue
@project.repository.index_blobs
@project.repository.index_commits
@project.wiki.create_page("index_page", "Bla bla initial")
end
step 'I search "def"' do
fill_in "search", with: "def"
click_button "Go"
end
step 'I find a Comment' do
select_filter("Comments")
expect(page.find('.search-result-row')).to have_content(@note.note)
end
step 'I find a Wiki Page' do
select_filter("Wiki")
expect(page.find('.blob-result')).to have_content('Bla bla init')
end
step 'I find a Commit' do
select_filter("Commits")
expect(page.find('.commit-content .item-title')).to have_content("Initial commit")
end
step 'I find a Code' do
expect(page.first('.blob-result')).to have_content("def")
end
end
class Spinach::Features::SnippetsSearch < Spinach::FeatureSteps
include SharedAuthentication
include SharedPaths
include SharedProject
include SharedElastic
include StubConfiguration
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end
step 'there is a snippet "index" with "php rocks" string' do
create :personal_snippet, :public, content: "php rocks", title: "index"
Gitlab::Elastic::Helper.refresh_index
end
step 'there is a snippet "php" with "benefits" string' do
create :personal_snippet, :public, content: "benefits", title: "php"
Gitlab::Elastic::Helper.refresh_index
end
step 'I search "php"' do
fill_in "search", with: "php"
click_button "Go"
end
step 'I find "index" snippet' do
expect(page.find('.file-holder')).to have_content("php rocks")
end
step 'I select search by titles and filenames' do
select_filter("Titles and Filenames")
end
step 'I find "php" snippet' do
expect(page.find('.search-result-row')).to have_content("php")
end
end
module SharedElastic
include Spinach::DSL
step 'I search "initial"' do
fill_in "search", with: "initial"
click_button "Go"
end
step 'I find an Issue' do
select_filter("Issues")
expect(page.find('.search-result-row')).to have_content(@issue.title)
end
step 'I find a Merge Request' do
select_filter("Merge requests")
expect(page.find('.search-result-row')).to have_content(@merge_request.title)
end
step 'I find a Milestone' do
select_filter("Milestones")
expect(page.find('.search-result-row')).to have_content(@milestone.title)
end
step 'Elasticsearch is enabled' do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
end
def select_filter(name)
find(:xpath, "//ul[contains(@class, 'search-filter')]//a[contains(.,'#{name}')]").click
end
end
......@@ -84,7 +84,7 @@ module Banzai
relative_url_root,
project.full_path,
uri_type(file_path),
Addressable::URI.escape(ref),
Addressable::URI.escape(ref).gsub('#', '%23'),
Addressable::URI.escape(file_path)
].compact.join('/').squeeze('/').chomp('/')
......
......@@ -199,6 +199,10 @@ module Gitlab
ActiveRecord::Base.connection
end
def self.cached_column_exists?(table_name, column_name)
connection.schema_cache.columns_hash(table_name).has_key?(column_name.to_s)
end
private_class_method :connection
def self.database_version
......
......@@ -859,6 +859,19 @@ into similar problems in the future (e.g. when new tables are created).
BackgroundMigrationWorker.perform_in(delay_interval * index, job_class_name, [start_id, end_id])
end
end
def foreign_key_exists?(table, column)
foreign_keys(table).any? do |key|
key.options[:column] == column.to_s
end
end
# Rails' index_exists? doesn't work when you only give it a table and index
# name. As such we have to use some extra code to check if an index exists for
# a given name.
def index_exists_by_name?(table, index)
indexes(table).map(&:name).include?(index)
end
end
end
end
......@@ -29,6 +29,14 @@ module Gitlab
@merge_request_diff.real_size
end
def clear_cache!
Rails.cache.delete(cache_key)
end
def cache_key
[@merge_request_diff, 'highlighted-diff-files', diff_options]
end
private
def highlight_diff_file_from_cache!(diff_file, cache_diff_lines)
......@@ -64,16 +72,12 @@ module Gitlab
end
def store_highlight_cache
Rails.cache.write(cache_key, highlight_cache) if @highlight_cache_was_empty
Rails.cache.write(cache_key, highlight_cache, expires_in: 1.week) if @highlight_cache_was_empty
end
def cacheable?(diff_file)
@merge_request_diff.present? && diff_file.text? && diff_file.diffable?
end
def cache_key
[@merge_request_diff, 'highlighted-diff-files', diff_options]
end
end
end
end
......
......@@ -70,6 +70,7 @@ module Gitlab
update_user_references
update_project_references
remove_duplicate_assignees
reset_tokens!
remove_encrypted_attributes!
......@@ -83,6 +84,14 @@ module Gitlab
end
end
def remove_duplicate_assignees
return unless @relation_hash['issue_assignees']
# When an assignee did not exist in the members mapper, the importer is
# assigned. We only need to assign each user once.
@relation_hash['issue_assignees'].uniq!(&:user_id)
end
def setup_note
set_note_author
# attachment is deprecated and note uploads are handled by Markdown uploader
......
module Gitlab
# This class is used to move local, unhashed files owned by projects to their new location
class ProjectTransfer
def move_project(project_path, namespace_path_was, namespace_path)
new_namespace_folder = File.join(root_dir, namespace_path)
FileUtils.mkdir_p(new_namespace_folder) unless Dir.exist?(new_namespace_folder)
from = File.join(root_dir, namespace_path_was, project_path)
to = File.join(root_dir, namespace_path, project_path)
# nil parent_path (or parent_path_was) represents a root namespace
def move_namespace(path, parent_path_was, parent_path)
parent_path_was ||= ''
parent_path ||= ''
new_parent_folder = File.join(root_dir, parent_path)
FileUtils.mkdir_p(new_parent_folder)
from = File.join(root_dir, parent_path_was, path)
to = File.join(root_dir, parent_path, path)
move(from, to, "")
end
alias_method :move_project, :move_namespace
def rename_project(path_was, path, namespace_path)
base_dir = File.join(root_dir, namespace_path)
move(path_was, path, base_dir)
......
......@@ -5,6 +5,7 @@ module Gitlab
Gitlab::SlashCommands::IssueShow,
Gitlab::SlashCommands::IssueNew,
Gitlab::SlashCommands::IssueSearch,
Gitlab::SlashCommands::IssueMove,
Gitlab::SlashCommands::Deploy,
Gitlab::SlashCommands::Run
].freeze
......
module Gitlab
module SlashCommands
class IssueMove < IssueCommand
def self.match(text)
%r{
\A # the beginning of a string
issue\s+move\s+ # the command
\#?(?<iid>\d+)\s+ # the issue id, may preceded by hash sign
(to\s+)? # aid the command to be much more human-ly
(?<project_path>[^\s]+) # named group for id of dest. project
}x.match(text)
end
def self.help_message
'issue move <issue_id> (to)? <project_path>'
end
def self.allowed?(project, user)
can?(user, :admin_issue, project)
end
def execute(match)
old_issue = find_by_iid(match[:iid])
target_project = Project.find_by_full_path(match[:project_path])
unless current_user.can?(:read_project, target_project) && old_issue
return Gitlab::SlashCommands::Presenters::Access.new.not_found
end
new_issue = Issues::MoveService.new(project, current_user)
.execute(old_issue, target_project)
presenter(new_issue).present(old_issue)
rescue Issues::MoveService::MoveError => e
presenter(old_issue).display_move_error(e.message)
end
private
def presenter(issue)
Gitlab::SlashCommands::Presenters::IssueMove.new(issue)
end
end
end
end
# coding: utf-8
module Gitlab
module SlashCommands
module Presenters
class IssueMove < Presenters::Base
include Presenters::IssueBase
def present(old_issue)
in_channel_response(moved_issue(old_issue))
end
def display_move_error(error)
message = header_with_list("The action was not successful, because:", [error])
ephemeral_response(text: message)
end
private
def moved_issue(old_issue)
{
attachments: [
{
title: "#{@resource.title} · #{@resource.to_reference}",
title_link: resource_url,
author_name: author.name,
author_icon: author.avatar_url,
fallback: "Issue #{@resource.to_reference}: #{@resource.title}",
pretext: pretext(old_issue),
color: color(@resource),
fields: fields,
mrkdwn_in: [
:title,
:pretext,
:text,
:fields
]
}
]
}
end
def pretext(old_issue)
"Moved issue *#{issue_link(old_issue)}* to *#{issue_link(@resource)}*"
end
def issue_link(issue)
"[#{issue.to_reference}](#{project_issue_url(issue.project, issue)})"
end
end
end
end
end
......@@ -8,6 +8,10 @@
"karma": "karma start config/karma.config.js --single-run",
"karma-coverage": "BABEL_ENV=coverage karma start config/karma.config.js --single-run",
"karma-start": "karma start config/karma.config.js",
"prettier-staged": "node ./scripts/frontend/prettier.js",
"prettier-staged-save": "node ./scripts/frontend/prettier.js save",
"prettier-all": "node ./scripts/frontend/prettier.js check-all",
"prettier-all-save": "node ./scripts/frontend/prettier.js save-all",
"webpack": "webpack --config config/webpack.config.js",
"webpack-prod": "NODE_ENV=production webpack --config config/webpack.config.js"
},
......@@ -116,7 +120,7 @@
"karma-sourcemap-loader": "^0.3.7",
"karma-webpack": "2.0.7",
"nodemon": "^1.15.1",
"prettier": "1.9.2",
"prettier": "1.11.1",
"webpack-dev-server": "^2.11.2"
}
}
/* eslint import/no-commonjs: "off" */
const execFileSync = require('child_process').execFileSync;
const exec = (command, args) => {
const options = {
cwd: process.cwd(),
env: process.env,
encoding: 'utf-8',
};
return execFileSync(command, args, options);
};
const execGitCmd = args =>
exec('git', args)
.trim()
.toString()
.split('\n');
module.exports = {
getStagedFiles: fileExtensionFilter => {
const gitOptions = [
'diff',
'--name-only',
'--cached',
'--diff-filter=ACMRTUB',
];
if (fileExtensionFilter) gitOptions.push(...fileExtensionFilter);
return execGitCmd(gitOptions);
},
};
/* eslint import/no-commonjs: "off", import/no-extraneous-dependencies: "off", no-console: "off" */
const glob = require('glob');
const prettier = require('prettier');
const fs = require('fs');
const getStagedFiles = require('./frontend_script_utils').getStagedFiles;
const mode = process.argv[2] || 'check';
const shouldSave = mode === 'save' || mode === 'save-all';
const allFiles = mode === 'check-all' || mode === 'save-all';
const config = {
patterns: ['**/*.js', '**/*.vue', '**/*.scss'],
ignore: ['**/node_modules/**', '**/vendor/**', '**/public/**'],
parsers: {
js: 'babylon',
vue: 'vue',
scss: 'css',
},
};
const availableExtensions = Object.keys(config.parsers);
console.log(`Loading ${allFiles ? 'All' : 'Staged'} Files ...`);
const stagedFiles = allFiles
? null
: getStagedFiles(availableExtensions.map(ext => `*.${ext}`));
if (stagedFiles) {
if (!stagedFiles.length || (stagedFiles.length === 1 && !stagedFiles[0])) {
console.log('No matching staged files.');
return;
}
console.log(`Matching staged Files : ${stagedFiles.length}`);
}
let didWarn = false;
let didError = false;
let files;
if (allFiles) {
const ignore = config.ignore;
const patterns = config.patterns;
const globPattern =
patterns.length > 1 ? `{${patterns.join(',')}}` : `${patterns.join(',')}`;
files = glob
.sync(globPattern, { ignore })
.filter(f => allFiles || stagedFiles.includes(f));
} else {
files = stagedFiles.filter(f =>
availableExtensions.includes(f.split('.').pop()),
);
}
if (!files.length) {
console.log('No Files found to process with Prettier');
return;
}
console.log(`${shouldSave ? 'Updating' : 'Checking'} ${files.length} file(s)`);
prettier
.resolveConfig('.')
.then(options => {
console.log('Found options : ', options);
files.forEach(file => {
try {
const fileExtension = file.split('.').pop();
Object.assign(options, {
parser: config.parsers[fileExtension],
});
const input = fs.readFileSync(file, 'utf8');
if (shouldSave) {
const output = prettier.format(input, options);
if (output !== input) {
fs.writeFileSync(file, output, 'utf8');
console.log(`Prettified : ${file}`);
}
} else if (!prettier.check(input, options)) {
if (!didWarn) {
console.log(
'\n===============================\nGitLab uses Prettier to format all JavaScript code.\nPlease format each file listed below or run "yarn prettier-staged-save"\n===============================\n',
);
didWarn = true;
}
console.log(`Prettify Manually : ${file}`);
}
} catch (error) {
didError = true;
console.log(`\n\nError with ${file}: ${error.message}`);
}
});
if (didWarn || didError) {
process.exit(1);
}
})
.catch(e => {
console.log(`Error on loading the Config File: ${e.message}`);
process.exit(1);
});
......@@ -39,6 +39,7 @@ describe 'CI Lint', :js do
it 'displays information about an error' do
expect(page).to have_content('Status: syntax is incorrect')
expect(page).to have_selector('.ace_content', text: yaml_content)
end
end
......
......@@ -527,4 +527,29 @@ feature 'File blob', :js do
end
end
end
context 'realtime pipelines' do
before do
Files::CreateService.new(
project,
project.creator,
start_branch: 'feature',
branch_name: 'feature',
commit_message: "Add ruby file",
file_path: 'files/ruby/test.rb',
file_content: "# Awesome content"
).execute
create(:ci_pipeline, status: 'running', project: project, ref: 'feature', sha: project.commit('feature').sha)
visit_blob('files/ruby/test.rb', ref: 'feature')
end
it 'should show the realtime pipeline status' do
page.within('.commit-actions') do
expect(page).to have_css('.ci-status-icon')
expect(page).to have_css('.ci-status-icon-running')
expect(page).to have_css('.js-ci-status-icon-running')
end
end
end
end
......@@ -6,39 +6,6 @@ describe SearchHelper do
str
end
describe '#parse_search_result_from_elastic' do
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end
it "returns parsed result" do
project = create :project, :repository
project.repository.index_blobs
Gitlab::Elastic::Helper.refresh_index
result = project.repository.search(
'def popen',
type: :blob,
options: { highlight: true }
)[:blobs][:results][0]
parsed_result = helper.parse_search_result(result)
expect(parsed_result.ref). to eq('b83d6e391c22777fca1ed3012fce84f633d7fed0')
expect(parsed_result.filename).to eq('files/ruby/popen.rb')
expect(parsed_result.startline).to eq(2)
expect(parsed_result.data).to include("Popen")
end
end
describe 'search_autocomplete_source' do
context "with no current user" do
before do
......
......@@ -217,6 +217,23 @@ describe Banzai::Filter::RelativeLinkFilter do
end
end
context 'when ref name contains special chars' do
let(:ref) {'mark#\'@],+;-._/#@!$&()+down'}
it 'correctly escapes the ref' do
# Adressable won't escape the '#', so we do this manually
ref_escaped = 'mark%23\'@%5D,+;-._/%23@!$&()+down'
# Stub this method so the branch doesn't actually need to be in the repo
allow_any_instance_of(described_class).to receive(:uri_type).and_return(:raw)
doc = filter(link('files/images/logo-black.png'))
expect(doc.at_css('a')['href'])
.to eq "/#{project_path}/raw/#{ref_escaped}/files/images/logo-black.png"
end
end
context 'when requested path is a directory with space in the repo' do
let(:ref) { 'master' }
let(:commit) { project.commit('38008cb17ce1466d8fec2dfa6f6ab8dcfe5cf49e') }
......
......@@ -287,6 +287,17 @@ describe Gitlab::Database do
end
end
describe '.cached_column_exists?' do
it 'only retrieves data once' do
expect(ActiveRecord::Base.connection).to receive(:columns).once.and_call_original
2.times do
expect(described_class.cached_column_exists?(:projects, :id)).to be_truthy
expect(described_class.cached_column_exists?(:projects, :bogus_column)).to be_falsey
end
end
end
describe '#true_value' do
it 'returns correct value for PostgreSQL' do
expect(described_class).to receive(:postgresql?).and_return(true)
......
......@@ -12,7 +12,7 @@ describe Gitlab::Diff::FileCollection::MergeRequestDiff do
diff_files
end
it 'does not files marked as undiffable in .gitattributes' do
it 'does not highlight files marked as undiffable in .gitattributes' do
allow_any_instance_of(Gitlab::Diff::File).to receive(:diffable?).and_return(false)
expect_any_instance_of(Gitlab::Diff::File).not_to receive(:highlighted_diff_lines)
......
......@@ -43,7 +43,6 @@
{
"id": 40,
"title": "Voluptatem",
"assignee_id": 1,
"author_id": 22,
"project_id": 5,
"created_at": "2016-06-14T15:02:08.340Z",
......@@ -61,7 +60,23 @@
"issue_assignees": [
{
"user_id": 1,
"issue_id": 1
"issue_id": 40
},
{
"user_id": 15,
"issue_id": 40
},
{
"user_id": 16,
"issue_id": 40
},
{
"user_id": 16,
"issue_id": 40
},
{
"user_id": 6,
"issue_id": 40
}
],
"milestone": {
......@@ -319,8 +334,7 @@
},
{
"id": 39,
"title": "Delectus veniam ratione in eos culpa et natus molestiae earum aut.",
"assignee_id": 20,
"title": "Issue without assignees",
"author_id": 22,
"project_id": 5,
"created_at": "2016-06-14T15:02:08.233Z",
......@@ -334,6 +348,7 @@
"confidential": false,
"due_date": null,
"moved_to_id": null,
"issue_assignees": [],
"milestone": {
"id": 1,
"title": "test milestone",
......@@ -539,7 +554,6 @@
{
"id": 38,
"title": "Quasi adipisci non cupiditate dolorem quo qui earum sed.",
"assignee_id": 1,
"author_id": 6,
"project_id": 5,
"created_at": "2016-06-14T15:02:08.154Z",
......@@ -756,7 +770,6 @@
{
"id": 37,
"title": "Cupiditate quo aut ducimus minima molestiae vero numquam possimus.",
"assignee_id": 15,
"author_id": 20,
"project_id": 5,
"created_at": "2016-06-14T15:02:08.051Z",
......@@ -952,7 +965,6 @@
{
"id": 36,
"title": "Necessitatibus dolor est enim quia rem suscipit quidem voluptas ullam.",
"assignee_id": 20,
"author_id": 16,
"project_id": 5,
"created_at": "2016-06-14T15:02:07.958Z",
......@@ -1148,7 +1160,6 @@
{
"id": 35,
"title": "Repellat praesentium deserunt maxime incidunt harum porro qui.",
"assignee_id": 6,
"author_id": 20,
"project_id": 5,
"created_at": "2016-06-14T15:02:07.832Z",
......@@ -1344,7 +1355,6 @@
{
"id": 34,
"title": "Ullam expedita deserunt libero consequatur quia dolor harum perferendis facere quidem.",
"assignee_id": 20,
"author_id": 1,
"project_id": 5,
"created_at": "2016-06-14T15:02:07.717Z",
......@@ -1540,7 +1550,6 @@
{
"id": 33,
"title": "Numquam accusamus eos iste exercitationem magni non inventore.",
"assignee_id": 15,
"author_id": 26,
"project_id": 5,
"created_at": "2016-06-14T15:02:07.611Z",
......@@ -1736,7 +1745,6 @@
{
"id": 32,
"title": "Necessitatibus magnam qui at velit consequatur perspiciatis.",
"assignee_id": 22,
"author_id": 15,
"project_id": 5,
"created_at": "2016-06-14T15:02:07.431Z",
......@@ -1932,7 +1940,6 @@
{
"id": 31,
"title": "Libero nam magnam incidunt eaque placeat error et.",
"assignee_id": 1,
"author_id": 16,
"project_id": 5,
"created_at": "2016-06-14T15:02:07.280Z",
......
......@@ -4,7 +4,12 @@ include ImportExport::CommonUtil
describe Gitlab::ImportExport::ProjectTreeRestorer do
describe 'restore project tree' do
before(:context) do
@user = create(:user)
# Using an admin for import, so we can check assignment of existing members
@user = create(:admin)
@existing_members = [
create(:user, username: 'bernard_willms'),
create(:user, username: 'saul_will')
]
RSpec::Mocks.with_temporary_scope do
@project = create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project')
......@@ -63,8 +68,9 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
expect(issue.reload.updated_at.to_s).to eq('2016-06-14 15:02:47 UTC')
end
it 'has issue assignees' do
expect(Issue.where(title: 'Voluptatem').first.issue_assignees).not_to be_empty
it 'has multiple issue assignees' do
expect(Issue.find_by(title: 'Voluptatem').assignees).to contain_exactly(@user, *@existing_members)
expect(Issue.find_by(title: 'Issue without assignees').assignees).to be_empty
end
it 'contains the merge access levels on a protected branch' do
......
......@@ -21,30 +21,77 @@ describe Gitlab::ProjectTransfer do
describe '#move_project' do
it "moves project upload to another namespace" do
FileUtils.mkdir_p(File.join(@root_dir, @namespace_path_was, @project_path))
path_to_be_moved = File.join(@root_dir, @namespace_path_was, @project_path)
expected_path = File.join(@root_dir, @namespace_path, @project_path)
FileUtils.mkdir_p(path_to_be_moved)
@project_transfer.move_project(@project_path, @namespace_path_was, @namespace_path)
expected_path = File.join(@root_dir, @namespace_path, @project_path)
expect(Dir.exist?(expected_path)).to be_truthy
end
end
describe '#move_namespace' do
context 'when moving namespace from root into another namespace' do
it "moves namespace projects' upload" do
child_namespace = 'test_child_namespace'
path_to_be_moved = File.join(@root_dir, child_namespace, @project_path)
expected_path = File.join(@root_dir, @namespace_path, child_namespace, @project_path)
FileUtils.mkdir_p(path_to_be_moved)
@project_transfer.move_namespace(child_namespace, nil, @namespace_path)
expect(Dir.exist?(expected_path)).to be_truthy
end
end
context 'when moving namespace from one parent to another' do
it "moves namespace projects' upload" do
child_namespace = 'test_child_namespace'
path_to_be_moved = File.join(@root_dir, @namespace_path_was, child_namespace, @project_path)
expected_path = File.join(@root_dir, @namespace_path, child_namespace, @project_path)
FileUtils.mkdir_p(path_to_be_moved)
@project_transfer.move_namespace(child_namespace, @namespace_path_was, @namespace_path)
expect(Dir.exist?(expected_path)).to be_truthy
end
end
context 'when moving namespace from having a parent to root' do
it "moves namespace projects' upload" do
child_namespace = 'test_child_namespace'
path_to_be_moved = File.join(@root_dir, @namespace_path_was, child_namespace, @project_path)
expected_path = File.join(@root_dir, child_namespace, @project_path)
FileUtils.mkdir_p(path_to_be_moved)
@project_transfer.move_namespace(child_namespace, @namespace_path_was, nil)
expect(Dir.exist?(expected_path)).to be_truthy
end
end
end
describe '#rename_project' do
it "renames project" do
FileUtils.mkdir_p(File.join(@root_dir, @namespace_path, @project_path_was))
path_to_be_moved = File.join(@root_dir, @namespace_path, @project_path_was)
expected_path = File.join(@root_dir, @namespace_path, @project_path)
FileUtils.mkdir_p(path_to_be_moved)
@project_transfer.rename_project(@project_path_was, @project_path, @namespace_path)
expected_path = File.join(@root_dir, @namespace_path, @project_path)
expect(Dir.exist?(expected_path)).to be_truthy
end
end
describe '#rename_namespace' do
it "renames namespace" do
FileUtils.mkdir_p(File.join(@root_dir, @namespace_path_was, @project_path))
path_to_be_moved = File.join(@root_dir, @namespace_path_was, @project_path)
expected_path = File.join(@root_dir, @namespace_path, @project_path)
FileUtils.mkdir_p(path_to_be_moved)
@project_transfer.rename_namespace(@namespace_path_was, @namespace_path)
expected_path = File.join(@root_dir, @namespace_path, @project_path)
expect(Dir.exist?(expected_path)).to be_truthy
end
end
......
......@@ -108,5 +108,10 @@ describe Gitlab::SlashCommands::Command do
it { is_expected.to eq(Gitlab::SlashCommands::IssueSearch) }
end
context 'IssueMove is triggered' do
let(:params) { { text: 'issue move #78291 to gitlab/gitlab-ci' } }
it { is_expected.to eq(Gitlab::SlashCommands::IssueMove) }
end
end
end
require 'spec_helper'
describe Gitlab::SlashCommands::IssueMove, service: true do
describe '#match' do
shared_examples_for 'move command' do |text_command|
it 'can be parsed to extract the needed fields' do
match_data = described_class.match(text_command)
expect(match_data['iid']).to eq('123456')
expect(match_data['project_path']).to eq('gitlab/gitlab-ci')
end
end
it_behaves_like 'move command', 'issue move #123456 to gitlab/gitlab-ci'
it_behaves_like 'move command', 'issue move #123456 gitlab/gitlab-ci'
it_behaves_like 'move command', 'issue move #123456 gitlab/gitlab-ci '
it_behaves_like 'move command', 'issue move 123456 to gitlab/gitlab-ci'
it_behaves_like 'move command', 'issue move 123456 gitlab/gitlab-ci'
it_behaves_like 'move command', 'issue move 123456 gitlab/gitlab-ci '
end
describe '#execute' do
set(:user) { create(:user) }
set(:issue) { create(:issue) }
set(:chat_name) { create(:chat_name, user: user) }
set(:project) { issue.project }
set(:other_project) { create(:project, namespace: project.namespace) }
before do
[project, other_project].each { |prj| prj.add_master(user) }
end
subject { described_class.new(project, chat_name) }
def process_message(message)
subject.execute(described_class.match(message))
end
context 'when the user can move the issue' do
context 'when the move fails' do
it 'returns the error message' do
message = "issue move #{issue.iid} #{project.full_path}"
expect(process_message(message)).to include(response_type: :ephemeral,
text: a_string_matching('Cannot move issue'))
end
end
context 'when the move succeeds' do
let(:message) { "issue move #{issue.iid} #{other_project.full_path}" }
it 'moves the issue to the new destination' do
expect { process_message(message) }.to change { Issue.count }.by(1)
new_issue = issue.reload.moved_to
expect(new_issue.state).to eq('opened')
expect(new_issue.project_id).to eq(other_project.id)
expect(new_issue.author_id).to eq(issue.author_id)
expect(issue.state).to eq('closed')
expect(issue.project_id).to eq(project.id)
end
it 'returns the new issue' do
expect(process_message(message))
.to include(response_type: :in_channel,
attachments: [a_hash_including(title_link: a_string_including(other_project.full_path))])
end
it 'mentions the old issue' do
expect(process_message(message))
.to include(attachments: [a_hash_including(pretext: a_string_including(project.full_path))])
end
end
end
context 'when the issue does not exist' do
it 'returns not found' do
message = "issue move #{issue.iid.succ} #{other_project.full_path}"
expect(process_message(message)).to include(response_type: :ephemeral,
text: a_string_matching('not found'))
end
end
context 'when the target project does not exist' do
it 'returns not found' do
message = "issue move #{issue.iid} #{other_project.full_path}/foo"
expect(process_message(message)).to include(response_type: :ephemeral,
text: a_string_matching('not found'))
end
end
context 'when the user cannot see the target project' do
it 'returns not found' do
message = "issue move #{issue.iid} #{other_project.full_path}"
other_project.team.truncate
expect(process_message(message)).to include(response_type: :ephemeral,
text: a_string_matching('not found'))
end
end
context 'when the user does not have the required permissions on the target project' do
it 'returns the error message' do
message = "issue move #{issue.iid} #{other_project.full_path}"
other_project.team.truncate
other_project.team.add_guest(user)
expect(process_message(message)).to include(response_type: :ephemeral,
text: a_string_matching('Cannot move issue'))
end
end
end
end
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment