Commit 186ab901 authored by Filipa Lacerda's avatar Filipa Lacerda

Merge branch 'master' into dz-sec-report-base-head

* master: (54 commits)
  Add Go Back link to WebIDE
  Resolve conflicts
  Docs: refactor doc general guidelines and style guidelines
  Docs: refactor doc general guidelines and style guidelines
  fixed specs & eslint
  Add version available info to integrity check rake task docs
  Extract constant for LfsPointerFile::VERSION_LINE
  dont log any errors with event listeners in tests
  Resolve CE to EE merge conflicts on documentation
  added default editor options
  Bump fog-google to 1.3.3 to fix LFS upload
  removed shadow on modified edior changed selection background color
  improve styling of diff viewer
  fixed karma spec
  fixed scss-lint added pointer cursor to diff overview
  change editor when opening file from sidebar
  fixed SCSS-lint
  added border to overview hide viewport display
  change background of diff overview fix content not resetting when discarded
  remove hacky margin added to hide diff overview added comment re. try/catch in dispose
  ...
parents e7721aa9 e58fce3f
...@@ -77,19 +77,32 @@ stages: ...@@ -77,19 +77,32 @@ stages:
# (as many users are still using 9.2). # (as many users are still using 9.2).
- postgres:9.2 - postgres:9.2
- redis:alpine - redis:alpine
- docker.elastic.co/elasticsearch/elasticsearch:5.5.2
.use-pg-9-6-no-elasticsearch: &use-pg-9-6-no-elasticsearch .use-mysql: &use-mysql
services:
- mysql:latest
- redis:alpine
# BEGIN EE-only service helpers
.use-pg-9-6: &use-pg-9-6
services: services:
- postgres:9.6 - postgres:9.6
- redis:alpine - redis:alpine
.use-mysql: &use-mysql .use-pg-with-elasticsearch: &use-pg-with-elasticsearch
services:
- postgres:9.2
- redis:alpine
- docker.elastic.co/elasticsearch/elasticsearch:5.5.2
.use-mysql-with-elasticsearch: &use-mysql-with-elasticsearch
services: services:
- mysql:latest - mysql:latest
- redis:alpine - redis:alpine
- docker.elastic.co/elasticsearch/elasticsearch:5.5.2 - docker.elastic.co/elasticsearch/elasticsearch:5.5.2
# END EE-only service helpers
# Skip all jobs except the ones that begin with 'docs/'. # Skip all jobs except the ones that begin with 'docs/'.
# Used for commits including ONLY documentation changes. # Used for commits including ONLY documentation changes.
...@@ -179,15 +192,15 @@ stages: ...@@ -179,15 +192,15 @@ stages:
.rspec-ee-pg: &rspec-ee-pg .rspec-ee-pg: &rspec-ee-pg
<<: *rspec-metadata-ee <<: *rspec-metadata-ee
<<: *use-pg <<: *use-pg-with-elasticsearch
.rspec-ee-mysql: &rspec-ee-mysql .rspec-ee-mysql: &rspec-ee-mysql
<<: *rspec-metadata-ee <<: *rspec-metadata-ee
<<: *use-mysql <<: *use-mysql-with-elasticsearch
.rspec-geo-pg-9-6: &rspec-metadata-pg-geo .rspec-geo-pg-9-6: &rspec-metadata-pg-geo
<<: *rspec-metadata <<: *rspec-metadata
<<: *use-pg-9-6-no-elasticsearch <<: *use-pg-9-6
stage: test stage: test
script: script:
- export NO_KNAPSACK=1 - export NO_KNAPSACK=1
......
...@@ -109,7 +109,7 @@ gem 'dropzonejs-rails', '~> 0.7.1' ...@@ -109,7 +109,7 @@ gem 'dropzonejs-rails', '~> 0.7.1'
# for backups # for backups
gem 'fog-aws', '~> 2.0' gem 'fog-aws', '~> 2.0'
gem 'fog-core', '~> 1.44' gem 'fog-core', '~> 1.44'
gem 'fog-google', '~> 1.3.2' gem 'fog-google', '~> 1.3.3'
gem 'fog-local', '~> 0.3' gem 'fog-local', '~> 0.3'
gem 'fog-openstack', '~> 0.1' gem 'fog-openstack', '~> 0.1'
gem 'fog-rackspace', '~> 0.1.1' gem 'fog-rackspace', '~> 0.1.1'
...@@ -371,7 +371,7 @@ group :development, :test do ...@@ -371,7 +371,7 @@ group :development, :test do
gem 'benchmark-ips', '~> 2.3.0', require: false gem 'benchmark-ips', '~> 2.3.0', require: false
gem 'license_finder', '~> 3.1', require: false gem 'license_finder', '~> 3.1', require: false
gem 'knapsack', '~> 1.11.0' gem 'knapsack', '~> 1.16'
gem 'activerecord_sane_schema_dumper', '0.2' gem 'activerecord_sane_schema_dumper', '0.2'
...@@ -435,9 +435,9 @@ gem 'google-protobuf', '= 3.5.1' ...@@ -435,9 +435,9 @@ gem 'google-protobuf', '= 3.5.1'
gem 'toml-rb', '~> 1.0.0', require: false gem 'toml-rb', '~> 1.0.0', require: false
# Feature toggles # Feature toggles
gem 'flipper', '~> 0.11.0' gem 'flipper', '~> 0.13.0'
gem 'flipper-active_record', '~> 0.11.0' gem 'flipper-active_record', '~> 0.13.0'
gem 'flipper-active_support_cache_store', '~> 0.11.0' gem 'flipper-active_support_cache_store', '~> 0.13.0'
# Structured logging # Structured logging
gem 'lograge', '~> 0.5' gem 'lograge', '~> 0.5'
......
...@@ -242,13 +242,13 @@ GEM ...@@ -242,13 +242,13 @@ GEM
path_expander (~> 1.0) path_expander (~> 1.0)
ruby_parser (~> 3.0) ruby_parser (~> 3.0)
sexp_processor (~> 4.0) sexp_processor (~> 4.0)
flipper (0.11.0) flipper (0.13.0)
flipper-active_record (0.11.0) flipper-active_record (0.13.0)
activerecord (>= 3.2, < 6) activerecord (>= 3.2, < 6)
flipper (~> 0.11.0) flipper (~> 0.13.0)
flipper-active_support_cache_store (0.11.0) flipper-active_support_cache_store (0.13.0)
activesupport (>= 3.2, < 6) activesupport (>= 3.2, < 6)
flipper (~> 0.11.0) flipper (~> 0.13.0)
flowdock (0.7.1) flowdock (0.7.1)
httparty (~> 0.7) httparty (~> 0.7)
multi_json multi_json
...@@ -266,7 +266,7 @@ GEM ...@@ -266,7 +266,7 @@ GEM
builder builder
excon (~> 0.58) excon (~> 0.58)
formatador (~> 0.2) formatador (~> 0.2)
fog-google (1.3.2) fog-google (1.3.3)
fog-core fog-core
fog-json fog-json
fog-xml fog-xml
...@@ -479,9 +479,8 @@ GEM ...@@ -479,9 +479,8 @@ GEM
kaminari-core (= 1.0.1) kaminari-core (= 1.0.1)
kaminari-core (1.0.1) kaminari-core (1.0.1)
kgio (2.10.0) kgio (2.10.0)
knapsack (1.11.0) knapsack (1.16.0)
rake rake
timecop (>= 0.1.0)
kubeclient (2.2.0) kubeclient (2.2.0)
http (= 0.9.8) http (= 0.9.8)
recursive-open-struct (= 1.0.0) recursive-open-struct (= 1.0.0)
...@@ -1075,13 +1074,13 @@ DEPENDENCIES ...@@ -1075,13 +1074,13 @@ DEPENDENCIES
fast_blank fast_blank
ffaker (~> 2.4) ffaker (~> 2.4)
flay (~> 2.10.0) flay (~> 2.10.0)
flipper (~> 0.11.0) flipper (~> 0.13.0)
flipper-active_record (~> 0.11.0) flipper-active_record (~> 0.13.0)
flipper-active_support_cache_store (~> 0.11.0) flipper-active_support_cache_store (~> 0.13.0)
fog-aliyun (~> 0.2.0) fog-aliyun (~> 0.2.0)
fog-aws (~> 2.0) fog-aws (~> 2.0)
fog-core (~> 1.44) fog-core (~> 1.44)
fog-google (~> 1.3.2) fog-google (~> 1.3.3)
fog-local (~> 0.3) fog-local (~> 0.3)
fog-openstack (~> 0.1) fog-openstack (~> 0.1)
fog-rackspace (~> 0.1.1) fog-rackspace (~> 0.1.1)
...@@ -1126,7 +1125,7 @@ DEPENDENCIES ...@@ -1126,7 +1125,7 @@ DEPENDENCIES
json-schema (~> 2.8.0) json-schema (~> 2.8.0)
jwt (~> 1.5.6) jwt (~> 1.5.6)
kaminari (~> 1.0) kaminari (~> 1.0)
knapsack (~> 1.11.0) knapsack (~> 1.16)
kubeclient (~> 2.2.0) kubeclient (~> 2.2.0)
letter_opener_web (~> 1.3.0) letter_opener_web (~> 1.3.0)
license_finder (~> 3.1) license_finder (~> 3.1)
......
...@@ -73,6 +73,7 @@ export default class MergeRequestTabs { ...@@ -73,6 +73,7 @@ export default class MergeRequestTabs {
constructor({ action, setUrl, stubLocation } = {}) { constructor({ action, setUrl, stubLocation } = {}) {
const mergeRequestTabs = document.querySelector('.js-tabs-affix'); const mergeRequestTabs = document.querySelector('.js-tabs-affix');
const navbar = document.querySelector('.navbar-gitlab'); const navbar = document.querySelector('.navbar-gitlab');
const peek = document.getElementById('peek');
const paddingTop = 16; const paddingTop = 16;
this.diffsLoaded = false; this.diffsLoaded = false;
...@@ -86,6 +87,10 @@ export default class MergeRequestTabs { ...@@ -86,6 +87,10 @@ export default class MergeRequestTabs {
this.showTab = this.showTab.bind(this); this.showTab = this.showTab.bind(this);
this.stickyTop = navbar ? navbar.offsetHeight - paddingTop : 0; this.stickyTop = navbar ? navbar.offsetHeight - paddingTop : 0;
if (peek) {
this.stickyTop += peek.offsetHeight;
}
if (mergeRequestTabs) { if (mergeRequestTabs) {
this.stickyTop += mergeRequestTabs.offsetHeight; this.stickyTop += mergeRequestTabs.offsetHeight;
} }
......
...@@ -132,13 +132,35 @@ ...@@ -132,13 +132,35 @@
.multi-file-tabs { .multi-file-tabs {
display: flex; display: flex;
overflow-x: auto;
background-color: $white-normal; background-color: $white-normal;
box-shadow: inset 0 -1px $white-dark; box-shadow: inset 0 -1px $white-dark;
> li { > ul {
display: flex;
overflow-x: auto;
}
li {
position: relative; position: relative;
} }
.dropdown {
display: flex;
margin-left: auto;
margin-bottom: 1px;
padding: 0 $grid-size;
border-left: 1px solid $white-dark;
background-color: $white-light;
&.shadow {
box-shadow: 0 0 10px $dropdown-shadow-color;
}
.btn {
margin-top: auto;
margin-bottom: auto;
}
}
} }
.multi-file-tab { .multi-file-tab {
...@@ -207,6 +229,70 @@ ...@@ -207,6 +229,70 @@
.vertical-center { .vertical-center {
min-height: auto; min-height: auto;
} }
.monaco-editor .lines-content .cigr {
display: none;
}
.monaco-diff-editor.vs {
.editor.modified {
box-shadow: none;
}
.diagonal-fill {
display: none !important;
}
.diffOverview {
background-color: $white-light;
border-left: 1px solid $white-dark;
cursor: ns-resize;
}
.diffViewport {
display: none;
}
.char-insert {
background-color: $line-added-dark;
}
.char-delete {
background-color: $line-removed-dark;
}
.line-numbers {
color: $black-transparent;
}
.view-overlays {
.line-insert {
background-color: $line-added;
}
.line-delete {
background-color: $line-removed;
}
}
.margin {
background-color: $gray-light;
border-right: 1px solid $white-normal;
.line-insert {
border-right: 1px solid $line-added-dark;
}
.line-delete {
border-right: 1px solid $line-removed-dark;
}
}
.margin-view-overlays .insert-sign,
.margin-view-overlays .delete-sign {
opacity: .4;
}
}
} }
.multi-file-editor-holder { .multi-file-editor-holder {
...@@ -596,11 +682,6 @@ ...@@ -596,11 +682,6 @@
padding-bottom: 0; padding-bottom: 0;
} }
.multi-file-commit-panel .multi-file-commit-panel-inner-scroll {
max-height: calc(100vh - #{$header-height + $context-header-height});
min-height: calc(100vh - #{$header-height + $context-header-height});
}
&.flash-shown { &.flash-shown {
.content-wrapper { .content-wrapper {
margin-top: 0; margin-top: 0;
...@@ -609,10 +690,11 @@ ...@@ -609,10 +690,11 @@
.ide-view { .ide-view {
height: calc(100vh - #{$header-height + $flash-height}); height: calc(100vh - #{$header-height + $flash-height});
} }
}
.multi-file-commit-panel .multi-file-commit-panel-inner-scroll { .projects-sidebar {
max-height: calc(100vh - #{$header-height + $flash-height + $context-header-height}); .multi-file-commit-panel-inner-scroll {
min-height: calc(100vh - #{$header-height + $flash-height + $context-header-height}); flex: 1;
} }
} }
} }
...@@ -632,11 +714,6 @@ ...@@ -632,11 +714,6 @@
height: calc(100vh - #{$header-height + $performance-bar-height}); height: calc(100vh - #{$header-height + $performance-bar-height});
} }
.multi-file-commit-panel .multi-file-commit-panel-inner-scroll {
max-height: calc(100vh - #{$header-height + $performance-bar-height + 60});
min-height: calc(100vh - #{$header-height + $performance-bar-height + 60});
}
&.flash-shown { &.flash-shown {
.content-wrapper { .content-wrapper {
margin-top: 0; margin-top: 0;
...@@ -645,11 +722,6 @@ ...@@ -645,11 +722,6 @@
.ide-view { .ide-view {
height: calc(100vh - #{$header-height + $performance-bar-height + $flash-height}); height: calc(100vh - #{$header-height + $performance-bar-height + $flash-height});
} }
.multi-file-commit-panel .multi-file-commit-panel-inner-scroll {
max-height: calc(100vh - #{$header-height + $performance-bar-height + $flash-height + $context-header-height});
min-height: calc(100vh - #{$header-height + $performance-bar-height + $flash-height + $context-header-height});
}
} }
} }
...@@ -684,3 +756,29 @@ ...@@ -684,3 +756,29 @@
.ide-commit-new-branch { .ide-commit-new-branch {
margin-left: 25px; margin-left: 25px;
} }
.ide-external-links {
p {
margin: 0;
}
}
.ide-sidebar-link {
padding: $gl-padding-8 $gl-padding;
background: $indigo-700;
color: $white-light;
text-decoration: none;
display: flex;
align-items: center;
&:focus,
&:hover {
color: $white-light;
text-decoration: underline;
background: $indigo-500;
}
&:active {
background: $indigo-800;
}
}
...@@ -348,15 +348,15 @@ class ApplicationSetting < ActiveRecord::Base ...@@ -348,15 +348,15 @@ class ApplicationSetting < ActiveRecord::Base
end end
def home_page_url_column_exists? def home_page_url_column_exists?
ActiveRecord::Base.connection.column_exists?(:application_settings, :home_page_url) ::Gitlab::Database.cached_column_exists?(:application_settings, :home_page_url)
end end
def help_page_support_url_column_exists? def help_page_support_url_column_exists?
ActiveRecord::Base.connection.column_exists?(:application_settings, :help_page_support_url) ::Gitlab::Database.cached_column_exists?(:application_settings, :help_page_support_url)
end end
def sidekiq_throttling_column_exists? def sidekiq_throttling_column_exists?
ActiveRecord::Base.connection.column_exists?(:application_settings, :sidekiq_throttling_enabled) ::Gitlab::Database.cached_column_exists?(:application_settings, :sidekiq_throttling_enabled)
end end
def domain_whitelist_raw def domain_whitelist_raw
......
...@@ -161,11 +161,6 @@ class JiraService < IssueTrackerService ...@@ -161,11 +161,6 @@ class JiraService < IssueTrackerService
add_comment(data, jira_issue) add_comment(data, jira_issue)
end end
# reason why service cannot be tested
def disabled_title
"Please fill in Password and Username."
end
def test(_) def test(_)
result = test_settings result = test_settings
success = result.present? success = result.present?
......
...@@ -39,10 +39,6 @@ class PipelinesEmailService < Service ...@@ -39,10 +39,6 @@ class PipelinesEmailService < Service
project.pipelines.any? project.pipelines.any?
end end
def disabled_title
'Please setup a pipeline on your repository.'
end
def test_data(project, user) def test_data(project, user)
data = Gitlab::DataBuilder::Pipeline.build(project.pipelines.last) data = Gitlab::DataBuilder::Pipeline.build(project.pipelines.last)
data[:user] = user.hook_attrs data[:user] = user.hook_attrs
......
...@@ -163,11 +163,6 @@ class Service < ActiveRecord::Base ...@@ -163,11 +163,6 @@ class Service < ActiveRecord::Base
true true
end end
# reason why service cannot be tested
def disabled_title
"Please setup a project repository."
end
# Provide convenient accessor methods # Provide convenient accessor methods
# for each serialized property. # for each serialized property.
# Also keep track of updated properties in a similar way as ActiveModel::Dirty # Also keep track of updated properties in a similar way as ActiveModel::Dirty
......
module Files module Files
class CreateService < Files::BaseService class CreateService < Files::BaseService
def create_commit! def create_commit!
handler = Lfs::FileModificationHandler.new(project, @branch_name) transformer = Lfs::FileTransformer.new(project, @branch_name)
handler.new_file(@file_path, @file_content) do |content_or_lfs_pointer| result = transformer.new_file(@file_path, @file_content)
create_transformed_commit(content_or_lfs_pointer)
end create_transformed_commit(result.content)
end end
private private
......
...@@ -3,11 +3,33 @@ module Files ...@@ -3,11 +3,33 @@ module Files
UPDATE_FILE_ACTIONS = %w(update move delete).freeze UPDATE_FILE_ACTIONS = %w(update move delete).freeze
def create_commit! def create_commit!
transformer = Lfs::FileTransformer.new(project, @branch_name)
actions = actions_after_lfs_transformation(transformer, params[:actions])
commit_actions!(actions)
end
private
def actions_after_lfs_transformation(transformer, actions)
actions.map do |action|
if action[:action] == 'create'
result = transformer.new_file(action[:file_path], action[:content], encoding: action[:encoding])
action[:content] = result.content
action[:encoding] = result.encoding
end
action
end
end
def commit_actions!(actions)
repository.multi_action( repository.multi_action(
current_user, current_user,
message: @commit_message, message: @commit_message,
branch_name: @branch_name, branch_name: @branch_name,
actions: params[:actions], actions: actions,
author_email: @author_email, author_email: @author_email,
author_name: @author_name, author_name: @author_name,
start_project: @start_project, start_project: @start_project,
...@@ -17,8 +39,6 @@ module Files ...@@ -17,8 +39,6 @@ module Files
raise_error(e) raise_error(e)
end end
private
def validate! def validate!
super super
......
module Lfs module Lfs
class FileModificationHandler # Usage: Calling `new_file` check to see if a file should be in LFS and
# return a transformed result with `content` and `encoding` to commit.
#
# For LFS an LfsObject linked to the project is stored and an LFS
# pointer returned. If the file isn't in LFS the untransformed content
# is returned to save in the commit.
#
# transformer = Lfs::FileTransformer.new(project, @branch_name)
# content_or_lfs_pointer = transformer.new_file(file_path, content).content
# create_transformed_commit(content_or_lfs_pointer)
#
class FileTransformer
attr_reader :project, :branch_name attr_reader :project, :branch_name
delegate :repository, to: :project delegate :repository, to: :project
...@@ -9,24 +20,37 @@ module Lfs ...@@ -9,24 +20,37 @@ module Lfs
@branch_name = branch_name @branch_name = branch_name
end end
def new_file(file_path, file_content) def new_file(file_path, file_content, encoding: nil)
if project.lfs_enabled? && lfs_file?(file_path) if project.lfs_enabled? && lfs_file?(file_path)
file_content = Base64.decode64(file_content) if encoding == 'base64'
lfs_pointer_file = Gitlab::Git::LfsPointerFile.new(file_content) lfs_pointer_file = Gitlab::Git::LfsPointerFile.new(file_content)
lfs_object = create_lfs_object!(lfs_pointer_file, file_content) lfs_object = create_lfs_object!(lfs_pointer_file, file_content)
content = lfs_pointer_file.pointer
success = yield(content) link_lfs_object!(lfs_object)
link_lfs_object!(lfs_object) if success Result.new(content: lfs_pointer_file.pointer, encoding: 'text')
else else
yield(file_content) Result.new(content: file_content, encoding: encoding)
end
end
class Result
attr_reader :content, :encoding
def initialize(content:, encoding:)
@content = content
@encoding = encoding
end end
end end
private private
def lfs_file?(file_path) def lfs_file?(file_path)
repository.attributes_at(branch_name, file_path)['filter'] == 'lfs' cached_attributes.attributes(file_path)['filter'] == 'lfs'
end
def cached_attributes
@cached_attributes ||= Gitlab::Git::AttributesAtRefParser.new(repository, branch_name)
end end
def create_lfs_object!(lfs_pointer_file, file_content) def create_lfs_object!(lfs_pointer_file, file_content)
......
...@@ -15,11 +15,6 @@ ...@@ -15,11 +15,6 @@
.footer-block.row-content-block .footer-block.row-content-block
= service_save_button(@service) = service_save_button(@service)
&nbsp; &nbsp;
- if @service.valid? && @service.activated?
- unless @service.can_test?
- disabled_class = 'disabled'
- disabled_title = @service.disabled_title
= link_to 'Cancel', project_settings_integrations_path(@project), class: 'btn btn-cancel' = link_to 'Cancel', project_settings_integrations_path(@project), class: 'btn btn-cancel'
- if lookup_context.template_exists?('show', "projects/services/#{@service.to_param}", true) - if lookup_context.template_exists?('show', "projects/services/#{@service.to_param}", true)
......
---
title: Update knapsack to 1.16.0
merge_request: 17735
author: Takuya Noguchi
type: other
---
title: Create commit API and Web IDE obey LFS filters
merge_request: 16718
author:
type: fixed
---
title: Cache column_exists? for application settings
merge_request:
author:
type: performance
---
title: Cache table_exists?('application_settings') to reduce repeated schema reloads
merge_request:
author:
type: performance
...@@ -29,7 +29,7 @@ in your testing/production environment. ...@@ -29,7 +29,7 @@ in your testing/production environment.
GitLab stores a number of secret values in the `/etc/gitlab/gitlab-secrets.json` GitLab stores a number of secret values in the `/etc/gitlab/gitlab-secrets.json`
file which *must* match between the primary and secondary nodes. Until there is file which *must* match between the primary and secondary nodes. Until there is
a means of automatically replicating these between nodes (see issue [gitlab-org/gitlab-ee#3789]), a means of automatically replicating these between nodes (see issue [gitlab-org/gitlab-ee#3789]),
they must be manually replicated to the secondary. they must be manually replicated to the secondary.
1. SSH into the **primary** node, and execute the command below: 1. SSH into the **primary** node, and execute the command below:
...@@ -127,7 +127,11 @@ keys must be manually replicated to the secondary node. ...@@ -127,7 +127,11 @@ keys must be manually replicated to the secondary node.
1. Restart sshd: 1. Restart sshd:
```bash ```bash
service ssh restart # Debian or Ubuntu installations
sudo service ssh reload
# CentOS installations
sudo service sshd reload
``` ```
### Step 3. Add the secondary GitLab node ### Step 3. Add the secondary GitLab node
...@@ -145,13 +149,13 @@ keys must be manually replicated to the secondary node. ...@@ -145,13 +149,13 @@ keys must be manually replicated to the secondary node.
``` ```
gitlab-ctl restart gitlab-ctl restart
``` ```
Check if there are any common issue with your Geo setup by running: Check if there are any common issue with your Geo setup by running:
``` ```
gitlab-rake gitlab:geo:check gitlab-rake gitlab:geo:check
``` ```
1. SSH into your GitLab **primary** server and login as root to verify the 1. SSH into your GitLab **primary** server and login as root to verify the
secondary is reachable or there are any common issue with your Geo setup: secondary is reachable or there are any common issue with your Geo setup:
...@@ -164,13 +168,13 @@ replicating missing data from the primary in a process known as **backfill**. ...@@ -164,13 +168,13 @@ replicating missing data from the primary in a process known as **backfill**.
Meanwhile, the primary node will start to notify the secondary of any changes, so Meanwhile, the primary node will start to notify the secondary of any changes, so
that the secondary can act on those notifications immediately. that the secondary can act on those notifications immediately.
Make sure the secondary instance is running and accessible. Make sure the secondary instance is running and accessible.
You can login to the secondary node with the same credentials as used in the primary. You can login to the secondary node with the same credentials as used in the primary.
### Step 4. (Optional) Enabling hashed storage (from GitLab 10.0) ### Step 4. (Optional) Enabling hashed storage (from GitLab 10.0)
CAUTION: **Warning**: CAUTION: **Warning**:
Hashed storage is in **Beta**. It is not considered production-ready. See Hashed storage is in **Beta**. It is not considered production-ready. See
[Hashed Storage] for more detail, and for the latest updates, check [Hashed Storage] for more detail, and for the latest updates, check
infrastructure issue [gitlab-com/infrastructure#2821]. infrastructure issue [gitlab-com/infrastructure#2821].
......
...@@ -84,9 +84,9 @@ checks using those checksums can be run. These checks also detect missing files. ...@@ -84,9 +84,9 @@ checks using those checksums can be run. These checks also detect missing files.
Currently, integrity checks are supported for the following types of file: Currently, integrity checks are supported for the following types of file:
* CI artifacts * CI artifacts (Available from version 10.7.0)
* LFS objects * LFS objects (Available from version 10.6.0)
* User uploads * User uploads (Available from version 10.6.0)
**Omnibus Installation** **Omnibus Installation**
......
...@@ -16,18 +16,26 @@ codequality: ...@@ -16,18 +16,26 @@ codequality:
- docker:dind - docker:dind
script: script:
- docker pull codeclimate/codeclimate - docker pull codeclimate/codeclimate
- docker run --env CODECLIMATE_CODE="$PWD" --volume "$PWD":/code --volume /var/run/docker.sock:/var/run/docker.sock --volume /tmp/cc:/tmp/cc codeclimate/codeclimate:0.69.0 init - export SP_VERSION=$(echo "$CI_SERVER_VERSION" | sed 's/^\([0-9]*\)\.\([0-9]*\).*/\1-\2-stable/')
- docker run --env CODECLIMATE_CODE="$PWD" --volume "$PWD":/code --volume /var/run/docker.sock:/var/run/docker.sock --volume /tmp/cc:/tmp/cc codeclimate/codeclimate:0.69.0 analyze -f json > codeclimate.json || true - docker run
--env SOURCE_CODE="$PWD" \
--volume "$PWD":/code \
--volume /var/run/docker.sock:/var/run/docker.sock \
"registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code
artifacts: artifacts:
paths: [codeclimate.json] paths: [codeclimate.json]
``` ```
This will create a `codequality` job in your CI pipeline and will allow you to The above example will create a `codequality` job in your CI/CD pipeline which
download and analyze the report artifact in JSON format. will scan your source code for code quality issues. The report will be saved
as an artifact that you can later download and analyze.
For [GitLab Starter][ee] users, this information can be automatically TIP: **Tip:**
extracted and shown right in the merge request widget. [Learn more on code quality Starting with [GitLab Starter][ee] 9.3, this information will
diffs in merge requests](../../user/project/merge_requests/code_quality_diff.md). be automatically extracted and shown right in the merge request widget. To do
so, the CI/CD job must be named `codequality` and the artifact path must be
`codeclimate.json`.
[Learn more on code quality diffs in merge requests](https://docs.gitlab.com/ee/user/project/merge_requests/code_quality_diff.html).
[cli]: https://github.com/codeclimate/codeclimate [cli]: https://github.com/codeclimate/codeclimate
[dind]: ../docker/using_docker_build.md#use-docker-in-docker-executor [dind]: ../docker/using_docker_build.md#use-docker-in-docker-executor
......
This diff is collapsed.
This diff is collapsed.
...@@ -70,7 +70,7 @@ To downgrade an Omnibus installation, it is sufficient to install the Community ...@@ -70,7 +70,7 @@ To downgrade an Omnibus installation, it is sufficient to install the Community
Edition package on top of the currently installed one. You can do this manually, Edition package on top of the currently installed one. You can do this manually,
by directly [downloading the package](https://packages.gitlab.com/gitlab/gitlab-ce) by directly [downloading the package](https://packages.gitlab.com/gitlab/gitlab-ce)
you need, or by adding our CE package repository and following the you need, or by adding our CE package repository and following the
[CE installation instructions](https://about.gitlab.com/downloads/?version=ce). [CE installation instructions](https://about.gitlab.com/installation/?version=ce).
**Source Installation** **Source Installation**
......
...@@ -24,8 +24,11 @@ ...@@ -24,8 +24,11 @@
methods: { methods: {
...mapActions([ ...mapActions([
'discardFileChanges', 'discardFileChanges',
'updateViewer',
]), ]),
openFileInEditor(file) { openFileInEditor(file) {
this.updateViewer('diff');
router.push(`/project${file.url}`); router.push(`/project${file.url}`);
}, },
}, },
......
<script>
import Icon from '~/vue_shared/components/icon.vue';
export default {
components: {
Icon,
},
props: {
hasChanges: {
type: Boolean,
required: false,
default: false,
},
viewer: {
type: String,
required: true,
},
showShadow: {
type: Boolean,
required: true,
},
},
methods: {
changeMode(mode) {
this.$emit('click', mode);
},
},
};
</script>
<template>
<div
class="dropdown"
:class="{
shadow: showShadow,
}"
>
<button
type="button"
class="btn btn-primary btn-sm"
:class="{
'btn-inverted': hasChanges,
}"
data-toggle="dropdown"
>
<template v-if="viewer === 'editor'">
{{ __('Editing') }}
</template>
<template v-else>
{{ __('Reviewing') }}
</template>
<icon
name="angle-down"
:size="12"
css-classes="caret-down"
/>
</button>
<div class="dropdown-menu dropdown-menu-selectable dropdown-open-left">
<ul>
<li>
<a
href="#"
@click.prevent="changeMode('editor')"
:class="{
'is-active': viewer === 'editor',
}"
>
<strong class="dropdown-menu-inner-title">{{ __('Editing') }}</strong>
<span class="dropdown-menu-inner-content">
{{ __('View and edit lines') }}
</span>
</a>
</li>
<li>
<a
href="#"
@click.prevent="changeMode('diff')"
:class="{
'is-active': viewer === 'diff',
}"
>
<strong class="dropdown-menu-inner-title">{{ __('Reviewing') }}</strong>
<span class="dropdown-menu-inner-content">
{{ __('Compare changes with the last commit') }}
</span>
</a>
</li>
</ul>
</div>
</div>
</template>
<script>
import icon from '~/vue_shared/components/icon.vue';
export default {
components: {
icon,
},
props: {
projectUrl: {
type: String,
required: true,
},
},
computed: {
goBackUrl() {
return document.referrer || this.projectUrl;
},
},
};
</script>
<template>
<nav
class="ide-external-links"
v-once
>
<p>
<a
:href="goBackUrl"
class="ide-sidebar-link"
>
<icon
:size="16"
class="append-right-8"
name="go-back"
/>
<span class="ide-external-links-text">
{{ s__('Go back') }}
</span>
</a>
</p>
</nav>
</template>
<script> <script>
import projectAvatarImage from '~/vue_shared/components/project_avatar/image.vue'; import projectAvatarImage from '~/vue_shared/components/project_avatar/image.vue';
import branchesTree from './ide_project_branches_tree.vue'; import branchesTree from './ide_project_branches_tree.vue';
import externalLinks from './ide_external_links.vue';
export default { export default {
components: { components: {
branchesTree, branchesTree,
externalLinks,
projectAvatarImage, projectAvatarImage,
}, },
props: { props: {
...@@ -37,6 +39,9 @@ export default { ...@@ -37,6 +39,9 @@ export default {
</div> </div>
</a> </a>
</div> </div>
<external-links
:project-url="project.web_url"
/>
<div class="multi-file-commit-panel-inner-scroll"> <div class="multi-file-commit-panel-inner-scroll">
<branches-tree <branches-tree
v-for="branch in project.branches" v-for="branch in project.branches"
......
...@@ -15,6 +15,8 @@ export default { ...@@ -15,6 +15,8 @@ export default {
'leftPanelCollapsed', 'leftPanelCollapsed',
'rightPanelCollapsed', 'rightPanelCollapsed',
'panelResizing', 'panelResizing',
'viewer',
'delayViewerUpdated',
]), ]),
shouldHideEditor() { shouldHideEditor() {
return this.activeFile && this.activeFile.binary && !this.activeFile.raw; return this.activeFile && this.activeFile.binary && !this.activeFile.raw;
...@@ -37,6 +39,9 @@ export default { ...@@ -37,6 +39,9 @@ export default {
this.editor.updateDimensions(); this.editor.updateDimensions();
} }
}, },
viewer() {
this.createEditorInstance();
},
}, },
beforeDestroy() { beforeDestroy() {
this.editor.dispose(); this.editor.dispose();
...@@ -59,6 +64,8 @@ export default { ...@@ -59,6 +64,8 @@ export default {
'setFileLanguage', 'setFileLanguage',
'setEditorPosition', 'setEditorPosition',
'setFileEOL', 'setFileEOL',
'updateViewer',
'updateDelayViewerUpdated',
]), ]),
initMonaco() { initMonaco() {
if (this.shouldHideEditor) return; if (this.shouldHideEditor) return;
...@@ -67,16 +74,34 @@ export default { ...@@ -67,16 +74,34 @@ export default {
this.getRawFileData(this.activeFile) this.getRawFileData(this.activeFile)
.then(() => { .then(() => {
this.editor.createInstance(this.$refs.editor); const viewerPromise = this.delayViewerUpdated ? this.updateViewer('editor') : Promise.resolve();
return viewerPromise;
})
.then(() => {
this.updateDelayViewerUpdated(false);
this.createEditorInstance();
}) })
.then(() => this.setupEditor())
.catch((err) => { .catch((err) => {
flash('Error setting up monaco. Please try again.', 'alert', document, null, false, true); flash('Error setting up monaco. Please try again.', 'alert', document, null, false, true);
throw err; throw err;
}); });
}, },
createEditorInstance() {
this.editor.dispose();
this.$nextTick(() => {
if (this.viewer === 'editor') {
this.editor.createInstance(this.$refs.editor);
} else {
this.editor.createDiffInstance(this.$refs.editor);
}
this.setupEditor();
});
},
setupEditor() { setupEditor() {
if (!this.activeFile) return; if (!this.activeFile || !this.editor.instance) return;
this.model = this.editor.createModel(this.activeFile); this.model = this.editor.createModel(this.activeFile);
......
<script> <script>
import { mapState } from 'vuex'; import { mapActions, mapState } from 'vuex';
import timeAgoMixin from '~/vue_shared/mixins/timeago'; import timeAgoMixin from '~/vue_shared/mixins/timeago';
import skeletonLoadingContainer from '~/vue_shared/components/skeleton_loading_container.vue'; import skeletonLoadingContainer from '~/vue_shared/components/skeleton_loading_container.vue';
...@@ -70,6 +70,9 @@ ...@@ -70,6 +70,9 @@
} }
}, },
methods: { methods: {
...mapActions([
'updateDelayViewerUpdated',
]),
clickFile(row) { clickFile(row) {
// Manual Action if a tree is selected/opened // Manual Action if a tree is selected/opened
if (this.file.type === 'tree' && this.$router.currentRoute.path === `/project${row.url}`) { if (this.file.type === 'tree' && this.$router.currentRoute.path === `/project${row.url}`) {
...@@ -78,7 +81,13 @@ ...@@ -78,7 +81,13 @@
tree: this.file, tree: this.file,
}); });
} }
this.$router.push(`/project${row.url}`);
const delayPromise = this.file.changed ?
Promise.resolve() : this.updateDelayViewerUpdated(true);
return delayPromise.then(() => {
this.$router.push(`/project${row.url}`);
});
}, },
}, },
}; };
......
<script> <script>
import { mapState } from 'vuex'; import { mapActions, mapGetters, mapState } from 'vuex';
import RepoTab from './repo_tab.vue'; import RepoTab from './repo_tab.vue';
import EditorMode from './editor_mode_dropdown.vue';
export default { export default {
components: { components: {
'repo-tab': RepoTab, RepoTab,
EditorMode,
},
data() {
return {
showShadow: false,
};
}, },
computed: { computed: {
...mapGetters([
'hasChanges',
]),
...mapState([ ...mapState([
'openFiles', 'openFiles',
'viewer',
]),
},
updated() {
if (!this.$refs.tabsScroller) return;
this.showShadow = this.$refs.tabsScroller.scrollWidth > this.$refs.tabsScroller.offsetWidth;
},
methods: {
...mapActions([
'updateViewer',
]), ]),
}, },
}; };
</script> </script>
<template> <template>
<ul <div class="multi-file-tabs">
class="multi-file-tabs list-unstyled append-bottom-0" <ul
> class="list-unstyled append-bottom-0"
<repo-tab ref="tabsScroller"
v-for="tab in openFiles" >
:key="tab.key" <repo-tab
:tab="tab" v-for="tab in openFiles"
:key="tab.key"
:tab="tab"
/>
</ul>
<editor-mode
:viewer="viewer"
:show-shadow="showShadow"
:has-changes="hasChanges"
@click="updateViewer"
/> />
</ul> </div>
</template> </template>
...@@ -26,6 +26,9 @@ export default class Model { ...@@ -26,6 +26,9 @@ export default class Model {
this.events = new Map(); this.events = new Map();
this.updateContent = this.updateContent.bind(this); this.updateContent = this.updateContent.bind(this);
this.dispose = this.dispose.bind(this);
eventHub.$on(`editor.update.model.dispose.${this.file.path}`, this.dispose);
eventHub.$on(`editor.update.model.content.${this.file.path}`, this.updateContent); eventHub.$on(`editor.update.model.content.${this.file.path}`, this.updateContent);
} }
...@@ -75,6 +78,7 @@ export default class Model { ...@@ -75,6 +78,7 @@ export default class Model {
this.disposable.dispose(); this.disposable.dispose();
this.events.clear(); this.events.clear();
eventHub.$off(`editor.update.model.dispose.${this.file.path}`, this.dispose);
eventHub.$off(`editor.update.model.content.${this.file.path}`, this.updateContent); eventHub.$off(`editor.update.model.content.${this.file.path}`, this.updateContent);
} }
} }
import eventHub from 'ee/ide/eventhub';
import Disposable from './disposable'; import Disposable from './disposable';
import Model from './model'; import Model from './model';
...@@ -25,9 +26,17 @@ export default class ModelManager { ...@@ -25,9 +26,17 @@ export default class ModelManager {
this.models.set(model.path, model); this.models.set(model.path, model);
this.disposable.add(model); this.disposable.add(model);
eventHub.$on(`editor.update.model.dispose.${file.path}`, this.removeCachedModel.bind(this, file));
return model; return model;
} }
removeCachedModel(file) {
this.models.delete(file.path);
eventHub.$off(`editor.update.model.dispose.${file.path}`, this.removeCachedModel);
}
dispose() { dispose() {
// dispose of all the models // dispose of all the models
this.disposable.dispose(); this.disposable.dispose();
......
...@@ -27,6 +27,8 @@ export default class DecorationsController { ...@@ -27,6 +27,8 @@ export default class DecorationsController {
} }
decorate(model) { decorate(model) {
if (!this.editor.instance) return;
const decorations = this.getAllDecorationsForModel(model); const decorations = this.getAllDecorationsForModel(model);
const oldDecorations = this.editorDecorations.get(model.url) || []; const oldDecorations = this.editorDecorations.get(model.url) || [];
......
...@@ -3,9 +3,16 @@ import DecorationsController from './decorations/controller'; ...@@ -3,9 +3,16 @@ import DecorationsController from './decorations/controller';
import DirtyDiffController from './diff/controller'; import DirtyDiffController from './diff/controller';
import Disposable from './common/disposable'; import Disposable from './common/disposable';
import ModelManager from './common/model_manager'; import ModelManager from './common/model_manager';
import editorOptions from './editor_options'; import editorOptions, { defaultEditorOptions } from './editor_options';
import gitlabTheme from './themes/gl_theme';
import gitlabTheme from 'ee/ide/lib/themes/gl_theme'; // eslint-disable-line import/first export const clearDomElement = el => {
if (!el || !el.firstChild) return;
while (el.firstChild) {
el.removeChild(el.firstChild);
}
};
export default class Editor { export default class Editor {
static create(monaco) { static create(monaco) {
...@@ -34,19 +41,31 @@ export default class Editor { ...@@ -34,19 +41,31 @@ export default class Editor {
createInstance(domElement) { createInstance(domElement) {
if (!this.instance) { if (!this.instance) {
clearDomElement(domElement);
this.disposable.add(
(this.instance = this.monaco.editor.create(domElement, {
...defaultEditorOptions,
})),
(this.dirtyDiffController = new DirtyDiffController(
this.modelManager,
this.decorationsController,
)),
);
window.addEventListener('resize', this.debouncedUpdate, false);
}
}
createDiffInstance(domElement) {
if (!this.instance) {
clearDomElement(domElement);
this.disposable.add( this.disposable.add(
this.instance = this.monaco.editor.create(domElement, { (this.instance = this.monaco.editor.createDiffEditor(domElement, {
model: null, ...defaultEditorOptions,
readOnly: false, readOnly: true,
contextmenu: true, })),
scrollBeyondLastLine: false,
minimap: {
enabled: false,
},
}),
this.dirtyDiffController = new DirtyDiffController(
this.modelManager, this.decorationsController,
),
); );
window.addEventListener('resize', this.debouncedUpdate, false); window.addEventListener('resize', this.debouncedUpdate, false);
...@@ -58,25 +77,39 @@ export default class Editor { ...@@ -58,25 +77,39 @@ export default class Editor {
} }
attachModel(model) { attachModel(model) {
if (this.instance.getEditorType() === 'vs.editor.IDiffEditor') {
this.instance.setModel({
original: model.getOriginalModel(),
modified: model.getModel(),
});
return;
}
this.instance.setModel(model.getModel()); this.instance.setModel(model.getModel());
if (this.dirtyDiffController) this.dirtyDiffController.attachModel(model); if (this.dirtyDiffController) this.dirtyDiffController.attachModel(model);
this.currentModel = model; this.currentModel = model;
this.instance.updateOptions(editorOptions.reduce((acc, obj) => { this.instance.updateOptions(
Object.keys(obj).forEach((key) => { editorOptions.reduce((acc, obj) => {
Object.assign(acc, { Object.keys(obj).forEach(key => {
[key]: obj[key](model), Object.assign(acc, {
[key]: obj[key](model),
});
}); });
}); return acc;
return acc; }, {}),
}, {})); );
if (this.dirtyDiffController) this.dirtyDiffController.reDecorate(model); if (this.dirtyDiffController) this.dirtyDiffController.reDecorate(model);
} }
setupMonacoTheme() { setupMonacoTheme() {
this.monaco.editor.defineTheme(gitlabTheme.themeName, gitlabTheme.monacoTheme); this.monaco.editor.defineTheme(
gitlabTheme.themeName,
gitlabTheme.monacoTheme,
);
this.monaco.editor.setTheme('gitlab'); this.monaco.editor.setTheme('gitlab');
} }
...@@ -88,12 +121,21 @@ export default class Editor { ...@@ -88,12 +121,21 @@ export default class Editor {
} }
dispose() { dispose() {
this.disposable.dispose();
window.removeEventListener('resize', this.debouncedUpdate); window.removeEventListener('resize', this.debouncedUpdate);
// dispose main monaco instance // catch any potential errors with disposing the error
if (this.instance) { // this is mainly for tests caused by elements not existing
try {
this.disposable.dispose();
this.instance = null; this.instance = null;
} catch (e) {
this.instance = null;
if (process.env.NODE_ENV !== 'test') {
// eslint-disable-next-line no-console
console.error(e);
}
} }
} }
...@@ -113,6 +155,8 @@ export default class Editor { ...@@ -113,6 +155,8 @@ export default class Editor {
} }
onPositionChange(cb) { onPositionChange(cb) {
if (!this.instance.onDidChangeCursorPosition) return;
this.disposable.add( this.disposable.add(
this.instance.onDidChangeCursorPosition(e => cb(this.instance, e)), this.instance.onDidChangeCursorPosition(e => cb(this.instance, e)),
); );
......
export default [{ export const defaultEditorOptions = {
readOnly: model => !!model.file.file_lock, model: null,
}]; readOnly: false,
contextmenu: true,
scrollBeyondLastLine: false,
minimap: {
enabled: false,
},
};
export default [
{
readOnly: model => !!model.file.file_lock,
},
];
...@@ -6,6 +6,9 @@ export default { ...@@ -6,6 +6,9 @@ export default {
rules: [], rules: [],
colors: { colors: {
'editorLineNumber.foreground': '#CCCCCC', 'editorLineNumber.foreground': '#CCCCCC',
'diffEditor.insertedTextBackground': '#ddfbe6',
'diffEditor.removedTextBackground': '#f9d7dc',
'editor.selectionBackground': '#aad6f8',
}, },
}, },
}; };
...@@ -84,6 +84,14 @@ export const scrollToTab = () => { ...@@ -84,6 +84,14 @@ export const scrollToTab = () => {
}); });
}; };
export const updateViewer = ({ commit }, viewer) => {
commit(types.UPDATE_VIEWER, viewer);
};
export const updateDelayViewerUpdated = ({ commit }, delay) => {
commit(types.UPDATE_DELAY_VIEWER_CHANGE, delay);
};
export * from './actions/tree'; export * from './actions/tree';
export * from './actions/file'; export * from './actions/file';
export * from './actions/project'; export * from './actions/project';
......
import { normalizeHeaders } from '~/lib/utils/common_utils'; import { normalizeHeaders } from '~/lib/utils/common_utils';
import flash from '~/flash'; import flash from '~/flash';
import eventHub from 'ee/ide/eventhub';
import service from '../../services'; import service from '../../services';
import * as types from '../mutation_types'; import * as types from '../mutation_types';
import router from '../../ide_router'; import router from '../../ide_router';
...@@ -27,6 +28,8 @@ export const closeFile = ({ commit, state, dispatch }, file) => { ...@@ -27,6 +28,8 @@ export const closeFile = ({ commit, state, dispatch }, file) => {
} }
dispatch('getLastCommitData'); dispatch('getLastCommitData');
eventHub.$emit(`editor.update.model.dispose.${file.path}`);
}; };
export const setFileActive = ({ commit, state, getters, dispatch }, file) => { export const setFileActive = ({ commit, state, getters, dispatch }, file) => {
...@@ -150,4 +153,6 @@ export const discardFileChanges = ({ commit }, file) => { ...@@ -150,4 +153,6 @@ export const discardFileChanges = ({ commit }, file) => {
if (file.tempFile && file.opened) { if (file.tempFile && file.opened) {
commit(types.TOGGLE_FILE_OPEN, file); commit(types.TOGGLE_FILE_OPEN, file);
} }
eventHub.$emit(`editor.update.model.content.${file.path}`, file.raw);
}; };
...@@ -15,3 +15,5 @@ export const canEditFile = (state) => { ...@@ -15,3 +15,5 @@ export const canEditFile = (state) => {
export const addedFiles = state => state.changedFiles.filter(f => f.tempFile); export const addedFiles = state => state.changedFiles.filter(f => f.tempFile);
export const modifiedFiles = state => state.changedFiles.filter(f => !f.tempFile); export const modifiedFiles = state => state.changedFiles.filter(f => !f.tempFile);
export const hasChanges = state => !!state.changedFiles.length;
...@@ -46,3 +46,6 @@ export const SET_EDIT_MODE = 'SET_EDIT_MODE'; ...@@ -46,3 +46,6 @@ export const SET_EDIT_MODE = 'SET_EDIT_MODE';
export const TOGGLE_EDIT_MODE = 'TOGGLE_EDIT_MODE'; export const TOGGLE_EDIT_MODE = 'TOGGLE_EDIT_MODE';
export const SET_CURRENT_BRANCH = 'SET_CURRENT_BRANCH'; export const SET_CURRENT_BRANCH = 'SET_CURRENT_BRANCH';
export const UPDATE_VIEWER = 'UPDATE_VIEWER';
export const UPDATE_DELAY_VIEWER_CHANGE = 'UPDATE_DELAY_VIEWER_CHANGE';
...@@ -57,6 +57,16 @@ export default { ...@@ -57,6 +57,16 @@ export default {
lastCommitMsg, lastCommitMsg,
}); });
}, },
[types.UPDATE_VIEWER](state, viewer) {
Object.assign(state, {
viewer,
});
},
[types.UPDATE_DELAY_VIEWER_CHANGE](state, delayViewerUpdated) {
Object.assign(state, {
delayViewerUpdated,
});
},
...projectMutations, ...projectMutations,
...fileMutations, ...fileMutations,
...treeMutations, ...treeMutations,
......
...@@ -20,4 +20,6 @@ export default () => ({ ...@@ -20,4 +20,6 @@ export default () => ({
leftPanelCollapsed: false, leftPanelCollapsed: false,
rightPanelCollapsed: false, rightPanelCollapsed: false,
panelResizing: false, panelResizing: false,
viewer: 'editor',
delayViewerUpdated: false,
}); });
---
title: Add a Go back button to WebIDE to allow returning to where it was launched
from
merge_request:
author:
type: added
require 'spec_helper'
describe 'Project elastic search', :js, :elastic do
let(:user) { create(:user) }
let(:project) { create(:project, :repository, namespace: user.namespace) }
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
project.add_master(user)
sign_in(user)
end
describe 'searching' do
it 'finds issues' do
create(:issue, project: project, title: 'Test searching for an issue')
expect_search_result(scope: 'Issues', term: 'Test', result: 'Test searching for an issue')
end
it 'finds merge requests' do
create(:merge_request, source_project: project, target_project: project, title: 'Test searching for an MR')
expect_search_result(scope: 'Merge requests', term: 'Test', result: 'Test searching for an MR')
end
it 'finds milestones' do
create(:milestone, project: project, title: 'Test searching for a milestone')
expect_search_result(scope: 'Milestones', term: 'Test', result: 'Test searching for a milestone')
end
it 'finds wiki pages' do
project.wiki.create_page('test.md', 'Test searching for a wiki page')
expect_search_result(scope: 'Wiki', term: 'Test', result: 'Test searching for a wiki page')
end
it 'finds notes' do
create(:note, project: project, note: 'Test searching for a note')
search(scope: 'Comments', term: 'Test')
expect(page).to have_content(/showing (\d+) - (\d+) of (\d+) notes/i)
expect(page).to have_content('Test searching for a note')
end
it 'finds commits' do
project.repository.index_commits
search(scope: 'Commits', term: 'initial')
expect(page).to have_content(/showing (\d+) - (\d+) of (\d+) commits/i)
expect(page).to have_content('Initial commit')
end
it 'finds blobs' do
project.repository.index_blobs
search(scope: 'Code', term: 'def')
expect(page).to have_content(/showing (\d+) - (\d+) of (\d+) blobs/i)
expect(page).to have_content('def username_regex')
end
end
def search(scope:, term:)
visit project_path(project)
fill_in('search', with: term)
find('#search').native.send_keys(:enter)
page.within '.search-filter' do
click_link scope
end
end
def expect_search_result(scope:, term:, result:)
search(scope: scope, term: term)
expect(page).to have_content(/showing (\d+) - (\d+) of (\d+) #{Regexp.escape(scope)}/i)
expect(page).to have_content(result)
end
end
require 'spec_helper'
describe 'Snippet elastic search', :js, :elastic do
let(:user) { create(:user) }
let(:project) { create(:project, namespace: user.namespace) }
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
project.add_master(user)
sign_in(user)
end
describe 'searching' do
it 'finds a personal snippet' do
create(:personal_snippet, author: user, content: 'Test searching for personal snippets')
visit explore_snippets_path
fill_in 'search', with: 'Test'
click_button 'Go'
expect(page).to have_content('Test searching for personal snippets')
end
it 'finds a project snippet' do
create(:project_snippet, project: project, content: 'Test searching for personal snippets')
visit explore_snippets_path
fill_in 'search', with: 'Test'
click_button 'Go'
expect(page).to have_content('Test searching for personal snippets')
end
end
end
require 'spec_helper'
describe SearchHelper do
describe '#parse_search_result_from_elastic' do
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end
it "returns parsed result" do
project = create :project, :repository
project.repository.index_blobs
Gitlab::Elastic::Helper.refresh_index
result = project.repository.search(
'def popen',
type: :blob,
options: { highlight: true }
)[:blobs][:results][0]
parsed_result = helper.parse_search_result(result)
expect(parsed_result.ref). to eq('b83d6e391c22777fca1ed3012fce84f633d7fed0')
expect(parsed_result.filename).to eq('files/ruby/popen.rb')
expect(parsed_result.startline).to eq(2)
expect(parsed_result.data).to include("Popen")
end
end
end
require 'spec_helper' require 'spec_helper'
describe Issue, elastic: true do describe Issue, :elastic do
before do before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true) stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end end
let(:project) { create :project } let(:project) { create :project }
......
require 'spec_helper' require 'spec_helper'
describe MergeRequest, elastic: true do describe MergeRequest, :elastic do
before do before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true) stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end end
it "searches merge requests" do it "searches merge requests" do
......
require 'spec_helper' require 'spec_helper'
describe Milestone, elastic: true do describe Milestone, :elastic do
before do before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true) stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end end
it "searches milestones" do it "searches milestones" do
......
require 'spec_helper' require 'spec_helper'
describe Note, elastic: true do describe Note, :elastic do
before do before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true) stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end end
it "searches notes" do it "searches notes" do
......
require 'spec_helper' require 'spec_helper'
describe Project, elastic: true do describe Project, :elastic do
before do before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true) stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end end
it "finds projects" do it "finds projects" do
......
require 'spec_helper' require 'spec_helper'
describe ProjectWiki, elastic: true do describe ProjectWiki, :elastic do
before do before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true) stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end end
it "searches wiki page" do it "searches wiki page" do
......
require 'spec_helper' require 'spec_helper'
describe Repository, elastic: true do describe Repository, :elastic do
before do before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true) stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end end
def index!(project) def index!(project)
......
require 'spec_helper' require 'spec_helper'
describe Snippet, elastic: true do describe Snippet, :elastic do
before do before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true) stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end end
context 'searching snippets by code' do context 'searching snippets by code' do
......
...@@ -1312,4 +1312,48 @@ describe Project do ...@@ -1312,4 +1312,48 @@ describe Project do
expect(project.user_can_push_to_empty_repo?(user)).to be_falsey expect(project.user_can_push_to_empty_repo?(user)).to be_falsey
end end
end end
describe 'project import state transitions' do
context 'state transition: [:started] => [:finished]' do
context 'elasticsearch indexing disabled' do
before do
stub_ee_application_setting(elasticsearch_indexing: false)
end
it 'does not index the repository' do
project = create(:project, :import_started, import_type: :github)
expect(ElasticCommitIndexerWorker).not_to receive(:perform_async)
project.import_finish
end
end
context 'elasticsearch indexing enabled' do
let(:project) { create(:project, :import_started, import_type: :github) }
before do
stub_ee_application_setting(elasticsearch_indexing: true)
end
context 'no index status' do
it 'schedules a full index of the repository' do
expect(ElasticCommitIndexerWorker).to receive(:perform_async).with(project.id, nil)
project.import_finish
end
end
context 'with index status' do
let!(:index_status) { project.create_index_status!(indexed_at: Time.now, last_commit: 'foo') }
it 'schedules a progressive index of the repository' do
expect(ElasticCommitIndexerWorker).to receive(:perform_async).with(project.id, index_status.last_commit)
project.import_finish
end
end
end
end
end
end end
...@@ -26,4 +26,40 @@ describe Repository do ...@@ -26,4 +26,40 @@ describe Repository do
repository.after_sync repository.after_sync
end end
end end
describe "Elastic search", :elastic do
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
end
describe "class method find_commits_by_message_with_elastic" do
it "returns commits" do
project = create :project, :repository
project1 = create :project, :repository
project.repository.index_commits
project1.repository.index_commits
Gitlab::Elastic::Helper.refresh_index
expect(described_class.find_commits_by_message_with_elastic('initial').first).to be_a(Commit)
expect(described_class.find_commits_by_message_with_elastic('initial').count).to eq(2)
expect(described_class.find_commits_by_message_with_elastic('initial').total_count).to eq(2)
end
end
describe "find_commits_by_message_with_elastic" do
it "returns commits" do
project = create :project, :repository
project.repository.index_commits
Gitlab::Elastic::Helper.refresh_index
expect(project.repository.find_commits_by_message_with_elastic('initial').first).to be_a(Commit)
expect(project.repository.find_commits_by_message_with_elastic('initial').count).to eq(1)
expect(project.repository.find_commits_by_message_with_elastic('initial').total_count).to eq(1)
end
end
end
end end
require 'spec_helper'
describe API::Search do
set(:user) { create(:user) }
set(:group) { create(:group) }
let(:project) { create(:project, :public, name: 'awesome project', group: group) }
let(:repo_project) { create(:project, :public, :repository, group: group) }
shared_examples 'response is correct' do |schema:, size: 1|
it { expect(response).to have_gitlab_http_status(200) }
it { expect(response).to match_response_schema(schema) }
it { expect(response).to include_limited_pagination_headers }
it { expect(json_response.size).to eq(size) }
end
shared_examples 'elasticsearch disabled' do
it 'returns 400 error for wiki_blobs scope' do
get api(endpoint, user), scope: 'wiki_blobs', search: 'awesome'
expect(response).to have_gitlab_http_status(400)
end
it 'returns 400 error for blobs scope' do
get api(endpoint, user), scope: 'blobs', search: 'monitors'
expect(response).to have_gitlab_http_status(400)
end
it 'returns 400 error for commits scope' do
get api(endpoint, user), scope: 'commits', search: 'folder'
expect(response).to have_gitlab_http_status(400)
end
end
shared_examples 'elasticsearch enabled' do
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end
context 'for wiki_blobs scope' do
before do
wiki = create(:project_wiki, project: project)
create(:wiki_page, wiki: wiki, attrs: { title: 'home', content: "Awesome page" })
project.wiki.index_blobs
Gitlab::Elastic::Helper.refresh_index
get api(endpoint, user), scope: 'wiki_blobs', search: 'awesome'
end
it_behaves_like 'response is correct', schema: 'public_api/v4/blobs'
end
context 'for commits scope' do
before do
repo_project.repository.index_commits
Gitlab::Elastic::Helper.refresh_index
get api(endpoint, user), scope: 'commits', search: 'folder'
end
it_behaves_like 'response is correct', schema: 'public_api/v4/commits_details', size: 2
end
context 'for blobs scope' do
before do
repo_project.repository.index_blobs
Gitlab::Elastic::Helper.refresh_index
get api(endpoint, user), scope: 'blobs', search: 'monitors'
end
it_behaves_like 'response is correct', schema: 'public_api/v4/blobs'
end
end
describe 'GET /search' do
context 'with correct params' do
context 'when elasticsearch is disabled' do
it_behaves_like 'elasticsearch disabled' do
let(:endpoint) { '/search' }
end
end
context 'when elasticsearch is enabled' do
it_behaves_like 'elasticsearch enabled' do
let(:endpoint) { '/search' }
end
end
end
end
describe "GET /groups/:id/-/search" do
context 'with correct params' do
context 'when elasticsearch is disabled' do
it_behaves_like 'elasticsearch disabled' do
let(:endpoint) { "/groups/#{group.id}/-/search" }
end
end
context 'when elasticsearch is enabled' do
it_behaves_like 'elasticsearch enabled' do
let(:endpoint) { "/groups/#{group.id}/-/search" }
end
end
end
end
end
require 'spec_helper'
describe Search::GroupService do
shared_examples_for 'group search' do
context 'finding projects by name' do
let(:user) { create(:user) }
let(:term) { "Project Name" }
let(:nested_group) { create(:group, :nested) }
# These projects shouldn't be found
let!(:outside_project) { create(:project, :public, name: "Outside #{term}") }
let!(:private_project) { create(:project, :private, namespace: nested_group, name: "Private #{term}" )}
let!(:other_project) { create(:project, :public, namespace: nested_group, name: term.reverse) }
# These projects should be found
let!(:project1) { create(:project, :internal, namespace: nested_group, name: "Inner #{term} 1") }
let!(:project2) { create(:project, :internal, namespace: nested_group, name: "Inner #{term} 2") }
let!(:project3) { create(:project, :internal, namespace: nested_group.parent, name: "Outer #{term}") }
let(:results) { described_class.new(user, search_group, search: term).execute }
subject { results.objects('projects') }
context 'in parent group' do
let(:search_group) { nested_group.parent }
it { is_expected.to match_array([project1, project2, project3]) }
end
context 'in subgroup' do
let(:search_group) { nested_group }
it { is_expected.to match_array([project1, project2]) }
end
end
end
describe 'elasticsearch' do
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
# Ensure these are present when the index is refreshed
_ = [
outside_project, private_project, other_project,
project1, project2, project3
]
Gitlab::Elastic::Helper.refresh_index
end
after do
Gitlab::Elastic::Helper.delete_index
end
include_examples 'group search'
end
end
RSpec.configure do |config|
config.before(:each, :elastic) do
Gitlab::Elastic::Helper.create_empty_index
end
config.after(:each, :elastic) do
Gitlab::Elastic::Helper.delete_index
end
end
require 'spec_helper' require 'spec_helper'
describe ElasticIndexerWorker, elastic: true do describe ElasticIndexerWorker, :elastic do
subject { described_class.new } subject { described_class.new }
before do before do
...@@ -8,12 +8,6 @@ describe ElasticIndexerWorker, elastic: true do ...@@ -8,12 +8,6 @@ describe ElasticIndexerWorker, elastic: true do
Elasticsearch::Model.client = Elasticsearch::Model.client =
Gitlab::Elastic::Client.build(Gitlab::CurrentSettings.elasticsearch_config) Gitlab::Elastic::Client.build(Gitlab::CurrentSettings.elasticsearch_config)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
end end
it 'returns true if ES disabled' do it 'returns true if ES disabled' do
......
Feature: Global Search
Background:
Given I sign in as a user
And I own project "Shop"
And Elasticsearch is enabled
Scenario: I search through the all projects
Given project has all data available for the search
And I visit dashboard page
Then I search "initial"
And I find an Issue
And I find a Merge Request
And I find a Milestone
\ No newline at end of file
Feature: Project Search
Background:
Given I sign in as a user
And I own project "Shop"
And Elasticsearch is enabled
Scenario: I search through the all project items
Given project has all data available for the search
And I visit my project's home page
Then I search "initial"
And I find an Issue
And I find a Merge Request
And I find a Milestone
And I find a Comment
And I find a Commit
And I find a Wiki Page
Then I visit my project's home page
Then I search "def"
And I find a Code
Feature: Snippets Search
Background:
Given I sign in as a user
And Elasticsearch is enabled
Scenario: I search through the snippets
Given there is a snippet "index" with "php rocks" string
And there is a snippet "php" with "benefits" string
And I visit snippets page
Then I search "php"
And I find "index" snippet
Then I select search by titles and filenames
And I find "php" snippet
\ No newline at end of file
require_dependency Rails.root.join('spec', 'support', 'stub_configuration')
class Spinach::Features::GlobalSearch < Spinach::FeatureSteps
include SharedAuthentication
include SharedPaths
include SharedProject
include SharedElastic
include StubConfiguration
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end
step 'project has all data available for the search' do
@project = create :project
@project.add_master(current_user)
@issue = create :issue, title: 'bla-bla initial', project: @project
@merge_request = create :merge_request, title: 'bla-bla initial', source_project: @project
@milestone = create :milestone, title: 'bla-bla initial', project: @project
end
end
class Spinach::Features::ProjectSearch < Spinach::FeatureSteps
include SharedAuthentication
include SharedPaths
include SharedProject
include SharedElastic
include StubConfiguration
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end
step 'project has all data available for the search' do
@project = create :project, :repository
@project.add_master(current_user)
@issue = create :issue, title: 'bla-bla initial', project: @project
@merge_request = create :merge_request, title: 'bla-bla initial', source_project: @project
@milestone = create :milestone, title: 'bla-bla initial', project: @project
@note = create :note, note: 'bla-bla initial', project: @project, noteable: @issue
@project.repository.index_blobs
@project.repository.index_commits
@project.wiki.create_page("index_page", "Bla bla initial")
end
step 'I search "def"' do
fill_in "search", with: "def"
click_button "Go"
end
step 'I find a Comment' do
select_filter("Comments")
expect(page.find('.search-result-row')).to have_content(@note.note)
end
step 'I find a Wiki Page' do
select_filter("Wiki")
expect(page.find('.blob-result')).to have_content('Bla bla init')
end
step 'I find a Commit' do
select_filter("Commits")
expect(page.find('.commit-content .item-title')).to have_content("Initial commit")
end
step 'I find a Code' do
expect(page.first('.blob-result')).to have_content("def")
end
end
class Spinach::Features::SnippetsSearch < Spinach::FeatureSteps
include SharedAuthentication
include SharedPaths
include SharedProject
include SharedElastic
include StubConfiguration
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end
step 'there is a snippet "index" with "php rocks" string' do
create :personal_snippet, :public, content: "php rocks", title: "index"
Gitlab::Elastic::Helper.refresh_index
end
step 'there is a snippet "php" with "benefits" string' do
create :personal_snippet, :public, content: "benefits", title: "php"
Gitlab::Elastic::Helper.refresh_index
end
step 'I search "php"' do
fill_in "search", with: "php"
click_button "Go"
end
step 'I find "index" snippet' do
expect(page.find('.file-holder')).to have_content("php rocks")
end
step 'I select search by titles and filenames' do
select_filter("Titles and Filenames")
end
step 'I find "php" snippet' do
expect(page.find('.search-result-row')).to have_content("php")
end
end
module SharedElastic
include Spinach::DSL
step 'I search "initial"' do
fill_in "search", with: "initial"
click_button "Go"
end
step 'I find an Issue' do
select_filter("Issues")
expect(page.find('.search-result-row')).to have_content(@issue.title)
end
step 'I find a Merge Request' do
select_filter("Merge requests")
expect(page.find('.search-result-row')).to have_content(@merge_request.title)
end
step 'I find a Milestone' do
select_filter("Milestones")
expect(page.find('.search-result-row')).to have_content(@milestone.title)
end
step 'Elasticsearch is enabled' do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
end
def select_filter(name)
find(:xpath, "//ul[contains(@class, 'search-filter')]//a[contains(.,'#{name}')]").click
end
end
...@@ -70,7 +70,7 @@ module Gitlab ...@@ -70,7 +70,7 @@ module Gitlab
active_db_connection = ActiveRecord::Base.connection.active? rescue false active_db_connection = ActiveRecord::Base.connection.active? rescue false
active_db_connection && active_db_connection &&
ActiveRecord::Base.connection.table_exists?('application_settings') Gitlab::Database.cached_table_exists?('application_settings')
rescue ActiveRecord::NoDatabaseError rescue ActiveRecord::NoDatabaseError
false false
end end
......
...@@ -203,6 +203,11 @@ module Gitlab ...@@ -203,6 +203,11 @@ module Gitlab
connection.schema_cache.columns_hash(table_name).has_key?(column_name.to_s) connection.schema_cache.columns_hash(table_name).has_key?(column_name.to_s)
end end
def self.cached_table_exists?(table_name)
# Rails 5 uses data_source_exists? instead of table_exists?
connection.schema_cache.table_exists?(table_name)
end
private_class_method :connection private_class_method :connection
def self.database_version def self.database_version
......
module Gitlab module Gitlab
module Git module Git
class LfsPointerFile class LfsPointerFile
VERSION = "https://git-lfs.github.com/spec/v1".freeze
VERSION_LINE = "version #{VERSION}".freeze
def initialize(data) def initialize(data)
@data = data @data = data
end end
def pointer def pointer
@pointer ||= <<~FILE @pointer ||= <<~FILE
version https://git-lfs.github.com/spec/v1 #{VERSION_LINE}
oid sha256:#{sha256} oid sha256:#{sha256}
size #{size} size #{size}
FILE FILE
...@@ -20,6 +23,10 @@ module Gitlab ...@@ -20,6 +23,10 @@ module Gitlab
def sha256 def sha256
@sha256 ||= Digest::SHA256.hexdigest(@data) @sha256 ||= Digest::SHA256.hexdigest(@data)
end end
def inspect
"#<#{self.class}:#{object_id} @size=#{size}, @sha256=#{sha256.inspect}>"
end
end end
end end
end end
...@@ -1006,8 +1006,9 @@ module Gitlab ...@@ -1006,8 +1006,9 @@ module Gitlab
# This only checks the root .gitattributes file, # This only checks the root .gitattributes file,
# it does not traverse subfolders to find additional .gitattributes files # it does not traverse subfolders to find additional .gitattributes files
# #
# This method is around 30 times slower than `attributes`, # This method is around 30 times slower than `attributes`, which uses
# which uses `$GIT_DIR/info/attributes` # `$GIT_DIR/info/attributes`. Consider caching AttributesAtRefParser
# and reusing that for multiple calls instead of this method.
def attributes_at(ref, file_path) def attributes_at(ref, file_path)
parser = AttributesAtRefParser.new(self, ref) parser = AttributesAtRefParser.new(self, ref)
parser.attributes(file_path) parser.attributes(file_path)
......
...@@ -32,7 +32,7 @@ branch_to_fetch = 'master' ...@@ -32,7 +32,7 @@ branch_to_fetch = 'master'
if ls_remote_output.include?(minimal_ce_branch_name) if ls_remote_output.include?(minimal_ce_branch_name)
remote_to_fetch = ce_repo_url remote_to_fetch = ce_repo_url
branch_to_fetch = ls_remote_output.split("refs/heads/").last.strip branch_to_fetch = ls_remote_output.scan(%r{(?<=refs/heads/).+}).sort_by(&:size).first
puts puts
puts "💪 We found the branch '#{branch_to_fetch}' in the #{ce_repo_url} repository. We will fetch it." puts "💪 We found the branch '#{branch_to_fetch}' in the #{ce_repo_url} repository. We will fetch it."
else else
......
...@@ -6,39 +6,6 @@ describe SearchHelper do ...@@ -6,39 +6,6 @@ describe SearchHelper do
str str
end end
describe '#parse_search_result_from_elastic' do
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end
it "returns parsed result" do
project = create :project, :repository
project.repository.index_blobs
Gitlab::Elastic::Helper.refresh_index
result = project.repository.search(
'def popen',
type: :blob,
options: { highlight: true }
)[:blobs][:results][0]
parsed_result = helper.parse_search_result(result)
expect(parsed_result.ref). to eq('b83d6e391c22777fca1ed3012fce84f633d7fed0')
expect(parsed_result.filename).to eq('files/ruby/popen.rb')
expect(parsed_result.startline).to eq(2)
expect(parsed_result.data).to include("Popen")
end
end
describe 'search_autocomplete_source' do describe 'search_autocomplete_source' do
context "with no current user" do context "with no current user" do
before do before do
......
...@@ -36,6 +36,7 @@ describe('Multi-file editor commit sidebar list item', () => { ...@@ -36,6 +36,7 @@ describe('Multi-file editor commit sidebar list item', () => {
it('opens a closed file in the editor when clicking the file path', () => { it('opens a closed file in the editor when clicking the file path', () => {
spyOn(vm, 'openFileInEditor').and.callThrough(); spyOn(vm, 'openFileInEditor').and.callThrough();
spyOn(vm, 'updateViewer');
spyOn(router, 'push'); spyOn(router, 'push');
vm.$el.querySelector('.multi-file-commit-list-path').click(); vm.$el.querySelector('.multi-file-commit-list-path').click();
...@@ -44,6 +45,16 @@ describe('Multi-file editor commit sidebar list item', () => { ...@@ -44,6 +45,16 @@ describe('Multi-file editor commit sidebar list item', () => {
expect(router.push).toHaveBeenCalled(); expect(router.push).toHaveBeenCalled();
}); });
it('calls updateViewer with diff when clicking file', () => {
spyOn(vm, 'openFileInEditor').and.callThrough();
spyOn(vm, 'updateViewer');
spyOn(router, 'push');
vm.$el.querySelector('.multi-file-commit-list-path').click();
expect(vm.updateViewer).toHaveBeenCalledWith('diff');
});
describe('computed', () => { describe('computed', () => {
describe('iconName', () => { describe('iconName', () => {
it('returns modified when not a tempFile', () => { it('returns modified when not a tempFile', () => {
......
import Vue from 'vue';
import ideExternalLinks from 'ee/ide/components/ide_external_links.vue';
import createComponent from 'spec/helpers/vue_mount_component_helper';
describe('ide external links component', () => {
let vm;
let fakeReferrer;
let Component;
const fakeProjectUrl = '/project/';
beforeEach(() => {
Component = Vue.extend(ideExternalLinks);
});
afterEach(() => {
vm.$destroy();
});
describe('goBackUrl', () => {
it('renders the Go Back link with the referrer when present', () => {
fakeReferrer = '/example/README.md';
spyOnProperty(document, 'referrer').and.returnValue(fakeReferrer);
vm = createComponent(Component, {
projectUrl: fakeProjectUrl,
}).$mount();
expect(vm.goBackUrl).toEqual(fakeReferrer);
});
it('renders the Go Back link with the project url when referrer is not present', () => {
fakeReferrer = '';
spyOnProperty(document, 'referrer').and.returnValue(fakeReferrer);
vm = createComponent(Component, {
projectUrl: fakeProjectUrl,
}).$mount();
expect(vm.goBackUrl).toEqual(fakeProjectUrl);
});
});
});
...@@ -61,6 +61,34 @@ describe('RepoEditor', () => { ...@@ -61,6 +61,34 @@ describe('RepoEditor', () => {
}); });
}); });
describe('createEditorInstance', () => {
it('calls createInstance when viewer is editor', (done) => {
spyOn(vm.editor, 'createInstance');
vm.createEditorInstance();
vm.$nextTick(() => {
expect(vm.editor.createInstance).toHaveBeenCalled();
done();
});
});
it('calls createDiffInstance when viewer is diff', (done) => {
vm.$store.state.viewer = 'diff';
spyOn(vm.editor, 'createDiffInstance');
vm.createEditorInstance();
vm.$nextTick(() => {
expect(vm.editor.createDiffInstance).toHaveBeenCalled();
done();
});
});
});
describe('setupEditor', () => { describe('setupEditor', () => {
it('creates new model', () => { it('creates new model', () => {
spyOn(vm.editor, 'createModel').and.callThrough(); spyOn(vm.editor, 'createModel').and.callThrough();
......
...@@ -7,15 +7,17 @@ describe('RepoTabs', () => { ...@@ -7,15 +7,17 @@ describe('RepoTabs', () => {
const openedFiles = [file('open1'), file('open2')]; const openedFiles = [file('open1'), file('open2')];
let vm; let vm;
function createComponent() { function createComponent(el = null) {
const RepoTabs = Vue.extend(repoTabs); const RepoTabs = Vue.extend(repoTabs);
return new RepoTabs({ return new RepoTabs({
store, store,
}).$mount(); }).$mount(el);
} }
afterEach(() => { afterEach(() => {
vm.$destroy();
resetStore(vm.$store); resetStore(vm.$store);
}); });
...@@ -34,4 +36,44 @@ describe('RepoTabs', () => { ...@@ -34,4 +36,44 @@ describe('RepoTabs', () => {
done(); done();
}); });
}); });
describe('updated', () => {
it('sets showShadow as true when scroll width is larger than width', (done) => {
const el = document.createElement('div');
el.innerHTML = '<div id="test-app"></div>';
document.body.appendChild(el);
const style = document.createElement('style');
style.innerText = `
.multi-file-tabs {
width: 100px;
}
.multi-file-tabs .list-unstyled {
display: flex;
overflow-x: auto;
}
`;
document.head.appendChild(style);
vm = createComponent('#test-app');
openedFiles[0].active = true;
vm.$nextTick()
.then(() => {
expect(vm.showShadow).toBeFalsy();
vm.$store.state.openFiles = openedFiles;
})
.then(vm.$nextTick)
.then(() => {
expect(vm.showShadow).toBeTruthy();
style.remove();
el.remove();
})
.then(done)
.catch(done.fail);
});
});
}); });
/* global monaco */ /* global monaco */
import eventHub from 'ee/ide/eventhub';
import monacoLoader from 'ee/ide/monaco_loader'; import monacoLoader from 'ee/ide/monaco_loader';
import ModelManager from 'ee/ide/lib/common/model_manager'; import ModelManager from 'ee/ide/lib/common/model_manager';
import { file } from '../../helpers'; import { file } from '../../helpers';
...@@ -47,6 +48,15 @@ describe('Multi-file editor library model manager', () => { ...@@ -47,6 +48,15 @@ describe('Multi-file editor library model manager', () => {
expect(instance.models.get).toHaveBeenCalled(); expect(instance.models.get).toHaveBeenCalled();
}); });
it('adds eventHub listener', () => {
const f = file();
spyOn(eventHub, '$on').and.callThrough();
instance.addModel(f);
expect(eventHub.$on).toHaveBeenCalledWith(`editor.update.model.dispose.${f.path}`, jasmine.anything());
});
}); });
describe('hasCachedModel', () => { describe('hasCachedModel', () => {
...@@ -69,6 +79,30 @@ describe('Multi-file editor library model manager', () => { ...@@ -69,6 +79,30 @@ describe('Multi-file editor library model manager', () => {
}); });
}); });
describe('removeCachedModel', () => {
let f;
beforeEach(() => {
f = file();
instance.addModel(f);
});
it('clears cached model', () => {
instance.removeCachedModel(f);
expect(instance.models.size).toBe(0);
});
it('removes eventHub listener', () => {
spyOn(eventHub, '$off').and.callThrough();
instance.removeCachedModel(f);
expect(eventHub.$off).toHaveBeenCalledWith(`editor.update.model.dispose.${f.path}`, jasmine.anything());
});
});
describe('dispose', () => { describe('dispose', () => {
it('clears cached models', () => { it('clears cached models', () => {
instance.addModel(file()); instance.addModel(file());
......
/* global monaco */ /* global monaco */
import eventHub from 'ee/ide/eventhub';
import monacoLoader from 'ee/ide/monaco_loader'; import monacoLoader from 'ee/ide/monaco_loader';
import Model from 'ee/ide/lib/common/model'; import Model from 'ee/ide/lib/common/model';
import { file } from '../../helpers'; import { file } from '../../helpers';
...@@ -7,6 +8,8 @@ describe('Multi-file editor library model', () => { ...@@ -7,6 +8,8 @@ describe('Multi-file editor library model', () => {
let model; let model;
beforeEach((done) => { beforeEach((done) => {
spyOn(eventHub, '$on').and.callThrough();
monacoLoader(['vs/editor/editor.main'], () => { monacoLoader(['vs/editor/editor.main'], () => {
model = new Model(monaco, file('path')); model = new Model(monaco, file('path'));
...@@ -23,6 +26,10 @@ describe('Multi-file editor library model', () => { ...@@ -23,6 +26,10 @@ describe('Multi-file editor library model', () => {
expect(model.model).not.toBeNull(); expect(model.model).not.toBeNull();
}); });
it('adds eventHub listener', () => {
expect(eventHub.$on).toHaveBeenCalledWith(`editor.update.model.dispose.${model.file.path}`, jasmine.anything());
});
describe('path', () => { describe('path', () => {
it('returns file path', () => { it('returns file path', () => {
expect(model.path).toBe('path'); expect(model.path).toBe('path');
...@@ -88,5 +95,13 @@ describe('Multi-file editor library model', () => { ...@@ -88,5 +95,13 @@ describe('Multi-file editor library model', () => {
expect(model.events.size).toBe(0); expect(model.events.size).toBe(0);
}); });
it('removes eventHub listener', () => {
spyOn(eventHub, '$off').and.callThrough();
model.dispose();
expect(eventHub.$off).toHaveBeenCalledWith(`editor.update.model.dispose.${model.file.path}`, jasmine.anything());
});
}); });
}); });
...@@ -5,8 +5,16 @@ import { file } from '../helpers'; ...@@ -5,8 +5,16 @@ import { file } from '../helpers';
describe('Multi-file editor library', () => { describe('Multi-file editor library', () => {
let instance; let instance;
let el;
let holder;
beforeEach(done => {
el = document.createElement('div');
holder = document.createElement('div');
el.appendChild(holder);
document.body.appendChild(el);
beforeEach((done) => {
monacoLoader(['vs/editor/editor.main'], () => { monacoLoader(['vs/editor/editor.main'], () => {
instance = editor.create(monaco); instance = editor.create(monaco);
...@@ -16,6 +24,8 @@ describe('Multi-file editor library', () => { ...@@ -16,6 +24,8 @@ describe('Multi-file editor library', () => {
afterEach(() => { afterEach(() => {
instance.dispose(); instance.dispose();
el.remove();
}); });
it('creates instance of editor', () => { it('creates instance of editor', () => {
...@@ -27,33 +37,48 @@ describe('Multi-file editor library', () => { ...@@ -27,33 +37,48 @@ describe('Multi-file editor library', () => {
}); });
describe('createInstance', () => { describe('createInstance', () => {
let el;
beforeEach(() => {
el = document.createElement('div');
});
it('creates editor instance', () => { it('creates editor instance', () => {
spyOn(instance.monaco.editor, 'create').and.callThrough(); spyOn(instance.monaco.editor, 'create').and.callThrough();
instance.createInstance(el); instance.createInstance(holder);
expect(instance.monaco.editor.create).toHaveBeenCalled(); expect(instance.monaco.editor.create).toHaveBeenCalled();
}); });
it('creates dirty diff controller', () => { it('creates dirty diff controller', () => {
instance.createInstance(el); instance.createInstance(holder);
expect(instance.dirtyDiffController).not.toBeNull(); expect(instance.dirtyDiffController).not.toBeNull();
}); });
it('creates model manager', () => { it('creates model manager', () => {
instance.createInstance(el); instance.createInstance(holder);
expect(instance.modelManager).not.toBeNull(); expect(instance.modelManager).not.toBeNull();
}); });
}); });
describe('createDiffInstance', () => {
it('creates editor instance', () => {
spyOn(instance.monaco.editor, 'createDiffEditor').and.callThrough();
instance.createDiffInstance(holder);
expect(instance.monaco.editor.createDiffEditor).toHaveBeenCalledWith(
holder,
{
model: null,
contextmenu: true,
minimap: {
enabled: false,
},
readOnly: true,
scrollBeyondLastLine: false,
},
);
});
});
describe('createModel', () => { describe('createModel', () => {
it('calls model manager addModel', () => { it('calls model manager addModel', () => {
spyOn(instance.modelManager, 'addModel'); spyOn(instance.modelManager, 'addModel');
...@@ -87,12 +112,28 @@ describe('Multi-file editor library', () => { ...@@ -87,12 +112,28 @@ describe('Multi-file editor library', () => {
expect(instance.instance.setModel).toHaveBeenCalledWith(model.getModel()); expect(instance.instance.setModel).toHaveBeenCalledWith(model.getModel());
}); });
it('sets original & modified when diff editor', () => {
spyOn(instance.instance, 'getEditorType').and.returnValue(
'vs.editor.IDiffEditor',
);
spyOn(instance.instance, 'setModel');
instance.attachModel(model);
expect(instance.instance.setModel).toHaveBeenCalledWith({
original: model.getOriginalModel(),
modified: model.getModel(),
});
});
it('attaches the model to the dirty diff controller', () => { it('attaches the model to the dirty diff controller', () => {
spyOn(instance.dirtyDiffController, 'attachModel'); spyOn(instance.dirtyDiffController, 'attachModel');
instance.attachModel(model); instance.attachModel(model);
expect(instance.dirtyDiffController.attachModel).toHaveBeenCalledWith(model); expect(instance.dirtyDiffController.attachModel).toHaveBeenCalledWith(
model,
);
}); });
it('re-decorates with the dirty diff controller', () => { it('re-decorates with the dirty diff controller', () => {
...@@ -100,7 +141,9 @@ describe('Multi-file editor library', () => { ...@@ -100,7 +141,9 @@ describe('Multi-file editor library', () => {
instance.attachModel(model); instance.attachModel(model);
expect(instance.dirtyDiffController.reDecorate).toHaveBeenCalledWith(model); expect(instance.dirtyDiffController.reDecorate).toHaveBeenCalledWith(
model,
);
}); });
}); });
......
...@@ -2,6 +2,7 @@ import Vue from 'vue'; ...@@ -2,6 +2,7 @@ import Vue from 'vue';
import store from 'ee/ide/stores'; import store from 'ee/ide/stores';
import service from 'ee/ide/services'; import service from 'ee/ide/services';
import router from 'ee/ide/ide_router'; import router from 'ee/ide/ide_router';
import eventHub from 'ee/ide/eventhub';
import { file, resetStore } from '../../helpers'; import { file, resetStore } from '../../helpers';
describe('Multi-file store file actions', () => { describe('Multi-file store file actions', () => {
...@@ -457,6 +458,8 @@ describe('Multi-file store file actions', () => { ...@@ -457,6 +458,8 @@ describe('Multi-file store file actions', () => {
let tmpFile; let tmpFile;
beforeEach(() => { beforeEach(() => {
spyOn(eventHub, '$on');
tmpFile = file(); tmpFile = file();
tmpFile.content = 'testing'; tmpFile.content = 'testing';
......
...@@ -210,4 +210,15 @@ describe('Multi-file store actions', () => { ...@@ -210,4 +210,15 @@ describe('Multi-file store actions', () => {
.catch(done.fail); .catch(done.fail);
}); });
}); });
describe('updateViewer', () => {
it('updates viewer state', (done) => {
store.dispatch('updateViewer', 'diff')
.then(() => {
expect(store.state.viewer).toBe('diff');
})
.then(done)
.catch(done.fail);
});
});
}); });
...@@ -401,6 +401,8 @@ describe('IDE commit module actions', () => { ...@@ -401,6 +401,8 @@ describe('IDE commit module actions', () => {
}); });
it('redirects to new merge request page', (done) => { it('redirects to new merge request page', (done) => {
spyOn(eventHub, '$on');
store.state.commit.commitAction = '3'; store.state.commit.commitAction = '3';
store.dispatch('commit/commitChanges') store.dispatch('commit/commitChanges')
......
...@@ -106,4 +106,12 @@ describe('Multi-file store mutations', () => { ...@@ -106,4 +106,12 @@ describe('Multi-file store mutations', () => {
expect(localState.rightPanelCollapsed).toBeFalsy(); expect(localState.rightPanelCollapsed).toBeFalsy();
}); });
}); });
describe('UPDATE_VIEWER', () => {
it('sets viewer state', () => {
mutations.UPDATE_VIEWER(localState, 'diff');
expect(localState.viewer).toBe('diff');
});
});
}); });
...@@ -298,6 +298,18 @@ describe Gitlab::Database do ...@@ -298,6 +298,18 @@ describe Gitlab::Database do
end end
end end
describe '.cached_table_exists?' do
it 'only retrieves data once per table' do
expect(ActiveRecord::Base.connection).to receive(:table_exists?).with(:projects).once.and_call_original
expect(ActiveRecord::Base.connection).to receive(:table_exists?).with(:bogus_table_name).once.and_call_original
2.times do
expect(described_class.cached_table_exists?(:projects)).to be_truthy
expect(described_class.cached_table_exists?(:bogus_table_name)).to be_falsey
end
end
end
describe '#true_value' do describe '#true_value' do
it 'returns correct value for PostgreSQL' do it 'returns correct value for PostgreSQL' do
expect(described_class).to receive(:postgresql?).and_return(true) expect(described_class).to receive(:postgresql?).and_return(true)
......
...@@ -1846,46 +1846,6 @@ describe Project do ...@@ -1846,46 +1846,6 @@ describe Project do
expect(housekeeping_service).not_to have_received(:execute) expect(housekeeping_service).not_to have_received(:execute)
end end
context 'elasticsearch indexing disabled' do
before do
stub_ee_application_setting(elasticsearch_indexing: false)
end
it 'does not index the repository' do
project = create(:project, :import_started, import_type: :github)
expect(ElasticCommitIndexerWorker).not_to receive(:perform_async)
project.import_finish
end
end
context 'elasticsearch indexing enabled' do
let(:project) { create(:project, :import_started, import_type: :github) }
before do
stub_ee_application_setting(elasticsearch_indexing: true)
end
context 'no index status' do
it 'schedules a full index of the repository' do
expect(ElasticCommitIndexerWorker).to receive(:perform_async).with(project.id, nil)
project.import_finish
end
end
context 'with index status' do
let!(:index_status) { project.create_index_status!(indexed_at: Time.now, last_commit: 'foo') }
it 'schedules a progressive index of the repository' do
expect(ElasticCommitIndexerWorker).to receive(:perform_async).with(project.id, index_status.last_commit)
project.import_finish
end
end
end
end end
end end
......
...@@ -1813,48 +1813,6 @@ describe Repository do ...@@ -1813,48 +1813,6 @@ describe Repository do
end end
end end
describe "Elastic search", :elastic do
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end
describe "class method find_commits_by_message_with_elastic" do
it "returns commits" do
project = create :project, :repository
project1 = create :project, :repository
project.repository.index_commits
project1.repository.index_commits
Gitlab::Elastic::Helper.refresh_index
expect(described_class.find_commits_by_message_with_elastic('initial').first).to be_a(Commit)
expect(described_class.find_commits_by_message_with_elastic('initial').count).to eq(2)
expect(described_class.find_commits_by_message_with_elastic('initial').total_count).to eq(2)
end
end
describe "find_commits_by_message_with_elastic" do
it "returns commits" do
project = create :project, :repository
project.repository.index_commits
Gitlab::Elastic::Helper.refresh_index
expect(project.repository.find_commits_by_message_with_elastic('initial').first).to be_a(Commit)
expect(project.repository.find_commits_by_message_with_elastic('initial').count).to eq(1)
expect(project.repository.find_commits_by_message_with_elastic('initial').total_count).to eq(1)
end
end
end
describe '#after_create' do describe '#after_create' do
it 'flushes the exists cache' do it 'flushes the exists cache' do
expect(repository).to receive(:expire_exists_cache) expect(repository).to receive(:expire_exists_cache)
......
...@@ -3,8 +3,8 @@ require 'spec_helper' ...@@ -3,8 +3,8 @@ require 'spec_helper'
describe API::Search do describe API::Search do
set(:user) { create(:user) } set(:user) { create(:user) }
set(:group) { create(:group) } set(:group) { create(:group) }
let(:project) { create(:project, :public, name: 'awesome project', group: group) } set(:project) { create(:project, :public, name: 'awesome project', group: group) }
let(:repo_project) { create(:project, :public, :repository, group: group) } set(:repo_project) { create(:project, :public, :repository, group: group) }
shared_examples 'response is correct' do |schema:, size: 1| shared_examples 'response is correct' do |schema:, size: 1|
it { expect(response).to have_gitlab_http_status(200) } it { expect(response).to have_gitlab_http_status(200) }
...@@ -13,74 +13,6 @@ describe API::Search do ...@@ -13,74 +13,6 @@ describe API::Search do
it { expect(json_response.size).to eq(size) } it { expect(json_response.size).to eq(size) }
end end
shared_examples 'elasticsearch disabled' do
it 'returns 400 error for wiki_blobs scope' do
get api(endpoint, user), scope: 'wiki_blobs', search: 'awesome'
expect(response).to have_gitlab_http_status(400)
end
it 'returns 400 error for blobs scope' do
get api(endpoint, user), scope: 'blobs', search: 'monitors'
expect(response).to have_gitlab_http_status(400)
end
it 'returns 400 error for commits scope' do
get api(endpoint, user), scope: 'commits', search: 'folder'
expect(response).to have_gitlab_http_status(400)
end
end
shared_examples 'elasticsearch enabled' do
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
end
after do
Gitlab::Elastic::Helper.delete_index
stub_ee_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end
context 'for wiki_blobs scope' do
before do
wiki = create(:project_wiki, project: project)
create(:wiki_page, wiki: wiki, attrs: { title: 'home', content: "Awesome page" })
project.wiki.index_blobs
Gitlab::Elastic::Helper.refresh_index
get api(endpoint, user), scope: 'wiki_blobs', search: 'awesome'
end
it_behaves_like 'response is correct', schema: 'public_api/v4/blobs'
end
context 'for commits scope' do
before do
repo_project.repository.index_commits
Gitlab::Elastic::Helper.refresh_index
get api(endpoint, user), scope: 'commits', search: 'folder'
end
it_behaves_like 'response is correct', schema: 'public_api/v4/commits_details', size: 2
end
context 'for blobs scope' do
before do
repo_project.repository.index_blobs
Gitlab::Elastic::Helper.refresh_index
get api(endpoint, user), scope: 'blobs', search: 'monitors'
end
it_behaves_like 'response is correct', schema: 'public_api/v4/blobs'
end
end
describe 'GET /search' do describe 'GET /search' do
context 'when user is not authenticated' do context 'when user is not authenticated' do
it 'returns 401 error' do it 'returns 401 error' do
...@@ -109,8 +41,6 @@ describe API::Search do ...@@ -109,8 +41,6 @@ describe API::Search do
context 'with correct params' do context 'with correct params' do
context 'for projects scope' do context 'for projects scope' do
before do before do
project
get api('/search', user), scope: 'projects', search: 'awesome' get api('/search', user), scope: 'projects', search: 'awesome'
end end
...@@ -166,18 +96,6 @@ describe API::Search do ...@@ -166,18 +96,6 @@ describe API::Search do
it_behaves_like 'response is correct', schema: 'public_api/v4/snippets' it_behaves_like 'response is correct', schema: 'public_api/v4/snippets'
end end
context 'when elasticsearch is disabled' do
it_behaves_like 'elasticsearch disabled' do
let(:endpoint) { '/search' }
end
end
context 'when elasticsearch is enabled' do
it_behaves_like 'elasticsearch enabled' do
let(:endpoint) { '/search' }
end
end
end end
end end
...@@ -227,8 +145,6 @@ describe API::Search do ...@@ -227,8 +145,6 @@ describe API::Search do
context 'with correct params' do context 'with correct params' do
context 'for projects scope' do context 'for projects scope' do
before do before do
project
get api("/groups/#{group.id}/-/search", user), scope: 'projects', search: 'awesome' get api("/groups/#{group.id}/-/search", user), scope: 'projects', search: 'awesome'
end end
...@@ -276,18 +192,6 @@ describe API::Search do ...@@ -276,18 +192,6 @@ describe API::Search do
it_behaves_like 'response is correct', schema: 'public_api/v4/milestones' it_behaves_like 'response is correct', schema: 'public_api/v4/milestones'
end end
context 'when elasticsearch is disabled' do
it_behaves_like 'elasticsearch disabled' do
let(:endpoint) { "/groups/#{group.id}/-/search" }
end
end
context 'when elasticsearch is enabled' do
it_behaves_like 'elasticsearch enabled' do
let(:endpoint) { "/groups/#{group.id}/-/search" }
end
end
end end
end end
......
...@@ -43,7 +43,7 @@ describe Files::CreateService do ...@@ -43,7 +43,7 @@ describe Files::CreateService do
blob = repository.blob_at('lfs', file_path) blob = repository.blob_at('lfs', file_path)
expect(blob.data).not_to start_with('version https://git-lfs.github.com/spec/v1') expect(blob.data).not_to start_with(Gitlab::Git::LfsPointerFile::VERSION_LINE)
expect(blob.data).to eq(file_content) expect(blob.data).to eq(file_content)
end end
end end
...@@ -58,7 +58,7 @@ describe Files::CreateService do ...@@ -58,7 +58,7 @@ describe Files::CreateService do
blob = repository.blob_at('lfs', file_path) blob = repository.blob_at('lfs', file_path)
expect(blob.data).to start_with('version https://git-lfs.github.com/spec/v1') expect(blob.data).to start_with(Gitlab::Git::LfsPointerFile::VERSION_LINE)
end end
it "creates an LfsObject with the file's content" do it "creates an LfsObject with the file's content" do
......
...@@ -4,28 +4,30 @@ describe Files::MultiService do ...@@ -4,28 +4,30 @@ describe Files::MultiService do
subject { described_class.new(project, user, commit_params) } subject { described_class.new(project, user, commit_params) }
let(:project) { create(:project, :repository) } let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
let(:user) { create(:user) } let(:user) { create(:user) }
let(:branch_name) { project.default_branch } let(:branch_name) { project.default_branch }
let(:original_file_path) { 'files/ruby/popen.rb' } let(:original_file_path) { 'files/ruby/popen.rb' }
let(:new_file_path) { 'files/ruby/popen.rb' } let(:new_file_path) { 'files/ruby/popen.rb' }
let(:file_content) { 'New content' }
let(:action) { 'update' } let(:action) { 'update' }
let!(:original_commit_id) do let!(:original_commit_id) do
Gitlab::Git::Commit.last_for_path(project.repository, branch_name, original_file_path).sha Gitlab::Git::Commit.last_for_path(project.repository, branch_name, original_file_path).sha
end end
let(:actions) do let(:default_action) do
[ {
{ action: action,
action: action, file_path: new_file_path,
file_path: new_file_path, previous_path: original_file_path,
previous_path: original_file_path, content: file_content,
content: 'New content', last_commit_id: original_commit_id
last_commit_id: original_commit_id }
}
]
end end
let(:actions) { [default_action] }
let(:commit_params) do let(:commit_params) do
{ {
commit_message: "Update File", commit_message: "Update File",
...@@ -110,6 +112,56 @@ describe Files::MultiService do ...@@ -110,6 +112,56 @@ describe Files::MultiService do
end end
end end
context 'when creating a file matching an LFS filter' do
let(:action) { 'create' }
let(:branch_name) { 'lfs' }
let(:new_file_path) { 'test_file.lfs' }
before do
allow(project).to receive(:lfs_enabled?).and_return(true)
end
it 'creates an LFS pointer' do
subject.execute
blob = repository.blob_at('lfs', new_file_path)
expect(blob.data).to start_with(Gitlab::Git::LfsPointerFile::VERSION_LINE)
end
it "creates an LfsObject with the file's content" do
subject.execute
expect(LfsObject.last.file.read).to eq file_content
end
context 'with base64 encoded content' do
let(:raw_file_content) { 'Raw content' }
let(:file_content) { Base64.encode64(raw_file_content) }
let(:actions) { [default_action.merge(encoding: 'base64')] }
it 'creates an LFS pointer' do
subject.execute
blob = repository.blob_at('lfs', new_file_path)
expect(blob.data).to start_with(Gitlab::Git::LfsPointerFile::VERSION_LINE)
end
it "creates an LfsObject with the file's content" do
subject.execute
expect(LfsObject.last.file.read).to eq raw_file_content
end
end
it 'links the LfsObject to the project' do
expect do
subject.execute
end.to change { project.lfs_objects.count }.by(1)
end
end
context 'when file status validation is skipped' do context 'when file status validation is skipped' do
let(:action) { 'create' } let(:action) { 'create' }
let(:new_file_path) { 'files/ruby/new_file.rb' } let(:new_file_path) { 'files/ruby/new_file.rb' }
......
require "spec_helper"
describe Lfs::FileTransformer do
let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
let(:file_content) { 'Test file content' }
let(:branch_name) { 'lfs' }
let(:file_path) { 'test_file.lfs' }
subject { described_class.new(project, branch_name) }
describe '#new_file' do
context 'with lfs disabled' do
it 'skips gitattributes check' do
expect(repository.raw).not_to receive(:blob_at)
subject.new_file(file_path, file_content)
end
it 'returns untransformed content' do
result = subject.new_file(file_path, file_content)
expect(result.content).to eq(file_content)
end
it 'returns untransformed encoding' do
result = subject.new_file(file_path, file_content, encoding: 'base64')
expect(result.encoding).to eq('base64')
end
end
context 'with lfs enabled' do
before do
allow(project).to receive(:lfs_enabled?).and_return(true)
end
it 'reuses cached gitattributes' do
second_file = 'another_file.lfs'
expect(repository.raw).to receive(:blob_at).with(branch_name, '.gitattributes').once
subject.new_file(file_path, file_content)
subject.new_file(second_file, file_content)
end
it "creates an LfsObject with the file's content" do
subject.new_file(file_path, file_content)
expect(LfsObject.last.file.read).to eq file_content
end
it 'returns an LFS pointer' do
result = subject.new_file(file_path, file_content)
expect(result.content).to start_with(Gitlab::Git::LfsPointerFile::VERSION_LINE)
end
it 'returns LFS pointer encoding as text' do
result = subject.new_file(file_path, file_content, encoding: 'base64')
expect(result.encoding).to eq('text')
end
context "when doesn't use LFS" do
let(:file_path) { 'other.filetype' }
it "doesn't create LFS pointers" do
new_content = subject.new_file(file_path, file_content).content
expect(new_content).not_to start_with(Gitlab::Git::LfsPointerFile::VERSION_LINE)
expect(new_content).to eq(file_content)
end
end
it 'links LfsObjects to project' do
expect do
subject.new_file(file_path, file_content)
end.to change { project.lfs_objects.count }.by(1)
end
context 'when LfsObject already exists' do
let(:lfs_pointer) { Gitlab::Git::LfsPointerFile.new(file_content) }
before do
create(:lfs_object, oid: lfs_pointer.sha256, size: lfs_pointer.size)
end
it 'links LfsObjects to project' do
expect do
subject.new_file(file_path, file_content)
end.to change { project.lfs_objects.count }.by(1)
end
end
end
end
end
...@@ -37,25 +37,4 @@ describe Search::GroupService do ...@@ -37,25 +37,4 @@ describe Search::GroupService do
describe 'basic search' do describe 'basic search' do
include_examples 'group search' include_examples 'group search'
end end
describe 'elasticsearch' do
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
# Ensure these are present when the index is refreshed
_ = [
outside_project, private_project, other_project,
project1, project2, project3
]
Gitlab::Elastic::Helper.refresh_index
end
after do
Gitlab::Elastic::Helper.delete_index
end
include_examples 'group search'
end
end end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment