Commit 00fa950a authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent c36152ff
......@@ -5,6 +5,7 @@ class SearchController < ApplicationController
include SearchHelper
include RendersCommits
before_action :override_snippet_scope, only: :show
around_action :allow_gitaly_ref_name_caching
skip_before_action :authenticate_user!
......@@ -103,4 +104,14 @@ class SearchController < ApplicationController
Gitlab::UsageDataCounters::SearchCounter.increment_navbar_searches_count
end
# Disallow web snippet_blobs search as we migrate snippet
# from database-backed storage to git repository-based,
# and searching across multiple git repositories is not feasible.
#
# TODO: after 13.0 refactor this into Search::SnippetService
# See https://gitlab.com/gitlab-org/gitlab/issues/208882
def override_snippet_scope
params[:scope] = 'snippet_titles' if params[:snippets] == 'true'
end
end
......@@ -24,7 +24,6 @@
= users
- elsif @show_snippets
= search_filter_link 'snippet_blobs', _("Snippet Contents"), search: { snippets: true, group_id: nil, project_id: nil }
= search_filter_link 'snippet_titles', _("Titles and Filenames"), search: { snippets: true, group_id: nil, project_id: nil }
- else
= search_filter_link 'projects', _("Projects"), data: { qa_selector: 'projects_tab' }
......
---
title: Remove and deprecate snippet content search
merge_request: 26359
author:
type: removed
---
title: Optimize Project related count service desk enabled
merge_request: 27115
author:
type: performance
---
title: Enable Workhorse upload acceleration for Project Import uploads via API
merge_request: 26914
author:
type: performance
# frozen_string_literal: true
class AddIndexOnIdAndServiceDeskEnabledToProjects < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
INDEX_NAME = 'index_projects_on_id_service_desk_enabled'
disable_ddl_transaction!
def up
add_concurrent_index :projects, :id, where: 'service_desk_enabled = true', name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :projects, INDEX_NAME
end
end
......@@ -10,7 +10,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2020_03_11_165635) do
ActiveRecord::Schema.define(version: 2020_03_12_163407) do
# These are extensions that must be enabled in order to support this database
enable_extension "pg_trgm"
......@@ -3491,6 +3491,7 @@ ActiveRecord::Schema.define(version: 2020_03_11_165635) do
t.index ["id", "repository_storage", "last_repository_updated_at"], name: "idx_projects_on_repository_storage_last_repository_updated_at"
t.index ["id"], name: "index_on_id_partial_with_legacy_storage", where: "((storage_version < 2) OR (storage_version IS NULL))"
t.index ["id"], name: "index_projects_on_id_partial_for_visibility", unique: true, where: "(visibility_level = ANY (ARRAY[10, 20]))"
t.index ["id"], name: "index_projects_on_id_service_desk_enabled", where: "(service_desk_enabled = true)"
t.index ["id"], name: "index_projects_on_mirror_and_mirror_trigger_builds_both_true", where: "((mirror IS TRUE) AND (mirror_trigger_builds IS TRUE))"
t.index ["last_activity_at", "id"], name: "index_projects_api_last_activity_at_id_desc", order: { id: :desc }
t.index ["last_activity_at", "id"], name: "index_projects_api_vis20_last_activity_at", where: "(visibility_level = 20)"
......
......@@ -163,17 +163,21 @@ Git operations in GitLab will result in an API error.
unicorn['enable'] = false
sidekiq['enable'] = false
gitlab_workhorse['enable'] = false
grafana['enable'] = false
# If you run a seperate monitoring node you can disable these services
alertmanager['enable'] = false
prometheus['enable'] = false
# If you don't run a seperate monitoring node you can
# Enable Prometheus access & disable these extra services
# This makes Prometheus listen on all interfaces. You must use firewalls to restrict access to this address/port.
# prometheus['listen_address'] = '0.0.0.0:9090'
# prometheus['monitor_kubernetes'] = false
# If you don't want to run monitoring services uncomment the following (not recommended)
# alertmanager['enable'] = false
# gitlab_exporter['enable'] = false
# grafana['enable'] = false
# node_exporter['enable'] = false
# prometheus['enable'] = false
# Enable prometheus monitoring - comment out if you disable monitoring services above.
# This makes Prometheus listen on all interfaces. You must use firewalls to restrict access to this address/port.
prometheus['listen_address'] = '0.0.0.0:9090'
# Prevent database connections during 'gitlab-ctl reconfigure'
gitlab_rails['rake_cache_clear'] = false
......@@ -861,7 +865,7 @@ default level is `WARN`.
You can run a gRPC trace with:
```shell
GRPC_TRACE=all GRPC_VERBOSITY=DEBUG sudo gitlab-rake gitlab:gitaly:check
sudo GRPC_TRACE=all GRPC_VERBOSITY=DEBUG gitlab-rake gitlab:gitaly:check
```
### Observing `gitaly-ruby` traffic
......
......@@ -255,6 +255,8 @@ Example response:
### Scope: snippet_blobs
This scope will be disabled after GitLab 13.0.
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/search?scope=snippet_blobs&search=test
```
......
......@@ -46,6 +46,9 @@ GET /projects/:id/vulnerability_findings?confidence=unknown,experimental
GET /projects/:id/vulnerability_findings?pipeline_id=42
```
CAUTION: **Deprecation:**
Beginning with GitLab 12.9, the `undefined` severity level is deprecated and the `undefined` confidence level isn't reported for new vulnerabilities.
| Attribute | Type | Required | Description |
| ------------- | -------------- | -------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) which the authenticated user is a member of. |
......
......@@ -81,7 +81,9 @@ There are some high level differences between the products worth mentioning:
container images to set up your build environment. For example, set up one pipeline that builds your build environment
itself and publish that to the container registry. Then, have your pipelines use this instead of each building their
own environment, which will be slower and may be less consistent. We have extensive docs on [how to use the Container Registry](../../user/packages/container_registry/index.md).
- Totally stuck and not sure where to turn for advice? The [GitLab community forum](https://forum.gitlab.com/) can be a great resource.
- A central utilities repository can be a great place to put assorted scheduled jobs
or other manual jobs that function like utilities. Jenkins installations tend to
have a few of these.
## Agents vs. Runners
......
......@@ -9,7 +9,7 @@ importer and a parallel importer. The Rake task `import:github` uses the
sequential importer, while everything else uses the parallel importer. The
difference between these two importers is quite simple: the sequential importer
does all work in a single thread, making it more useful for debugging purposes
or Rake tasks. The parallel importer on the other hand uses Sidekiq.
or Rake tasks. The parallel importer, on the other hand, uses Sidekiq.
## Requirements
......@@ -31,9 +31,9 @@ The importer's codebase is broken up into the following directories:
## Architecture overview
When a GitHub project is imported we schedule and execute a job for the
`RepositoryImportworker` worker as all other importers. However, unlike other
importers we don't immediately perform the work necessary. Instead work is
When a GitHub project is imported, we schedule and execute a job for the
`RepositoryImportWorker` worker as all other importers. However, unlike other
importers, we don't immediately perform the work necessary. Instead work is
divided into separate stages, with each stage consisting out of a set of Sidekiq
jobs that are executed. Between every stage a job is scheduled that periodically
checks if all work of the current stage is completed, advancing the import
......@@ -65,9 +65,9 @@ This worker will import all pull requests. For every pull request a job for the
### 5. Stage::ImportIssuesAndDiffNotesWorker
This worker will import all issues and pull request comments. For every issue we
This worker will import all issues and pull request comments. For every issue, we
schedule a job for the `Gitlab::GithubImport::ImportIssueWorker` worker. For
pull request comments we instead schedule jobs for the
pull request comments, we instead schedule jobs for the
`Gitlab::GithubImport::DiffNoteImporter` worker.
This worker processes both issues and diff notes in parallel so we don't need to
......@@ -82,7 +82,7 @@ project.
### 6. Stage::ImportNotesWorker
This worker imports regular comments for both issues and pull requests. For
every comment we schedule a job for the
every comment, we schedule a job for the
`Gitlab::GithubImport::ImportNoteWorker` worker.
Regular comments have to be imported at the end since the GitHub API used
......@@ -116,14 +116,14 @@ schedule the worker of the next stage.
To reduce the number of `AdvanceStageWorker` jobs scheduled this worker will
briefly wait for jobs to complete before deciding what the next action should
be. For small projects this may slow down the import process a bit, but it will
be. For small projects, this may slow down the import process a bit, but it will
also reduce pressure on the system as a whole.
## Refreshing import JIDs
GitLab includes a worker called `StuckImportJobsWorker` that will periodically
run and mark project imports as failed if they have been running for more than
15 hours. For GitHub projects this poses a bit of a problem: importing large
15 hours. For GitHub projects, this poses a bit of a problem: importing large
projects could take several hours depending on how often we hit the GitHub rate
limit (more on this below), but we don't want `StuckImportJobsWorker` to mark
our import as failed because of this.
......@@ -137,7 +137,7 @@ long we're still performing work.
## GitHub rate limit
GitHub has a rate limit of 5 000 API calls per hour. The number of requests
GitHub has a rate limit of 5,000 API calls per hour. The number of requests
necessary to import a project is largely dominated by the number of unique users
involved in a project (e.g. issue authors). Other data such as issue pages
and comments typically only requires a few dozen requests to import. This is
......@@ -176,11 +176,11 @@ There are two types of lookups we cache:
in our GitLab database.
The expiration time of these keys is 24 hours. When retrieving the cache of a
positive lookups we refresh the TTL automatically. The TTL of false lookups is
positive lookup, we refresh the TTL automatically. The TTL of false lookups is
never refreshed.
Because of this caching layer it's possible newly registered GitLab accounts
won't be linked to their corresponding GitHub accounts. This however will sort
Because of this caching layer, it's possible newly registered GitLab accounts
won't be linked to their corresponding GitHub accounts. This, however, will sort
itself out once the cached keys expire.
The user cache lookup is shared across projects. This means that the more
......@@ -194,12 +194,12 @@ The code for this resides in:
## Mapping labels and milestones
To reduce pressure on the database we do not query it when setting labels and
milestones on issues and merge requests. Instead we cache this data when we
milestones on issues and merge requests. Instead, we cache this data when we
import labels and milestones, then we reuse this cache when assigning them to
issues/merge requests. Similar to the user lookups these cache keys are expired
automatically after 24 hours of not being used.
Unlike the user lookup caches these label and milestone caches are scoped to the
Unlike the user lookup caches, these label and milestone caches are scoped to the
project that is being imported.
The code for this resides in:
......
......@@ -57,6 +57,7 @@ Libraries with the following licenses are acceptable for use:
- [Creative Commons Zero (CC0)][CC0]: A public domain dedication, recommended as a way to disclaim copyright on your work to the maximum extent possible.
- [Unlicense][UNLICENSE]: Another public domain dedication.
- [OWFa 1.0][OWFa1]: An open-source license and patent grant designed for specifications.
- [JSON License](https://www.json.org/license.html): Equivalent to the MIT license plus the statement, "The Software shall be used for Good, not Evil."
## Unacceptable Licenses
......
......@@ -448,9 +448,12 @@ SOME_CONSTANT = 'bar'
You might want millions of project rows in your local database, for example,
in order to compare relative query performance, or to reproduce a bug. You could
do this by hand with SQL commands, but since you have ActiveRecord models, you
might find using these gems more convenient:
do this by hand with SQL commands or using [Mass Inserting Rails
Models](mass_insert.md) functionality.
Assuming you are working with ActiveRecord models, you might also find these links helpful:
- [Insert records in batches](insert_into_tables_in_batches.md)
- [BulkInsert gem](https://github.com/jamis/bulk_insert)
- [ActiveRecord::PgGenerateSeries gem](https://github.com/ryu39/active_record-pg_generate_series)
......
......@@ -14,16 +14,6 @@ need to ensure your own [Runners are configured](../../ci/runners/README.md) and
**Note**: GitLab's Web Application Firewall is deployed with [Ingress](../../user/clusters/applications.md#Ingress),
so it will be available to your applications no matter how you deploy them to Kubernetes.
## Enable or disable ModSecurity
ModSecurity is enabled by default on GitLab.com. You can toggle the feature flag to false by running the following command in the Rails console:
```ruby
Feature.disable(:ingress_modsecurity)
```
Once disabled, you must uninstall and reinstall your Ingress application for the changes to take effect. See the [Feature Flag](../../user/project/operations/feature_flags.md) documentation for more information.
## Configuring your Google account
Before creating and connecting your Kubernetes cluster to your GitLab project,
......@@ -112,10 +102,9 @@ Once it is installed, the other applications that rely on it will each have thei
For this guide, we need to install Ingress. Ingress provides load balancing,
SSL termination, and name-based virtual hosting, using NGINX behind
the scenes. Make sure that the **Enable Web Application Firewall** button is checked
before installing.
the scenes. Make sure to switch the toogle to the enabled position before installing.
![Cluster applications](./img/guide_waf_ingress_installation.png)
![Cluster applications](./img/guide_waf_ingress_installation_v12_9.png)
After Ingress is installed, wait a few seconds and copy the IP address that
is displayed in order to add in your base **Domain** at the top of the page. For
......
......@@ -347,6 +347,9 @@ it highlighted:
}
```
CAUTION: **Deprecation:**
Beginning with GitLab 12.9, container scanning no longer reports `undefined` severity and confidence levels.
Here is the description of the report file structure nodes and their meaning. All fields are mandatory to be present in
the report JSON unless stated otherwise. Presence of optional fields depends on the underlying analyzers being used.
......
......@@ -355,6 +355,9 @@ it highlighted:
}
```
CAUTION: **Deprecation:**
Beginning with GitLab 12.9, dependency scanning no longer reports `undefined` severity and confidence levels.
Here is the description of the report file structure nodes and their meaning. All fields are mandatory to be present in
the report JSON unless stated otherwise. Presence of optional fields depends on the underlying analyzers being used.
......
......@@ -413,6 +413,9 @@ it highlighted:
}
```
CAUTION: **Deprecation:**
Beginning with GitLab 12.9, SAST no longer reports `undefined` severity and confidence levels.
Here is the description of the report file structure nodes and their meaning. All fields are mandatory in
the report JSON unless stated otherwise. Presence of optional fields depends on the underlying analyzers being used.
......
......@@ -279,21 +279,23 @@ This feature:
kubectl logs -n gitlab-managed-apps $(kubectl get pod -n gitlab-managed-apps -l app=nginx-ingress,component=controller --no-headers=true -o custom-columns=:metadata.name) modsecurity-log -f
```
To enable ModSecurity, check the **Enable Web Application Firewall** checkbox
when installing your [Ingress application](#ingress).
To enable WAF, switch its respective toggle to the enabled position when installing or updating [Ingress application](#ingress).
If this is your first time using GitLab's WAF, we recommend you follow the
[quick start guide](../../topics/web_application_firewall/quick_start_guide.md).
There is a small performance overhead by enabling ModSecurity. If this is
considered significant for your application, you can disable ModSecurity's
rule engine for your deployed application by setting
[the deployment variable](../../topics/autodevops/index.md)
rule engine for your deployed application in any of the following ways:
1. Setting [the deployment variable](../../topics/autodevops/index.md)
`AUTO_DEVOPS_MODSECURITY_SEC_RULE_ENGINE` to `Off`. This will prevent ModSecurity
from processing any requests for the given application or environment.
To permanently disable it, you must [uninstall](#uninstalling-applications) and
reinstall your Ingress application for the changes to take effect.
1. Switching its respective toggle to the disabled position and applying changes through the **Save changes** button. This will reinstall
Ingress with the recent changes.
![Disabling WAF](../../topics/web_application_firewall/img/guide_waf_ingress_save_changes_v12_9.png)
### JupyterHub
......
......@@ -585,7 +585,7 @@ Service discovery:
- [`gitlab-cookbooks` / `gitlab_consul` · GitLab](https://gitlab.com/gitlab-cookbooks/gitlab_consul)
### Haproxy
### HAProxy
High Performance TCP/HTTP Load Balancer:
......
......@@ -172,6 +172,14 @@ There are 2 methods to specify a variable in a query or dashboard:
1. Variables can be specified using the [Liquid template format](https://help.shopify.com/en/themes/liquid/basics), for example `{{ci_environment_slug}}` ([added](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/20793) in GitLab 12.6).
1. You can also enclose it in quotation marks with curly braces with a leading percent, for example `"%{ci_environment_slug}"`. This method is deprecated though and support will be [removed in the next major release](https://gitlab.com/gitlab-org/gitlab/issues/37990).
#### Editing additional metrics from the dashboard
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/208976) in GitLab 12.9.
You can edit existing additional custom metrics by clicking the **{ellipsis_v}** **More actions** dropdown and selecting **Edit metric**.
![Edit metric](img/prometheus_dashboard_edit_metric_link_v_12_9.png)
### Defining custom dashboards per project
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/59974) in GitLab 12.1.
......
......@@ -4,11 +4,12 @@ module API
module Helpers
module FileUploadHelpers
def file_is_valid?
params[:file] && params[:file]['tempfile'].respond_to?(:read)
filename = params[:file]&.original_filename
filename && ImportExportUploader::EXTENSION_WHITELIST.include?(File.extname(filename).delete('.'))
end
def validate_file!
render_api_error!('Uploaded file is invalid', 400) unless file_is_valid?
render_api_error!({ error: _('You need to upload a GitLab project export archive (ending in .gz).') }, 422) unless file_is_valid?
end
end
end
......
......@@ -21,10 +21,6 @@ module API
def rate_limiter
::Gitlab::ApplicationRateLimiter
end
def with_workhorse_upload_acceleration?
request.headers[Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER].present?
end
end
before do
......@@ -46,11 +42,7 @@ module API
params do
requires :path, type: String, desc: 'The new project path and name'
# TODO: remove rubocop disable - https://gitlab.com/gitlab-org/gitlab/issues/14960
# and mark WH fields as required (instead of optional) after the WH version including
# https://gitlab.com/gitlab-org/gitlab-workhorse/-/merge_requests/459
# is deployed and GITLAB_WORKHORSE_VERSION is updated accordingly.
requires :file, types: [::API::Validations::Types::WorkhorseFile, File], desc: 'The project export file to be imported' # rubocop:disable Scalability/FileUploads
requires :file, type: ::API::Validations::Types::WorkhorseFile, desc: 'The project export file to be imported'
optional :name, type: String, desc: 'The name of the project to be imported. Defaults to the path of the project if not provided.'
optional :namespace, type: String, desc: "The ID or name of the namespace that the project will be imported into. Defaults to the current user's namespace."
optional :overwrite, type: Boolean, default: false, desc: 'If there is a project in the same namespace and with the same name overwrite it'
......@@ -75,7 +67,7 @@ module API
success Entities::ProjectImportStatus
end
post 'import' do
require_gitlab_workhorse! if with_workhorse_upload_acceleration?
require_gitlab_workhorse!
key = "project_import".to_sym
......@@ -87,27 +79,19 @@ module API
Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-foss/issues/42437')
validate_file!
namespace = if import_params[:namespace]
find_namespace!(import_params[:namespace])
else
current_user.namespace
end
# TODO: remove the condition after the WH version including
# https://gitlab.com/gitlab-org/gitlab-workhorse/-/merge_requests/459
# is deployed and GITLAB_WORKHORSE_VERSION is updated accordingly.
file = if with_workhorse_upload_acceleration?
import_params[:file] || bad_request!('Unable to process project import file')
else
validate_file!
import_params[:file]['tempfile']
end
project_params = {
path: import_params[:path],
namespace_id: namespace.id,
name: import_params[:name],
file: file,
file: import_params[:file],
overwrite: import_params[:overwrite]
}
......
......@@ -18284,9 +18284,6 @@ msgstr ""
msgid "Smartcard authentication failed: client certificate header is missing."
msgstr ""
msgid "Snippet Contents"
msgstr ""
msgid "Snippets"
msgstr ""
......
......@@ -140,6 +140,14 @@ describe SearchController do
end
end
context 'snippet search' do
it 'forces title search' do
get :show, params: { scope: 'snippet_blobs', snippets: 'true', search: 'foo' }
expect(assigns[:scope]).to eq('snippet_titles')
end
end
it 'finds issue comments' do
project = create(:project, :public)
note = create(:note_on_issue, project: project)
......
......@@ -32,6 +32,8 @@ shared_examples_for 'snippet editor' do
visit project_snippets_path(project)
# Wait for the SVG to ensure the button location doesn't shift
within('.empty-state') { find('img.js-lazy-loaded') }
click_on('New snippet')
wait_for_requests
end
......
......@@ -16,45 +16,4 @@ describe 'Search Snippets' do
expect(page).to have_link(public_snippet.title)
expect(page).to have_link(private_snippet.title)
end
it 'User searches for snippet contents' do
create(:personal_snippet,
:public,
title: 'Many lined snippet',
content: <<-CONTENT.strip_heredoc
|line one
|line two
|line three
|line four
|line five
|line six
|line seven
|line eight
|line nine
|line ten
|line eleven
|line twelve
|line thirteen
|line fourteen
CONTENT
)
sign_in create(:user)
visit dashboard_snippets_path
submit_search('line seven')
expect(page).to have_content('line seven')
# 3 lines before the matched line should be visible
expect(page).to have_content('line six')
expect(page).to have_content('line five')
expect(page).to have_content('line four')
expect(page).not_to have_content('line three')
# 3 lines after the matched line should be visible
expect(page).to have_content('line eight')
expect(page).to have_content('line nine')
expect(page).to have_content('line ten')
expect(page).not_to have_content('line eleven')
end
end
......@@ -5,13 +5,16 @@ require 'spec_helper'
describe BulkInsertSafe do
class BulkInsertItem < ApplicationRecord
include BulkInsertSafe
include ShaAttribute
validates :name, :enum_value, :secret_value, presence: true
validates :name, :enum_value, :secret_value, :sha_value, presence: true
ENUM_VALUES = {
case_1: 1
}.freeze
sha_attribute :sha_value
enum enum_value: ENUM_VALUES
attr_encrypted :secret_value,
......@@ -44,6 +47,7 @@ describe BulkInsertSafe do
t.integer :enum_value, null: false
t.text :encrypted_secret_value, null: false
t.string :encrypted_secret_value_iv, null: false
t.binary :sha_value, null: false, limit: 20
end
end
......@@ -61,7 +65,8 @@ describe BulkInsertSafe do
BulkInsertItem.new(
name: "item-#{n}",
enum_value: 'case_1',
secret_value: "my-secret"
secret_value: 'my-secret',
sha_value: '2fd4e1c67a2d28fced849ee1bb76e7391b93eb12'
)
end
end
......@@ -71,7 +76,8 @@ describe BulkInsertSafe do
BulkInsertItem.new(
name: nil, # requires `name` to be set
enum_value: 'case_1',
secret_value: "my-secret"
secret_value: 'my-secret',
sha_value: '2fd4e1c67a2d28fced849ee1bb76e7391b93eb12'
)
end
end
......@@ -112,6 +118,16 @@ describe BulkInsertSafe do
BulkInsertItem.bulk_insert!(items, batch_size: 5)
end
it 'items can be properly fetched from database' do
items = build_valid_items_for_bulk_insertion
BulkInsertItem.bulk_insert!(items)
attribute_names = BulkInsertItem.attribute_names - %w[id]
expect(BulkInsertItem.last(items.size).pluck(*attribute_names)).to eq(
items.pluck(*attribute_names))
end
it 'rolls back the transaction when any item is invalid' do
# second batch is bad
all_items = build_valid_items_for_bulk_insertion + build_invalid_items_for_bulk_insertion
......
......@@ -5,7 +5,6 @@ require 'spec_helper'
describe API::ProjectImport do
include WorkhorseHelpers
let(:export_path) { "#{Dir.tmpdir}/project_export_spec" }
let(:user) { create(:user) }
let(:file) { File.join('spec', 'features', 'projects', 'import_export', 'test_project_export.tar.gz') }
let(:namespace) { create(:group) }
......@@ -14,29 +13,39 @@ describe API::ProjectImport do
let(:workhorse_headers) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => workhorse_token } }
before do
allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
stub_uploads_object_storage(FileUploader)
namespace.add_owner(user)
end
after do
FileUtils.rm_rf(export_path, secure: true)
describe 'POST /projects/import' do
subject { upload_archive(file_upload, workhorse_headers, params) }
let(:file_upload) { fixture_file_upload(file) }
let(:params) do
{
path: 'test-import',
'file.size' => file_upload.size
}
end
before do
allow(ImportExportUploader).to receive(:workhorse_upload_path).and_return('/')
end
describe 'POST /projects/import' do
it 'schedules an import using a namespace' do
stub_import(namespace)
params[:namespace] = namespace.id
post api('/projects/import', user), params: { path: 'test-import', file: fixture_file_upload(file), namespace: namespace.id }
subject
expect(response).to have_gitlab_http_status(:created)
end
it 'schedules an import using the namespace path' do
stub_import(namespace)
params[:namespace] = namespace.full_path
post api('/projects/import', user), params: { path: 'test-import', file: fixture_file_upload(file), namespace: namespace.full_path }
subject
expect(response).to have_gitlab_http_status(:created)
end
......@@ -46,24 +55,30 @@ describe API::ProjectImport do
it 'schedules an import using a namespace and a different name' do
stub_import(namespace)
params[:name] = expected_name
params[:namespace] = namespace.id
post api('/projects/import', user), params: { path: 'test-import', file: fixture_file_upload(file), namespace: namespace.id, name: expected_name }
subject
expect(response).to have_gitlab_http_status(:created)
end
it 'schedules an import using the namespace path and a different name' do
stub_import(namespace)
params[:name] = expected_name
params[:namespace] = namespace.full_path
post api('/projects/import', user), params: { path: 'test-import', file: fixture_file_upload(file), namespace: namespace.full_path, name: expected_name }
subject
expect(response).to have_gitlab_http_status(:created)
end
it 'sets name correctly' do
stub_import(namespace)
params[:name] = expected_name
params[:namespace] = namespace.full_path
post api('/projects/import', user), params: { path: 'test-import', file: fixture_file_upload(file), namespace: namespace.full_path, name: expected_name }
subject
project = Project.find(json_response['id'])
expect(project.name).to eq(expected_name)
......@@ -71,8 +86,11 @@ describe API::ProjectImport do
it 'sets name correctly with an overwrite' do
stub_import(namespace)
params[:name] = 'new project name'
params[:namespace] = namespace.full_path
params[:overwrite] = true
post api('/projects/import', user), params: { path: 'test-import', file: fixture_file_upload(file), namespace: namespace.full_path, name: 'new project name', overwrite: true }
subject
project = Project.find(json_response['id'])
expect(project.name).to eq('new project name')
......@@ -80,8 +98,10 @@ describe API::ProjectImport do
it 'schedules an import using the path and name explicitly set to nil' do
stub_import(namespace)
params[:name] = nil
params[:namespace] = namespace.full_path
post api('/projects/import', user), params: { path: 'test-import', file: fixture_file_upload(file), namespace: namespace.full_path, name: nil }
subject
project = Project.find(json_response['id'])
expect(project.name).to eq('test-import')
......@@ -90,8 +110,9 @@ describe API::ProjectImport do
it 'schedules an import at the user namespace level' do
stub_import(user.namespace)
params[:path] = 'test-import2'
post api('/projects/import', user), params: { path: 'test-import2', file: fixture_file_upload(file) }
subject
expect(response).to have_gitlab_http_status(:created)
end
......@@ -100,7 +121,10 @@ describe API::ProjectImport do
expect_any_instance_of(ProjectImportState).not_to receive(:schedule)
expect(::Projects::CreateService).not_to receive(:new)
post api('/projects/import', user), params: { namespace: 'nonexistent', path: 'test-import2', file: fixture_file_upload(file) }
params[:namespace] = 'nonexistent'
params[:path] = 'test-import2'
subject
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Namespace Not Found')
......@@ -109,37 +133,40 @@ describe API::ProjectImport do
it 'does not schedule an import if the user has no permission to the namespace' do
expect_any_instance_of(ProjectImportState).not_to receive(:schedule)
post(api('/projects/import', create(:user)),
params: {
path: 'test-import3',
file: fixture_file_upload(file),
namespace: namespace.full_path
})
new_namespace = create(:group)
params[:path] = 'test-import3'
params[:namespace] = new_namespace.full_path
subject
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Namespace Not Found')
end
context 'if user uploads no valid file' do
let(:file) { 'README.md' }
it 'does not schedule an import if the user uploads no valid file' do
expect_any_instance_of(ProjectImportState).not_to receive(:schedule)
post api('/projects/import', user), params: { path: 'test-import3', file: './random/test' }
params[:path] = 'test-import3'
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('file is invalid')
subject
expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response['message']['error']).to eq('You need to upload a GitLab project export archive (ending in .gz).')
end
end
it 'stores params that can be overridden' do
stub_import(namespace)
override_params = { 'description' => 'Hello world' }
post api('/projects/import', user),
params: {
path: 'test-import',
file: fixture_file_upload(file),
namespace: namespace.id,
override_params: override_params
}
params[:namespace] = namespace.id
params[:override_params] = override_params
subject
import_project = Project.find(json_response['id'])
expect(import_project.import_data.data['override_params']).to eq(override_params)
......@@ -149,33 +176,14 @@ describe API::ProjectImport do
stub_import(namespace)
override_params = { 'not_allowed' => 'Hello world' }
post api('/projects/import', user),
params: {
path: 'test-import',
file: fixture_file_upload(file),
namespace: namespace.id,
override_params: override_params
}
import_project = Project.find(json_response['id'])
params[:namespace] = namespace.id
params[:override_params] = override_params
expect(import_project.import_data.data['override_params']).to be_empty
end
it 'correctly overrides params during the import', :sidekiq_might_not_need_inline do
override_params = { 'description' => 'Hello world' }
subject
perform_enqueued_jobs do
post api('/projects/import', user),
params: {
path: 'test-import',
file: fixture_file_upload(file),
namespace: namespace.id,
override_params: override_params
}
end
import_project = Project.find(json_response['id'])
expect(import_project.description).to eq('Hello world')
expect(import_project.import_data.data['override_params']).to be_empty
end
context 'when target path already exists in namespace' do
......@@ -184,7 +192,9 @@ describe API::ProjectImport do
it 'does not schedule an import' do
expect_any_instance_of(ProjectImportState).not_to receive(:schedule)
post api('/projects/import', user), params: { path: existing_project.path, file: fixture_file_upload(file) }
params[:path] = existing_project.path
subject
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq('Name has already been taken')
......@@ -194,7 +204,10 @@ describe API::ProjectImport do
it 'schedules an import' do
stub_import(user.namespace)
post api('/projects/import', user), params: { path: existing_project.path, file: fixture_file_upload(file), overwrite: true }
params[:path] = existing_project.path
params[:overwrite] = true
subject
expect(response).to have_gitlab_http_status(:created)
end
......@@ -207,16 +220,16 @@ describe API::ProjectImport do
end
it 'prevents users from importing projects' do
post api('/projects/import', user), params: { path: 'test-import', file: fixture_file_upload(file), namespace: namespace.id }
params[:namespace] = namespace.id
subject
expect(response).to have_gitlab_http_status(:too_many_requests)
expect(json_response['message']['error']).to eq('This endpoint has been requested too many times. Try again later.')
end
end
context 'with direct upload enabled' do
subject { upload_archive(file_upload, workhorse_headers, params) }
context 'when using remote storage' do
let(:file_name) { 'project_export.tar.gz' }
let!(:fog_connection) do
......@@ -232,21 +245,11 @@ describe API::ProjectImport do
let(:file_upload) { fog_to_uploaded_file(tmp_object) }
let(:params) do
{
path: 'test-import-project',
namespace: namespace.id,
'file.remote_id' => file_name,
'file.size' => file_upload.size
}
end
before do
allow(ImportExportUploader).to receive(:workhorse_upload_path).and_return('/')
end
it 'schedules an import' do
stub_import(namespace)
params[:namespace] = namespace.id
it 'accepts the request and stores the file' do
expect { subject }.to change { Project.count }.by(1)
subject
expect(response).to have_gitlab_http_status(:created)
end
......@@ -257,7 +260,7 @@ describe API::ProjectImport do
api("/projects/import", user),
method: :post,
file_key: :file,
params: params.merge(file: file_upload),
params: params.merge(file: file),
headers: headers,
send_rewritten_field: true
)
......@@ -301,6 +304,7 @@ describe API::ProjectImport do
expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response['TempPath']).to eq(ImportExportUploader.workhorse_local_upload_path)
end
it 'rejects requests that bypassed gitlab-workhorse' do
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment