Commit 20450649 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 3f0f13c6
......@@ -24,6 +24,7 @@ When adding migrations:
- [ ] Added a `down` method so the migration can be reverted
- [ ] Added the output of the migration(s) to the MR body
- [ ] Added tests for the migration in `spec/migrations` if necessary (e.g. when migrating data)
- [ ] Added rollback procedure. Include either a rollback procedure or description how to rollback changes
When adding or modifying queries to improve performance:
......
......@@ -5,102 +5,103 @@ import Api from './api';
import ProjectSelectComboButton from './project_select_combo_button';
import { s__ } from './locale';
export default function projectSelect() {
import(/* webpackChunkName: 'select2' */ 'select2/select2')
.then(() => {
$('.ajax-project-select').each(function(i, select) {
var placeholder;
const simpleFilter = $(select).data('simpleFilter') || false;
this.groupId = $(select).data('groupId');
this.includeGroups = $(select).data('includeGroups');
this.allProjects = $(select).data('allProjects') || false;
this.orderBy = $(select).data('orderBy') || 'id';
this.withIssuesEnabled = $(select).data('withIssuesEnabled');
this.withMergeRequestsEnabled = $(select).data('withMergeRequestsEnabled');
this.withShared =
$(select).data('withShared') === undefined ? true : $(select).data('withShared');
this.includeProjectsInSubgroups = $(select).data('includeProjectsInSubgroups') || false;
this.allowClear = $(select).data('allowClear') || false;
const projectSelect = () => {
$('.ajax-project-select').each(function(i, select) {
var placeholder;
const simpleFilter = $(select).data('simpleFilter') || false;
this.groupId = $(select).data('groupId');
this.includeGroups = $(select).data('includeGroups');
this.allProjects = $(select).data('allProjects') || false;
this.orderBy = $(select).data('orderBy') || 'id';
this.withIssuesEnabled = $(select).data('withIssuesEnabled');
this.withMergeRequestsEnabled = $(select).data('withMergeRequestsEnabled');
this.withShared =
$(select).data('withShared') === undefined ? true : $(select).data('withShared');
this.includeProjectsInSubgroups = $(select).data('includeProjectsInSubgroups') || false;
this.allowClear = $(select).data('allowClear') || false;
placeholder = s__('ProjectSelect|Search for project');
if (this.includeGroups) {
placeholder += s__('ProjectSelect| or group');
}
placeholder = s__('ProjectSelect|Search for project');
if (this.includeGroups) {
placeholder += s__('ProjectSelect| or group');
}
$(select).select2({
placeholder,
minimumInputLength: 0,
query: (function(_this) {
return function(query) {
var finalCallback, projectsCallback;
finalCallback = function(projects) {
$(select).select2({
placeholder,
minimumInputLength: 0,
query: (function(_this) {
return function(query) {
var finalCallback, projectsCallback;
finalCallback = function(projects) {
var data;
data = {
results: projects,
};
return query.callback(data);
};
if (_this.includeGroups) {
projectsCallback = function(projects) {
var groupsCallback;
groupsCallback = function(groups) {
var data;
data = {
results: projects,
};
return query.callback(data);
data = groups.concat(projects);
return finalCallback(data);
};
if (_this.includeGroups) {
projectsCallback = function(projects) {
var groupsCallback;
groupsCallback = function(groups) {
var data;
data = groups.concat(projects);
return finalCallback(data);
};
return Api.groups(query.term, {}, groupsCallback);
};
} else {
projectsCallback = finalCallback;
}
if (_this.groupId) {
return Api.groupProjects(
_this.groupId,
query.term,
{
with_issues_enabled: _this.withIssuesEnabled,
with_merge_requests_enabled: _this.withMergeRequestsEnabled,
with_shared: _this.withShared,
include_subgroups: _this.includeProjectsInSubgroups,
},
projectsCallback,
);
} else {
return Api.projects(
query.term,
{
order_by: _this.orderBy,
with_issues_enabled: _this.withIssuesEnabled,
with_merge_requests_enabled: _this.withMergeRequestsEnabled,
membership: !_this.allProjects,
},
projectsCallback,
);
}
return Api.groups(query.term, {}, groupsCallback);
};
})(this),
id(project) {
if (simpleFilter) return project.id;
return JSON.stringify({
name: project.name,
url: project.web_url,
});
},
text(project) {
return project.name_with_namespace || project.name;
},
} else {
projectsCallback = finalCallback;
}
if (_this.groupId) {
return Api.groupProjects(
_this.groupId,
query.term,
{
with_issues_enabled: _this.withIssuesEnabled,
with_merge_requests_enabled: _this.withMergeRequestsEnabled,
with_shared: _this.withShared,
include_subgroups: _this.includeProjectsInSubgroups,
},
projectsCallback,
);
} else {
return Api.projects(
query.term,
{
order_by: _this.orderBy,
with_issues_enabled: _this.withIssuesEnabled,
with_merge_requests_enabled: _this.withMergeRequestsEnabled,
membership: !_this.allProjects,
},
projectsCallback,
);
}
};
})(this),
id(project) {
if (simpleFilter) return project.id;
return JSON.stringify({
name: project.name,
url: project.web_url,
});
},
text(project) {
return project.name_with_namespace || project.name;
},
initSelection(el, callback) {
return Api.project(el.val()).then(({ data }) => callback(data));
},
initSelection(el, callback) {
return Api.project(el.val()).then(({ data }) => callback(data));
},
allowClear: this.allowClear,
allowClear: this.allowClear,
dropdownCssClass: 'ajax-project-dropdown',
});
if (simpleFilter) return select;
return new ProjectSelectComboButton(select);
});
})
dropdownCssClass: 'ajax-project-dropdown',
});
if (simpleFilter) return select;
return new ProjectSelectComboButton(select);
});
};
export default () =>
import(/* webpackChunkName: 'select2' */ 'select2/select2')
.then(projectSelect)
.catch(() => {});
}
# frozen_string_literal: true
module ExportHelper
# An EE-overwriteable list of descriptions
def project_export_descriptions
[
_('Project and wiki repositories'),
_('Project uploads'),
_('Project configuration, including services'),
_('Issues with comments, merge requests with diffs and comments, labels, milestones, snippets, and other project entities'),
_('LFS objects'),
_('Issue Boards')
]
end
end
ExportHelper.prepend_if_ee('EE::ExportHelper')
......@@ -1098,6 +1098,8 @@ class Repository
raw.create_repository
after_create
true
end
def blobs_metadata(paths, ref = 'HEAD')
......
......@@ -12,6 +12,8 @@ module Projects
private
attr_accessor :shared
def execute_after_export_action(after_export_strategy)
return unless after_export_strategy
......@@ -21,50 +23,54 @@ module Projects
end
def save_all!
if save_services
Gitlab::ImportExport::Saver.save(project: project, shared: @shared)
if save_exporters
Gitlab::ImportExport::Saver.save(project: project, shared: shared)
notify_success
else
cleanup_and_notify_error!
end
end
def save_services
[version_saver, avatar_saver, project_tree_saver, uploads_saver, repo_saver, wiki_repo_saver, lfs_saver].all?(&:save)
def save_exporters
exporters.all?(&:save)
end
def exporters
[version_saver, avatar_saver, project_tree_saver, uploads_saver, repo_saver, wiki_repo_saver, lfs_saver]
end
def version_saver
Gitlab::ImportExport::VersionSaver.new(shared: @shared)
Gitlab::ImportExport::VersionSaver.new(shared: shared)
end
def avatar_saver
Gitlab::ImportExport::AvatarSaver.new(project: project, shared: @shared)
Gitlab::ImportExport::AvatarSaver.new(project: project, shared: shared)
end
def project_tree_saver
Gitlab::ImportExport::ProjectTreeSaver.new(project: project, current_user: @current_user, shared: @shared, params: @params)
Gitlab::ImportExport::ProjectTreeSaver.new(project: project, current_user: current_user, shared: shared, params: params)
end
def uploads_saver
Gitlab::ImportExport::UploadsSaver.new(project: project, shared: @shared)
Gitlab::ImportExport::UploadsSaver.new(project: project, shared: shared)
end
def repo_saver
Gitlab::ImportExport::RepoSaver.new(project: project, shared: @shared)
Gitlab::ImportExport::RepoSaver.new(project: project, shared: shared)
end
def wiki_repo_saver
Gitlab::ImportExport::WikiRepoSaver.new(project: project, shared: @shared)
Gitlab::ImportExport::WikiRepoSaver.new(project: project, shared: shared)
end
def lfs_saver
Gitlab::ImportExport::LfsSaver.new(project: project, shared: @shared)
Gitlab::ImportExport::LfsSaver.new(project: project, shared: shared)
end
def cleanup_and_notify_error
Rails.logger.error("Import/Export - Project #{project.name} with ID: #{project.id} export error - #{@shared.errors.join(', ')}") # rubocop:disable Gitlab/RailsLogger
Rails.logger.error("Import/Export - Project #{project.name} with ID: #{project.id} export error - #{shared.errors.join(', ')}") # rubocop:disable Gitlab/RailsLogger
FileUtils.rm_rf(@shared.export_path)
FileUtils.rm_rf(shared.export_path)
notify_error
end
......@@ -72,7 +78,7 @@ module Projects
def cleanup_and_notify_error!
cleanup_and_notify_error
raise Gitlab::ImportExport::Error.new(@shared.errors.join(', '))
raise Gitlab::ImportExport::Error.new(shared.errors.to_sentence)
end
def notify_success
......@@ -80,8 +86,10 @@ module Projects
end
def notify_error
notification_service.project_not_exported(@project, @current_user, @shared.errors)
notification_service.project_not_exported(project, current_user, shared.errors)
end
end
end
end
Projects::ImportExport::ExportService.prepend_if_ee('EE::Projects::ImportExport::ExportService')
......@@ -10,12 +10,8 @@
%p.append-bottom-0
%p= _('The following items will be exported:')
%ul
%li= _('Project and wiki repositories')
%li= _('Project uploads')
%li= _('Project configuration, including services')
%li= _('Issues with comments, merge requests with diffs and comments, labels, milestones, snippets, and other project entities')
%li= _('LFS objects')
%li= _('Issue Boards')
- project_export_descriptions.each do |desc|
%li= desc
%p= _('The following items will NOT be exported:')
%ul
%li= _('Job traces and artifacts')
......
# frozen_string_literal: true
class DesignIssueIdNullable < ActiveRecord::Migration[5.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
change_column_null :design_management_designs, :issue_id, true
end
end
......@@ -1217,7 +1217,7 @@ ActiveRecord::Schema.define(version: 2019_09_26_041216) do
create_table "design_management_designs", force: :cascade do |t|
t.integer "project_id", null: false
t.integer "issue_id", null: false
t.integer "issue_id"
t.string "filename", null: false
t.index ["issue_id", "filename"], name: "index_design_management_designs_on_issue_id_and_filename", unique: true
t.index ["project_id"], name: "index_design_management_designs_on_project_id"
......
......@@ -31,8 +31,8 @@ including (but not limited to):
- System statistics such as the process' memory usage and open file descriptors.
- Ruby garbage collection statistics.
Metrics data is written to [InfluxDB][influxdb] over [UDP][influxdb-udp]. Stored
data can be visualized using [Grafana][grafana] or any other application that
Metrics data is written to [InfluxDB](https://www.influxdata.com/products/influxdb-overview/)
over [UDP][influxdb-udp]. Stored data can be visualized using [Grafana](https://grafana.com) or any other application that
supports reading data from InfluxDB. Alternatively data can be queried using the
InfluxDB CLI.
......@@ -67,6 +67,4 @@ the actual interval can be anywhere between 7.5 and 22.5. The interval is
re-generated for every sampling run instead of being generated once and re-used
for the duration of the process' lifetime.
[influxdb]: https://influxdata.com/time-series-platform/influxdb/
[influxdb-udp]: https://docs.influxdata.com/influxdb/v0.9/write_protocols/udp/
[grafana]: http://grafana.org/
......@@ -38,8 +38,8 @@ InfluxDB needs to be restarted.
### Storage Engine
InfluxDB comes with different storage engines and as of InfluxDB 0.9.5 a new
storage engine is available, called [TSM Tree]. All users **must** use the new
`tsm1` storage engine as this [will be the default engine][tsm1-commit] in
storage engine is available, called [TSM Tree](https://www.influxdata.com/blog/new-storage-engine-time-structured-merge-tree/).
All users **must** use the new `tsm1` storage engine as this [will be the default engine][tsm1-commit] in
upcoming InfluxDB releases.
Make sure you have the following in your configuration file:
......@@ -188,6 +188,5 @@ Read more on:
[influxdb cli]: https://docs.influxdata.com/influxdb/v0.9/tools/shell/
[udp]: https://docs.influxdata.com/influxdb/v0.9/write_protocols/udp/
[influxdb]: https://www.influxdata.com/products/influxdb-overview/
[tsm tree]: https://influxdata.com/blog/new-storage-engine-time-structured-merge-tree/
[tsm1-commit]: https://github.com/influxdata/influxdb/commit/15d723dc77651bac83e09e2b1c94be480966cb0d
[influx-admin]: https://docs.influxdata.com/influxdb/v0.9/administration/authentication_and_authorization/#create-a-new-admin-user
......@@ -21,7 +21,7 @@ Prometheus works by periodically connecting to data sources and collecting their
performance metrics via the [various exporters](#bundled-software-metrics). To view
and work with the monitoring data, you can either
[connect directly to Prometheus](#viewing-performance-metrics) or utilize a
dashboard tool like [Grafana].
dashboard tool like [Grafana](https://grafana.com).
## Configuring Prometheus
......@@ -199,8 +199,8 @@ having [NGINX proxy it][nginx-custom-config].
The performance data collected by Prometheus can be viewed directly in the
Prometheus console or through a compatible dashboard tool.
The Prometheus interface provides a [flexible query language][prom-query] to work
with the collected data where you can visualize their output.
The Prometheus interface provides a [flexible query language](https://prometheus.io/docs/prometheus/latest/querying/basics/)
to work with the collected data where you can visualize their output.
For a more fully featured dashboard, Grafana can be used and has
[official support for Prometheus][prom-grafana].
......@@ -274,7 +274,7 @@ The GitLab exporter allows you to measure various GitLab metrics, pulled from Re
> Introduced in GitLab 9.0.
> Pod monitoring introduced in GitLab 9.4.
If your GitLab server is running within Kubernetes, Prometheus will collect metrics from the Nodes and [annotated Pods](https://prometheus.io/docs/operating/configuration/#kubernetes_sd_config) in the cluster, including performance data on each container. This is particularly helpful if your CI/CD environments run in the same cluster, as you can use the [Prometheus project integration][prometheus integration] to monitor them.
If your GitLab server is running within Kubernetes, Prometheus will collect metrics from the Nodes and [annotated Pods](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#kubernetes_sd_config) in the cluster, including performance data on each container. This is particularly helpful if your CI/CD environments run in the same cluster, as you can use the [Prometheus project integration][prometheus integration] to monitor them.
To disable the monitoring of Kubernetes:
......@@ -288,16 +288,11 @@ To disable the monitoring of Kubernetes:
1. Save the file and [reconfigure GitLab][reconfigure] for the changes to
take effect.
[grafana]: https://grafana.net
[hsts]: https://en.wikipedia.org/wiki/HTTP_Strict_Transport_Security
[multi-user-prometheus]: https://gitlab.com/gitlab-org/multi-user-prometheus
[nginx-custom-config]: https://docs.gitlab.com/omnibus/settings/nginx.html#inserting-custom-nginx-settings-into-the-gitlab-server-block
[prometheus]: https://prometheus.io
[prom-exporters]: https://prometheus.io/docs/instrumenting/exporters/
[prom-query]: https://prometheus.io/docs/querying/basics
[prom-grafana]: https://prometheus.io/docs/visualization/grafana/
[scrape-config]: https://prometheus.io/docs/operating/configuration/#%3Cscrape_config%3E
[reconfigure]: ../../restart_gitlab.md#omnibus-gitlab-reconfigure
[1261]: https://gitlab.com/gitlab-org/omnibus-gitlab/merge_requests/1261
[prometheus integration]: ../../../user/project/integrations/prometheus.md
[prometheus-cadvisor-metrics]: https://github.com/google/cadvisor/blob/master/docs/storage/prometheus.md
......@@ -877,6 +877,6 @@ The above image shows:
- The HEAD request to the AWS bucket reported a 403 Unauthorized.
What does this mean? This strongly suggests that the S3 user does not have the right
[permissions to perform a HEAD request](http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectHEAD.html).
[permissions to perform a HEAD request](https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectHEAD.html).
The solution: check the [IAM permissions again](https://docs.docker.com/registry/storage-drivers/s3/).
Once the right permissions were set, the error will go away.
......@@ -94,17 +94,18 @@ and details for a database reviewer:
- Check queries timing (If any): Queries executed in a migration
need to fit comfortably within `15s` - preferably much less than that - on GitLab.com.
- Check [background migrations](background_migrations.md):
- For data migrations, establish a time estimate for execution
- Establish a time estimate for execution
- They should only be used when migrating data in larger tables.
- If a single `update` is below than `1s` the query can be placed
directly in a regular migration (inside `db/migrate`).
- Review queries (for example, make sure batch sizes are fine)
- Establish a time estimate for execution
- Because execution time can be longer than for a regular migration,
it's suggested to treat background migrations as post migrations:
place them in `db/post_migrate` instead of `db/migrate`. Keep in mind
that post migrations are executed post-deployment in production.
- Check [timing guidelines for migrations](#timing-guidelines-for-migrations)
- Check migrations are reversible and implement a `#down` method
- Data migrations should be reversible too or come with a description of how to reverse, when possible. This applies to all types of migrations (regular, post-deploy, background).
- Query performance
- Check for any obviously complex queries and queries the author specifically
points out for review (if any)
......
......@@ -83,7 +83,7 @@ the above methods, provided the cloud provider supports it.
- [Install GitLab on Google Cloud Platform](google_cloud_platform/index.md): Install Omnibus GitLab on a VM in GCP.
- [Install GitLab on Azure](azure/index.md): Install Omnibus GitLab from Azure Marketplace.
- [Install GitLab on OpenShift](https://docs.gitlab.com/charts/installation/cloud/openshift.html): Install GitLab on OpenShift by using GitLab's Helm charts.
- [Install GitLab on DC/OS](https://mesosphere.com/blog/gitlab-dcos/): Install GitLab on Mesosphere DC/OS via the [GitLab-Mesosphere integration](https://about.gitlab.com/2016/09/16/announcing-gitlab-and-mesosphere/).
- [Install GitLab on DC/OS](https://d2iq.com/blog/gitlab-dcos): Install GitLab on Mesosphere DC/OS via the [GitLab-Mesosphere integration](https://about.gitlab.com/2016/09/16/announcing-gitlab-and-mesosphere/).
- [Install GitLab on DigitalOcean](https://about.gitlab.com/2016/04/27/getting-started-with-gitlab-and-digitalocean/): Install Omnibus GitLab on DigitalOcean.
- _Testing only!_ [DigitalOcean and Docker Machine](digitaloceandocker.md):
Quickly test any version of GitLab on DigitalOcean using Docker Machine.
......@@ -43,7 +43,7 @@ Below is a diagram of the recommended architecture.
Here's a list of the AWS services we will use, with links to pricing information:
- **EC2**: GitLab will deployed on shared hardware which means
[on-demand pricing](https://aws.amazon.com/ec2/pricing/on-demand)
[on-demand pricing](https://aws.amazon.com/ec2/pricing/on-demand/)
will apply. If you want to run it on a dedicated or reserved instance,
consult the [EC2 pricing page](https://aws.amazon.com/ec2/pricing/) for more
information on the cost.
......@@ -222,10 +222,10 @@ Now, it's time to create the database:
1. For the size, let's select a `t2.medium` instance.
1. Multi-AZ-deployment is recommended as redundancy, so choose "Create
replica in different zone". Read more at
[High Availability (Multi-AZ)](http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.MultiAZ.html).
[High Availability (Multi-AZ)](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.MultiAZ.html).
1. A Provisioned IOPS (SSD) storage type is best suited for HA (though you can
choose a General Purpose (SSD) to reduce the costs). Read more about it at
[Storage for Amazon RDS](http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Storage.html).
[Storage for Amazon RDS](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Storage.html).
1. The rest of the settings on this page request a DB instance identifier, username
and a master password. We've chosen to use `gitlab-db-ha`, `gitlab` and a
......@@ -668,7 +668,7 @@ to request additional material:
about administering your GitLab instance.
- [Upload a license](../../user/admin_area/license.md):
Activate all GitLab Enterprise Edition functionality with a license.
- [Pricing](https://about.gitlab.com/pricing): Pricing for the different tiers.
- [Pricing](https://about.gitlab.com/pricing/): Pricing for the different tiers.
<!-- ## Troubleshooting
......
......@@ -38,7 +38,7 @@ create SQL Databases, author websites, and perform lots of other cloud tasks.
## Create New VM
The [Azure Marketplace][Azure-Marketplace] is an online store for pre-configured applications and
The [Azure Marketplace](https://azuremarketplace.microsoft.com/en-us/marketplace/) is an online store for pre-configured applications and
services which have been optimized for the cloud by software vendors like GitLab,
available on the Azure Marketplace as pre-configured solutions. In this tutorial
we will install GitLab Community Edition, but for GitLab Enterprise Edition you
......@@ -108,7 +108,7 @@ ahead and select this one, but please choose the size which best meets your own
> **Note:** be aware that whilst your VM is active (known as "allocated"), it will incur
"compute charges" which, ultimately, you will be billed for. So, even if you're using the
free trial credits, you'll likely want to learn
[how to properly shutdown an Azure VM to save money][Azure-Properly-Shutdown-VM].
[how to properly shutdown an Azure VM to save money](https://buildazure.com/properly-shutdown-azure-vm-to-save-money/).
Go ahead and click your chosen size, then click **"Select"** when you're ready to proceed to the
next step.
......@@ -329,7 +329,7 @@ To perform an update, we need to connect directly to our Azure VM instance and r
from the terminal. Our Azure VM is actually a server running Linux (Ubuntu), so we'll need to
connect to it using SSH ([Secure Shell][SSH]).
If you're running Windows, you'll need to connect using [PuTTY] or an equivalent Windows SSH client.
If you're running Windows, you'll need to connect using [PuTTY](https://www.putty.org) or an equivalent Windows SSH client.
If you're running Linux or macOS, then you already have an SSH client installed.
> **Note:**
......@@ -337,7 +337,7 @@ If you're running Linux or macOS, then you already have an SSH client installed.
> - Remember that you will need to login with the username and password you specified
> [when you created](#basics) your Azure VM
> - If you need to reset your VM password, read
> [how to reset SSH credentials for a user on an Azure VM][Azure-Troubleshoot-SSH-Connection].
> [how to reset SSH credentials for a user on an Azure VM](https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshoot-ssh-connection).
#### SSH from the command-line
......@@ -356,7 +356,7 @@ Provide your password at the prompt to authenticate.
#### SSH from Windows (PuTTY)
If you're using [PuTTY] in Windows as your [SSH] client, then you might want to take a quick
If you're using [PuTTY](https://www.putty.org) in Windows as your [SSH] client, then you might want to take a quick
read on [using PuTTY in Windows][Using-SSH-In-Putty].
### Updating GitLab
......@@ -416,12 +416,12 @@ Check out our other [Technical Articles](../../articles/index.md) or browse the
- [GitLab Enterprise Edition][EE]
- [Microsoft Azure][Azure]
- [Azure - Free Account FAQ][Azure-Free-Account-FAQ]
- [Azure - Marketplace][Azure-Marketplace]
- [Azure - Marketplace](https://azuremarketplace.microsoft.com/en-us/marketplace/)
- [Azure Portal][Azure-Portal]
- [Azure - Pricing Calculator][Azure-Pricing-Calculator]
- [Azure - Troubleshoot SSH Connections to an Azure Linux VM][Azure-Troubleshoot-SSH-Connection]
- [Azure - Properly Shutdown an Azure VM][Azure-Properly-Shutdown-VM]
- [SSH], [PuTTY] and [Using SSH in PuTTY][Using-SSH-In-Putty]
- [Azure - Troubleshoot SSH Connections to an Azure Linux VM](https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshoot-ssh-connection)
- [Azure - Properly Shutdown an Azure VM](https://buildazure.com/properly-shutdown-azure-vm-to-save-money/)
- [SSH], [PuTTY](https://www.putty.org) and [Using SSH in PuTTY][Using-SSH-In-Putty]
[Original-Blog-Post]: https://about.gitlab.com/2016/07/13/how-to-setup-a-gitlab-instance-on-microsoft-azure/ "How to Set up a GitLab Instance on Microsoft Azure"
[CE]: https://about.gitlab.com/features/
......@@ -430,16 +430,12 @@ Check out our other [Technical Articles](../../articles/index.md) or browse the
[Azure-Troubleshoot-Linux-VM]: https://docs.microsoft.com/en-us/azure/virtual-machines/linux/troubleshoot-app-connection "Troubleshoot application connectivity issues on a Linux virtual machine in Azure"
[Azure-IP-Address-Types]: https://docs.microsoft.com/en-us/azure/virtual-network/virtual-network-ip-addresses-overview-arm "IP address types and allocation methods in Azure"
[Azure-How-To-Open-Ports]: https://docs.microsoft.com/en-us/azure/virtual-machines/windows/nsg-quickstart-portal "How to open ports to a virtual machine with the Azure portal"
[Azure-Troubleshoot-SSH-Connection]: https://docs.microsoft.com/en-us/azure/virtual-machines/linux/troubleshoot-ssh-connection "Troubleshoot SSH connections to an Azure Linux VM"
[Azure]: https://azure.microsoft.com/en-us/
[Azure-Free-Account-FAQ]: https://azure.microsoft.com/en-us/free/free-account-faq/
[Azure-Marketplace]: https://azure.microsoft.com/en-us/marketplace/
[Azure-Portal]: https://portal.azure.com
[Azure-Pricing-Calculator]: https://azure.microsoft.com/en-us/pricing/calculator/
[Azure-Properly-Shutdown-VM]: https://buildazure.com/2017/03/16/properly-shutdown-azure-vm-to-save-money/ "Properly Shutdown an Azure VM to Save Money"
[SSH]: https://en.wikipedia.org/wiki/Secure_Shell
[PuTTY]: http://www.putty.org/
[Using-SSH-In-Putty]: https://mediatemple.net/community/products/dv/204404604/using-ssh-in-putty-
<!-- ## Troubleshooting
......
......@@ -14,9 +14,9 @@ locally on either macOS or Linux.
### On macOS
#### Install Docker Toolbox
#### Install Docker Desktop
- <https://www.docker.com/products/docker-toolbox>
- <https://www.docker.com/products/docker-desktop>
### On Linux
......@@ -115,7 +115,7 @@ docker-machine ip gitlab-test-env-do
# example output: 192.168.151.134
```
Browse to: <http://192.168.151.134:8888/>.
Browse to: `http://192.168.151.134:8888/`.
#### Execute interactive shell/edit configuration
......
......@@ -72,7 +72,7 @@ By default, Google assigns an ephemeral IP to your instance. It is strongly
recommended to assign a static IP if you are going to use GitLab in production
and use a domain name as we'll see below.
Read Google's documentation on how to [promote an ephemeral IP address][ip].
Read Google's documentation on how to [promote an ephemeral IP address](https://cloud.google.com/compute/docs/ip-addresses/reserve-static-external-ip-address#promote_ephemeral_ip).
### Using a domain name
......@@ -133,9 +133,7 @@ Kerberos, etc. Here are some documents you might be interested in reading:
- [GitLab Container Registry configuration](../../administration/packages/container_registry.md)
[freetrial]: https://console.cloud.google.com/freetrial "GCP free trial"
[ip]: https://cloud.google.com/compute/docs/configure-instance-ip-addresses#promote_ephemeral_ip "Configuring an Instance's IP Addresses"
[gcp]: https://cloud.google.com/ "Google Cloud Platform"
[launcher]: https://cloud.google.com/launcher/ "Google Cloud Launcher home page"
[req]: ../requirements.md "GitLab hardware and software requirements"
[ssh]: https://cloud.google.com/compute/docs/instances/connecting-to-instance "Connecting to Linux Instances"
[omni-smtp]: https://docs.gitlab.com/omnibus/settings/smtp.html#smtp-settings "Omnibus GitLab SMTP settings"
......
......@@ -10,7 +10,7 @@ other installation options, see the [main installation page](README.md).
It was created for and tested on **Debian/Ubuntu** operating systems.
Read [requirements.md](requirements.md) for hardware and operating system requirements.
If you want to install on RHEL/CentOS, we recommend using the
[Omnibus packages](https://about.gitlab.com/downloads/).
[Omnibus packages](https://about.gitlab.com/install/).
This guide is long because it covers many cases and includes all commands you
need, this is [one of the few installation scripts that actually works out of the box](https://twitter.com/robinvdvleuten/status/424163226532986880).
......@@ -25,7 +25,7 @@ following the
## Consider the Omnibus package installation
Since an installation from source is a lot of work and error prone we strongly recommend the fast and reliable [Omnibus package installation](https://about.gitlab.com/downloads/) (deb/rpm).
Since an installation from source is a lot of work and error prone we strongly recommend the fast and reliable [Omnibus package installation](https://about.gitlab.com/install/) (deb/rpm).
One reason the Omnibus package is more reliable is its use of Runit to restart any of the GitLab processes in case one crashes.
On heavily used GitLab instances the memory usage of the Sidekiq background worker will grow over time.
......@@ -205,7 +205,7 @@ The Ruby interpreter is required to run GitLab.
**Note:** The current supported Ruby (MRI) version is 2.6.x. GitLab 12.2
dropped support for Ruby 2.5.x.
The use of Ruby version managers such as [RVM], [rbenv] or [chruby] with GitLab
The use of Ruby version managers such as [RVM], [rbenv](https://github.com/rbenv/rbenv) or [chruby] with GitLab
in production, frequently leads to hard to diagnose problems. For example,
GitLab Shell is called from OpenSSH, and having a version manager can prevent
pushing and pulling over SSH. Version managers are not supported and we strongly
......@@ -532,7 +532,7 @@ sudo -u git -H chmod o-rwx config/database.yml
### Install Gems
NOTE: **Note:**
As of Bundler 1.5.2, you can invoke `bundle install -jN` (where `N` is the number of your processor cores) and enjoy parallel gems installation with measurable difference in completion time (~60% faster). Check the number of your cores with `nproc`. For more information, see this [post](https://robots.thoughtbot.com/parallel-gem-installing-using-bundler).
As of Bundler 1.5.2, you can invoke `bundle install -jN` (where `N` is the number of your processor cores) and enjoy parallel gems installation with measurable difference in completion time (~60% faster). Check the number of your cores with `nproc`. For more information, see this [post](https://thoughtbot.com/blog/parallel-gem-installing-using-bundler).
Make sure you have `bundle` (run `bundle -v`):
......@@ -1025,5 +1025,4 @@ sudo yum groupinstall 'Development Tools'
```
[RVM]: https://rvm.io/ "RVM Homepage"
[rbenv]: https://github.com/sstephenson/rbenv "rbenv on GitHub"
[chruby]: https://github.com/postmodern/chruby "chruby on GitHub"
......@@ -38,7 +38,7 @@ test OpenShift easily:
- [VirtualBox]
- [Vagrant]
- [OpenShift Client][oc] (`oc` for short)
- [OpenShift Client](https://docs.okd.io/latest/cli_reference/get_started_cli.html) (`oc` for short)
It is also important to mention that for the purposes of this tutorial, the
latest Origin release is used:
......@@ -92,7 +92,7 @@ Now that OpenShift is set up, let's see how the web console looks like.
Once Vagrant finishes its thing with the VM, you will be presented with a
message which has some important information. One of them is the IP address
of the deployed OpenShift platform and in particular <https://10.2.2.2:8443/console/>.
of the deployed OpenShift platform and in particular `https://10.2.2.2:8443/console/`.
Open this link with your browser and accept the self-signed certificate in
order to proceed.
......@@ -101,7 +101,7 @@ landing page looks like:
![openshift web console](img/web-console.png)
You can see that a number of [projects] are already created for testing purposes.
You can see that a number of [projects](https://docs.okd.io/latest/dev_guide/projects.html) are already created for testing purposes.
If you head over the `openshift-infra` project, a number of services with their
respective pods are there to explore.
......@@ -109,15 +109,15 @@ respective pods are there to explore.
![openshift web console](img/openshift-infra-project.png)
We are not going to explore the whole interface, but if you want to learn about
the key concepts of OpenShift, read the [core concepts reference][core] in the
official documentation.
the key concepts of OpenShift, read the [core concepts reference](https://docs.okd.io/latest/architecture/core_concepts/index.html)
in the official documentation.
### Explore the OpenShift CLI
OpenShift Client (`oc`), is a powerful CLI tool that talks to the OpenShift API
and performs pretty much everything you can do from the web UI and much more.
Assuming you have [installed][oc] it, let's explore some of its main
Assuming you have [installed](https://docs.okd.io/latest/cli_reference/get_started_cli.html) it, let's explore some of its main
functionalities.
Let's first see the version of `oc`:
......@@ -174,7 +174,7 @@ The last command should spit a bunch of information about the statuses of the
pods and the services, which if you look closely is what we encountered in the
second image when we explored the web console.
You can always read more about `oc` in the [OpenShift CLI documentation][oc].
You can always read more about `oc` in the [OpenShift CLI documentation](https://docs.okd.io/latest/cli_reference/get_started_cli.html).
### Troubleshooting the all-in-one VM
......@@ -250,7 +250,7 @@ The next step is to import the OpenShift template for GitLab.
### Import the template
The [template][templates] is basically a JSON file which describes a set of
The [template](https://docs.okd.io/latest/architecture/core_concepts/templates.html) is basically a JSON file which describes a set of
related object definitions to be created together, as well as a set of
parameters for those objects.
......@@ -318,7 +318,7 @@ password for PostgreSQL, it will be created automatically.
The `gitlab.apps.10.2.2.2.nip.io` hostname that is used by default will
resolve to the host with IP `10.2.2.2` which is the IP our VM uses. It is a
trick to have distinct FQDNs pointing to services that are on our local network.
Read more on how this works in <http://nip.io>.
Read more on how this works in <https://nip.io>.
Now that we configured this, let's see how to manage and scale GitLab.
......@@ -355,7 +355,7 @@ Navigate back to the **Overview** and hopefully all pods will be up and running.
![GitLab running](img/gitlab-running.png)
Congratulations! You can now navigate to your new shinny GitLab instance by
visiting <http://gitlab.apps.10.2.2.2.nip.io> where you will be asked to
visiting `http://gitlab.apps.10.2.2.2.nip.io` where you will be asked to
change the root user password. Login using `root` as username and providing the
password you just set, and start using GitLab!
......@@ -366,7 +366,7 @@ of resources, you'd be happy to know that you can scale up with the push of a
button.
In the **Overview** page just click the up arrow button in the pod where
GitLab is. The change is instant and you can see the number of [replicas] now
GitLab is. The change is instant and you can see the number of [replicas](https://docs.okd.io/latest/architecture/core_concepts/deployments.html#replication-controllers) now
running scaled to 2.
![GitLab scale](img/gitlab-scale.png)
......@@ -384,7 +384,7 @@ scale up. If a pod is in pending state for too long, you can navigate to
### Scale GitLab using the `oc` CLI
Using `oc` is super easy to scale up the replicas of a pod. You may want to
skim through the [basic CLI operations][basic-cli] to get a taste how the CLI
skim through the [basic CLI operations](https://docs.okd.io/latest/cli_reference/basic_cli_operations.html) to get a taste how the CLI
commands are used. Pay extra attention to the object types as we will use some
of them and their abbreviated versions below.
......@@ -457,7 +457,7 @@ In case you were wondering whether there is an option to autoscale a pod based
on the resources of your server, the answer is yes, of course there is.
We will not expand on this matter, but feel free to read the documentation on
OpenShift's website about [autoscaling].
OpenShift's website about [autoscaling](https://docs.okd.io/latest/dev_guide/pod_autoscaling.html).
## Current limitations
......@@ -472,7 +472,7 @@ bother us. In any case, it is something to keep in mind when deploying GitLab
on a production cluster.
In order to deploy GitLab on a production cluster, you will need to assign the
GitLab service account to the `anyuid` [Security Context Constraints][scc].
GitLab service account to the `anyuid` [Security Context Constraints](https://docs.okd.io/latest/admin_guide/manage_scc.html).
For OpenShift v3.0, you will need to do this manually:
......@@ -505,25 +505,16 @@ application and you are done. You are ready to login to your new GitLab instance
And remember that in this tutorial we just scratched the surface of what Origin
is capable of. As always, you can refer to the detailed
[documentation][openshift-docs] to learn more about deploying your own OpenShift
[documentation](https://docs.okd.io) to learn more about deploying your own OpenShift
PaaS and managing your applications with the ease of containers.
[RedHat]: https://www.redhat.com/en "RedHat website"
[vm-new]: https://app.vagrantup.com/openshift/boxes/origin-all-in-one "Official OpenShift Vagrant box on Vagrant Cloud"
[template]: https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/docker/openshift-template.json "OpenShift template for GitLab"
[Docker]: https://www.docker.com "Docker website"
[oc]: https://docs.openshift.org/latest/cli_reference/get_started_cli.html "Documentation - oc CLI documentation"
[VirtualBox]: https://www.virtualbox.org/wiki/Downloads "VirtualBox downloads"
[Vagrant]: https://www.vagrantup.com/downloads.html "Vagrant downloads"
[projects]: https://docs.openshift.org/latest/dev_guide/projects.html "Documentation - Projects overview"
[core]: https://docs.openshift.org/latest/architecture/core_concepts/index.html "Documentation - Core concepts of OpenShift Origin"
[templates]: https://docs.openshift.org/latest/architecture/core_concepts/templates.html "Documentation - OpenShift templates"
[old-post]: https://blog.openshift.com/deploy-gitlab-openshift/ "Old post - Deploy GitLab on OpenShift"
[line]: https://gitlab.com/gitlab-org/omnibus-gitlab/blob/658c065c8d022ce858dd63eaeeadb0b2ddc8deea/docker/openshift-template.json#L239 "GitLab - OpenShift template"
[oc-gh]: https://github.com/openshift/origin/releases/tag/v1.3.0 "OpenShift Origin 1.3.0 release on GitHub"
[ha]: ../../administration/high_availability/gitlab.html "Documentation - GitLab High Availability"
[replicas]: https://docs.openshift.org/latest/architecture/core_concepts/deployments.html#replication-controllers "Documentation - Replication controller"
[autoscaling]: https://docs.openshift.org/latest/dev_guide/pod_autoscaling.html "Documentation - Autoscale"
[basic-cli]: https://docs.openshift.org/latest/cli_reference/basic_cli_operations.html "Documentation - Basic CLI operations"
[openshift-docs]: https://docs.openshift.org "OpenShift documentation"
[scc]: https://docs.openshift.org/latest/admin_guide/manage_scc.html "Documentation - Managing Security Context Constraints"
[ha]: ../../administration/high_availability/gitlab.md "Documentation - GitLab High Availability"
......@@ -69,7 +69,7 @@ This is the recommended minimum hardware for a handful of example GitLab user ba
- 4 cores supports up to 500 users
- 8 cores supports up to 1,000 users
- 32 cores supports up to 5,000 users
- More users? Run it high-availability on [multiple application servers](https://about.gitlab.com/high-availability/)
- More users? Run it high-availability on [multiple application servers](https://about.gitlab.com/solutions/high-availability/)
### Memory
......@@ -86,7 +86,7 @@ errors during usage.
- 16GB RAM supports up to 500 users
- 32GB RAM supports up to 1,000 users
- 128GB RAM supports up to 5,000 users
- More users? Run it high-availability on [multiple application servers](https://about.gitlab.com/high-availability/)
- More users? Run it high-availability on [multiple application servers](https://about.gitlab.com/solutions/high-availability/)
We recommend having at least [2GB of swap on your server](https://askubuntu.com/a/505344/310789), even if you currently have
enough available RAM. Having swap will help reduce the chance of errors occurring
......@@ -139,7 +139,7 @@ If you are using [GitLab Geo](../development/geo.md):
- The
[tracking database](../development/geo.md#using-the-tracking-database)
requires the
[postgres_fdw](https://www.postgresql.org/docs/9.6/static/postgres-fdw.html)
[postgres_fdw](https://www.postgresql.org/docs/9.6/postgres-fdw.html)
extension.
```
......
......@@ -55,7 +55,7 @@ The following languages and dependency managers are supported.
|----------------------------- | --------- | ------------ |
| Java ([Gradle](https://gradle.org/)) | not currently ([issue](https://gitlab.com/gitlab-org/gitlab/issues/13075 "Dependency Scanning for Gradle" )) | not available |
| Java ([Maven](https://maven.apache.org/)) | yes | [gemnasium](https://gitlab.com/gitlab-org/security-products/gemnasium) |
| JavaScript ([npm](https://www.npmjs.com/), [yarn](https://yarnpkg.com/en/)) | yes | [gemnasium](https://gitlab.com/gitlab-org/security-products/gemnasium), [Retire.js](https://retirejs.github.io/retire.js) |
| JavaScript ([npm](https://www.npmjs.com/), [yarn](https://yarnpkg.com/en/)) | yes | [gemnasium](https://gitlab.com/gitlab-org/security-products/gemnasium), [Retire.js](https://retirejs.github.io/retire.js/) |
| Go ([Golang](https://golang.org/)) | not currently ([issue](https://gitlab.com/gitlab-org/gitlab/issues/7132 "Dependency Scanning for Go")) | not available |
| PHP ([Composer](https://getcomposer.org/)) | yes | [gemnasium](https://gitlab.com/gitlab-org/security-products/gemnasium) |
| Python ([pip](https://pip.pypa.io/en/stable/)) | yes | [gemnasium](https://gitlab.com/gitlab-org/security-products/gemnasium) |
......
......@@ -57,7 +57,7 @@ This workflow comes with some drawbacks and there's a
## Interacting with the vulnerabilities
> Introduced in [GitLab Ultimate](https://about.gitlab.com/pricing) 10.8.
> Introduced in [GitLab Ultimate](https://about.gitlab.com/pricing/) 10.8.
CAUTION: **Warning:**
This feature is currently [Alpha](https://about.gitlab.com/handbook/product/#alpha-beta-ga) and while you can start using it, it may receive important changes in the future.
......@@ -84,7 +84,7 @@ If you wish to undo this dismissal, you can click the **Undo dismiss** button.
#### Adding a dismissal reason
> Introduced in [GitLab Ultimate](https://about.gitlab.com/pricing) 12.0.
> Introduced in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.0.
When dismissing a vulnerability, it's often helpful to provide a reason for doing so.
If you press the comment button next to **Dismiss vulnerability** in the modal, a text box will appear, allowing you to add a comment with your dismissal.
......@@ -110,7 +110,7 @@ the vulnerability will now have an associated issue next to the name.
### Solutions for vulnerabilities (auto-remediation)
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/5656) in [GitLab Ultimate](https://about.gitlab.com/pricing) 11.7.
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/5656) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 11.7.
Some vulnerabilities can be fixed by applying the solution that GitLab
automatically generates. The following scanners are supported:
......@@ -134,7 +134,7 @@ generated by GitLab. To apply the fix:
#### Creating a merge request from a vulnerability
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/9224) in [GitLab Ultimate](https://about.gitlab.com/pricing) 11.9.
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/9224) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 11.9.
In certain cases, GitLab will allow you to create a merge request that will
automatically remediate the vulnerability. Any vulnerability that has a
......@@ -148,7 +148,7 @@ Clicking on this button will create a merge request to apply the solution onto t
## Security approvals in merge requests **(ULTIMATE)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/9928) in [GitLab Ultimate](https://about.gitlab.com/pricing) 12.2.
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/9928) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.2.
Merge Request Approvals can be configured to require approval from a member
of your security team when a vulnerability, or a software license compliance violation would be introduced by a merge request.
......
......@@ -60,7 +60,7 @@ The following languages and package managers are supported.
| Elixir | [mix](https://elixir-lang.org/getting-started/mix-otp/introduction-to-mix.html) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types)) |[License Finder](https://github.com/pivotal/LicenseFinder)|
| C++/C | [conan](https://conan.io/) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types))|[License Finder](https://github.com/pivotal/LicenseFinder)|
| Scala | [sbt](https://www.scala-sbt.org/) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types))|[License Finder](https://github.com/pivotal/LicenseFinder)|
| Rust | [cargo](https://crates.io/) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types))|[License Finder](https://github.com/pivotal/LicenseFinder)|
| Rust | [cargo](https://crates.io) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types))|[License Finder](https://github.com/pivotal/LicenseFinder)|
| PHP | [composer](https://getcomposer.org/) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types))|[License Finder](https://github.com/pivotal/LicenseFinder)|
## Requirements
......
......@@ -67,7 +67,7 @@ The following table shows which languages, package managers and frameworks are s
| .NET | [Security Code Scan](https://security-code-scan.github.io) | 11.0 |
| Any | [Gitleaks](https://github.com/zricethezav/gitleaks) and [TruffleHog](https://github.com/dxa4481/truffleHog) | 11.9 |
| Apex (Salesforce) | [pmd](https://pmd.github.io/pmd/index.html) | 12.1 |
| C/C++ | [Flawfinder](https://www.dwheeler.com/flawfinder/) | 10.7 |
| C/C++ | [Flawfinder](https://dwheeler.com/flawfinder/) | 10.7 |
| Elixir (Phoenix) | [Sobelow](https://github.com/nccgroup/sobelow) | 11.10 |
| Go | [Gosec](https://github.com/securego/gosec) | 10.7 |
| Groovy ([Ant](https://ant.apache.org/), [Gradle](https://gradle.org/), [Maven](https://maven.apache.org/) and [SBT](https://www.scala-sbt.org/)) | [SpotBugs](https://spotbugs.github.io/) with the [find-sec-bugs](https://find-sec-bugs.github.io/) plugin | 11.3 (Gradle) & 11.9 (Ant, Maven, SBT) |
......
......@@ -36,7 +36,7 @@ To use the group, project or pipeline security dashboard:
## Pipeline Security Dashboard
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/13496) in [GitLab Ultimate](https://about.gitlab.com/pricing) 12.3.
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/13496) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.3.
At the pipeline level, the Security Dashboard displays the vulnerabilities present in the branch of the project the pipeline was run against.
......@@ -46,7 +46,7 @@ Visit the page for any pipeline which has run any of the [supported reports](#su
## Project Security Dashboard
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/6165) in [GitLab Ultimate](https://about.gitlab.com/pricing) 11.1.
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/6165) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 11.1.
At the project level, the Security Dashboard displays the latest security reports
for your project. Use it to find and fix vulnerabilities affecting the
......@@ -56,7 +56,7 @@ for your project. Use it to find and fix vulnerabilities affecting the
## Group Security Dashboard
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/6709) in [GitLab Ultimate](https://about.gitlab.com/pricing) 11.5.
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/6709) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 11.5.
The group Security Dashboard gives an overview of the vulnerabilities of all the
projects in a group and its subgroups.
......
......@@ -38,7 +38,6 @@ to be enabled:
- Files uploaded must have a file extension of either `png`, `jpg`, `jpeg`, `gif`, `bmp`, `tiff` or `ico`.
The [`svg` extension is not yet supported](https://gitlab.com/gitlab-org/gitlab/issues/12771).
- Design uploads are limited to 10 files at a time.
- [Designs cannot yet be deleted](https://gitlab.com/gitlab-org/gitlab/issues/11089).
- Design Management is
[not yet supported in the project export](https://gitlab.com/gitlab-org/gitlab/issues/11090).
- Design Management data
......@@ -64,13 +63,13 @@ of the design, and will replace the previous version.
## Viewing designs
Images on the Design Management page can be enlarged by clicking on them.
Images on the Design Management page can be enlarged by clicking on them.
The number of comments on a design — if any — is listed to the right
of the design filename. Clicking on this number enlarges the design
just like clicking anywhere else on the design.
When a design is added or modified, an icon is displayed on the item
to help summarize changes between versions.
to help summarize changes between versions.
| Indicator | Example |
| --------- | ------- |
......
......@@ -65,6 +65,7 @@ The following items will be exported:
- Project configuration, including services
- Issues with comments, merge requests with diffs and comments, labels, milestones, snippets,
and other project entities
- Design Management files and data **(PREMIUM)**
- LFS objects
- Issue boards
......
......@@ -38,6 +38,10 @@ module Gitlab
"lfs-objects"
end
def wiki_repo_bundle_filename
"project.wiki.bundle"
end
def config_file
Rails.root.join('lib/gitlab/import_export/import_export.yml')
end
......@@ -61,3 +65,5 @@ module Gitlab
end
end
end
Gitlab::ImportExport.prepend_if_ee('EE::Gitlab::ImportExport')
......@@ -26,30 +26,60 @@ module Gitlab
end
def find
find_object || @klass.create(project_attributes)
find_object || klass.create(project_attributes)
end
private
attr_reader :klass, :attributes, :group, :project
def find_object
@klass.where(where_clause).first
klass.where(where_clause).first
end
def where_clause
@attributes.slice('title').map do |key, value|
scope_clause = table[:project_id].eq(@project.id)
scope_clause = scope_clause.or(table[:group_id].eq(@group.id)) if @group
where_clauses.reduce(:and)
end
def where_clauses
[
where_clause_base,
where_clause_for_title,
where_clause_for_klass
].compact
end
# Returns Arel clause `"{table_name}"."project_id" = {project.id}`
# or, if group is present:
# `"{table_name}"."project_id" = {project.id} OR "{table_name}"."group_id" = {group.id}`
def where_clause_base
clause = table[:project_id].eq(project.id)
clause = clause.or(table[:group_id].eq(group.id)) if group
clause
end
table[key].eq(value).and(scope_clause)
end.reduce(:or)
# Returns Arel clause `"{table_name}"."title" = '{attributes['title']}'`
# if attributes has 'title key, otherwise `nil`.
def where_clause_for_title
attrs_to_arel(attributes.slice('title'))
end
# Returns Arel clause:
# `"{table_name}"."{attrs.keys[0]}" = '{attrs.values[0]} AND {table_name}"."{attrs.keys[1]}" = '{attrs.values[1]}"`
# from the given Hash of attributes.
def attrs_to_arel(attrs)
attrs.map do |key, value|
table[key].eq(value)
end.reduce(:and)
end
def table
@table ||= @klass.arel_table
@table ||= klass.arel_table
end
def project_attributes
@attributes.except('group').tap do |atts|
attributes.except('group').tap do |atts|
if label?
atts['type'] = 'ProjectLabel' # Always create project labels
elsif milestone?
......@@ -60,15 +90,17 @@ module Gitlab
claim_iid
end
end
atts['importing'] = true if klass.ancestors.include?(Importable)
end
end
def label?
@klass == Label
klass == Label
end
def milestone?
@klass == Milestone
klass == Milestone
end
# If an existing group milestone used the IID
......@@ -79,7 +111,7 @@ module Gitlab
def claim_iid
# The milestone has to be a group milestone, as it's the only case where
# we set the IID as the maximum. The rest of them are fixed.
milestone = @project.milestones.find_by(iid: @attributes['iid'])
milestone = project.milestones.find_by(iid: attributes['iid'])
return unless milestone
......@@ -87,6 +119,15 @@ module Gitlab
milestone.ensure_project_iid!
milestone.save!
end
protected
# Returns Arel clause for a particular model or `nil`.
def where_clause_for_klass
# no-op
end
end
end
end
Gitlab::ImportExport::GroupProjectObjectBuilder.prepend_if_ee('EE::Gitlab::ImportExport::GroupProjectObjectBuilder')
......@@ -248,7 +248,16 @@ preloads:
ee:
tree:
project:
protected_branches:
- issues:
- designs:
- notes:
- :author
- events:
- :push_event_payload
- design_versions:
- actions:
- :design # Duplicate export of issues.designs in order to link the record to both Issue and DesignVersion
- protected_branches:
- :unprotect_access_levels
protected_environments:
- protected_environments:
- :deploy_access_levels
......@@ -21,7 +21,7 @@ module Gitlab
if import_file && check_version! && restorers.all?(&:restore) && overwrite_project
project_tree.restored_project
else
raise Projects::ImportService::Error.new(@shared.errors.join(', '))
raise Projects::ImportService::Error.new(shared.errors.to_sentence)
end
rescue => e
raise Projects::ImportService::Error.new(e.message)
......@@ -31,70 +31,72 @@ module Gitlab
private
attr_accessor :archive_file, :current_user, :project, :shared
def restorers
[repo_restorer, wiki_restorer, project_tree, avatar_restorer,
uploads_restorer, lfs_restorer, statistics_restorer]
end
def import_file
Gitlab::ImportExport::FileImporter.import(project: @project,
archive_file: @archive_file,
shared: @shared)
Gitlab::ImportExport::FileImporter.import(project: project,
archive_file: archive_file,
shared: shared)
end
def check_version!
Gitlab::ImportExport::VersionChecker.check!(shared: @shared)
Gitlab::ImportExport::VersionChecker.check!(shared: shared)
end
def project_tree
@project_tree ||= Gitlab::ImportExport::ProjectTreeRestorer.new(user: @current_user,
shared: @shared,
project: @project)
@project_tree ||= Gitlab::ImportExport::ProjectTreeRestorer.new(user: current_user,
shared: shared,
project: project)
end
def avatar_restorer
Gitlab::ImportExport::AvatarRestorer.new(project: project_tree.restored_project, shared: @shared)
Gitlab::ImportExport::AvatarRestorer.new(project: project_tree.restored_project, shared: shared)
end
def repo_restorer
Gitlab::ImportExport::RepoRestorer.new(path_to_bundle: repo_path,
shared: @shared,
shared: shared,
project: project_tree.restored_project)
end
def wiki_restorer
Gitlab::ImportExport::WikiRestorer.new(path_to_bundle: wiki_repo_path,
shared: @shared,
shared: shared,
project: ProjectWiki.new(project_tree.restored_project),
wiki_enabled: @project.wiki_enabled?)
wiki_enabled: project.wiki_enabled?)
end
def uploads_restorer
Gitlab::ImportExport::UploadsRestorer.new(project: project_tree.restored_project, shared: @shared)
Gitlab::ImportExport::UploadsRestorer.new(project: project_tree.restored_project, shared: shared)
end
def lfs_restorer
Gitlab::ImportExport::LfsRestorer.new(project: project_tree.restored_project, shared: @shared)
Gitlab::ImportExport::LfsRestorer.new(project: project_tree.restored_project, shared: shared)
end
def statistics_restorer
Gitlab::ImportExport::StatisticsRestorer.new(project: project_tree.restored_project, shared: @shared)
Gitlab::ImportExport::StatisticsRestorer.new(project: project_tree.restored_project, shared: shared)
end
def path_with_namespace
File.join(@project.namespace.full_path, @project.path)
File.join(project.namespace.full_path, project.path)
end
def repo_path
File.join(@shared.export_path, 'project.bundle')
File.join(shared.export_path, Gitlab::ImportExport.project_bundle_filename)
end
def wiki_repo_path
File.join(@shared.export_path, 'project.wiki.bundle')
File.join(shared.export_path, Gitlab::ImportExport.wiki_repo_bundle_filename)
end
def remove_import_file
upload = @project.import_export_upload
upload = project.import_export_upload
return unless upload&.import_file&.file
......@@ -105,10 +107,10 @@ module Gitlab
def overwrite_project
project = project_tree.restored_project
return unless can?(@current_user, :admin_namespace, project.namespace)
return unless can?(current_user, :admin_namespace, project.namespace)
if overwrite_project?
::Projects::OverwriteProjectService.new(project, @current_user)
::Projects::OverwriteProjectService.new(project, current_user)
.execute(project_to_overwrite)
end
......@@ -116,7 +118,7 @@ module Gitlab
end
def original_path
@project.import_data&.data&.fetch('original_path', nil)
project.import_data&.data&.fetch('original_path', nil)
end
def overwrite_project?
......@@ -125,9 +127,11 @@ module Gitlab
def project_to_overwrite
strong_memoize(:project_to_overwrite) do
Project.find_by_full_path("#{@project.namespace.full_path}/#{original_path}")
Project.find_by_full_path("#{project.namespace.full_path}/#{original_path}")
end
end
end
end
end
Gitlab::ImportExport::Importer.prepend_if_ee('EE::Gitlab::ImportExport::Importer')
......@@ -93,6 +93,10 @@ module Gitlab
end
end
def remove_feature_dependent_sub_relations(_relation_item)
# no-op
end
def project_relations_without_project_members
# We remove `project_members` as they are deserialized separately
project_relations.except(:project_members)
......@@ -171,6 +175,8 @@ module Gitlab
next
end
remove_feature_dependent_sub_relations(relation_item)
# The transaction at this level is less speedy than one single transaction
# But we can't have it in the upper level or GC won't get rid of the AR objects
# after we save the batch.
......@@ -238,3 +244,5 @@ module Gitlab
end
end
end
Gitlab::ImportExport::ProjectTreeRestorer.prepend_if_ee('::EE::Gitlab::ImportExport::ProjectTreeRestorer')
......@@ -34,13 +34,13 @@ module Gitlab
PROJECT_REFERENCES = %w[project_id source_project_id target_project_id].freeze
BUILD_MODELS = %w[Ci::Build commit_status].freeze
BUILD_MODELS = %i[Ci::Build commit_status].freeze
IMPORTED_OBJECT_MAX_RETRIES = 5.freeze
EXISTING_OBJECT_CHECK = %i[milestone milestones label labels project_label project_labels group_label group_labels project_feature].freeze
TOKEN_RESET_MODELS = %w[Project Namespace Ci::Trigger Ci::Build Ci::Runner ProjectHook].freeze
TOKEN_RESET_MODELS = %i[Project Namespace Ci::Trigger Ci::Build Ci::Runner ProjectHook].freeze
def self.create(*args)
new(*args).create
......@@ -56,7 +56,7 @@ module Gitlab
end
def initialize(relation_sym:, relation_hash:, members_mapper:, user:, project:, excluded_keys: [])
@relation_name = self.class.overrides[relation_sym] || relation_sym
@relation_name = self.class.overrides[relation_sym]&.to_sym || relation_sym
@relation_hash = relation_hash.except('noteable_id')
@members_mapper = members_mapper
@user = user
......@@ -92,6 +92,10 @@ module Gitlab
OVERRIDES
end
def self.existing_object_check
EXISTING_OBJECT_CHECK
end
private
def setup_models
......@@ -105,7 +109,7 @@ module Gitlab
update_group_references
remove_duplicate_assignees
setup_pipeline if @relation_name == 'Ci::Pipeline'
setup_pipeline if @relation_name == :'Ci::Pipeline'
reset_tokens!
remove_encrypted_attributes!
......@@ -184,14 +188,14 @@ module Gitlab
end
def update_group_references
return unless EXISTING_OBJECT_CHECK.include?(@relation_name)
return unless self.class.existing_object_check.include?(@relation_name)
return unless @relation_hash['group_id']
@relation_hash['group_id'] = @project.namespace_id
end
def reset_tokens!
return unless Gitlab::ImportExport.reset_tokens? && TOKEN_RESET_MODELS.include?(@relation_name.to_s)
return unless Gitlab::ImportExport.reset_tokens? && TOKEN_RESET_MODELS.include?(@relation_name)
# If we import/export a project to the same instance, tokens will have to be reset.
# We also have to reset them to avoid issues when the gitlab secrets file cannot be copied across.
......@@ -255,7 +259,7 @@ module Gitlab
# Only find existing records to avoid mapping tables such as milestones
# Otherwise always create the record, skipping the extra SELECT clause.
@existing_or_new_object ||= begin
if EXISTING_OBJECT_CHECK.include?(@relation_name)
if self.class.existing_object_check.include?(@relation_name)
attribute_hash = attribute_hash_for(['events'])
existing_object.assign_attributes(attribute_hash) if attribute_hash.any?
......@@ -284,7 +288,7 @@ module Gitlab
end
def legacy_trigger?
@relation_name == 'Ci::Trigger' && @relation_hash['owner_id'].nil?
@relation_name == :'Ci::Trigger' && @relation_hash['owner_id'].nil?
end
def find_or_create_object!
......@@ -293,7 +297,7 @@ module Gitlab
# Can't use IDs as validation exists calling `group` or `project` attributes
finder_hash = parsed_relation_hash.tap do |hash|
hash['group'] = @project.group if relation_class.attribute_method?('group_id')
hash['project'] = @project
hash['project'] = @project if relation_class.reflect_on_association(:project)
hash.delete('project_id')
end
......
......@@ -6,19 +6,23 @@ module Gitlab
include Gitlab::ImportExport::CommandLineUtil
def initialize(project:, shared:, path_to_bundle:)
@project = project
@repository = project.repository
@path_to_bundle = path_to_bundle
@shared = shared
end
def restore
return true unless File.exist?(@path_to_bundle)
return true unless File.exist?(path_to_bundle)
@project.repository.create_from_bundle(@path_to_bundle)
repository.create_from_bundle(path_to_bundle)
rescue => e
@shared.error(e)
shared.error(e)
false
end
private
attr_accessor :repository, :path_to_bundle, :shared
end
end
end
......@@ -5,27 +5,35 @@ module Gitlab
class RepoSaver
include Gitlab::ImportExport::CommandLineUtil
attr_reader :full_path
attr_reader :project, :repository, :shared
def initialize(project:, shared:)
@project = project
@shared = shared
@repository = @project.repository
end
def save
return true if @project.empty_repo? # it's ok to have no repo
return true unless repository_exists? # it's ok to have no repo
@full_path = File.join(@shared.export_path, ImportExport.project_bundle_filename)
bundle_to_disk
end
private
def repository_exists?
repository.exists? && !repository.empty?
end
def bundle_full_path
File.join(shared.export_path, ImportExport.project_bundle_filename)
end
def bundle_to_disk
mkdir_p(@shared.export_path)
@project.repository.bundle_to_disk(@full_path)
mkdir_p(shared.export_path)
repository.bundle_to_disk(bundle_full_path)
rescue => e
@shared.error(e)
shared.error(e)
false
end
end
......
......@@ -4,28 +4,16 @@ module Gitlab
module ImportExport
class WikiRepoSaver < RepoSaver
def save
@wiki = ProjectWiki.new(@project)
return true unless wiki_repository_exists? # it's okay to have no Wiki
wiki = ProjectWiki.new(project)
@repository = wiki.repository
bundle_to_disk(File.join(@shared.export_path, project_filename))
end
def bundle_to_disk(full_path)
mkdir_p(@shared.export_path)
@wiki.repository.bundle_to_disk(full_path)
rescue => e
@shared.error(e)
false
super
end
private
def project_filename
"project.wiki.bundle"
end
def wiki_repository_exists?
@wiki.repository.exists? && !@wiki.repository.empty?
def bundle_full_path
File.join(shared.export_path, ImportExport.wiki_repo_bundle_filename)
end
end
end
......
......@@ -6,19 +6,22 @@ module Gitlab
def initialize(project:, shared:, path_to_bundle:, wiki_enabled:)
super(project: project, shared: shared, path_to_bundle: path_to_bundle)
@project = project
@wiki_enabled = wiki_enabled
end
def restore
@project.wiki if create_empty_wiki?
project.wiki if create_empty_wiki?
super
end
private
attr_accessor :project, :wiki_enabled
def create_empty_wiki?
!File.exist?(@path_to_bundle) && @wiki_enabled
!File.exist?(path_to_bundle) && wiki_enabled
end
end
end
......
......@@ -5185,6 +5185,9 @@ msgstr ""
msgid "Deselect all"
msgstr ""
msgid "Design Management files and data"
msgstr ""
msgid "DesignManagement|%{current_design} of %{designs_count}"
msgstr ""
......
......@@ -502,3 +502,17 @@ lists:
milestone_releases:
- milestone
- release
design: &design
- issue
- actions
- versions
- notes
designs: *design
actions:
- design
- version
versions: &version
- issue
- designs
- actions
design_versions: *version
......@@ -2,6 +2,8 @@ require 'spec_helper'
include ImportExport::CommonUtil
describe Gitlab::ImportExport::ProjectTreeRestorer do
let(:shared) { project.import_export_shared }
describe 'restore project tree' do
before(:context) do
# Using an admin for import, so we can check assignment of existing members
......@@ -14,7 +16,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
RSpec::Mocks.with_temporary_scope do
@project = create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project')
@shared = @project.import_export_shared
allow(@shared).to receive(:export_path).and_return('spec/lib/gitlab/import_export/')
allow(@shared).to receive(:export_path).and_return('spec/fixtures/lib/gitlab/import_export/')
allow_any_instance_of(Repository).to receive(:fetch_source_branch!).and_return(true)
allow_any_instance_of(Gitlab::Git::Repository).to receive(:branch_exists?).and_return(false)
......@@ -274,36 +276,6 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
end
shared_examples 'restores project successfully' do
it 'correctly restores project' do
expect(shared.errors).to be_empty
expect(restored_project_json).to be_truthy
end
end
shared_examples 'restores project correctly' do |**results|
it 'has labels' do
expect(project.labels.size).to eq(results.fetch(:labels, 0))
end
it 'has label priorities' do
expect(project.labels.find_by(title: 'A project label').priorities).not_to be_empty
end
it 'has milestones' do
expect(project.milestones.size).to eq(results.fetch(:milestones, 0))
end
it 'has issues' do
expect(project.issues.size).to eq(results.fetch(:issues, 0))
end
it 'does not set params that are excluded from import_export settings' do
expect(project.import_type).to be_nil
expect(project.creator_id).not_to eq 123
end
end
shared_examples 'restores group correctly' do |**results|
it 'has group label' do
expect(project.group.labels.size).to eq(results.fetch(:labels, 0))
......@@ -322,18 +294,17 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
context 'Light JSON' do
let(:user) { create(:user) }
let(:shared) { project.import_export_shared }
let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') }
let(:project_tree_restorer) { described_class.new(user: user, shared: shared, project: project) }
let(:restored_project_json) { project_tree_restorer.restore }
before do
allow(shared).to receive(:export_path).and_return('spec/lib/gitlab/import_export/')
allow(shared).to receive(:export_path).and_return('spec/fixtures/lib/gitlab/import_export/')
end
context 'with a simple project' do
before do
project_tree_restorer.instance_variable_set(:@path, "spec/lib/gitlab/import_export/project.light.json")
project_tree_restorer.instance_variable_set(:@path, "spec/fixtures/lib/gitlab/import_export/project.light.json")
restored_project_json
end
......@@ -341,6 +312,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
it_behaves_like 'restores project correctly',
issues: 1,
labels: 2,
label_with_priorities: 'A project label',
milestones: 1,
first_issue_labels: 1,
services: 1
......@@ -363,7 +335,12 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
create(:ci_build, token: 'abcd')
end
it_behaves_like 'restores project successfully'
it_behaves_like 'restores project correctly',
issues: 1,
labels: 2,
label_with_priorities: 'A project label',
milestones: 1,
first_issue_labels: 1
end
end
......@@ -430,15 +407,15 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
before do
project_tree_restorer.instance_variable_set(:@path, "spec/lib/gitlab/import_export/project.group.json")
project_tree_restorer.instance_variable_set(:@path, "spec/fixtures/lib/gitlab/import_export/project.group.json")
restored_project_json
end
it_behaves_like 'restores project successfully'
it_behaves_like 'restores project correctly',
issues: 2,
labels: 2,
label_with_priorities: 'A project label',
milestones: 2,
first_issue_labels: 1
......@@ -459,7 +436,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
before do
project_tree_restorer.instance_variable_set(:@path, "spec/lib/gitlab/import_export/project.light.json")
project_tree_restorer.instance_variable_set(:@path, "spec/fixtures/lib/gitlab/import_export/project.light.json")
end
it 'does not import any templated services' do
......@@ -501,7 +478,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
it 'preserves the project milestone IID' do
project_tree_restorer.instance_variable_set(:@path, "spec/lib/gitlab/import_export/project.milestone-iid.json")
project_tree_restorer.instance_variable_set(:@path, "spec/fixtures/lib/gitlab/import_export/project.milestone-iid.json")
expect_any_instance_of(Gitlab::ImportExport::Shared).not_to receive(:error)
......@@ -534,7 +511,6 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
describe '#restored_project' do
let(:project) { create(:project) }
let(:shared) { project.import_export_shared }
let(:tree_hash) { { 'visibility_level' => visibility } }
let(:restorer) { described_class.new(user: nil, shared: shared, project: project) }
......
......@@ -21,7 +21,7 @@ describe Gitlab::ImportExport::RelationRenameService do
context 'when importing' do
let(:project_tree_restorer) { Gitlab::ImportExport::ProjectTreeRestorer.new(user: user, shared: shared, project: project) }
let(:import_path) { 'spec/lib/gitlab/import_export' }
let(:import_path) { 'spec/fixtures/lib/gitlab/import_export' }
let(:file_content) { IO.read("#{import_path}/project.json") }
let!(:json_file) { ActiveSupport::JSON.decode(file_content) }
......
......@@ -2,8 +2,8 @@ require 'spec_helper'
describe Gitlab::ImportExport::RepoSaver do
describe 'bundle a project Git repo' do
let(:user) { create(:user) }
let!(:project) { create(:project, :public, name: 'searchable_project') }
set(:user) { create(:user) }
let!(:project) { create(:project, :repository) }
let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
let(:shared) { project.import_export_shared }
let(:bundler) { described_class.new(project: project, shared: shared) }
......@@ -20,5 +20,13 @@ describe Gitlab::ImportExport::RepoSaver do
it 'bundles the repo successfully' do
expect(bundler.save).to be true
end
context 'when the repo is empty' do
let!(:project) { create(:project) }
it 'bundles the repo successfully' do
expect(bundler.save).to be true
end
end
end
end
......@@ -731,3 +731,18 @@ ExternalPullRequest:
- target_repository
- source_sha
- target_sha
DesignManagement::Design:
- id
- project_id
- issue_id
- filename
DesignManagement::Action:
- design_id
- event
- version_id
DesignManagement::Version:
- id
- created_at
- sha
- issue_id
- user_id
......@@ -2,8 +2,8 @@ require 'spec_helper'
describe Gitlab::ImportExport::WikiRepoSaver do
describe 'bundle a wiki Git repo' do
let(:user) { create(:user) }
let!(:project) { create(:project, :public, :wiki_repo, name: 'searchable_project') }
set(:user) { create(:user) }
let!(:project) { create(:project, :wiki_repo) }
let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
let(:shared) { project.import_export_shared }
let(:wiki_bundler) { described_class.new(project: project, shared: shared) }
......@@ -23,5 +23,13 @@ describe Gitlab::ImportExport::WikiRepoSaver do
it 'bundles the repo successfully' do
expect(wiki_bundler.save).to be true
end
context 'when the repo is empty' do
let!(:project) { create(:project) }
it 'bundles the repo successfully' do
expect(wiki_bundler.save).to be true
end
end
end
end
......@@ -1075,7 +1075,7 @@ describe Repository do
let(:ref) { 'refs/heads/master' }
it 'returns nil' do
is_expected.to eq(nil)
is_expected.to be_nil
end
end
......@@ -2002,7 +2002,7 @@ describe Repository do
it 'returns nil if repo does not exist' do
allow(repository).to receive(:root_ref).and_raise(Gitlab::Git::Repository::NoRepository)
expect(repository.avatar).to eq(nil)
expect(repository.avatar).to be_nil
end
it 'returns the first avatar file found in the repository' do
......@@ -2604,6 +2604,10 @@ describe Repository do
expect { repository.create_if_not_exists }.to change { repository.exists? }.from(false).to(true)
end
it 'returns true' do
expect(repository.create_if_not_exists).to eq(true)
end
it 'calls out to the repository client to create a repo' do
expect(repository.raw.gitaly_repository_client).to receive(:create_repository)
......@@ -2618,6 +2622,10 @@ describe Repository do
repository.create_if_not_exists
end
it 'returns nil' do
expect(repository.create_if_not_exists).to be_nil
end
end
context 'when the repository exists but the cache is not up to date' do
......@@ -2629,6 +2637,10 @@ describe Repository do
expect { repository.create_if_not_exists }.not_to raise_error
end
it 'returns nil' do
expect(repository.create_if_not_exists).to be_nil
end
end
end
......
......@@ -35,20 +35,27 @@ describe Projects::ImportExport::ExportService do
end
it 'saves the repo' do
# This spec errors when run against the EE codebase as there will be a third repository
# saved (the EE-specific design repository).
#
# Instead, skip this test when run within EE. There is a spec for the EE-specific design repo
# in the corresponding EE spec.
skip if Gitlab.ee?
# once for the normal repo, once for the wiki
expect(Gitlab::ImportExport::RepoSaver).to receive(:new).twice.and_call_original
service.execute
end
it 'saves the lfs objects' do
expect(Gitlab::ImportExport::LfsSaver).to receive(:new).and_call_original
it 'saves the wiki repo' do
expect(Gitlab::ImportExport::WikiRepoSaver).to receive(:new).and_call_original
service.execute
end
it 'saves the wiki repo' do
expect(Gitlab::ImportExport::WikiRepoSaver).to receive(:new).and_call_original
it 'saves the lfs objects' do
expect(Gitlab::ImportExport::LfsSaver).to receive(:new).and_call_original
service.execute
end
......@@ -98,9 +105,9 @@ describe Projects::ImportExport::ExportService do
end
end
context 'when saver services fail' do
context 'when saving services fail' do
before do
allow(service).to receive(:save_services).and_return(false)
allow(service).to receive(:save_exporters).and_return(false)
end
after do
......@@ -122,7 +129,7 @@ describe Projects::ImportExport::ExportService do
expect(Rails.logger).to receive(:error)
end
it 'the after export strategy is not called' do
it 'does not call the export strategy' do
expect(service).not_to receive(:execute_after_export_action)
end
end
......
# frozen_string_literal: true
# Shared examples for ProjectTreeRestorer (shared to allow the testing
# of EE-specific features)
RSpec.shared_examples 'restores project correctly' do |**results|
it 'restores the project' do
expect(shared.errors).to be_empty
expect(restored_project_json).to be_truthy
end
it 'has labels' do
labels_size = results.fetch(:labels, 0)
expect(project.labels.size).to eq(labels_size)
end
it 'has label priorities' do
label_with_priorities = results[:label_with_priorities]
if label_with_priorities
expect(project.labels.find_by(title: label_with_priorities).priorities).not_to be_empty
end
end
it 'has milestones' do
expect(project.milestones.size).to eq(results.fetch(:milestones, 0))
end
it 'has issues' do
expect(project.issues.size).to eq(results.fetch(:issues, 0))
end
it 'does not set params that are excluded from import_export settings' do
expect(project.import_type).to be_nil
expect(project.creator_id).not_to eq 123
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment