Commit e43077ab authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 211a8c33
<script>
import { GlButton, GlButtonGroup, GlIcon, GlTooltipDirective } from '@gitlab/ui';
import { BTN_COPY_CONTENTS_TITLE, BTN_DOWNLOAD_TITLE, BTN_RAW_TITLE } from './constants';
export default {
components: {
GlIcon,
GlButtonGroup,
GlButton,
},
directives: {
GlTooltip: GlTooltipDirective,
},
props: {
blob: {
type: Object,
required: true,
},
},
computed: {
rawUrl() {
return this.blob.rawPath;
},
downloadUrl() {
return `${this.blob.rawPath}?inline=false`;
},
},
methods: {
requestCopyContents() {
this.$emit('copy');
},
},
BTN_COPY_CONTENTS_TITLE,
BTN_DOWNLOAD_TITLE,
BTN_RAW_TITLE,
};
</script>
<template>
<gl-button-group>
<gl-button
v-gl-tooltip.hover
:aria-label="$options.BTN_COPY_CONTENTS_TITLE"
:title="$options.BTN_COPY_CONTENTS_TITLE"
@click="requestCopyContents"
>
<gl-icon name="copy-to-clipboard" :size="14" />
</gl-button>
<gl-button
v-gl-tooltip.hover
:aria-label="$options.BTN_RAW_TITLE"
:title="$options.BTN_RAW_TITLE"
:href="rawUrl"
target="_blank"
>
<gl-icon name="doc-code" :size="14" />
</gl-button>
<gl-button
v-gl-tooltip.hover
:aria-label="$options.BTN_DOWNLOAD_TITLE"
:title="$options.BTN_DOWNLOAD_TITLE"
:href="downloadUrl"
target="_blank"
>
<gl-icon name="download" :size="14" />
</gl-button>
</gl-button-group>
</template>
import { __ } from '~/locale';
export const BTN_COPY_CONTENTS_TITLE = __('Copy file contents');
export const BTN_RAW_TITLE = __('Open raw');
export const BTN_DOWNLOAD_TITLE = __('Download');
...@@ -75,9 +75,9 @@ export default { ...@@ -75,9 +75,9 @@ export default {
* This field needs a lot of verification, because of different possible cases: * This field needs a lot of verification, because of different possible cases:
* *
* 1. person who is an author of a commit might be a GitLab user * 1. person who is an author of a commit might be a GitLab user
* 2. if person who is an author of a commit is a GitLab user he/she can have a GitLab avatar * 2. if person who is an author of a commit is a GitLab user, they can have a GitLab avatar
* 3. If GitLab user does not have avatar he/she might have a Gravatar * 3. If GitLab user does not have avatar they might have a Gravatar
* 4. If committer is not a GitLab User he/she can have a Gravatar * 4. If committer is not a GitLab User they can have a Gravatar
* 5. We do not have consistent API object in this case * 5. We do not have consistent API object in this case
* 6. We should improve API and the code * 6. We should improve API and the code
* *
...@@ -93,17 +93,17 @@ export default { ...@@ -93,17 +93,17 @@ export default {
// 1. person who is an author of a commit might be a GitLab user // 1. person who is an author of a commit might be a GitLab user
if (this.pipeline.commit.author) { if (this.pipeline.commit.author) {
// 2. if person who is an author of a commit is a GitLab user // 2. if person who is an author of a commit is a GitLab user
// he/she can have a GitLab avatar // they can have a GitLab avatar
if (this.pipeline.commit.author.avatar_url) { if (this.pipeline.commit.author.avatar_url) {
commitAuthorInformation = this.pipeline.commit.author; commitAuthorInformation = this.pipeline.commit.author;
// 3. If GitLab user does not have avatar he/she might have a Gravatar // 3. If GitLab user does not have avatar, they might have a Gravatar
} else if (this.pipeline.commit.author_gravatar_url) { } else if (this.pipeline.commit.author_gravatar_url) {
commitAuthorInformation = Object.assign({}, this.pipeline.commit.author, { commitAuthorInformation = Object.assign({}, this.pipeline.commit.author, {
avatar_url: this.pipeline.commit.author_gravatar_url, avatar_url: this.pipeline.commit.author_gravatar_url,
}); });
} }
// 4. If committer is not a GitLab User he/she can have a Gravatar // 4. If committer is not a GitLab User, they can have a Gravatar
} else { } else {
commitAuthorInformation = { commitAuthorInformation = {
avatar_url: this.pipeline.commit.author_gravatar_url, avatar_url: this.pipeline.commit.author_gravatar_url,
......
...@@ -52,7 +52,7 @@ class RootController < Dashboard::ProjectsController ...@@ -52,7 +52,7 @@ class RootController < Dashboard::ProjectsController
end end
def redirect_to_home_page_url? def redirect_to_home_page_url?
# If user is not signed-in and tries to access root_path - redirect him to landing page # If user is not signed-in and tries to access root_path - redirect them to landing page
# Don't redirect to the default URL to prevent endless redirections # Don't redirect to the default URL to prevent endless redirections
return false unless Gitlab::CurrentSettings.home_page_url.present? return false unless Gitlab::CurrentSettings.home_page_url.present?
......
...@@ -11,7 +11,10 @@ class Board < ApplicationRecord ...@@ -11,7 +11,10 @@ class Board < ApplicationRecord
validates :group, presence: true, unless: :project validates :group, presence: true, unless: :project
scope :with_associations, -> { preload(:destroyable_lists) } scope :with_associations, -> { preload(:destroyable_lists) }
scope :order_by_name_asc, -> { order(arel_table[:name].lower.asc) }
# Sort by case-insensitive name, then ascending ids. This ensures that we will always
# get the same list/first board no matter how many other boards are named the same
scope :order_by_name_asc, -> { order(arel_table[:name].lower.asc).order(id: :asc) }
scope :first_board, -> { where(id: self.order_by_name_asc.limit(1).select(:id)) } scope :first_board, -> { where(id: self.order_by_name_asc.limit(1).select(:id)) }
def project_needed? def project_needed?
......
...@@ -370,7 +370,7 @@ class ProjectPolicy < BasePolicy ...@@ -370,7 +370,7 @@ class ProjectPolicy < BasePolicy
# There's two separate cases when builds_disabled is true: # There's two separate cases when builds_disabled is true:
# 1. When internal CI is disabled - builds_disabled && internal_builds_disabled # 1. When internal CI is disabled - builds_disabled && internal_builds_disabled
# - We do not prevent the user from accessing Pipelines to allow him to access external CI # - We do not prevent the user from accessing Pipelines to allow them to access external CI
# 2. When the user is not allowed to access CI - builds_disabled && ~internal_builds_disabled # 2. When the user is not allowed to access CI - builds_disabled && ~internal_builds_disabled
# - We prevent the user from accessing Pipelines # - We prevent the user from accessing Pipelines
rule { (builds_disabled & ~internal_builds_disabled) | repository_disabled }.policy do rule { (builds_disabled & ~internal_builds_disabled) | repository_disabled }.policy do
......
...@@ -61,7 +61,7 @@ module Projects ...@@ -61,7 +61,7 @@ module Projects
def add_source_project_to_fork_network(source_project) def add_source_project_to_fork_network(source_project)
return unless @project.fork_network return unless @project.fork_network
# Because he have moved all references in the fork network from the source_project # Because they have moved all references in the fork network from the source_project
# we won't be able to query the database (only through its cached data), # we won't be able to query the database (only through its cached data),
# for its former relationships. That's why we're adding it to the network # for its former relationships. That's why we're adding it to the network
# as a fork of the target project # as a fork of the target project
......
---
title: Conan packages are validated based on full recipe instead of name/version alone
merge_request: 23467
author:
type: changed
---
title: Ensure board lists are sorted consistently
merge_request: 24637
author:
type: fixed
---
title: Clean backgroud_migration queue from ActivatePrometheusServicesForSharedCluster
jobs.
merge_request: 24135
author:
type: fixed
---
title: Sanitize request parameters in exceptions_json.log
merge_request: 24625
author:
type: fixed
...@@ -111,7 +111,7 @@ production: &base ...@@ -111,7 +111,7 @@ production: &base
# Email server smtp settings are in config/initializers/smtp_settings.rb.sample # Email server smtp settings are in config/initializers/smtp_settings.rb.sample
# default_can_create_group: false # default: true # default_can_create_group: false # default: true
# username_changing_enabled: false # default: true - User can change her username/namespace # username_changing_enabled: false # default: true - User can change their username/namespace
## Default theme ID ## Default theme ID
## 1 - Indigo ## 1 - Indigo
## 2 - Dark ## 2 - Dark
......
# frozen_string_literal: true
class DropActivatePrometheusServicesForSharedClusterApplicationsBackgroundMigration < ActiveRecord::Migration[5.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
DROPPED_JOB_CLASS = 'ActivatePrometheusServicesForSharedClusterApplications'
QUEUE = 'background_migration'
def up
Sidekiq::Queue.new(QUEUE).each do |job|
klass, project_id, *should_be_empty = job.args
next unless klass == DROPPED_JOB_CLASS && project_id.is_a?(Integer) && should_be_empty.empty?
job.delete
end
end
end
# frozen_string_literal: true
class ReplaceConanMetadataIndex < ActiveRecord::Migration[5.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
OLD_INDEX = 'index_packages_conan_metadata_on_package_id'
NEW_INDEX = 'index_packages_conan_metadata_on_package_id_username_channel'
disable_ddl_transaction!
def up
add_concurrent_index :packages_conan_metadata,
[:package_id, :package_username, :package_channel],
unique: true, name: NEW_INDEX
remove_concurrent_index_by_name :packages_conan_metadata, OLD_INDEX
end
def down
add_concurrent_index :packages_conan_metadata, :package_id, name: OLD_INDEX
remove_concurrent_index_by_name :packages_conan_metadata, NEW_INDEX
end
end
# frozen_string_literal: true # frozen_string_literal: true
class PatchPrometheusServicesForSharedClusterApplications < ActiveRecord::Migration[5.2] class PatchPrometheusServicesForSharedClusterApplications < ActiveRecord::Migration[5.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
MIGRATION = 'ActivatePrometheusServicesForSharedClusterApplications'.freeze
BATCH_SIZE = 500
DELAY = 2.minutes
disable_ddl_transaction!
module Migratable
module Applications
class Prometheus < ActiveRecord::Base
self.table_name = 'clusters_applications_prometheus'
enum status: {
errored: -1,
installed: 3,
updated: 5
}
end
end
class Project < ActiveRecord::Base
self.table_name = 'projects'
include ::EachBatch
scope :with_application_on_group_clusters, -> {
joins("INNER JOIN namespaces ON namespaces.id = projects.namespace_id")
.joins("INNER JOIN cluster_groups ON cluster_groups.group_id = namespaces.id")
.joins("INNER JOIN clusters ON clusters.id = cluster_groups.cluster_id AND clusters.cluster_type = #{Cluster.cluster_types['group_type']}")
.joins("INNER JOIN clusters_applications_prometheus ON clusters_applications_prometheus.cluster_id = clusters.id
AND clusters_applications_prometheus.status IN (#{Applications::Prometheus.statuses[:installed]}, #{Applications::Prometheus.statuses[:updated]})")
}
scope :without_active_prometheus_services, -> {
joins("LEFT JOIN services ON services.project_id = projects.id AND services.type = 'PrometheusService'")
.where("services.id IS NULL OR (services.active = FALSE AND services.properties = '{}')")
}
end
class Cluster < ActiveRecord::Base
self.table_name = 'clusters'
enum cluster_type: {
instance_type: 1,
group_type: 2
}
def self.has_prometheus_application?
joins("INNER JOIN clusters_applications_prometheus ON clusters_applications_prometheus.cluster_id = clusters.id
AND clusters_applications_prometheus.status IN (#{Applications::Prometheus.statuses[:installed]}, #{Applications::Prometheus.statuses[:updated]})").exists?
end
end
end
def up def up
projects_without_active_prometheus_service.group('projects.id').each_batch(of: BATCH_SIZE) do |batch, index| # no-op
bg_migrations_batch = batch.select('projects.id').map { |project| [MIGRATION, project.id] }
delay = index * DELAY
BackgroundMigrationWorker.bulk_perform_in(delay.seconds, bg_migrations_batch)
end
end end
def down def down
# no-op # no-op
end end
private
def projects_without_active_prometheus_service
scope = Migratable::Project.without_active_prometheus_services
return scope if migrate_instance_cluster?
scope.with_application_on_group_clusters
end
def migrate_instance_cluster?
if instance_variable_defined?('@migrate_instance_cluster')
@migrate_instance_cluster
else
@migrate_instance_cluster = Migratable::Cluster.instance_type.has_prometheus_application?
end
end
end end
...@@ -2945,7 +2945,7 @@ ActiveRecord::Schema.define(version: 2020_02_06_111847) do ...@@ -2945,7 +2945,7 @@ ActiveRecord::Schema.define(version: 2020_02_06_111847) do
t.datetime_with_timezone "updated_at", null: false t.datetime_with_timezone "updated_at", null: false
t.string "package_username", limit: 255, null: false t.string "package_username", limit: 255, null: false
t.string "package_channel", limit: 255, null: false t.string "package_channel", limit: 255, null: false
t.index ["package_id", "package_username", "package_channel"], name: "index_packages_conan_metadata_on_package_id_username_channel", unique: true t.index ["package_id"], name: "index_packages_conan_metadata_on_package_id", unique: true
end end
create_table "packages_dependencies", force: :cascade do |t| create_table "packages_dependencies", force: :cascade do |t|
......
...@@ -48,3 +48,10 @@ to the Prometheus config in order for GitLab to receive notifications of any ale ...@@ -48,3 +48,10 @@ to the Prometheus config in order for GitLab to receive notifications of any ale
Once the webhook is setup, you can Once the webhook is setup, you can
[take action on incoming alerts](../../../user/project/integrations/prometheus.md#taking-action-on-incidents-ultimate). [take action on incoming alerts](../../../user/project/integrations/prometheus.md#taking-action-on-incidents-ultimate).
## Adding custom metrics to the self monitoring project
You can add custom metrics in the self monitoring project by:
1. [Duplicating](../../../user/project/integrations/prometheus.md#duplicating-a-gitlab-defined-dashboard) the default dashboard.
1. [Editing](../../../user/project/integrations/prometheus.md#view-and-edit-the-source-file-of-a-custom-dashboard) the newly created dashboard file and configuring it with [dashboard YAML properties](../../../user/project/integrations/prometheus.md#dashboard-yaml-properties).
...@@ -31,5 +31,5 @@ gitlab_rails['gitlab_username_changing_enabled'] = false ...@@ -31,5 +31,5 @@ gitlab_rails['gitlab_username_changing_enabled'] = false
For source installations, uncomment the following line in `config/gitlab.yml`: For source installations, uncomment the following line in `config/gitlab.yml`:
```yaml ```yaml
# username_changing_enabled: false # default: true - User can change her username/namespace # username_changing_enabled: false # default: true - User can change their username/namespace
``` ```
...@@ -64,7 +64,7 @@ Example response: ...@@ -64,7 +64,7 @@ Example response:
Gets a list of group or project members viewable by the authenticated user, including inherited members through ancestor groups. Gets a list of group or project members viewable by the authenticated user, including inherited members through ancestor groups.
When a user is a member of the project/group and of one or more ancestor groups the user is returned only once with the project `access_level` (if exists) When a user is a member of the project/group and of one or more ancestor groups the user is returned only once with the project `access_level` (if exists)
or the `access_level` for the user in the first group which he belongs to in the project groups ancestors chain. or the `access_level` for the user in the first group which they belong to in the project groups ancestors chain.
``` ```
GET /groups/:id/members/all GET /groups/:id/members/all
......
...@@ -96,8 +96,10 @@ Link each one to an appropriate place for more information. ...@@ -96,8 +96,10 @@ Link each one to an appropriate place for more information.
This is the part of the document where you can include one or more sets of instructions, each to accomplish a specific task. This is the part of the document where you can include one or more sets of instructions, each to accomplish a specific task.
Headers should describe the task the reader will achieve by following the instructions within, typically starting with a verb. Headers should describe the task the reader will achieve by following the instructions within, typically starting with a verb.
Larger instruction sets may have subsections covering specific phases of the process. Larger instruction sets may have subsections covering specific phases of the process.
Where appropriate, provide examples of code or configuration files to better clarify intended usage.
- Write a step-by-step guide, with no gaps between the steps. - Write a step-by-step guide, with no gaps between the steps.
- Include example code or configurations as part of the relevant step. Use appropriate markdown to [wrap code blocks with syntax highlighting](../../user/markdown.html#colored-code-and-syntax-highlighting).
- Start with an h2 (`##`), break complex steps into small steps using - Start with an h2 (`##`), break complex steps into small steps using
subheadings h3 > h4 > h5 > h6. _Never skip a hierarchy level, such subheadings h3 > h4 > h5 > h6. _Never skip a hierarchy level, such
as h2 > h4_, as it will break the TOC and may affect the breadcrumbs. as h2 > h4_, as it will break the TOC and may affect the breadcrumbs.
......
...@@ -168,7 +168,7 @@ user_input = '../other-repo.git/other-file' ...@@ -168,7 +168,7 @@ user_input = '../other-repo.git/other-file'
repo_path = 'repositories/user-repo.git' repo_path = 'repositories/user-repo.git'
# The intention of the code below is to open a file under repo_path, but # The intention of the code below is to open a file under repo_path, but
# because the user used '..' she can 'break out' into # because the user used '..' they can 'break out' into
# 'repositories/other-repo.git' # 'repositories/other-repo.git'
full_path = File.join(repo_path, user_input) full_path = File.join(repo_path, user_input)
File.open(full_path) do # Oops! File.open(full_path) do # Oops!
......
...@@ -72,6 +72,24 @@ When using spring and guard together, use `SPRING=1 bundle exec guard` instead t ...@@ -72,6 +72,24 @@ When using spring and guard together, use `SPRING=1 bundle exec guard` instead t
- Use [`:aggregate_failures`](https://relishapp.com/rspec/rspec-core/docs/expectation-framework-integration/aggregating-failures) when there is more than one expectation in a test. - Use [`:aggregate_failures`](https://relishapp.com/rspec/rspec-core/docs/expectation-framework-integration/aggregating-failures) when there is more than one expectation in a test.
- For [empty test description blocks](https://github.com/rubocop-hq/rspec-style-guide#it-and-specify), use `specify` rather than `it do` if the test is self-explanatory. - For [empty test description blocks](https://github.com/rubocop-hq/rspec-style-guide#it-and-specify), use `specify` rather than `it do` if the test is self-explanatory.
### Coverage
[`simplecov`](https://github.com/colszowka/simplecov) is used to generate code test coverage reports.
These are generated automatically on the CI, but not when running tests locally. To generate partial reports
when you run a spec file on your machine, set the `SIMPLECOV` environment variable:
```shell
SIMPLECOV=1 bundle exec rspec spec/models/repository_spec.rb
```
Coverage reports are generated into the `coverage` folder in the app root, and you can open these in your browser, for example:
```shell
firefox coverage/index.html
```
Use the coverage reports to ensure your tests cover 100% of your code.
### System / Feature tests ### System / Feature tests
NOTE: **Note:** Before writing a new system test, [please consider **not** NOTE: **Note:** Before writing a new system test, [please consider **not**
......
...@@ -57,7 +57,7 @@ Support for historical data is coming [in a future release](https://gitlab.com/g ...@@ -57,7 +57,7 @@ Support for historical data is coming [in a future release](https://gitlab.com/g
### Full text search ### Full text search
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/21656) in GitLab 12.7. > [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/21656) in GitLab 12.8.
When you enable [Elastic Stack](../../clusters/applications.md#elastic-stack) on your cluster, When you enable [Elastic Stack](../../clusters/applications.md#elastic-stack) on your cluster,
you can search the content of your logs via a search bar. you can search the content of your logs via a search bar.
......
...@@ -45,7 +45,8 @@ Once set, Code Owners are displayed in merge requests widgets: ...@@ -45,7 +45,8 @@ Once set, Code Owners are displayed in merge requests widgets:
Files can be specified using the same kind of patterns you would use Files can be specified using the same kind of patterns you would use
in the `.gitignore` file followed by the `@username` or email of one in the `.gitignore` file followed by the `@username` or email of one
or more users or by the `@name` of one or more groups that should or more users or by the `@name` of one or more groups that should
be owners of the file. be owners of the file. Groups must be added as [members of the project](members/index.md),
or they will be ignored.
The order in which the paths are defined is significant: the last The order in which the paths are defined is significant: the last
pattern that matches a given path will be used to find the code pattern that matches a given path will be used to find the code
......
...@@ -49,7 +49,7 @@ module Gitlab ...@@ -49,7 +49,7 @@ module Gitlab
return true return true
end end
# Block user in GitLab if he/she was blocked in AD # Block user in GitLab if they were blocked in AD
if Gitlab::Auth::LDAP::Person.disabled_via_active_directory?(ldap_identity.extern_uid, adapter) if Gitlab::Auth::LDAP::Person.disabled_via_active_directory?(ldap_identity.extern_uid, adapter)
block_user(user, 'is disabled in Active Directory') block_user(user, 'is disabled in Active Directory')
false false
......
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Create missing PrometheusServices records or sets active attribute to true
# for all projects which belongs to cluster with Prometheus Application installed.
class ActivatePrometheusServicesForSharedClusterApplications
module Migratable
# Migration model namespace isolated from application code.
class PrometheusService < ActiveRecord::Base
self.inheritance_column = :_type_disabled
self.table_name = 'services'
default_scope { where("services.type = 'PrometheusService'") }
def self.for_project(project_id)
new(
project_id: project_id,
active: true,
properties: '{}',
type: 'PrometheusService',
template: false,
push_events: true,
issues_events: true,
merge_requests_events: true,
tag_push_events: true,
note_events: true,
category: 'monitoring',
default: false,
wiki_page_events: true,
pipeline_events: true,
confidential_issues_events: true,
commit_events: true,
job_events: true,
confidential_note_events: true,
deployment_events: false
)
end
def managed?
properties == '{}'
end
end
end
def perform(project_id)
service = Migratable::PrometheusService.find_by(project_id: project_id) || Migratable::PrometheusService.for_project(project_id)
service.update!(active: true) if service.managed?
end
end
end
end
...@@ -97,6 +97,8 @@ module Gitlab ...@@ -97,6 +97,8 @@ module Gitlab
extra = extra.merge(data) if data.is_a?(Hash) extra = extra.merge(data) if data.is_a?(Hash)
end end
extra = sanitize_request_parameters(extra)
if sentry && Raven.configuration.server if sentry && Raven.configuration.server
Raven.capture_exception(exception, tags: default_tags, extra: extra) Raven.capture_exception(exception, tags: default_tags, extra: extra)
end end
...@@ -117,6 +119,11 @@ module Gitlab ...@@ -117,6 +119,11 @@ module Gitlab
end end
end end
def sanitize_request_parameters(parameters)
filter = ActiveSupport::ParameterFilter.new(::Rails.application.config.filter_parameters)
filter.filter(parameters)
end
def sentry_dsn def sentry_dsn
return unless Rails.env.production? || Rails.env.development? return unless Rails.env.production? || Rails.env.development?
return unless Gitlab.config.sentry.enabled return unless Gitlab.config.sentry.enabled
......
...@@ -57,7 +57,7 @@ gitaly_log="$app_root/log/gitaly.log" ...@@ -57,7 +57,7 @@ gitaly_log="$app_root/log/gitaly.log"
# Read configuration variable file if it is present # Read configuration variable file if it is present
test -f /etc/default/gitlab && . /etc/default/gitlab test -f /etc/default/gitlab && . /etc/default/gitlab
# Switch to the app_user if it is not he/she who is running the script. # Switch to the app_user if it is not they who are running the script.
if [ `whoami` != "$app_user" ]; then if [ `whoami` != "$app_user" ]; then
eval su - "$app_user" -c $(echo \")$shell_path -l -c \'$0 "$@"\'$(echo \"); exit; eval su - "$app_user" -c $(echo \")$shell_path -l -c \'$0 "$@"\'$(echo \"); exit;
fi fi
......
...@@ -21291,6 +21291,9 @@ msgstr "" ...@@ -21291,6 +21291,9 @@ msgstr ""
msgid "Vulnerabilities over time" msgid "Vulnerabilities over time"
msgstr "" msgstr ""
msgid "Vulnerability List"
msgstr ""
msgid "Vulnerability-Check" msgid "Vulnerability-Check"
msgstr "" msgstr ""
...@@ -21336,6 +21339,9 @@ msgstr "" ...@@ -21336,6 +21339,9 @@ msgstr ""
msgid "Vulnerability|Severity" msgid "Vulnerability|Severity"
msgstr "" msgstr ""
msgid "Vulnerability|Status"
msgstr ""
msgid "WIP" msgid "WIP"
msgstr "" msgstr ""
......
...@@ -8,10 +8,9 @@ module QA ...@@ -8,10 +8,9 @@ module QA
user = Resource::User.fabricate_or_use(Runtime::Env.gitlab_qa_username_1, Runtime::Env.gitlab_qa_password_1) user = Resource::User.fabricate_or_use(Runtime::Env.gitlab_qa_username_1, Runtime::Env.gitlab_qa_password_1)
project = Resource::Project.fabricate_via_api! do |resource| Resource::Project.fabricate_via_api! do |project|
resource.name = 'add-member-project' project.name = 'add-member-project'
end end.visit!
project.visit!
Page::Project::Menu.perform(&:go_to_members_settings) Page::Project::Menu.perform(&:go_to_members_settings)
Page::Project::Settings::Members.perform do |members| Page::Project::Settings::Members.perform do |members|
......
...@@ -10,8 +10,8 @@ module QA ...@@ -10,8 +10,8 @@ module QA
@runner_name = "qa-runner-#{Time.now.to_i}" @runner_name = "qa-runner-#{Time.now.to_i}"
@project = Resource::Project.fabricate_via_api! do |resource| @project = Resource::Project.fabricate_via_api! do |project|
resource.name = 'deploy-key-clone-project' project.name = 'deploy-key-clone-project'
end end
@repository_location = @project.repository_ssh_location @repository_location = @project.repository_ssh_location
......
...@@ -37,7 +37,7 @@ describe Projects::GroupLinksController do ...@@ -37,7 +37,7 @@ describe Projects::GroupLinksController do
end end
end end
context 'when user has access to group he want to link project to' do context 'when user has access to group they want to link project to' do
before do before do
group.add_developer(user) group.add_developer(user)
end end
...@@ -55,7 +55,7 @@ describe Projects::GroupLinksController do ...@@ -55,7 +55,7 @@ describe Projects::GroupLinksController do
end end
end end
context 'when user doers not have access to group he want to link to' do context 'when user doers not have access to group they want to link to' do
include_context 'link project to group' include_context 'link project to group'
it 'renders 404' do it 'renders 404' do
......
...@@ -287,7 +287,7 @@ describe 'User creates branch and merge request on issue page', :js do ...@@ -287,7 +287,7 @@ describe 'User creates branch and merge request on issue page', :js do
expect(source_message).to have_text('Source is not available') expect(source_message).to have_text('Source is not available')
# JavaScript gets refs started with `mas` (entered above) and places the first match. # JavaScript gets refs started with `mas` (entered above) and places the first match.
# User sees `mas` in black color (the part he entered) and the `ter` in gray color (a hint). # User sees `mas` in black color (the part they entered) and the `ter` in gray color (a hint).
# Since hinting is implemented via text selection and rspec/capybara doesn't have matchers for it, # Since hinting is implemented via text selection and rspec/capybara doesn't have matchers for it,
# we just checking the whole source name. # we just checking the whole source name.
expect(input_source.value).to eq(project.default_branch) expect(input_source.value).to eq(project.default_branch)
......
import { mount } from '@vue/test-utils';
import BlobHeaderActions from '~/blob/components/blob_header_default_actions.vue';
import {
BTN_COPY_CONTENTS_TITLE,
BTN_DOWNLOAD_TITLE,
BTN_RAW_TITLE,
} from '~/blob/components/constants';
import { GlButtonGroup, GlButton } from '@gitlab/ui';
import { Blob } from './mock_data';
describe('Blob Header Default Actions', () => {
let wrapper;
let btnGroup;
let buttons;
const hrefPrefix = 'http://localhost';
function createComponent(props = {}) {
wrapper = mount(BlobHeaderActions, {
propsData: {
blob: Object.assign({}, Blob, props),
},
});
}
beforeEach(() => {
createComponent();
btnGroup = wrapper.find(GlButtonGroup);
buttons = wrapper.findAll(GlButton);
});
afterEach(() => {
wrapper.destroy();
});
describe('renders', () => {
it('gl-button-group component', () => {
expect(btnGroup.exists()).toBe(true);
});
it('exactly 3 buttons with predefined actions', () => {
expect(buttons.length).toBe(3);
[BTN_COPY_CONTENTS_TITLE, BTN_RAW_TITLE, BTN_DOWNLOAD_TITLE].forEach((title, i) => {
expect(buttons.at(i).vm.$el.title).toBe(title);
});
});
it('correct href attribute on RAW button', () => {
expect(buttons.at(1).vm.$el.href).toBe(`${hrefPrefix}${Blob.rawPath}`);
});
it('correct href attribute on Download button', () => {
expect(buttons.at(2).vm.$el.href).toBe(`${hrefPrefix}${Blob.rawPath}?inline=false`);
});
});
describe('functionally', () => {
it('emits an event when a Copy Contents button is clicked', () => {
jest.spyOn(wrapper.vm, '$emit');
buttons.at(0).vm.$emit('click');
expect(wrapper.vm.$emit).toHaveBeenCalledWith('copy');
});
});
});
...@@ -83,7 +83,7 @@ describe NavHelper, :do_not_mock_admin_mode do ...@@ -83,7 +83,7 @@ describe NavHelper, :do_not_mock_admin_mode do
expect(helper.header_links).not_to include(:issues, :merge_requests, :todos, :search) expect(helper.header_links).not_to include(:issues, :merge_requests, :todos, :search)
end end
it 'shows the search box when the user cannot read cross project and he is visiting a project' do it 'shows the search box when the user cannot read cross project and they are visiting a project' do
helper.instance_variable_set(:@project, create(:project)) helper.instance_variable_set(:@project, create(:project))
expect(helper.header_links).to include(:search) expect(helper.header_links).to include(:search)
......
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::BackgroundMigration::ActivatePrometheusServicesForSharedClusterApplications, :migration, schema: 2020_01_14_113341 do
include MigrationHelpers::PrometheusServiceHelpers
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:services) { table(:services) }
let(:namespace) { namespaces.create(name: 'user', path: 'user') }
let(:project) { projects.create(namespace_id: namespace.id) }
let(:columns) do
%w(project_id active properties type template push_events
issues_events merge_requests_events tag_push_events
note_events category default wiki_page_events pipeline_events
confidential_issues_events commit_events job_events
confidential_note_events deployment_events)
end
describe '#perform' do
it 'is idempotent' do
expect { subject.perform(project.id) }.to change { services.order(:id).map { |row| row.attributes } }
expect { subject.perform(project.id) }.not_to change { services.order(:id).map { |row| row.attributes } }
end
context 'non prometheus services' do
it 'does not change them' do
other_type = 'SomeOtherService'
services.create(service_params_for(project.id, active: true, type: other_type))
expect { subject.perform(project.id) }.not_to change { services.where(type: other_type).order(:id).map { |row| row.attributes } }
end
end
context 'prometheus services are configured manually ' do
it 'does not change them' do
properties = '{"api_url":"http://test.dev","manual_configuration":"1"}'
services.create(service_params_for(project.id, properties: properties, active: false))
expect { subject.perform(project.id) }.not_to change { services.order(:id).map { |row| row.attributes } }
end
end
context 'prometheus integration services do not exist' do
it 'creates missing services entries' do
subject.perform(project.id)
rows = services.order(:id).map { |row| row.attributes.slice(*columns).symbolize_keys }
expect([service_params_for(project.id, active: true)]).to eq rows
end
end
context 'prometheus integration services exist' do
context 'in active state' do
it 'does not change them' do
services.create(service_params_for(project.id, active: true))
expect { subject.perform(project.id) }.not_to change { services.order(:id).map { |row| row.attributes } }
end
end
context 'not in active state' do
it 'sets active attribute to true' do
service = services.create(service_params_for(project.id))
expect { subject.perform(project.id) }.to change { service.reload.active? }.from(false).to(true)
end
end
end
end
end
...@@ -145,6 +145,17 @@ describe Gitlab::ErrorTracking do ...@@ -145,6 +145,17 @@ describe Gitlab::ErrorTracking do
) )
end end
context 'with filterable parameters' do
let(:extra) { { test: 1, my_token: 'test' } }
it 'filters parameters' do
expect(Gitlab::ErrorTracking::Logger).to receive(:error).with(
hash_including({ 'extra.test' => 1, 'extra.my_token' => '[FILTERED]' }))
described_class.track_exception(exception, extra)
end
end
context 'the exception implements :sentry_extra_data' do context 'the exception implements :sentry_extra_data' do
let(:extra_info) { { event: 'explosion', size: :massive } } let(:extra_info) { { event: 'explosion', size: :massive } }
let(:exception) { double(message: 'bang!', sentry_extra_data: extra_info, backtrace: caller) } let(:exception) { double(message: 'bang!', sentry_extra_data: extra_info, backtrace: caller) }
......
...@@ -160,7 +160,7 @@ describe Gitlab::UserAccess do ...@@ -160,7 +160,7 @@ describe Gitlab::UserAccess do
expect(access.can_push_to_branch?('master')).to be_falsey expect(access.can_push_to_branch?('master')).to be_falsey
end end
it 'does not allow the user to push if he does not have push access to the canonical project' do it 'does not allow the user to push if they do not have push access to the canonical project' do
canonical_project.add_guest(user) canonical_project.add_guest(user)
expect(access.can_push_to_branch?('awesome-feature')).to be_falsey expect(access.can_push_to_branch?('awesome-feature')).to be_falsey
......
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20200116051619_drop_activate_prometheus_services_for_shared_cluster_applications_background_migration.rb')
describe DropActivatePrometheusServicesForSharedClusterApplicationsBackgroundMigration, :sidekiq, :redis, :migration, schema: 2020_01_16_051619 do
subject(:migration) { described_class.new }
describe '#up' do
context 'there are only affected jobs on the queue' do
it 'removes enqueued ActivatePrometheusServicesForSharedClusterApplications background jobs' do
Sidekiq::Testing.disable! do # https://github.com/mperham/sidekiq/wiki/testing#api Sidekiq's API does not have a testing mode
Sidekiq::Client.push('queue' => described_class::QUEUE, 'class' => ::BackgroundMigrationWorker, 'args' => [described_class::DROPPED_JOB_CLASS, 1])
expect { migration.up }.to change { Sidekiq::Queue.new(described_class::QUEUE).size }.from(1).to(0)
end
end
end
context "there aren't any affected jobs on the queue" do
it 'skips other enqueued jobs' do
Sidekiq::Testing.disable! do
Sidekiq::Client.push('queue' => described_class::QUEUE, 'class' => ::BackgroundMigrationWorker, 'args' => ['SomeOtherClass', 1])
expect { migration.up }.not_to change { Sidekiq::Queue.new(described_class::QUEUE).size }
end
end
end
context "there are multiple types of jobs on the queue" do
it 'skips other enqueued jobs' do
Sidekiq::Testing.disable! do
queue = Sidekiq::Queue.new(described_class::QUEUE)
# this job will be deleted
Sidekiq::Client.push('queue' => described_class::QUEUE, 'class' => ::BackgroundMigrationWorker, 'args' => [described_class::DROPPED_JOB_CLASS, 1])
# this jobs will be skipped
skipped_jobs_args = [['SomeOtherClass', 1], [described_class::DROPPED_JOB_CLASS, 'wrong id type'], [described_class::DROPPED_JOB_CLASS, 1, 'some wired argument']]
skipped_jobs_args.each do |args|
Sidekiq::Client.push('queue' => described_class::QUEUE, 'class' => ::BackgroundMigrationWorker, 'args' => args)
end
migration.up
expect(queue.size).to be 3
expect(queue.map(&:args)).to match_array skipped_jobs_args
end
end
end
context "other queues" do
it 'does not modify them' do
Sidekiq::Testing.disable! do
Sidekiq::Client.push('queue' => 'other', 'class' => ::BackgroundMigrationWorker, 'args' => ['SomeOtherClass', 1])
Sidekiq::Client.push('queue' => 'other', 'class' => ::BackgroundMigrationWorker, 'args' => [described_class::DROPPED_JOB_CLASS, 1])
expect { migration.up }.not_to change { Sidekiq::Queue.new('other').size }
end
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200114113341_patch_prometheus_services_for_shared_cluster_applications.rb')
describe PatchPrometheusServicesForSharedClusterApplications, :migration do
include MigrationHelpers::PrometheusServiceHelpers
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:services) { table(:services) }
let(:clusters) { table(:clusters) }
let(:cluster_groups) { table(:cluster_groups) }
let(:clusters_applications_prometheus) { table(:clusters_applications_prometheus) }
let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
let(:application_statuses) do
{
errored: -1,
installed: 3,
updated: 5
}
end
let(:cluster_types) do
{
instance_type: 1,
group_type: 2
}
end
describe '#up' do
let!(:project_with_missing_service) { projects.create!(name: 'gitlab', path: 'gitlab-ce', namespace_id: namespace.id) }
let(:project_with_inactive_service) { projects.create!(name: 'gitlab', path: 'gitlab-ee', namespace_id: namespace.id) }
let(:project_with_active_service) { projects.create!(name: 'gitlab', path: 'gitlab-ee', namespace_id: namespace.id) }
let(:project_with_manual_active_service) { projects.create!(name: 'gitlab', path: 'gitlab-ee', namespace_id: namespace.id) }
let(:project_with_manual_inactive_service) { projects.create!(name: 'gitlab', path: 'gitlab-ee', namespace_id: namespace.id) }
let(:project_with_active_not_prometheus_service) { projects.create!(name: 'gitlab', path: 'gitlab-ee', namespace_id: namespace.id) }
let(:project_with_inactive_not_prometheus_service) { projects.create!(name: 'gitlab', path: 'gitlab-ee', namespace_id: namespace.id) }
before do
services.create(service_params_for(project_with_inactive_service.id, active: false))
services.create(service_params_for(project_with_active_service.id, active: true))
services.create(service_params_for(project_with_active_not_prometheus_service.id, active: true, type: 'other'))
services.create(service_params_for(project_with_inactive_not_prometheus_service.id, active: false, type: 'other'))
services.create(service_params_for(project_with_manual_inactive_service.id, active: false, properties: { some: 'data' }.to_json))
services.create(service_params_for(project_with_manual_active_service.id, active: true, properties: { some: 'data' }.to_json))
end
shared_examples 'patch prometheus services post migration' do
context 'prometheus application is installed on the cluster' do
it 'schedules a background migration' do
clusters_applications_prometheus.create(cluster_id: cluster.id, status: application_statuses[:installed], version: '123')
Sidekiq::Testing.fake! do
Timecop.freeze do
background_migrations = [["ActivatePrometheusServicesForSharedClusterApplications", project_with_missing_service.id],
["ActivatePrometheusServicesForSharedClusterApplications", project_with_inactive_service.id],
["ActivatePrometheusServicesForSharedClusterApplications", project_with_active_not_prometheus_service.id],
["ActivatePrometheusServicesForSharedClusterApplications", project_with_inactive_not_prometheus_service.id]]
migrate!
enqueued_migrations = BackgroundMigrationWorker.jobs.map { |job| job['args'] }
expect(enqueued_migrations).to match_array(background_migrations)
end
end
end
end
context 'prometheus application was recently updated on the cluster' do
it 'schedules a background migration' do
clusters_applications_prometheus.create(cluster_id: cluster.id, status: application_statuses[:updated], version: '123')
Sidekiq::Testing.fake! do
Timecop.freeze do
background_migrations = [["ActivatePrometheusServicesForSharedClusterApplications", project_with_missing_service.id],
["ActivatePrometheusServicesForSharedClusterApplications", project_with_inactive_service.id],
["ActivatePrometheusServicesForSharedClusterApplications", project_with_active_not_prometheus_service.id],
["ActivatePrometheusServicesForSharedClusterApplications", project_with_inactive_not_prometheus_service.id]]
migrate!
enqueued_migrations = BackgroundMigrationWorker.jobs.map { |job| job['args'] }
expect(enqueued_migrations).to match_array(background_migrations)
end
end
end
end
context 'prometheus application failed to install on the cluster' do
it 'does not schedule a background migration' do
clusters_applications_prometheus.create(cluster_id: cluster.id, status: application_statuses[:errored], version: '123')
Sidekiq::Testing.fake! do
Timecop.freeze do
migrate!
expect(BackgroundMigrationWorker.jobs.size).to eq 0
end
end
end
end
context 'prometheus application is NOT installed on the cluster' do
it 'does not schedule a background migration' do
Sidekiq::Testing.fake! do
Timecop.freeze do
migrate!
expect(BackgroundMigrationWorker.jobs.size).to eq 0
end
end
end
end
end
context 'Cluster is group_type' do
let(:cluster) { clusters.create(name: 'cluster', cluster_type: cluster_types[:group_type]) }
before do
cluster_groups.create(group_id: namespace.id, cluster_id: cluster.id)
end
it_behaves_like 'patch prometheus services post migration'
end
context 'Cluster is instance_type' do
let(:cluster) { clusters.create(name: 'cluster', cluster_type: cluster_types[:instance_type]) }
it_behaves_like 'patch prometheus services post migration'
end
end
end
...@@ -16,26 +16,29 @@ describe Board do ...@@ -16,26 +16,29 @@ describe Board do
end end
describe '#order_by_name_asc' do describe '#order_by_name_asc' do
let!(:second_board) { create(:board, name: 'Secondary board', project: project) } let!(:board_B) { create(:board, project: project, name: 'B') }
let!(:first_board) { create(:board, name: 'First board', project: project) } let!(:board_C) { create(:board, project: project, name: 'C') }
let!(:board_a) { create(:board, project: project, name: 'a') }
let!(:board_A) { create(:board, project: project, name: 'A') }
it 'returns in alphabetical order' do it 'returns in case-insensitive alphabetical order and then by ascending id' do
expect(project.boards.order_by_name_asc).to eq [first_board, second_board] expect(project.boards.order_by_name_asc).to eq [board_a, board_A, board_B, board_C]
end end
end end
describe '#first_board' do describe '#first_board' do
let!(:other_board) { create(:board, name: 'Other board', project: other_project) } let!(:board_B) { create(:board, project: project, name: 'B') }
let!(:second_board) { create(:board, name: 'Secondary board', project: project) } let!(:board_C) { create(:board, project: project, name: 'C') }
let!(:first_board) { create(:board, name: 'First board', project: project) } let!(:board_a) { create(:board, project: project, name: 'a') }
let!(:board_A) { create(:board, project: project, name: 'A') }
it 'return the first alphabetical board as a relation' do it 'return the first case-insensitive alphabetical board as a relation' do
expect(project.boards.first_board).to eq [first_board] expect(project.boards.first_board).to eq [board_a]
end end
# BoardsActions#board expects this behavior # BoardsActions#board expects this behavior
it 'raises an error when find is done on a non-existent record' do it 'raises an error when find is done on a non-existent record' do
expect { project.boards.first_board.find(second_board.id) }.to raise_error(ActiveRecord::RecordNotFound) expect { project.boards.first_board.find(board_A.id) }.to raise_error(ActiveRecord::RecordNotFound)
end end
end end
end end
...@@ -215,7 +215,7 @@ describe API::ProjectHooks, 'ProjectHooks' do ...@@ -215,7 +215,7 @@ describe API::ProjectHooks, 'ProjectHooks' do
expect(response).to have_gitlab_http_status(404) expect(response).to have_gitlab_http_status(404)
end end
it "returns a 404 if a user attempts to delete project hooks he/she does not own" do it "returns a 404 if a user attempts to delete project hooks they do not own" do
test_user = create(:user) test_user = create(:user)
other_project = create(:project) other_project = create(:project)
other_project.add_maintainer(test_user) other_project.add_maintainer(test_user)
......
...@@ -988,7 +988,7 @@ describe NotificationService, :mailer do ...@@ -988,7 +988,7 @@ describe NotificationService, :mailer do
expect(email).to have_header('X-GitLab-NotificationReason', NotificationReason::ASSIGNED) expect(email).to have_header('X-GitLab-NotificationReason', NotificationReason::ASSIGNED)
end end
it 'emails previous assignee even if he has the "on mention" notif level' do it 'emails previous assignee even if they have the "on mention" notif level' do
issue.assignees = [@u_mentioned] issue.assignees = [@u_mentioned]
notification.reassigned_issue(issue, @u_disabled, [@u_watcher]) notification.reassigned_issue(issue, @u_disabled, [@u_watcher])
...@@ -1005,7 +1005,7 @@ describe NotificationService, :mailer do ...@@ -1005,7 +1005,7 @@ describe NotificationService, :mailer do
should_not_email(@u_lazy_participant) should_not_email(@u_lazy_participant)
end end
it 'emails new assignee even if he has the "on mention" notif level' do it 'emails new assignee even if they have the "on mention" notif level' do
issue.assignees = [@u_mentioned] issue.assignees = [@u_mentioned]
notification.reassigned_issue(issue, @u_disabled, [@u_mentioned]) notification.reassigned_issue(issue, @u_disabled, [@u_mentioned])
......
# frozen_string_literal: true
module MigrationHelpers
module PrometheusServiceHelpers
def service_params_for(project_id, params = {})
{
project_id: project_id,
active: false,
properties: '{}',
type: 'PrometheusService',
template: false,
push_events: true,
issues_events: true,
merge_requests_events: true,
tag_push_events: true,
note_events: true,
category: 'monitoring',
default: false,
wiki_page_events: true,
pipeline_events: true,
confidential_issues_events: true,
commit_events: true,
job_events: true,
confidential_note_events: true,
deployment_events: false
}.merge(params)
end
def row_attributes(entity)
entity.attributes.with_indifferent_access.tap do |hash|
hash.merge!(hash.slice(:created_at, :updated_at).transform_values { |v| v.to_s(:db) })
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment