Commit a8de96bf authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent afe2b984
......@@ -254,4 +254,4 @@ danger-review:
- git version
- node --version
- yarn install --frozen-lockfile --cache-folder .yarn-cache --prefer-offline
- danger --fail-on-errors=true --new-comment --remove-previous-comments --verbose
- danger --fail-on-errors=true --verbose
......@@ -12,7 +12,7 @@ See [the general developer security release guidelines](https://gitlab.com/gitla
## Developer checklist
- [ ] **Make sure this merge request mentions the [GitLab Security] issue it belongs to (i.e. `Related to <issue_id>`).**
- [ ] **On "Related issues" section, write down the [GitLab Security] issue it belongs to (i.e. `Related to <issue_id>`).**
- [ ] Merge request targets `master`, or `X-Y-stable` for backports.
- [ ] Milestone is set for the version this merge request applies to. A closed milestone can be assigned via [quick actions].
- [ ] Title of this merge request is the same as for all backports.
......
<script>
import { GlFormInput } from '@gitlab/ui';
export default {
components: {
GlFormInput,
},
props: {
value: {
type: String,
required: true,
},
},
data() {
return {
name: this.value,
};
},
};
</script>
<template>
<div class="js-file-title file-title-flex-parent">
<gl-form-input
id="snippet_file_name"
v-model="name"
:placeholder="
s__('Snippets|Give your file a name to add code highlighting, e.g. example.rb for Ruby')
"
name="snippet_file_name"
class="form-control js-snippet-file-name qa-snippet-file-name"
type="text"
@change="$emit('input', name)"
/>
</div>
</template>
......@@ -63,7 +63,9 @@ export default {
methods: {
toggleForm() {
this.mediator.store.isLockDialogOpen = !this.mediator.store.isLockDialogOpen;
if (this.isEditable) {
this.mediator.store.isLockDialogOpen = !this.mediator.store.isLockDialogOpen;
}
},
updateLockedAttribute(locked) {
this.mediator.service
......
query ($fullPath: ID!, $iid: String!) {
project (fullPath: $fullPath) {
issue (iid: $iid) {
iid
}
}
}
query ($fullPath: ID!, $iid: String!) {
project (fullPath: $fullPath) {
issue (iid: $iid) {
iid
}
}
}
import axios from '~/lib/utils/axios_utils';
import createGqClient, { fetchPolicies } from '~/lib/graphql';
import sidebarDetailsQuery from 'ee_else_ce/sidebar/queries/sidebarDetails.query.graphql';
import sidebarDetailsForHealthStatusFeatureFlagQuery from 'ee_else_ce/sidebar/queries/sidebarDetailsForHealthStatusFeatureFlag.query.graphql';
export const gqClient = createGqClient(
{},
{
fetchPolicy: fetchPolicies.NO_CACHE,
},
);
export default class SidebarService {
constructor(endpointMap) {
......@@ -7,6 +17,8 @@ export default class SidebarService {
this.toggleSubscriptionEndpoint = endpointMap.toggleSubscriptionEndpoint;
this.moveIssueEndpoint = endpointMap.moveIssueEndpoint;
this.projectsAutocompleteEndpoint = endpointMap.projectsAutocompleteEndpoint;
this.fullPath = endpointMap.fullPath;
this.id = endpointMap.id;
SidebarService.singleton = this;
}
......@@ -15,7 +27,20 @@ export default class SidebarService {
}
get() {
return axios.get(this.endpoint);
const hasHealthStatusFeatureFlag = gon.features && gon.features.saveIssuableHealthStatus;
return Promise.all([
axios.get(this.endpoint),
gqClient.query({
query: hasHealthStatusFeatureFlag
? sidebarDetailsForHealthStatusFeatureFlagQuery
: sidebarDetailsQuery,
variables: {
fullPath: this.fullPath,
iid: this.id.toString(),
},
}),
]);
}
update(key, data) {
......
......@@ -19,6 +19,8 @@ export default class SidebarMediator {
toggleSubscriptionEndpoint: options.toggleSubscriptionEndpoint,
moveIssueEndpoint: options.moveIssueEndpoint,
projectsAutocompleteEndpoint: options.projectsAutocompleteEndpoint,
fullPath: options.fullPath,
id: options.id,
});
SidebarMediator.singleton = this;
}
......@@ -45,8 +47,8 @@ export default class SidebarMediator {
fetch() {
return this.service
.get()
.then(({ data }) => {
this.processFetchedData(data);
.then(([restResponse, graphQlResponse]) => {
this.processFetchedData(restResponse.data, graphQlResponse.data);
})
.catch(() => new Flash(__('Error occurred when fetching sidebar data')));
}
......
......@@ -17,7 +17,7 @@ const initAce = () => {
const initMonaco = () => {
const editorEl = document.getElementById('editor');
const contentEl = document.querySelector('.snippet-file-content');
const fileNameEl = document.querySelector('.snippet-file-name');
const fileNameEl = document.querySelector('.js-snippet-file-name');
const form = document.querySelector('.snippet-form-holder form');
editor = new Editor();
......
......@@ -77,3 +77,5 @@
.gl-text-red-700 { @include gl-text-red-700; }
.gl-text-orange-700 { @include gl-text-orange-700; }
.gl-text-green-700 { @include gl-text-green-700; }
.gl-align-items-center { @include gl-align-items-center; }
......@@ -44,6 +44,7 @@ class Projects::IssuesController < Projects::ApplicationController
before_action do
push_frontend_feature_flag(:vue_issuable_sidebar, project.group)
push_frontend_feature_flag(:save_issuable_health_status, project.group)
end
around_action :allow_gitaly_ref_name_caching, only: [:discussions]
......
......@@ -35,7 +35,7 @@ module AnalyticsNavbarHelper
return unless project_nav_tab?(:cycle_analytics)
navbar_sub_item(
title: _('Value Stream Analytics'),
title: _('Value Stream'),
path: 'cycle_analytics#show',
link: project_cycle_analytics_path(project),
link_to_options: { class: 'shortcuts-project-cycle-analytics' }
......@@ -47,7 +47,7 @@ module AnalyticsNavbarHelper
return if project.empty_repo?
navbar_sub_item(
title: _('Repository Analytics'),
title: _('Repository'),
path: 'graphs#charts',
link: charts_project_graph_path(project, current_ref),
link_to_options: { class: 'shortcuts-repository-charts' }
......@@ -60,7 +60,7 @@ module AnalyticsNavbarHelper
return unless project.feature_available?(:builds, current_user) || !project.empty_repo?
navbar_sub_item(
title: _('CI / CD Analytics'),
title: _('CI / CD'),
path: 'pipelines#charts',
link: charts_project_pipelines_path(project)
)
......
......@@ -463,6 +463,7 @@ module IssuablesHelper
currentUser: issuable[:current_user],
rootPath: root_path,
fullPath: issuable[:project_full_path],
id: issuable[:id],
timeTrackingLimitToHours: Gitlab::CurrentSettings.time_tracking_limit_to_hours
}
end
......
......@@ -52,7 +52,9 @@ class BroadcastMessage < ApplicationRecord
end
def cache
Gitlab::JsonCache.new(cache_key_with_version: false)
::Gitlab::SafeRequestStore.fetch(:broadcast_message_json_cache) do
Gitlab::JsonCache.new(cache_key_with_version: false)
end
end
def cache_expires_in
......@@ -68,9 +70,9 @@ class BroadcastMessage < ApplicationRecord
now_or_future = messages.select(&:now_or_future?)
# If there are cached entries but none are to be displayed we'll purge the
# cache so we don't keep running this code all the time.
cache.expire(cache_key) if now_or_future.empty?
# If there are cached entries but they don't match the ones we are
# displaying we'll refresh the cache so we don't need to keep filtering.
cache.expire(cache_key) if now_or_future != messages
now_or_future.select(&:now?).select { |message| message.matches_current_path(current_path) }
end
......
......@@ -21,7 +21,7 @@ class InternalId < ApplicationRecord
belongs_to :project
belongs_to :namespace
enum usage: { issues: 0, merge_requests: 1, deployments: 2, milestones: 3, epics: 4, ci_pipelines: 5, operations_feature_flags: 6 }
enum usage: ::InternalIdEnums.usage_resources
validates :usage, presence: true
......
# frozen_string_literal: true
module InternalIdEnums
def self.usage_resources
# when adding new resource, make sure it doesn't conflict with EE usage_resources
{ issues: 0, merge_requests: 1, deployments: 2, milestones: 3, epics: 4, ci_pipelines: 5, operations_feature_flags: 6 }
end
end
InternalIdEnums.prepend_if_ee('EE::InternalIdEnums')
......@@ -11,11 +11,7 @@ module Groups
end
def execute
unless @current_user.can?(:admin_group, @group)
raise ::Gitlab::ImportExport::Error.new(
"User with ID: %s does not have permission to Group %s with ID: %s." %
[@current_user.id, @group.name, @group.id])
end
validate_user_permissions
save!
ensure
......@@ -26,6 +22,14 @@ module Groups
attr_accessor :shared
def validate_user_permissions
unless @current_user.can?(:admin_group, @group)
@shared.error(::Gitlab::ImportExport::Error.permission_error(@current_user, @group))
notify_error!
end
end
def save!
if savers.all?(&:save)
notify_success
......
......@@ -12,15 +12,14 @@ module Groups
end
def execute
validate_user_permissions
if valid_user_permissions? && import_file && restorer.restore
notify_success
if import_file && restorer.restore
@group
else
raise StandardError.new(@shared.errors.to_sentence)
notify_error!
end
rescue => e
raise StandardError.new(e.message)
ensure
remove_import_file
end
......@@ -49,13 +48,37 @@ module Groups
upload.save!
end
def validate_user_permissions
unless current_user.can?(:admin_group, group)
raise ::Gitlab::ImportExport::Error.new(
"User with ID: %s does not have permission to Group %s with ID: %s." %
[current_user.id, group.name, group.id])
def valid_user_permissions?
if current_user.can?(:admin_group, group)
true
else
@shared.error(::Gitlab::ImportExport::Error.permission_error(current_user, group))
false
end
end
def notify_success
@shared.logger.info(
group_id: @group.id,
group_name: @group.name,
message: 'Group Import/Export: Import succeeded'
)
end
def notify_error
@shared.logger.error(
group_id: @group.id,
group_name: @group.name,
message: "Group Import/Export: Errors occurred, see '#{Gitlab::ErrorTracking::Logger.file_name}' for details"
)
end
def notify_error!
notify_error
raise Gitlab::ImportExport::Error.new(@shared.errors.to_sentence)
end
end
end
end
......@@ -60,7 +60,7 @@ module MergeRequests
def commit
repository.merge_to_ref(current_user, source, merge_request, target_ref, commit_message, first_parent_ref)
rescue Gitlab::Git::PreReceiveError => error
rescue Gitlab::Git::PreReceiveError, Gitlab::Git::CommandError => error
raise MergeError, error.message
end
end
......
......@@ -24,7 +24,7 @@ module Metrics
def execute
catch(:error) do
throw(:error, error(_(%q(You can't commit to this project)), :forbidden)) unless push_authorized?
throw(:error, error(_(%q(You are not allowed to push into this branch. Create another branch or open a merge request.)), :forbidden)) unless push_authorized?
result = ::Files::CreateService.new(project, current_user, dashboard_attrs).execute
throw(:error, wrap_error(result)) unless result[:status] == :success
......
......@@ -9,7 +9,7 @@ module Metrics
def execute
catch(:error) do
throw(:error, error(_(%q(You can't commit to this project)), :forbidden)) unless push_authorized?
throw(:error, error(_(%q(You are not allowed to push into this branch. Create another branch or open a merge request.)), :forbidden)) unless push_authorized?
result = ::Files::UpdateService.new(project, current_user, dashboard_attrs).execute
throw(:error, result.merge(http_status: :bad_request)) unless result[:status] == :success
......
......@@ -5,9 +5,7 @@ module Projects
class ExportService < BaseService
def execute(after_export_strategy = nil, options = {})
unless project.template_source? || can?(current_user, :admin_project, project)
raise ::Gitlab::ImportExport::Error.new(
"User with ID: %s does not have permission to Project %s with ID: %s." %
[current_user.id, project.name, project.id])
raise ::Gitlab::ImportExport::Error.permission_error(current_user, project)
end
@shared = project.import_export_shared
......
......@@ -48,9 +48,9 @@
- unless should_display_analytics_pages_in_sidebar
- if group_sidebar_link?(:contribution_analytics)
= nav_link(path: 'contribution_analytics#show') do
= link_to group_contribution_analytics_path(@group), title: _('Contribution Analytics'), data: { placement: 'right', qa_selector: 'contribution_analytics_link' } do
= link_to group_contribution_analytics_path(@group), title: _('Contribution'), data: { placement: 'right', qa_selector: 'contribution_analytics_link' } do
%span
= _('Contribution Analytics')
= _('Contribution')
= render_if_exists 'layouts/nav/group_insights_link'
......
......@@ -42,8 +42,8 @@
- unless should_display_analytics_pages_in_sidebar
- if can?(current_user, :read_cycle_analytics, @project)
= nav_link(path: 'cycle_analytics#show') do
= link_to project_cycle_analytics_path(@project), title: _('Value Stream Analytics'), class: 'shortcuts-project-cycle-analytics' do
%span= _('Value Stream Analytics')
= link_to project_cycle_analytics_path(@project), title: _('Value Stream'), class: 'shortcuts-project-cycle-analytics' do
%span= _('Value Stream')
= render_if_exists 'layouts/nav/project_insights_link'
......
......@@ -129,6 +129,9 @@
= render_if_exists 'shared/issuable/sidebar_weight', issuable_sidebar: issuable_sidebar
- if Feature.enabled?(:save_issuable_health_status, @project.group) && issuable_sidebar[:type] == "issue"
.js-sidebar-status-entry-point
- if issuable_sidebar.has_key?(:confidential)
-# haml-lint:disable InlineJavaScript
%script#js-confidential-issue-data{ type: "application/json" }= { is_confidential: issuable_sidebar[:confidential], is_editable: can_edit_issuable }.to_json.html_safe
......
......@@ -26,7 +26,7 @@
= f.label :file_name, s_('Snippets|File')
.file-holder.snippet
.js-file-title.file-title-flex-parent
= f.text_field :file_name, placeholder: s_("Snippets|Give your file a name to add code highlighting, e.g. example.rb for Ruby"), class: 'form-control snippet-file-name qa-snippet-file-name'
= f.text_field :file_name, placeholder: s_("Snippets|Give your file a name to add code highlighting, e.g. example.rb for Ruby"), class: 'form-control js-snippet-file-name qa-snippet-file-name'
.file-content.code
%pre#editor{ data: { 'editor-loading': true } }= @snippet.content
= f.hidden_field :content, class: 'snippet-file-content'
......
......@@ -865,7 +865,7 @@
:weight: 2
:idempotent:
- :name: create_evidence
:feature_category: :release_governance
:feature_category: :release_evidence
:has_external_dependencies:
:urgency: :default
:resource_boundary: :unknown
......
......@@ -3,7 +3,7 @@
class CreateEvidenceWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :release_governance
feature_category :release_evidence
weight 2
def perform(release_id)
......
---
title: Prevent unauthorized users to lock an issue from the collapsed sidebar.
merge_request: 26324
author: Gilang Gumilar
type: fixed
---
title: Remove "Analytics" suffix from the sidebar menu items
merge_request: 26415
author:
type: removed
---
title: Fix avg_cycle_analytics uncaught error and optimize query
merge_request: 26381
author:
type: fixed
---
title: Backfill LfsObjectsProject records of forks
merge_request: 25343
author:
type: other
---
title: Create approval todos on update
merge_request: 26077
author:
type: fixed
---
title: Fix MergeToRefService raises Gitlab::Git::CommandError
merge_request: 26465
author:
type: fixed
---
title: Ensure all errors are logged in Group Import
merge_request: 25619
author:
type: changed
---
title: Fix error messages for dashboard clonning process.
merge_request: 26290
author:
type: fixed
---
title: Add migration for Requirement model
merge_request: 26097
author:
type: added
---
title: Remove unnecessary Redis deletes for broadcast messages
merge_request: 26541
author:
type: performance
......@@ -25,8 +25,7 @@
- code_quality
- code_review
- collection
- compliance_controls
- compliance_frameworks
- compliance_management
- container_network_security
- container_registry
- container_scanning
......@@ -37,7 +36,7 @@
- dependency_proxy
- dependency_scanning
- design_management
- devops_score
- devops_reports
- digital_experience_management
- disaster_recovery
- dynamic_application_security_testing
......@@ -52,6 +51,7 @@
- gitaly
- gitlab_handbook
- gitter
- global_search
- helm_chart_registry
- importers
- incident_management
......@@ -61,6 +61,8 @@
- interactive_application_security_testing
- internationalization
- issue_tracking
- jenkins_importer
- jira_importer
- jupyter_notebooks
- kanban_boards
- kubernetes_management
......@@ -70,13 +72,14 @@
- load_testing
- logging
- malware_scanning
- merge_trains
- metrics
- omnibus_package
- package_registry
- pages
- pki_management
- planning_analytics
- quality_management
- release_governance
- release_evidence
- release_orchestration
- requirements_management
- responsible_disclosure
......@@ -86,7 +89,6 @@
- runner
- runtime_application_self_protection
- sdk
- search
- secret_detection
- secrets_management
- serverless
......@@ -97,8 +99,6 @@
- static_site_editor
- status_page
- subgroups
- system_testing
- teams
- templates
- threat_detection
- time_tracking
......@@ -113,4 +113,3 @@
- web_ide
- web_performance
- wiki
- workspaces
# frozen_string_literal: true
class CreateRequirements < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
create_table :requirements do |t|
t.timestamps_with_timezone null: false
t.integer :project_id, null: false
t.integer :author_id
t.integer :iid, null: false
t.integer :cached_markdown_version
t.integer :state, limit: 2, default: 1, null: false
t.string :title, limit: 255, null: false
t.text :title_html
t.index :project_id
t.index :author_id
t.index :title, name: "index_requirements_on_title_trigram", using: :gin, opclass: :gin_trgm_ops
t.index :state
t.index :created_at
t.index :updated_at
t.index %w(project_id iid), name: 'index_requirements_on_project_id_and_iid', where: 'project_id IS NOT NULL', unique: true, using: :btree
end
end
end
# frozen_string_literal: true
class RequirementsAddProjectFk < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_foreign_key(:requirements, :projects, column: :project_id, on_delete: :cascade) # rubocop: disable Migration/AddConcurrentForeignKey
end
end
def down
with_lock_retries do
remove_foreign_key(:requirements, column: :project_id)
end
end
end
# frozen_string_literal: true
class RequirementsAddAuthorFk < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_foreign_key(:requirements, :users, column: :author_id, on_delete: :nullify) # rubocop: disable Migration/AddConcurrentForeignKey
end
end
def down
with_lock_retries do
remove_foreign_key(:requirements, column: :author_id)
end
end
end
# frozen_string_literal: true
class RescheduleLinkLfsObjects < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
MIGRATION = 'LinkLfsObjects'
BATCH_SIZE = 1_000
disable_ddl_transaction!
def up
forks = Gitlab::BackgroundMigration::LinkLfsObjects::Project.with_non_existing_lfs_objects
queue_background_migration_jobs_by_range_at_intervals(
forks,
MIGRATION,
BackgroundMigrationWorker.minimum_interval,
batch_size: BATCH_SIZE
)
end
def down
# No-op. No need to make this reversible. In case the jobs enqueued runs and
# fails at some point, some records will be created. When rescheduled, those
# records won't be re-created. It's also hard to track which records to clean
# up if ever.
end
end
......@@ -3723,6 +3723,25 @@ ActiveRecord::Schema.define(version: 2020_03_04_160823) do
t.index ["project_id", "programming_language_id"], name: "index_repository_languages_on_project_and_languages_id", unique: true
end
create_table "requirements", force: :cascade do |t|
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
t.integer "project_id", null: false
t.integer "author_id"
t.integer "iid", null: false
t.integer "cached_markdown_version"
t.integer "state", limit: 2, default: 1, null: false
t.string "title", limit: 255, null: false
t.text "title_html"
t.index ["author_id"], name: "index_requirements_on_author_id"
t.index ["created_at"], name: "index_requirements_on_created_at"
t.index ["project_id", "iid"], name: "index_requirements_on_project_id_and_iid", unique: true, where: "(project_id IS NOT NULL)"
t.index ["project_id"], name: "index_requirements_on_project_id"
t.index ["state"], name: "index_requirements_on_state"
t.index ["title"], name: "index_requirements_on_title_trigram", opclass: :gin_trgm_ops, using: :gin
t.index ["updated_at"], name: "index_requirements_on_updated_at"
end
create_table "resource_label_events", force: :cascade do |t|
t.integer "action", null: false
t.integer "issue_id"
......@@ -5001,6 +5020,8 @@ ActiveRecord::Schema.define(version: 2020_03_04_160823) do
add_foreign_key "releases", "users", column: "author_id", name: "fk_8e4456f90f", on_delete: :nullify
add_foreign_key "remote_mirrors", "projects", name: "fk_43a9aa4ca8", on_delete: :cascade
add_foreign_key "repository_languages", "projects", on_delete: :cascade
add_foreign_key "requirements", "projects", on_delete: :cascade
add_foreign_key "requirements", "users", column: "author_id", on_delete: :nullify
add_foreign_key "resource_label_events", "epics", on_delete: :cascade
add_foreign_key "resource_label_events", "issues", on_delete: :cascade
add_foreign_key "resource_label_events", "labels", on_delete: :nullify
......
......@@ -54,8 +54,10 @@ Add the following to your `sshd_config` file. This is usually located at
Omnibus Docker:
```plaintext
AuthorizedKeysCommand /opt/gitlab/embedded/service/gitlab-shell/bin/gitlab-shell-authorized-keys-check git %u %k
AuthorizedKeysCommandUser git
Match User git # Apply the AuthorizedKeysCommands to the git user only
AuthorizedKeysCommand /opt/gitlab/embedded/service/gitlab-shell/bin/gitlab-shell-authorized-keys-check git %u %k
AuthorizedKeysCommandUser git
Match all # End match, settings apply to all users again
```
Reload OpenSSH:
......
......@@ -104,7 +104,8 @@ Review Apps are automatically stopped 2 days after the last deployment thanks to
the [Environment auto-stop](../../ci/environments.html#environments-auto-stop) feature.
If you need your Review App to stay up for a longer time, you can
[pin its environment](../../ci/environments.html#auto-stop-example).
[pin its environment](../../ci/environments.html#auto-stop-example) or retry the
`review-deploy` job to update the "latest deployed at" time.
The `review-cleanup` job that automatically runs in scheduled
pipelines (and is manual in merge request) stops stale Review Apps after 5 days,
......
......@@ -6,8 +6,6 @@ module Gitlab
class LinkLfsObjects
# Model definition used for migration
class ForkNetworkMember < ActiveRecord::Base
include EachBatch
self.table_name = 'fork_network_members'
def self.with_non_existing_lfs_objects
......@@ -25,62 +23,8 @@ module Gitlab
end
end
# Model definition used for migration
class Project < ActiveRecord::Base
include EachBatch
self.table_name = 'projects'
has_one :fork_network_member, class_name: 'LinkLfsObjects::ForkNetworkMember'
def self.with_non_existing_lfs_objects
fork_network_members =
ForkNetworkMember.with_non_existing_lfs_objects
.select(1)
.where('fork_network_members.project_id = projects.id')
where('EXISTS (?)', fork_network_members)
end
end
# Model definition used for migration
class LfsObjectsProject < ActiveRecord::Base
include EachBatch
self.table_name = 'lfs_objects_projects'
end
BATCH_SIZE = 1000
def perform(start_id, end_id)
forks =
Project
.with_non_existing_lfs_objects
.where(id: start_id..end_id)
forks.includes(:fork_network_member).find_each do |project|
LfsObjectsProject
.select("lfs_objects_projects.lfs_object_id, #{project.id}, NOW(), NOW()")
.where(project_id: project.fork_network_member.forked_from_project_id)
.each_batch(of: BATCH_SIZE) do |batch|
execute <<~SQL
INSERT INTO lfs_objects_projects (lfs_object_id, project_id, created_at, updated_at)
#{batch.to_sql}
SQL
end
end
logger.info(message: "LinkLfsObjects: created missing LfsObjectsProject for Projects #{forks.map(&:id).join(', ')}")
end
private
def execute(sql)
::ActiveRecord::Base.connection.execute(sql)
end
def logger
@logger ||= Gitlab::BackgroundMigration::Logger.build
# no-op as some queries times out
end
end
end
......
......@@ -3,15 +3,32 @@
module Gitlab
module CycleAnalytics
class UsageData
include Gitlab::Utils::StrongMemoize
PROJECTS_LIMIT = 10
attr_reader :projects, :options
attr_reader :options
def initialize
@projects = Project.sorted_by_activity.limit(PROJECTS_LIMIT)
@options = { from: 7.days.ago }
end
def projects
strong_memoize(:projects) do
projects = Project.where.not(last_activity_at: nil).order(last_activity_at: :desc).limit(10) +
Project.where.not(last_repository_updated_at: nil).order(last_repository_updated_at: :desc).limit(10)
projects = projects.uniq.sort_by do |project|
[project.last_activity_at, project.last_repository_updated_at].min
end
if projects.size < 10
projects.concat(Project.where(last_activity_at: nil, last_repository_updated_at: nil).limit(10))
end
projects.uniq.first(10)
end
end
def to_json(*)
total = 0
......
......@@ -2,6 +2,13 @@
module Gitlab
module ImportExport
Error = Class.new(StandardError)
class Error < StandardError
def self.permission_error(user, importable)
self.new(
"User with ID: %s does not have required permissions for %s: %s with ID: %s" %
[user.id, importable.class.name, importable.name, importable.id]
)
end
end
end
end
......@@ -49,11 +49,7 @@ module Gitlab
json = IO.read(@path)
ActiveSupport::JSON.decode(json)
rescue => e
@shared.logger.error(
group_id: @group.id,
group_name: @group.name,
message: "Import/Export error: #{e.message}"
)
@shared.error(e)
raise Gitlab::ImportExport::Error.new('Incorrect JSON format')
end
......
# frozen_string_literal: true
module Gitlab
module ImportExport
module Project
class BaseTask
include Gitlab::WithRequestStore
def initialize(opts, logger: Logger.new($stdout))
@project_path = opts.fetch(:project_path)
@file_path = opts.fetch(:file_path)
@namespace = Namespace.find_by_full_path(opts.fetch(:namespace_path))
@current_user = User.find_by_username(opts.fetch(:username))
@measurement_enabled = opts.fetch(:measurement_enabled)
@measurement = Gitlab::Utils::Measuring.new(logger: logger) if @measurement_enabled
@logger = logger
end
private
attr_reader :measurement, :project, :namespace, :current_user, :file_path, :project_path, :logger
def measurement_enabled?
@measurement_enabled
end
def success(message)
logger.info(message)
true
end
def error(message)
logger.error(message)
false
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module ImportExport
module Project
class ExportTask < BaseTask
def initialize(*)
super
@project = namespace.projects.find_by_path(@project_path)
end
def export
return error("Project with path: #{project_path} was not found. Please provide correct project path") unless project
return error("Invalid file path: #{file_path}. Please provide correct file path") unless file_path_exists?
with_export do
::Projects::ImportExport::ExportService.new(project, current_user)
.execute(Gitlab::ImportExport::AfterExportStrategies::MoveFileStrategy.new(archive_path: file_path))
end
success('Done!')
end
private
def file_path_exists?
directory = File.dirname(file_path)
Dir.exist?(directory)
end
def with_export
with_request_store do
::Gitlab::GitalyClient.allow_n_plus_1_calls do
measurement_enabled? ? measurement.with_measuring { yield } : yield
end
end
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module ImportExport
module Project
class ImportTask < BaseTask
def import
show_import_start_message
run_isolated_sidekiq_job
show_import_failures_count
return error(project.import_state.last_error) if project.import_state&.last_error
return error(project.errors.full_messages.to_sentence) if project.errors.any?
success('Done!')
end
private
# We want to ensure that all Sidekiq jobs are executed
# synchronously as part of that process.
# This ensures that all expensive operations do not escape
# to general Sidekiq clusters/nodes.
def with_isolated_sidekiq_job
Sidekiq::Testing.fake! do
with_request_store do
# If you are attempting to import a large project into a development environment,
# you may see Gitaly throw an error about too many calls or invocations.
# This is due to a n+1 calls limit being set for development setups (not enforced in production)
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/24475#note_283090635
# For development setups, this code-path will be excluded from n+1 detection.
::Gitlab::GitalyClient.allow_n_plus_1_calls do
measurement_enabled? ? measurement.with_measuring { yield } : yield
end
end
true
end
end
def run_isolated_sidekiq_job
with_isolated_sidekiq_job do
@project = create_project
execute_sidekiq_job
end
end
def create_project
# We are disabling ObjectStorage for `import`
# as it is too slow to handle big archives:
# 1. DB transaction timeouts on upload
# 2. Download of archive before unpacking
disable_upload_object_storage do
service = Projects::GitlabProjectsImportService.new(
current_user,
{
namespace_id: namespace.id,
path: project_path,
file: File.open(file_path)
}
)
service.execute
end
end
def execute_sidekiq_job
Sidekiq::Worker.drain_all
end
def disable_upload_object_storage
overwrite_uploads_setting('background_upload', false) do
overwrite_uploads_setting('direct_upload', false) do
yield
end
end
end
def overwrite_uploads_setting(key, value)
old_value = Settings.uploads.object_store[key]
Settings.uploads.object_store[key] = value
yield
ensure
Settings.uploads.object_store[key] = old_value
end
def full_path
"#{namespace.full_path}/#{project_path}"
end
def show_import_start_message
logger.info "Importing GitLab export: #{file_path} into GitLab" \
" #{full_path}" \
" as #{current_user.name}"
end
def show_import_failures_count
return unless project.import_failures.exists?
logger.info "Total number of not imported relations: #{project.import_failures.count}"
end
end
end
end
end
......@@ -94,14 +94,6 @@ module Gitlab
end
end
def log_error(details)
@logger.error(log_base_data.merge(details))
end
def log_debug(details)
@logger.debug(log_base_data.merge(details))
end
def log_base_data
log = {
importer: 'Import/Export',
......
......@@ -122,6 +122,8 @@ module Gitlab
def cycle_analytics_usage_data
Gitlab::CycleAnalytics::UsageData.new.to_json
rescue ActiveRecord::StatementInvalid
{ avg_cycle_analytics: {} }
end
def features_usage_data
......@@ -232,7 +234,7 @@ module Gitlab
end
def count(relation, column = nil, fallback: -1, batch: true)
if batch && Feature.enabled?(:usage_ping_batch_counter)
if batch && Feature.enabled?(:usage_ping_batch_counter, default_enabled: true)
Gitlab::Database::BatchCount.batch_count(relation, column)
else
relation.count
......@@ -242,7 +244,7 @@ module Gitlab
end
def distinct_count(relation, column = nil, fallback: -1, batch: true)
if batch && Feature.enabled?(:usage_ping_batch_counter)
if batch && Feature.enabled?(:usage_ping_batch_counter, default_enabled: true)
Gitlab::Database::BatchCount.batch_distinct_count(relation, column)
else
relation.distinct_count_by(column)
......
......@@ -59,14 +59,15 @@ module Gitlab
end
def duration_in_numbers(duration_in_seconds)
milliseconds = duration_in_seconds.in_milliseconds % 1.second.in_milliseconds
seconds = duration_in_seconds % 1.minute
minutes = (duration_in_seconds / 1.minute) % (1.hour / 1.minute)
hours = duration_in_seconds / 1.hour
if hours == 0
"%02d:%02d" % [minutes, seconds]
"%02d:%02d:%03d" % [minutes, seconds, milliseconds]
else
"%02d:%02d:%02d" % [hours, minutes, seconds]
"%02d:%02d:%02d:%03d" % [hours, minutes, seconds, milliseconds]
end
end
end
......
# frozen_string_literal: true
require 'gitlab/with_request_store'
# Export project to archive
#
# @example
......@@ -14,81 +12,36 @@ namespace :gitlab do
# Load it here to avoid polluting Rake tasks with Sidekiq test warnings
require 'sidekiq/testing'
warn_user_is_not_gitlab
if ENV['IMPORT_DEBUG'].present?
ActiveRecord::Base.logger = Logger.new(STDOUT)
Gitlab::Metrics::Exporter::SidekiqExporter.instance.start
end
GitlabProjectExport.new(
namespace_path: args.namespace_path,
project_path: args.project_path,
username: args.username,
file_path: args.archive_path,
measurement_enabled: Gitlab::Utils.to_boolean(args.measurement_enabled)
).export
end
end
end
class GitlabProjectExport
include Gitlab::WithRequestStore
def initialize(opts)
@project_path = opts.fetch(:project_path)
@file_path = opts.fetch(:file_path)
@current_user = User.find_by_username(opts.fetch(:username))
namespace = Namespace.find_by_full_path(opts.fetch(:namespace_path))
@project = namespace.projects.find_by_path(@project_path)
@measurement_enabled = opts.fetch(:measurement_enabled)
@measurable = Gitlab::Utils::Measuring.new if @measurement_enabled
end
def export
validate_project
validate_file_path
with_export do
::Projects::ImportExport::ExportService.new(project, current_user)
.execute(Gitlab::ImportExport::AfterExportStrategies::MoveFileStrategy.new(archive_path: file_path))
end
puts 'Done!'
rescue StandardError => e
puts "Exception: #{e.message}"
puts e.backtrace
exit 1
end
private
attr_reader :measurable, :project, :current_user, :file_path, :project_path
def validate_project
unless project
puts "Error: Project with path: #{project_path} was not found. Please provide correct project path"
exit 1
end
end
def validate_file_path
directory = File.dirname(file_path)
unless Dir.exist?(directory)
puts "Error: Invalid file path: #{file_path}. Please provide correct file path"
exit 1
end
end
def with_export
with_request_store do
::Gitlab::GitalyClient.allow_n_plus_1_calls do
measurement_enabled? ? measurable.with_measuring { yield } : yield
logger = Logger.new($stdout)
begin
warn_user_is_not_gitlab
if ENV['EXPORT_DEBUG'].present?
ActiveRecord::Base.logger = logger
Gitlab::Metrics::Exporter::SidekiqExporter.instance.start
logger.level = Logger::DEBUG
else
logger.level = Logger::INFO
end
task = Gitlab::ImportExport::Project::ExportTask.new(
namespace_path: args.namespace_path,
project_path: args.project_path,
username: args.username,
file_path: args.archive_path,
measurement_enabled: Gitlab::Utils.to_boolean(args.measurement_enabled),
logger: logger
)
success = task.export
exit(success)
rescue StandardError => e
logger.error "Exception: #{e.message}"
logger.debug e.backtrace
exit 1
end
end
end
def measurement_enabled?
@measurement_enabled
end
end
# frozen_string_literal: true
require 'gitlab/with_request_store'
# Import large project archives
#
# This task:
......@@ -18,148 +16,36 @@ namespace :gitlab do
# Load it here to avoid polluting Rake tasks with Sidekiq test warnings
require 'sidekiq/testing'
warn_user_is_not_gitlab
if ENV['IMPORT_DEBUG'].present?
ActiveRecord::Base.logger = Logger.new(STDOUT)
end
GitlabProjectImport.new(
namespace_path: args.namespace_path,
project_path: args.project_path,
username: args.username,
file_path: args.archive_path,
measurement_enabled: Gitlab::Utils.to_boolean(args.measurement_enabled)
).import
end
end
end
class GitlabProjectImport
include Gitlab::WithRequestStore
def initialize(opts)
@project_path = opts.fetch(:project_path)
@file_path = opts.fetch(:file_path)
@namespace = Namespace.find_by_full_path(opts.fetch(:namespace_path))
@current_user = User.find_by_username(opts.fetch(:username))
@measurement_enabled = opts.fetch(:measurement_enabled)
@measurement = Gitlab::Utils::Measuring.new if @measurement_enabled
end
def import
show_import_start_message
logger = Logger.new($stdout)
run_isolated_sidekiq_job
begin
warn_user_is_not_gitlab
show_import_failures_count
if project&.import_state&.last_error
puts "ERROR: #{project.import_state.last_error}"
exit 1
elsif project.errors.any?
puts "ERROR: #{project.errors.full_messages.join(', ')}"
exit 1
else
puts 'Done!'
end
rescue StandardError => e
puts "Exception: #{e.message}"
puts e.backtrace
exit 1
end
private
attr_reader :measurement, :project, :namespace, :current_user, :file_path, :project_path
def measurement_enabled?
@measurement_enabled
end
# We want to ensure that all Sidekiq jobs are executed
# synchronously as part of that process.
# This ensures that all expensive operations do not escape
# to general Sidekiq clusters/nodes.
def with_isolated_sidekiq_job
Sidekiq::Testing.fake! do
with_request_store do
# If you are attempting to import a large project into a development environment,
# you may see Gitaly throw an error about too many calls or invocations.
# This is due to a n+1 calls limit being set for development setups (not enforced in production)
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/24475#note_283090635
# For development setups, this code-path will be excluded from n+1 detection.
::Gitlab::GitalyClient.allow_n_plus_1_calls do
measurement_enabled? ? measurement.with_measuring { yield } : yield
if ENV['IMPORT_DEBUG'].present?
ActiveRecord::Base.logger = logger
Gitlab::Metrics::Exporter::SidekiqExporter.instance.start
logger.level = Logger::DEBUG
else
logger.level = Logger::INFO
end
end
true
end
end
def run_isolated_sidekiq_job
with_isolated_sidekiq_job do
@project = create_project
execute_sidekiq_job
end
end
def create_project
# We are disabling ObjectStorage for `import`
# as it is too slow to handle big archives:
# 1. DB transaction timeouts on upload
# 2. Download of archive before unpacking
disable_upload_object_storage do
service = Projects::GitlabProjectsImportService.new(
current_user,
{
namespace_id: namespace.id,
path: project_path,
file: File.open(file_path)
}
)
service.execute
end
end
def execute_sidekiq_job
Sidekiq::Worker.drain_all
end
def disable_upload_object_storage
overwrite_uploads_setting('background_upload', false) do
overwrite_uploads_setting('direct_upload', false) do
yield
task = Gitlab::ImportExport::Project::ImportTask.new(
namespace_path: args.namespace_path,
project_path: args.project_path,
username: args.username,
file_path: args.archive_path,
measurement_enabled: Gitlab::Utils.to_boolean(args.measurement_enabled),
logger: logger
)
success = task.import
exit(success)
rescue StandardError => e
logger.error "Exception: #{e.message}"
logger.debug e.backtrace
exit 1
end
end
end
def overwrite_uploads_setting(key, value)
old_value = Settings.uploads.object_store[key]
Settings.uploads.object_store[key] = value
yield
ensure
Settings.uploads.object_store[key] = old_value
end
def full_path
"#{namespace.full_path}/#{project_path}"
end
def show_import_start_message
puts "Importing GitLab export: #{file_path} into GitLab" \
" #{full_path}" \
" as #{current_user.name}"
end
def show_import_failures_count
return unless project.import_failures.exists?
puts "Total number of not imported relations: #{project.import_failures.count}"
end
end
......@@ -1142,6 +1142,9 @@ msgstr ""
msgid "Add email address"
msgstr ""
msgid "Add environment"
msgstr ""
msgid "Add header and footer to emails. Please note that color settings will only be applied within the application interface"
msgstr ""
......@@ -2453,6 +2456,9 @@ msgstr ""
msgid "At least one of group_id or project_id must be specified"
msgstr ""
msgid "At risk"
msgstr ""
msgid "Attach a file"
msgstr ""
......@@ -3133,9 +3139,6 @@ msgstr ""
msgid "CI / CD"
msgstr ""
msgid "CI / CD Analytics"
msgstr ""
msgid "CI / CD Charts"
msgstr ""
......@@ -5612,6 +5615,9 @@ msgstr ""
msgid "Create"
msgstr ""
msgid "Create %{environment}"
msgstr ""
msgid "Create %{type} token"
msgstr ""
......@@ -7432,6 +7438,9 @@ msgstr ""
msgid "Enter a number"
msgstr ""
msgid "Enter a whole number between 0 and 100"
msgstr ""
msgid "Enter at least three characters to search"
msgstr ""
......@@ -7456,6 +7465,9 @@ msgstr ""
msgid "Enter number of issues"
msgstr ""
msgid "Enter one or more user ID separated by commas"
msgstr ""
msgid "Enter the issue description"
msgstr ""
......@@ -8554,6 +8566,18 @@ msgstr ""
msgid "FeatureFlags|User IDs"
msgstr ""
msgid "FeatureFlag|Delete strategy"
msgstr ""
msgid "FeatureFlag|Percentage"
msgstr ""
msgid "FeatureFlag|Type"
msgstr ""
msgid "FeatureFlag|User IDs"
msgstr ""
msgid "Feb"
msgstr ""
......@@ -12790,6 +12814,9 @@ msgstr ""
msgid "Need help?"
msgstr ""
msgid "Needs attention"
msgstr ""
msgid "Network"
msgstr ""
......@@ -13398,6 +13425,9 @@ msgstr ""
msgid "Omnibus Protected Paths throttle is active. From 12.4, Omnibus throttle is deprecated and will be removed in a future release. Please read the %{relative_url_link_start}Migrating Protected Paths documentation%{relative_url_link_end}."
msgstr ""
msgid "On track"
msgstr ""
msgid "Onboarding"
msgstr ""
......@@ -13901,6 +13931,9 @@ msgstr ""
msgid "People without permission will never get a notification."
msgstr ""
msgid "Percent rollout (logged in users)"
msgstr ""
msgid "Percentage"
msgstr ""
......@@ -14552,6 +14585,9 @@ msgstr ""
msgid "Proceed"
msgstr ""
msgid "Productivity"
msgstr ""
msgid "Productivity Analytics"
msgstr ""
......@@ -16431,9 +16467,6 @@ msgstr ""
msgid "Repository"
msgstr ""
msgid "Repository Analytics"
msgstr ""
msgid "Repository Graph"
msgstr ""
......@@ -17425,6 +17458,9 @@ msgstr ""
msgid "Select source branch"
msgstr ""
msgid "Select strategy activation method"
msgstr ""
msgid "Select target branch"
msgstr ""
......@@ -17913,6 +17949,9 @@ msgstr ""
msgid "Sidebar|Only numeral characters allowed"
msgstr ""
msgid "Sidebar|Status"
msgstr ""
msgid "Sidebar|Weight"
msgstr ""
......@@ -21383,6 +21422,9 @@ msgstr ""
msgid "User Cohorts are only shown when the %{usage_ping_link_start}usage ping%{usage_ping_link_end} is enabled."
msgstr ""
msgid "User IDs"
msgstr ""
msgid "User OAuth applications"
msgstr ""
......@@ -21740,6 +21782,9 @@ msgstr ""
msgid "Value"
msgstr ""
msgid "Value Stream"
msgstr ""
msgid "Value Stream Analytics"
msgstr ""
......@@ -22449,6 +22494,9 @@ msgstr ""
msgid "You are going to transfer %{project_full_name} to another owner. Are you ABSOLUTELY sure?"
msgstr ""
msgid "You are not allowed to push into this branch. Create another branch or open a merge request."
msgstr ""
msgid "You are not allowed to unlink your primary login account"
msgstr ""
......@@ -22569,9 +22617,6 @@ msgstr ""
msgid "You can try again using %{begin_link}basic search%{end_link}"
msgstr ""
msgid "You can't commit to this project"
msgstr ""
msgid "You cannot access the raw file. Please wait a minute."
msgstr ""
......@@ -23512,6 +23557,9 @@ msgstr ""
msgid "is not an email you own"
msgstr ""
msgid "is not in the group enforcing Group Managed Account"
msgstr ""
msgid "is too long (%{current_value}). The maximum size is %{max_size}."
msgstr ""
......
......@@ -47,7 +47,7 @@ describe 'The group page' do
expect(page).to have_link('Group overview')
expect(page).to have_link('Details')
expect(page).not_to have_link('Activity')
expect(page).not_to have_link('Contribution Analytics')
expect(page).not_to have_link('Contribution')
expect(page).not_to have_link('Issues')
expect(page).not_to have_link('Merge Requests')
......
......@@ -10,7 +10,7 @@ describe 'Group navbar' do
{
nav_item: _('Analytics'),
nav_sub_items: [
_('Contribution Analytics')
_('Contribution')
]
}
end
......@@ -63,7 +63,7 @@ describe 'Group navbar' do
before do
stub_licensed_features(productivity_analytics: true)
analytics_nav_item[:nav_sub_items] << _('Productivity Analytics')
analytics_nav_item[:nav_sub_items] << _('Productivity')
group.add_maintainer(user)
sign_in(user)
......@@ -78,7 +78,7 @@ describe 'Group navbar' do
before do
stub_licensed_features(cycle_analytics_for_groups: true)
analytics_nav_item[:nav_sub_items] << _('Value Stream Analytics')
analytics_nav_item[:nav_sub_items] << _('Value Stream')
group.add_maintainer(user)
sign_in(user)
......
......@@ -225,6 +225,29 @@ describe 'Issue Sidebar' do
it 'does not have a option to edit labels' do
expect(page).not_to have_selector('.block.labels .edit-link')
end
context 'interacting with collapsed sidebar', :js do
collapsed_sidebar_selector = 'aside.right-sidebar.right-sidebar-collapsed'
expanded_sidebar_selector = 'aside.right-sidebar.right-sidebar-expanded'
lock_sidebar_block = '.block.lock'
lock_button = '.block.lock .btn-close'
collapsed_sidebar_block_icon = '.sidebar-collapsed-icon'
before do
resize_screen_sm
end
it 'expands then does not show the lock dialog form' do
expect(page).to have_css(collapsed_sidebar_selector)
page.within(lock_sidebar_block) do
find(collapsed_sidebar_block_icon).click
end
expect(page).to have_css(expanded_sidebar_selector)
expect(page).not_to have_selector(lock_button)
end
end
end
def visit_issue(project, issue)
......
......@@ -136,16 +136,16 @@ describe 'Project active tab' do
context 'on project Analytics/Repository Analytics' do
it_behaves_like 'page has active tab', _('Analytics')
it_behaves_like 'page has active sub tab', _('Repository Analytics')
it_behaves_like 'page has active sub tab', _('Repository')
end
context 'on project Analytics/Cycle Analytics' do
before do
click_tab(_('CI / CD Analytics'))
click_tab(_('CI / CD'))
end
it_behaves_like 'page has active tab', _('Analytics')
it_behaves_like 'page has active sub tab', _('CI / CD Analytics')
it_behaves_like 'page has active sub tab', _('CI / CD')
end
end
end
......
......@@ -10,10 +10,10 @@ describe 'Project navbar' do
{
nav_item: _('Analytics'),
nav_sub_items: [
_('CI / CD Analytics'),
_('CI / CD'),
(_('Code Review') if Gitlab.ee?),
_('Repository Analytics'),
_('Value Stream Analytics')
_('Repository'),
_('Value Stream')
]
}
end
......@@ -114,7 +114,7 @@ describe 'Project navbar' do
before do
stub_licensed_features(issues_analytics: true)
analytics_nav_item[:nav_sub_items] << _('Issues Analytics')
analytics_nav_item[:nav_sub_items] << _('Issues')
analytics_nav_item[:nav_sub_items].sort!
visit project_path(project)
......
......@@ -222,7 +222,7 @@ describe 'User uses shortcuts', :js do
find('body').native.send_key('d')
expect(page).to have_active_navigation(_('Analytics'))
expect(page).to have_active_sub_navigation(_('Repository Analytics'))
expect(page).to have_active_sub_navigation(_('Repository'))
end
end
end
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Blob Header Editing rendering matches the snapshot 1`] = `
<div
class="js-file-title file-title-flex-parent"
>
<gl-form-input-stub
class="form-control js-snippet-file-name qa-snippet-file-name"
id="snippet_file_name"
name="snippet_file_name"
placeholder="Give your file a name to add code highlighting, e.g. example.rb for Ruby"
type="text"
value="foo.md"
/>
</div>
`;
import { shallowMount } from '@vue/test-utils';
import BlobEditHeader from '~/blob/components/blob_edit_header.vue';
import { GlFormInput } from '@gitlab/ui';
describe('Blob Header Editing', () => {
let wrapper;
const value = 'foo.md';
function createComponent() {
wrapper = shallowMount(BlobEditHeader, {
propsData: {
value,
},
});
}
beforeEach(() => {
createComponent();
});
afterEach(() => {
wrapper.destroy();
});
describe('rendering', () => {
it('matches the snapshot', () => {
expect(wrapper.element).toMatchSnapshot();
});
it('contains a form input field', () => {
expect(wrapper.contains(GlFormInput)).toBe(true);
});
});
describe('functionality', () => {
it('emits input event when the blob name is changed', () => {
const inputComponent = wrapper.find(GlFormInput);
const newValue = 'bar.txt';
wrapper.setData({
name: newValue,
});
inputComponent.vm.$emit('change');
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.emitted().input[0]).toEqual([newValue]);
});
});
});
});
......@@ -178,8 +178,17 @@ const RESPONSE_MAP = {
},
};
const graphQlResponseData = {
project: {
issue: {
healthStatus: 'onTrack',
},
},
};
const mockData = {
responseMap: RESPONSE_MAP,
graphQlResponseData,
mediator: {
endpoint: '/gitlab-org/gitlab-shell/issues/5.json?serializer=sidebar_extras',
toggleSubscriptionEndpoint: '/gitlab-org/gitlab-shell/issues/5/toggle_subscription',
......@@ -195,6 +204,7 @@ const mockData = {
},
rootPath: '/',
fullPath: '/gitlab-org/gitlab-shell',
id: 1,
},
time: {
time_estimate: 3600,
......
......@@ -27,7 +27,7 @@ describe('Snippet editor', () => {
setHTMLFixture(`
<div class="snippet-form-holder">
<form>
<input class="snippet-file-name" type="text" value="${name}">
<input class="js-snippet-file-name" type="text" value="${name}">
<input class="snippet-file-content" type="hidden" value="${content}">
<pre id="editor"></pre>
</form>
......@@ -39,7 +39,7 @@ describe('Snippet editor', () => {
setUpFixture(name, content);
editorEl = document.getElementById('editor');
contentEl = document.querySelector('.snippet-file-content');
fileNameEl = document.querySelector('.snippet-file-name');
fileNameEl = document.querySelector('.js-snippet-file-name');
form = document.querySelector('.snippet-form-holder form');
initEditor();
......
......@@ -83,4 +83,17 @@ describe('LockIssueSidebar', () => {
done();
});
});
it('does not display the edit form when opened from collapsed state if not editable', done => {
expect(vm2.isLockDialogOpen).toBe(false);
vm2.$el.querySelector('.sidebar-collapsed-icon').click();
Vue.nextTick()
.then(() => {
expect(vm2.isLockDialogOpen).toBe(false);
})
.then(done)
.catch(done.fail);
});
});
......@@ -2,7 +2,7 @@ import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import SidebarMediator from '~/sidebar/sidebar_mediator';
import SidebarStore from '~/sidebar/stores/sidebar_store';
import SidebarService from '~/sidebar/services/sidebar_service';
import SidebarService, { gqClient } from '~/sidebar/services/sidebar_service';
import Mock from './mock_data';
const { mediator: mediatorMockData } = Mock;
......@@ -44,12 +44,18 @@ describe('Sidebar mediator', function() {
it('fetches the data', done => {
const mockData = Mock.responseMap.GET[mediatorMockData.endpoint];
mock.onGet(mediatorMockData.endpoint).reply(200, mockData);
const mockGraphQlData = Mock.graphQlResponseData;
spyOn(gqClient, 'query').and.returnValue({
data: mockGraphQlData,
});
spyOn(this.mediator, 'processFetchedData').and.callThrough();
this.mediator
.fetch()
.then(() => {
expect(this.mediator.processFetchedData).toHaveBeenCalledWith(mockData);
expect(this.mediator.processFetchedData).toHaveBeenCalledWith(mockData, mockGraphQlData);
})
.then(done)
.catch(done.fail);
......
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::BackgroundMigration::LinkLfsObjects, :migration, schema: 2020_02_10_062432 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:fork_networks) { table(:fork_networks) }
let(:fork_network_members) { table(:fork_network_members) }
let(:lfs_objects) { table(:lfs_objects) }
let(:lfs_objects_projects) { table(:lfs_objects_projects) }
let(:namespace) { namespaces.create(name: 'GitLab', path: 'gitlab') }
let!(:source_project) { projects.create(namespace_id: namespace.id) }
let!(:another_source_project) { projects.create(namespace_id: namespace.id) }
let!(:project) { projects.create(namespace_id: namespace.id) }
let!(:another_project) { projects.create(namespace_id: namespace.id) }
let!(:other_project) { projects.create(namespace_id: namespace.id) }
let!(:linked_project) { projects.create(namespace_id: namespace.id) }
let(:fork_network) { fork_networks.create(root_project_id: source_project.id) }
let(:another_fork_network) { fork_networks.create(root_project_id: another_source_project.id) }
let(:lfs_object) { lfs_objects.create(oid: 'abc123', size: 100) }
let(:another_lfs_object) { lfs_objects.create(oid: 'def456', size: 200) }
before do
stub_const("#{described_class}::BATCH_SIZE", 2)
# Create links between projects
fork_network_members.create(fork_network_id: fork_network.id, project_id: source_project.id, forked_from_project_id: nil)
[project, another_project, linked_project].each do |p|
fork_network_members.create(
fork_network_id: fork_network.id,
project_id: p.id,
forked_from_project_id: fork_network.root_project_id
)
end
fork_network_members.create(fork_network_id: another_fork_network.id, project_id: another_source_project.id, forked_from_project_id: nil)
fork_network_members.create(fork_network_id: another_fork_network.id, project_id: other_project.id, forked_from_project_id: another_fork_network.root_project_id)
# Links LFS objects to some projects
[source_project, another_source_project, linked_project].each do |p|
lfs_objects_projects.create(lfs_object_id: lfs_object.id, project_id: p.id)
lfs_objects_projects.create(lfs_object_id: another_lfs_object.id, project_id: p.id)
end
end
it 'creates LfsObjectsProject records for forks within the specified range of project IDs' do
expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |logger|
expect(logger).to receive(:info).twice
end
expect { subject.perform(project.id, other_project.id) }.to change { lfs_objects_projects.count }.by(6)
expect(lfs_object_ids_for(project)).to match_array(lfs_object_ids_for(source_project))
expect(lfs_object_ids_for(another_project)).to match_array(lfs_object_ids_for(source_project))
expect(lfs_object_ids_for(other_project)).to match_array(lfs_object_ids_for(another_source_project))
expect { subject.perform(project.id, other_project.id) }.not_to change { lfs_objects_projects.count }
end
context 'when it is not necessary to create LfsObjectProject records' do
it 'does not create LfsObjectProject records' do
expect { subject.perform(linked_project.id, linked_project.id) }
.not_to change { lfs_objects_projects.count }
end
end
def lfs_object_ids_for(project)
lfs_objects_projects.where(project_id: project.id).pluck(:lfs_object_id)
end
end
......@@ -467,6 +467,7 @@ project:
- resource_groups
- autoclose_referenced_issues
- status_page_setting
- requirements
award_emoji:
- awardable
- user
......
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::ImportExport::Error do
describe '.permission_error' do
subject(:error) do
described_class.permission_error(user, importable)
end
let(:user) { build(:user, id: 1) }
context 'when supplied a project' do
let(:importable) { build(:project, id: 1, name: 'project1') }
it 'returns an error with the correct message' do
expect(error.message)
.to eq 'User with ID: 1 does not have required permissions for Project: project1 with ID: 1'
end
end
context 'when supplied a group' do
let(:importable) { build(:group, id: 1, name: 'group1') }
it 'returns an error with the correct message' do
expect(error.message)
.to eq 'User with ID: 1 does not have required permissions for Group: group1 with ID: 1'
end
end
end
end
# frozen_string_literal: true
require 'rake_helper'
describe Gitlab::ImportExport::Project::ExportTask do
let(:username) { 'root' }
let(:namespace_path) { username }
let!(:user) { create(:user, username: username) }
let(:measurement_enabled) { false }
let(:file_path) { 'spec/fixtures/gitlab/import_export/test_project_export.tar.gz' }
let(:project) { create(:project, creator: user, namespace: user.namespace) }
let(:project_name) { project.name }
let(:task_params) do
{
username: username,
namespace_path: namespace_path,
project_path: project_name,
file_path: file_path,
measurement_enabled: measurement_enabled
}
end
subject { described_class.new(task_params).export }
context 'when project is found' do
let(:project) { create(:project, creator: user, namespace: user.namespace) }
around do |example|
example.run
ensure
File.delete(file_path)
end
it 'performs project export successfully' do
expect { subject }.to output(/Done!/).to_stdout
expect(subject).to eq(true)
expect(File).to exist(file_path)
end
it_behaves_like 'measurable'
end
context 'when project is not found' do
let(:project_name) { 'invalid project name' }
it 'logs an error' do
expect { subject }.to output(/Project with path: #{project_name} was not found. Please provide correct project path/).to_stdout
end
it 'returns false' do
expect(subject).to eq(false)
end
end
context 'when file path is invalid' do
let(:file_path) { '/invalid_file_path/test_project_export.tar.gz' }
it 'logs an error' do
expect { subject }.to output(/Invalid file path: #{file_path}. Please provide correct file path/ ).to_stdout
end
it 'returns false' do
expect(subject).to eq(false)
end
end
end
......@@ -2,19 +2,25 @@
require 'rake_helper'
describe 'gitlab:import_export:import rake task' do
describe Gitlab::ImportExport::Project::ImportTask do
let(:username) { 'root' }
let(:namespace_path) { username }
let!(:user) { create(:user, username: username) }
let(:measurement_enabled) { false }
let(:task_params) { [username, namespace_path, project_name, archive_path, measurement_enabled] }
let(:project) { Project.find_by_full_path("#{namespace_path}/#{project_name}") }
let(:import_task) { described_class.new(task_params) }
let(:task_params) do
{
username: username,
namespace_path: namespace_path,
project_path: project_name,
file_path: file_path,
measurement_enabled: measurement_enabled
}
end
before do
Rake.application.rake_require('tasks/gitlab/import_export/import')
allow(Settings.uploads.object_store).to receive(:[]=).and_call_original
allow_any_instance_of(GitlabProjectImport).to receive(:exit)
.and_raise(RuntimeError, 'exit not handled')
end
around do |example|
......@@ -30,15 +36,16 @@ describe 'gitlab:import_export:import rake task' do
Settings.uploads.object_store['background_upload'] = old_background_upload_setting
end
subject { run_rake_task('gitlab:import_export:import', task_params) }
subject { import_task.import }
context 'when project import is valid' do
let(:project_name) { 'import_rake_test_project' }
let(:archive_path) { 'spec/fixtures/gitlab/import_export/lightweight_project_export.tar.gz' }
let(:file_path) { 'spec/fixtures/gitlab/import_export/lightweight_project_export.tar.gz' }
it 'performs project import successfully' do
expect { subject }.to output(/Done!/).to_stdout
expect { subject }.not_to raise_error
expect(subject).to eq(true)
expect(project.merge_requests.count).to be > 0
expect(project.issues.count).to be > 0
......@@ -56,15 +63,13 @@ describe 'gitlab:import_export:import rake task' do
end
end
expect_next_instance_of(GitlabProjectImport) do |importer|
expect(importer).to receive(:execute_sidekiq_job).and_wrap_original do |m|
expect(Settings.uploads.object_store['background_upload']).to eq(true)
expect(Settings.uploads.object_store['direct_upload']).to eq(true)
expect(Settings.uploads.object_store).not_to receive(:[]=).with('backgroud_upload', false)
expect(Settings.uploads.object_store).not_to receive(:[]=).with('direct_upload', false)
expect(import_task).to receive(:execute_sidekiq_job).and_wrap_original do |m|
expect(Settings.uploads.object_store['background_upload']).to eq(true)
expect(Settings.uploads.object_store['direct_upload']).to eq(true)
expect(Settings.uploads.object_store).not_to receive(:[]=).with('backgroud_upload', false)
expect(Settings.uploads.object_store).not_to receive(:[]=).with('direct_upload', false)
m.call
end
m.call
end
subject
......@@ -75,13 +80,13 @@ describe 'gitlab:import_export:import rake task' do
context 'when project import is invalid' do
let(:project_name) { 'import_rake_invalid_test_project' }
let(:archive_path) { 'spec/fixtures/gitlab/import_export/corrupted_project_export.tar.gz' }
let(:file_path) { 'spec/fixtures/gitlab/import_export/corrupted_project_export.tar.gz' }
let(:not_imported_message) { /Total number of not imported relations: 1/ }
let(:error) { /Validation failed: Notes is invalid/ }
it 'performs project import successfully' do
expect { subject }.to output(not_imported_message).to_stdout
expect { subject }.not_to raise_error
expect(subject).to eq(true)
expect(project.merge_requests).to be_empty
expect(project.import_state.last_error).to be_nil
......
......@@ -324,6 +324,24 @@ describe Gitlab::UsageData do
end
end
describe '#cycle_analytics_usage_data' do
subject { described_class.cycle_analytics_usage_data }
it 'works when queries time out in new' do
allow(Gitlab::CycleAnalytics::UsageData)
.to receive(:new).and_raise(ActiveRecord::StatementInvalid.new(''))
expect { subject }.not_to raise_error
end
it 'works when queries time out in to_json' do
allow_any_instance_of(Gitlab::CycleAnalytics::UsageData)
.to receive(:to_json).and_raise(ActiveRecord::StatementInvalid.new(''))
expect { subject }.not_to raise_error
end
end
describe '#ingress_modsecurity_usage' do
subject { described_class.ingress_modsecurity_usage }
......
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200217091401_reschedule_link_lfs_objects.rb')
describe RescheduleLinkLfsObjects, :migration, :sidekiq do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:fork_networks) { table(:fork_networks) }
let(:fork_network_members) { table(:fork_network_members) }
let(:lfs_objects) { table(:lfs_objects) }
let(:lfs_objects_projects) { table(:lfs_objects_projects) }
let(:namespace) { namespaces.create(name: 'GitLab', path: 'gitlab') }
let(:fork_network) { fork_networks.create(root_project_id: source_project.id) }
let(:another_fork_network) { fork_networks.create(root_project_id: another_source_project.id) }
let!(:source_project) { projects.create(namespace_id: namespace.id) }
let!(:another_source_project) { projects.create(namespace_id: namespace.id) }
let!(:project) { projects.create(namespace_id: namespace.id) }
let!(:another_project) { projects.create(namespace_id: namespace.id) }
let!(:other_project) { projects.create(namespace_id: namespace.id) }
let!(:linked_project) { projects.create(namespace_id: namespace.id) }
let(:lfs_object) { lfs_objects.create(oid: 'abc123', size: 100) }
let(:another_lfs_object) { lfs_objects.create(oid: 'def456', size: 200) }
before do
# Create links between projects
fork_network_members.create(fork_network_id: fork_network.id, project_id: source_project.id, forked_from_project_id: nil)
[project, another_project, linked_project].each do |p|
fork_network_members.create(
fork_network_id: fork_network.id,
project_id: p.id,
forked_from_project_id: fork_network.root_project_id
)
end
fork_network_members.create(fork_network_id: another_fork_network.id, project_id: another_source_project.id, forked_from_project_id: nil)
fork_network_members.create(fork_network_id: another_fork_network.id, project_id: other_project.id, forked_from_project_id: another_fork_network.root_project_id)
end
context 'when there are forks to be backfilled' do
before do
stub_const("#{described_class.name}::BATCH_SIZE", 2)
# Links LFS objects to some projects
[source_project, another_source_project, linked_project].each do |p|
lfs_objects_projects.create(lfs_object_id: lfs_object.id, project_id: p.id)
lfs_objects_projects.create(lfs_object_id: another_lfs_object.id, project_id: p.id)
end
end
it 'schedules background migration to link LFS objects' do
Sidekiq::Testing.fake! do
migrate!
expect(BackgroundMigrationWorker.jobs.size).to eq(2)
expect(described_class::MIGRATION)
.to be_scheduled_delayed_migration(2.minutes, project.id, another_project.id)
expect(described_class::MIGRATION)
.to be_scheduled_delayed_migration(4.minutes, other_project.id, other_project.id)
end
end
end
context 'when there are no forks to be backfilled' do
before do
# Links LFS objects to all projects
projects.all.each do |p|
lfs_objects_projects.create(lfs_object_id: lfs_object.id, project_id: p.id)
lfs_objects_projects.create(lfs_object_id: another_lfs_object.id, project_id: p.id)
end
end
it 'does not schedule any job' do
Sidekiq::Testing.fake! do
migrate!
expect(BackgroundMigrationWorker.jobs.size).to eq(0)
end
end
end
end
......@@ -65,6 +65,17 @@ describe BroadcastMessage do
end
end
it 'expires the value if a broadcast message has ended', :request_store do
message = create(:broadcast_message, broadcast_type: broadcast_type, ends_at: Time.now.utc + 1.day)
expect(subject.call).to match_array([message])
expect(described_class.cache).to receive(:expire).and_call_original
Timecop.travel(1.week) do
2.times { expect(subject.call).to be_empty }
end
end
it 'does not create new records' do
create(:broadcast_message, broadcast_type: broadcast_type)
......
......@@ -38,12 +38,31 @@ describe Groups::ImportExport::ExportService do
let!(:another_user) { create(:user) }
let(:service) { described_class.new(group: group, user: another_user, params: { shared: shared }) }
let(:expected_message) do
"User with ID: %s does not have required permissions for Group: %s with ID: %s" %
[another_user.id, group.name, group.id]
end
it 'fails' do
expected_message =
"User with ID: %s does not have permission to Group %s with ID: %s." %
[another_user.id, group.name, group.id]
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error).with_message(expected_message)
end
it 'logs the error' do
expect(shared.logger).to receive(:error).with(
group_id: group.id,
group_name: group.name,
error: expected_message,
message: 'Group Import/Export: Export failed'
)
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
end
it 'tracks the error' do
expect(shared).to receive(:error) { |param| expect(param.message).to eq expected_message }
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
end
end
context 'when export fails' do
......
......@@ -9,6 +9,8 @@ describe Groups::ImportExport::ImportService do
let(:service) { described_class.new(group: group, user: user) }
let(:import_file) { fixture_file_upload('spec/fixtures/group_export.tar.gz') }
let(:import_logger) { instance_double(Gitlab::Import::Logger) }
subject { service.execute }
before do
......@@ -25,13 +27,82 @@ describe Groups::ImportExport::ImportService do
expect(group.import_export_upload.import_file.file).to be_nil
end
it 'logs the import success' do
allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
expect(import_logger).to receive(:info).with(
group_id: group.id,
group_name: group.name,
message: 'Group Import/Export: Import succeeded'
)
subject
end
end
context 'when user does not have correct permissions' do
let(:user) { create(:user) }
it 'raises exception' do
expect { subject }.to raise_error(StandardError)
it 'logs the error and raises an exception' do
allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
expect(import_logger).to receive(:error).with(
group_id: group.id,
group_name: group.name,
message: a_string_including('Errors occurred')
)
expect { subject }.to raise_error(Gitlab::ImportExport::Error)
end
it 'tracks the error' do
shared = Gitlab::ImportExport::Shared.new(group)
allow(Gitlab::ImportExport::Shared).to receive(:new).and_return(shared)
expect(shared).to receive(:error) do |param|
expect(param.message).to include 'does not have required permissions for'
end
expect { subject }.to raise_error(Gitlab::ImportExport::Error)
end
end
context 'when there are errors with the import file' do
let(:import_file) { fixture_file_upload('spec/fixtures/symlink_export.tar.gz') }
before do
allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
end
it 'logs the error and raises an exception' do
expect(import_logger).to receive(:error).with(
group_id: group.id,
group_name: group.name,
message: a_string_including('Errors occurred')
)
expect { subject }.to raise_error(Gitlab::ImportExport::Error)
end
end
context 'when there are errors with the sub-relations' do
let(:import_file) { fixture_file_upload('spec/fixtures/group_export_invalid_subrelations.tar.gz') }
it 'successfully imports the group' do
expect(subject).to be_truthy
end
it 'logs the import success' do
allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
expect(import_logger).to receive(:info).with(
group_id: group.id,
group_name: group.name,
message: 'Group Import/Export: Import succeeded'
)
subject
end
end
end
......
......@@ -91,6 +91,17 @@ describe MergeRequests::MergeToRefService do
it_behaves_like 'successfully evaluates pre-condition checks'
it 'returns an error when Gitlab::Git::CommandError is raised during merge' do
allow(project.repository).to receive(:merge_to_ref) do
raise Gitlab::Git::CommandError.new('Failed to create merge commit')
end
result = service.execute(merge_request)
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('Failed to create merge commit')
end
context 'commit history comparison with regular MergeService' do
before do
# The merge service needs an authorized user while merge-to-ref
......
......@@ -29,7 +29,7 @@ describe Metrics::Dashboard::CloneDashboardService, :use_clean_rails_memory_stor
end
context 'user does not have push right to repository' do
it_behaves_like 'misconfigured dashboard service response', :forbidden, %q(You can't commit to this project)
it_behaves_like 'misconfigured dashboard service response', :forbidden, %q(You are not allowed to push into this branch. Create another branch or open a merge request.)
end
context 'with rights to push to the repository' do
......
......@@ -27,7 +27,7 @@ describe Metrics::Dashboard::UpdateDashboardService, :use_clean_rails_memory_sto
end
context 'user does not have push right to repository' do
it_behaves_like 'misconfigured dashboard service response', :forbidden, "You can't commit to this project"
it_behaves_like 'misconfigured dashboard service response', :forbidden, "You are not allowed to push into this branch. Create another branch or open a merge request."
end
context 'with rights to push to the repository' do
......
......@@ -164,7 +164,7 @@ describe Projects::ImportExport::ExportService do
it 'fails' do
expected_message =
"User with ID: %s does not have permission to Project %s with ID: %s." %
"User with ID: %s does not have required permissions for Project: %s with ID: %s" %
[another_user.id, project.name, project.id]
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error).with_message(expected_message)
end
......
......@@ -18,7 +18,7 @@ RSpec.shared_examples 'measurable' do
end
context 'when measurement is not provided' do
let(:task_params) { [username, namespace_path, project_name, archive_path] }
let(:measurement_enabled) { nil }
it 'does not output measurement results' do
expect { subject }.not_to output(/Measuring enabled.../).to_stdout
......
# frozen_string_literal: true
require 'rake_helper'
describe 'gitlab:import_export:export rake task' do
let(:username) { 'root' }
let(:namespace_path) { username }
let!(:user) { create(:user, username: username) }
let(:measurement_enabled) { false }
let(:task_params) { [username, namespace_path, project_name, archive_path, measurement_enabled] }
before do
Rake.application.rake_require('tasks/gitlab/import_export/export')
end
subject { run_rake_task('gitlab:import_export:export', task_params) }
context 'when project is found' do
let(:project) { create(:project, creator: user, namespace: user.namespace) }
let(:project_name) { project.name }
let(:archive_path) { 'spec/fixtures/gitlab/import_export/test_project_export.tar.gz' }
around do |example|
example.run
ensure
File.delete(archive_path)
end
it 'performs project export successfully' do
expect { subject }.to output(/Done!/).to_stdout
expect(File).to exist(archive_path)
end
it_behaves_like 'measurable'
end
end
......@@ -173,7 +173,7 @@ describe 'layouts/nav/sidebar/_project' do
it 'shows the value stream analytics entry' do
render
expect(rendered).to have_link('Value Stream Analytics', href: project_cycle_analytics_path(project))
expect(rendered).to have_link('Value Stream', href: project_cycle_analytics_path(project))
end
end
......@@ -183,7 +183,7 @@ describe 'layouts/nav/sidebar/_project' do
it 'does not show the value stream analytics entry' do
render
expect(rendered).not_to have_link('Value Stream Analytics', href: project_cycle_analytics_path(project))
expect(rendered).not_to have_link('Value Stream', href: project_cycle_analytics_path(project))
end
end
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment