Commit 1f53cf7c authored by Shinya Maeda's avatar Shinya Maeda

Merge branch 'master-ce' into artifact-format-v2-with-parser

parents d867081d 5f664759
......@@ -745,7 +745,7 @@ GEM
retriable (3.1.1)
rinku (2.0.0)
rotp (2.1.2)
rouge (3.1.1)
rouge (3.2.0)
rqrcode (0.7.0)
chunky_png
rqrcode-rails3 (0.1.7)
......
......@@ -35,7 +35,9 @@ export default {
watch: {
dropdownOpen() {
this.$nextTick(() => {
this.$refs.dropdownMenu.scrollIntoView();
this.$refs.dropdownMenu.scrollIntoView({
block: 'nearest',
});
});
},
mouseOver() {
......
......@@ -137,7 +137,11 @@ export default class SearchAutocomplete {
if (!term) {
const contents = this.getCategoryContents();
if (contents) {
this.searchInput.data('glDropdown').filter.options.callback(contents);
const glDropdownInstance = this.searchInput.data('glDropdown');
if (glDropdownInstance) {
glDropdownInstance.filter.options.callback(contents);
}
this.enableAutocomplete();
}
return;
......
# frozen_string_literal: true
class Admin::ServicesController < Admin::ApplicationController
include ServiceParams
......@@ -30,7 +32,7 @@ class Admin::ServicesController < Admin::ApplicationController
def services_templates
Service.available_services_names.map do |service_name|
service_template = service_name.concat("_service").camelize.constantize
service_template = "#{service_name}_service".camelize.constantize
service_template.where(template: true).first_or_create
end
end
......
class Projects::PipelinesController < Projects::ApplicationController
before_action :whitelist_query_limiting, only: [:create, :retry]
before_action :pipeline, except: [:index, :new, :create, :charts]
before_action :commit, only: [:show, :builds, :failures]
before_action :authorize_read_pipeline!
before_action :authorize_create_pipeline!, only: [:new, :create]
before_action :authorize_update_pipeline!, only: [:retry, :cancel]
......@@ -168,10 +167,6 @@ class Projects::PipelinesController < Projects::ApplicationController
.present(current_user: current_user)
end
def commit
@commit ||= @pipeline.commit
end
def whitelist_query_limiting
# Also see https://gitlab.com/gitlab-org/gitlab-ce/issues/42343
Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-ce/issues/42339')
......
# frozen_string_literal: true
require_dependency 'declarative_policy'
class Ability
......
# frozen_string_literal: true
class AbuseReport < ActiveRecord::Base
include CacheMarkdownField
......
# frozen_string_literal: true
class ActiveSession
include ActiveModel::Model
......
# frozen_string_literal: true
class Appearance < ActiveRecord::Base
include CacheableAttributes
include CacheMarkdownField
......
# frozen_string_literal: true
class ApplicationSetting < ActiveRecord::Base
include CacheableAttributes
include CacheMarkdownField
......
# frozen_string_literal: true
class AuditEvent < ActiveRecord::Base
serialize :details, Hash # rubocop:disable Cop/ActiveRecordSerialize
......
# frozen_string_literal: true
class AwardEmoji < ActiveRecord::Base
DOWNVOTE_NAME = "thumbsdown".freeze
UPVOTE_NAME = "thumbsup".freeze
......
# frozen_string_literal: true
class Badge < ActiveRecord::Base
# This structure sets the placeholders that the urls
# can have. This hash also sets which action to ask when
......
# frozen_string_literal: true
# Blob is a Rails-specific wrapper around Gitlab::Git::Blob objects
class Blob < SimpleDelegator
CACHE_TIME = 60 # Cache raw blobs referred to by a (mutable) ref for 1 minute
......
# frozen_string_literal: true
class Board < ActiveRecord::Base
belongs_to :group
belongs_to :project
......
# frozen_string_literal: true
class BroadcastMessage < ActiveRecord::Base
include CacheMarkdownField
include Sortable
......
# frozen_string_literal: true
class ChatName < ActiveRecord::Base
LAST_USED_AT_INTERVAL = 1.hour
......
# frozen_string_literal: true
class ChatTeam < ActiveRecord::Base
validates :team_id, presence: true
validates :namespace, uniqueness: true
......
module Clusters
module Applications
class Ingress < ActiveRecord::Base
VERSION = '0.23.0'.freeze
self.table_name = 'clusters_applications_ingress'
include ::Clusters::Concerns::ApplicationCore
include ::Clusters::Concerns::ApplicationStatus
include ::Clusters::Concerns::ApplicationVersion
include ::Clusters::Concerns::ApplicationData
include AfterCommitQueue
default_value_for :ingress_type, :nginx
default_value_for :version, :nginx
default_value_for :version, VERSION
enum ingress_type: {
nginx: 1
......@@ -33,6 +36,7 @@ module Clusters
def install_command
Gitlab::Kubernetes::Helm::InstallCommand.new(
name,
version: VERSION,
chart: chart,
values: values
)
......
module Clusters
module Applications
class Jupyter < ActiveRecord::Base
VERSION = '0.0.1'.freeze
VERSION = 'v0.6'.freeze
self.table_name = 'clusters_applications_jupyter'
include ::Clusters::Concerns::ApplicationCore
include ::Clusters::Concerns::ApplicationStatus
include ::Clusters::Concerns::ApplicationVersion
include ::Clusters::Concerns::ApplicationData
belongs_to :oauth_application, class_name: 'Doorkeeper::Application'
......@@ -36,6 +37,7 @@ module Clusters
def install_command
Gitlab::Kubernetes::Helm::InstallCommand.new(
name,
version: VERSION,
chart: chart,
values: values,
repository: repository
......
......@@ -9,6 +9,7 @@ module Clusters
include ::Clusters::Concerns::ApplicationCore
include ::Clusters::Concerns::ApplicationStatus
include ::Clusters::Concerns::ApplicationVersion
include ::Clusters::Concerns::ApplicationData
default_value_for :version, VERSION
......@@ -44,8 +45,8 @@ module Clusters
def install_command
Gitlab::Kubernetes::Helm::InstallCommand.new(
name,
version: VERSION,
chart: chart,
version: version,
values: values
)
end
......
module Clusters
module Applications
class Runner < ActiveRecord::Base
VERSION = '0.1.13'.freeze
VERSION = '0.1.31'.freeze
self.table_name = 'clusters_applications_runners'
include ::Clusters::Concerns::ApplicationCore
include ::Clusters::Concerns::ApplicationStatus
include ::Clusters::Concerns::ApplicationVersion
include ::Clusters::Concerns::ApplicationData
belongs_to :runner, class_name: 'Ci::Runner', foreign_key: :runner_id
......@@ -29,6 +30,7 @@ module Clusters
def install_command
Gitlab::Kubernetes::Helm::InstallCommand.new(
name,
version: VERSION,
chart: chart,
values: values,
repository: repository
......
# frozen_string_literal: true
module Clusters
module Concerns
module ApplicationVersion
extend ActiveSupport::Concern
included do
state_machine :status do
after_transition any => [:installing] do |application|
application.update(version: application.class.const_get(:VERSION))
end
end
end
end
end
end
# coding: utf-8
# frozen_string_literal: true
class Commit
extend ActiveModel::Naming
extend Gitlab::Cache::RequestCache
......@@ -339,21 +341,21 @@ class Commit
end
def cherry_pick_description(user)
message_body = "(cherry picked from commit #{sha})"
message_body = ["(cherry picked from commit #{sha})"]
if merged_merge_request?(user)
commits_in_merge_request = merged_merge_request(user).commits
if commits_in_merge_request.present?
message_body << "\n"
message_body << ""
commits_in_merge_request.reverse.each do |commit_in_merge|
message_body << "\n#{commit_in_merge.short_id} #{commit_in_merge.title}"
message_body << "#{commit_in_merge.short_id} #{commit_in_merge.title}"
end
end
end
message_body
message_body.join("\n")
end
def cherry_pick_message(user)
......
# frozen_string_literal: true
# CommitRange makes it easier to work with commit ranges
#
# Examples:
......
# frozen_string_literal: true
class CommitStatus < ActiveRecord::Base
include HasStatus
include Importable
......
# frozen_string_literal: true
class Compare
include Gitlab::Utils::StrongMemoize
......
# frozen_string_literal: true
class ContainerRepository < ActiveRecord::Base
belongs_to :project
......
# frozen_string_literal: true
class CycleAnalytics
STAGES = %i[issue plan code test review staging production].freeze
......
# frozen_string_literal: true
class DashboardMilestone < GlobalMilestone
def issues_finder_params
{ authorized_only: true }
......
# frozen_string_literal: true
class DeployKey < Key
include IgnorableColumn
......
# frozen_string_literal: true
class DeployKeysProject < ActiveRecord::Base
belongs_to :project
belongs_to :deploy_key, inverse_of: :deploy_keys_projects
......
# frozen_string_literal: true
class DeployToken < ActiveRecord::Base
include Expirable
include TokenAuthenticatable
......@@ -28,7 +30,7 @@ class DeployToken < ActiveRecord::Base
end
def active?
!revoked && expires_at > Date.today
!revoked && !expired?
end
def scopes
......@@ -61,6 +63,12 @@ class DeployToken < ActiveRecord::Base
private
def expired?
return false unless expires_at
expires_at < Date.today
end
def ensure_at_least_one_scope
errors.add(:base, "Scopes can't be blank") unless read_repository || read_registry
end
......
# frozen_string_literal: true
class Deployment < ActiveRecord::Base
include AtomicInternalId
include IidRoutes
......
# frozen_string_literal: true
# A discussion on merge request or commit diffs consisting of `DiffNote` notes.
#
# A discussion of this type can be resolvable.
......
# frozen_string_literal: true
# A note on merge request or commit diffs
#
# A note of this type can be resolvable.
......
# frozen_string_literal: true
class DirectlyAddressedUser
class << self
def reference_pattern
......
# frozen_string_literal: true
# A non-diff discussion on an issue, merge request, commit, or snippet, consisting of `DiscussionNote` notes.
#
# A discussion of this type can be resolvable.
......
# frozen_string_literal: true
# A note in a non-diff discussion on an issue, merge request, commit, or snippet.
#
# A note of this type can be resolvable.
......
# frozen_string_literal: true
class Email < ActiveRecord::Base
include Sortable
include Gitlab::SQL::Pattern
......
# frozen_string_literal: true
class Environment < ActiveRecord::Base
# Used to generate random suffixes for the slug
LETTERS = 'a'..'z'
......@@ -173,7 +175,7 @@ class Environment < ActiveRecord::Base
# * cannot end with `-`
def generate_slug
# Lowercase letters and numbers only
slugified = name.to_s.downcase.gsub(/[^a-z0-9]/, '-')
slugified = +name.to_s.downcase.gsub(/[^a-z0-9]/, '-')
# Must start with a letter
slugified = 'env-' + slugified unless LETTERS.cover?(slugified[0])
......
# frozen_string_literal: true
# Placeholder class for model that is implemented in EE
# It reserves '&' as a reference prefix, but the table does not exists in CE
class Epic < ActiveRecord::Base
......
# frozen_string_literal: true
class Event < ActiveRecord::Base
include Sortable
include IgnorableColumn
......
# frozen_string_literal: true
# A collection of events to display in an event list.
#
# An EventCollection is meant to be used for displaying events to a user (e.g.
......
# frozen_string_literal: true
class ExternalIssue
include Referable
......
# frozen_string_literal: true
class ForkNetwork < ActiveRecord::Base
belongs_to :root_project, class_name: 'Project'
has_many :fork_network_members
......
# frozen_string_literal: true
class ForkNetworkMember < ActiveRecord::Base
belongs_to :fork_network
belongs_to :project
......
# frozen_string_literal: true
class ForkedProjectLink < ActiveRecord::Base
belongs_to :forked_to_project, -> { where.not(pending_delete: true) }, class_name: 'Project'
belongs_to :forked_from_project, -> { where.not(pending_delete: true) }, class_name: 'Project'
......
# frozen_string_literal: true
class GenericCommitStatus < CommitStatus
before_validation :set_default_values
......
# frozen_string_literal: true
class GlobalLabel
attr_accessor :title, :labels
alias_attribute :name, :title
......
# frozen_string_literal: true
class GlobalMilestone
include Milestoneish
......
# frozen_string_literal: true
class GpgKey < ActiveRecord::Base
KEY_PREFIX = '-----BEGIN PGP PUBLIC KEY BLOCK-----'.freeze
KEY_SUFFIX = '-----END PGP PUBLIC KEY BLOCK-----'.freeze
......
# frozen_string_literal: true
class GpgKeySubkey < ActiveRecord::Base
include ShaAttribute
......
# frozen_string_literal: true
class GpgSignature < ActiveRecord::Base
include ShaAttribute
......
# frozen_string_literal: true
require 'carrierwave/orm/activerecord'
class Group < Namespace
......
# frozen_string_literal: true
class GroupCustomAttribute < ActiveRecord::Base
belongs_to :group
......
# frozen_string_literal: true
class GroupLabel < Label
belongs_to :group
......
# frozen_string_literal: true
class GroupMilestone < GlobalMilestone
attr_accessor :group
......
# frozen_string_literal: true
class Guest
class << self
def can?(action, subject = :global)
......
# frozen_string_literal: true
class Identity < ActiveRecord::Base
def self.uniqueness_scope
:provider
......
# frozen_string_literal: true
class ImportExportUpload < ActiveRecord::Base
include WithUploads
include ObjectStorage::BackgroundMove
belongs_to :project
# These hold the project Import/Export archives (.tar.gz files)
mount_uploader :import_file, ImportExportUploader
mount_uploader :export_file, ImportExportUploader
......
# frozen_string_literal: true
# A discussion to wrap a single `Note` note on the root of an issue, merge request,
# commit, or snippet, that is not displayed as a discussion.
#
......
# frozen_string_literal: true
require 'resolv'
class InstanceConfiguration
......
# frozen_string_literal: true
# An InternalId is a strictly monotone sequence of integers
# generated for a given scope and usage.
#
......
# frozen_string_literal: true
require 'carrierwave/orm/activerecord'
class Issue < ActiveRecord::Base
......
# frozen_string_literal: true
class IssueAssignee < ActiveRecord::Base
belongs_to :issue
belongs_to :assignee, class_name: "User", foreign_key: :user_id
......
# frozen_string_literal: true
# IssueCollection can be used to reduce a list of issues down to a subset.
#
# IssueCollection is not meant to be some sort of Enumerable, instead it's meant
......
# frozen_string_literal: true
require 'digest/md5'
class Key < ActiveRecord::Base
......
# frozen_string_literal: true
class Label < ActiveRecord::Base
include CacheMarkdownField
include Referable
......
# frozen_string_literal: true
class LabelLink < ActiveRecord::Base
include Importable
......
# frozen_string_literal: true
class LabelPriority < ActiveRecord::Base
belongs_to :project
belongs_to :label
......
# frozen_string_literal: true
# A discussion on merge request or commit diffs consisting of `LegacyDiffNote` notes.
#
# All new diff discussions are of the type `DiffDiscussion`, but any diff discussions created
......
# frozen_string_literal: true
# A note on merge request or commit diffs, using the legacy implementation.
#
# All new diff notes are of the type `DiffNote`, but any diff notes created
......
# frozen_string_literal: true
class LfsFileLock < ActiveRecord::Base
belongs_to :project
belongs_to :user
......
# frozen_string_literal: true
class LfsObject < ActiveRecord::Base
include AfterCommitQueue
include ObjectStorage::BackgroundMove
......
# frozen_string_literal: true
class LfsObjectsProject < ActiveRecord::Base
belongs_to :project
belongs_to :lfs_object
......
# frozen_string_literal: true
class List < ActiveRecord::Base
belongs_to :board
belongs_to :label
......
# frozen_string_literal: true
class Member < ActiveRecord::Base
include AfterCommitQueue
include Sortable
......
# frozen_string_literal: true
class MergeRequest < ActiveRecord::Base
include AtomicInternalId
include IidRoutes
......
# frozen_string_literal: true
class MergeRequestDiff < ActiveRecord::Base
include Sortable
include Importable
......@@ -249,15 +251,13 @@ class MergeRequestDiff < ActiveRecord::Base
end
def load_diffs(options)
raw = merge_request_diff_files.map(&:to_hash)
collection = merge_request_diff_files
if paths = options[:paths]
raw = raw.select do |diff|
paths.include?(diff[:old_path]) || paths.include?(diff[:new_path])
end
collection = collection.where('old_path IN (?) OR new_path IN (?)', paths, paths)
end
Gitlab::Git::DiffCollection.new(raw, options)
Gitlab::Git::DiffCollection.new(collection.map(&:to_hash), options)
end
def load_commits
......
# frozen_string_literal: true
class MergeRequestDiffCommit < ActiveRecord::Base
include ShaAttribute
......
# frozen_string_literal: true
class MergeRequestDiffFile < ActiveRecord::Base
include Gitlab::EncodingHelper
include DiffFile
......
# frozen_string_literal: true
class MergeRequestsClosingIssues < ActiveRecord::Base
belongs_to :merge_request
belongs_to :issue
......
# frozen_string_literal: true
class Milestone < ActiveRecord::Base
# Represents a "No Milestone" state used for filtering Issues and Merge
# Requests that have no milestone assigned.
......
# frozen_string_literal: true
class Namespace < ActiveRecord::Base
include CacheMarkdownField
include Sortable
......
# frozen_string_literal: true
# A note on the root of an issue, merge request, commit, or snippet.
#
# A note of this type is never resolvable.
......
# frozen_string_literal: true
class NoteDiffFile < ActiveRecord::Base
include DiffFile
......
# frozen_string_literal: true
# Holds reasons for a notification to have been sent as well as a priority list to select which reason to use
# above the rest
class NotificationReason
......
# frozen_string_literal: true
class NotificationRecipient
include Gitlab::Utils::StrongMemoize
......
# frozen_string_literal: true
class NotificationSetting < ActiveRecord::Base
include IgnorableColumn
......
# frozen_string_literal: true
class OauthAccessGrant < Doorkeeper::AccessGrant
belongs_to :resource_owner, class_name: 'User'
belongs_to :application, class_name: 'Doorkeeper::Application'
......
# frozen_string_literal: true
class OauthAccessToken < Doorkeeper::AccessToken
belongs_to :resource_owner, class_name: 'User'
belongs_to :application, class_name: 'Doorkeeper::Application'
......
# frozen_string_literal: true
# When notes on a commit are displayed in the context of a merge request that
# contains that commit, they are displayed as if they were a discussion.
#
......
# frozen_string_literal: true
class PagesDomain < ActiveRecord::Base
VERIFICATION_KEY = 'gitlab-pages-verification-code'.freeze
VERIFICATION_THRESHOLD = 3.days.freeze
......
# frozen_string_literal: true
class PersonalAccessToken < ActiveRecord::Base
include Expirable
include TokenAuthenticatable
......
# frozen_string_literal: true
class PersonalSnippet < Snippet
include WithUploads
end
# frozen_string_literal: true
require 'carrierwave/orm/activerecord'
class Project < ActiveRecord::Base
......
# frozen_string_literal: true
class ProjectAuthorization < ActiveRecord::Base
belongs_to :user
belongs_to :project
......
# frozen_string_literal: true
class ProjectAutoDevops < ActiveRecord::Base
belongs_to :project
......
# frozen_string_literal: true
class ProjectCiCdSetting < ActiveRecord::Base
belongs_to :project, inverse_of: :ci_cd_settings
......
# frozen_string_literal: true
class ProjectCustomAttribute < ActiveRecord::Base
belongs_to :project
......
# frozen_string_literal: true
class ProjectDeployToken < ActiveRecord::Base
belongs_to :project
belongs_to :deploy_token, inverse_of: :project_deploy_tokens
......
# frozen_string_literal: true
class ProjectFeature < ActiveRecord::Base
# == Project features permissions
#
......
# frozen_string_literal: true
class ProjectGroupLink < ActiveRecord::Base
include Expirable
......
# frozen_string_literal: true
require 'carrierwave/orm/activerecord'
class ProjectImportData < ActiveRecord::Base
......
# frozen_string_literal: true
class ProjectImportState < ActiveRecord::Base
include AfterCommitQueue
......
# frozen_string_literal: true
class ProjectLabel < Label
MAX_NUMBER_OF_PRIORITIES = 1
......
# frozen_string_literal: true
class ProjectSnippet < Snippet
belongs_to :project
belongs_to :author, class_name: "User"
......
# frozen_string_literal: true
class ProjectStatistics < ActiveRecord::Base
belongs_to :project
belongs_to :namespace
......
# frozen_string_literal: true
class ProjectTeam
include BulkMemberAccessLoad
......
# frozen_string_literal: true
class ProtectableDropdown
REF_TYPES = %i[branches tags].freeze
......
# frozen_string_literal: true
class ProtectedBranch < ActiveRecord::Base
include Gitlab::ShellAdapter
include ProtectedRef
......
# frozen_string_literal: true
class ProtectedRefMatcher
def initialize(protected_ref)
@protected_ref = protected_ref
......
# frozen_string_literal: true
class ProtectedTag < ActiveRecord::Base
include Gitlab::ShellAdapter
include ProtectedRef
......
# frozen_string_literal: true
class PushEvent < Event
# This validation exists so we can't accidentally use PushEvent with a
# different "action" value.
......
# frozen_string_literal: true
class PushEventPayload < ActiveRecord::Base
include ShaAttribute
......
# frozen_string_literal: true
class ReadmeBlob < SimpleDelegator
attr_reader :repository
......
# frozen_string_literal: true
class RedirectRoute < ActiveRecord::Base
belongs_to :source, polymorphic: true # rubocop:disable Cop/PolymorphicAssociations
......
# frozen_string_literal: true
class Release < ActiveRecord::Base
include CacheMarkdownField
......
# frozen_string_literal: true
class RemoteMirror < ActiveRecord::Base
include AfterCommitQueue
......
# frozen_string_literal: true
require 'securerandom'
class Repository
......
# frozen_string_literal: true
class Route < ActiveRecord::Base
include CaseSensitivity
......
# frozen_string_literal: true
class SecurityEvent < AuditEvent
end
# frozen_string_literal: true
class SentNotification < ActiveRecord::Base
serialize :position, Gitlab::Diff::Position # rubocop:disable Cop/ActiveRecordSerialize
......
# frozen_string_literal: true
# To add new service you should build a class inherited from Service
# and implement a set of methods
class Service < ActiveRecord::Base
......
# frozen_string_literal: true
class Snippet < ActiveRecord::Base
include Gitlab::VisibilityLevel
include CacheMarkdownField
......
# frozen_string_literal: true
class SnippetBlob
include BlobLike
......
# frozen_string_literal: true
class SpamLog < ActiveRecord::Base
belongs_to :user
......
# frozen_string_literal: true
class Subscription < ActiveRecord::Base
belongs_to :user
belongs_to :project
......
# frozen_string_literal: true
class SystemNoteMetadata < ActiveRecord::Base
# These notes's action text might contain a reference that is external.
# We should always force a deep validation upon references that are found
......
# frozen_string_literal: true
class TermAgreement < ActiveRecord::Base
belongs_to :term, class_name: 'ApplicationSetting::Term'
belongs_to :user
......
# frozen_string_literal: true
class Timelog < ActiveRecord::Base
validates :time_spent, :user, presence: true
validate :issuable_id_is_present
......
# frozen_string_literal: true
class Todo < ActiveRecord::Base
include Sortable
......
# frozen_string_literal: true
class Tree
include Gitlab::MarkupHelper
......
# frozen_string_literal: true
class TrendingProject < ActiveRecord::Base
belongs_to :project
......
# frozen_string_literal: true
# Registration information for U2F (universal 2nd factor) devices, like Yubikeys
class U2fRegistration < ActiveRecord::Base
......
# frozen_string_literal: true
class Upload < ActiveRecord::Base
# Upper limit for foreground checksum processing
CHECKSUM_THRESHOLD = 100.megabytes
......
# frozen_string_literal: true
require 'carrierwave/orm/activerecord'
class User < ActiveRecord::Base
......@@ -128,7 +130,7 @@ class User < ActiveRecord::Base
has_many :builds, dependent: :nullify, class_name: 'Ci::Build' # rubocop:disable Cop/ActiveRecordDependent
has_many :pipelines, dependent: :nullify, class_name: 'Ci::Pipeline' # rubocop:disable Cop/ActiveRecordDependent
has_many :todos
has_many :notification_settings, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :notification_settings
has_many :award_emoji, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :triggers, dependent: :destroy, class_name: 'Ci::Trigger', foreign_key: :owner_id # rubocop:disable Cop/ActiveRecordDependent
......
# frozen_string_literal: true
class UserAgentDetail < ActiveRecord::Base
belongs_to :subject, polymorphic: true # rubocop:disable Cop/PolymorphicAssociations
......
# frozen_string_literal: true
class UserCallout < ActiveRecord::Base
belongs_to :user
......
# frozen_string_literal: true
class UserCustomAttribute < ActiveRecord::Base
belongs_to :user
......
# frozen_string_literal: true
class UserInteractedProject < ActiveRecord::Base
belongs_to :user
belongs_to :project
......
# frozen_string_literal: true
class UserSyncedAttributesMetadata < ActiveRecord::Base
belongs_to :user
......
# frozen_string_literal: true
class UsersStarProject < ActiveRecord::Base
belongs_to :project, counter_cache: :star_count, touch: true
belongs_to :user
......
# frozen_string_literal: true
class WikiDirectory
include ActiveModel::Validations
......
# frozen_string_literal: true
# rubocop:disable Rails/ActiveRecordAliases
class WikiPage
PageChangedError = Class.new(StandardError)
......
......@@ -15,7 +15,7 @@ module Projects
end
def execute
prepare_template_environment(template_file&.path)
prepare_template_environment(template_file)
prepare_import_params
......@@ -61,7 +61,6 @@ module Projects
if template_file
params[:import_type] = 'gitlab_project'
params[:import_source] = import_upload_path
end
params[:import_data] = { data: data } if data.present?
......
......@@ -11,6 +11,7 @@ module Users
author.user
end
@user = nil unless @user.is_a?(User)
@activity = activity
end
......
# frozen_string_literal: true
class ImportExportUploader < AttachmentUploader
EXTENSION_WHITELIST = %w[tar.gz].freeze
EXTENSION_WHITELIST = %w[tar.gz gz].freeze
def extension_whitelist
EXTENSION_WHITELIST
......
......@@ -22,7 +22,7 @@
%br
Go to
= link_to project_runners_path(@build.project) do
= link_to project_runners_path(@build.project, anchor: 'js-runners-settings') do
Runners page
- if @build.starts_environment?
......
#js-pipeline-header-vue.pipeline-header-container
- if @commit.present?
.commit-box
.commit-box
%h3.commit-title
= markdown(@commit.title, pipeline: :single_line)
- if @commit.description.present?
= markdown(commit.title, pipeline: :single_line)
- if commit.description.present?
.commit-description<
= preserve(markdown(@commit.description, pipeline: :single_line))
= preserve(markdown(commit.description, pipeline: :single_line))
.info-well
- if @commit.status
.info-well
- if commit.status
.well-segment.pipeline-info
.icon-container
= icon('clock-o')
......@@ -26,7 +23,7 @@
.well-segment.branch-info
.icon-container.commit-icon
= custom_icon("icon_commit")
= link_to @commit.short_id, project_commit_path(@project, @pipeline.sha), class: "commit-sha js-details-short"
= link_to commit.short_id, project_commit_path(@project, @pipeline.sha), class: "commit-sha js-details-short"
= link_to("#", class: "js-details-expand d-none d-sm-none d-md-inline") do
%span.text-expander
= sprite_icon('ellipsis_h', size: 12)
......
......@@ -4,8 +4,10 @@
- page_title "Pipeline"
.js-pipeline-container{ class: container_class, data: { controller_action: "#{controller.action_name}" } }
- if @commit
= render "projects/pipelines/info"
#js-pipeline-header-vue.pipeline-header-container
- if @pipeline.commit.present?
= render "projects/pipelines/info", commit: @pipeline.commit
= render "projects/pipelines/with_tabs", pipeline: @pipeline
......
......@@ -28,7 +28,7 @@
.settings-content
= render 'autodevops_form'
%section.qa-runners-settings.settings.no-animate{ class: ('expanded' if expanded) }
%section.qa-runners-settings.settings.no-animate#js-runners-settings{ class: ('expanded' if expanded) }
.settings-header
%h4
= _("Runners")
......
......@@ -4,6 +4,10 @@ class CreateGpgSignatureWorker
include ApplicationWorker
def perform(commit_shas, project_id)
# Older versions of GitPushService may push a single commit ID on the stack.
# We need this to be backwards compatible.
commit_shas = Array(commit_shas)
return if commit_shas.empty?
project = Project.find_by(id: project_id)
......
---
title: Adds foreign key to notification_settings.user_id
merge_request: 20567
author: Jacopo Beschi @jacopo-beschi
type: added
---
title: Improve performance when fetching collapsed diffs and commenting in merge requests
merge_request: 20940
author:
type: performance
---
title: Add object storage logic to project import
merge_request: 20773
author:
type: added
---
title: Chart versions for applications installed by one click install buttons should
be version locked
merge_request: 20765
author:
type: fixed
---
title: Use Helm 2.7.2 for GitLab Managed Apps
merge_request: 20956
author:
type: changed
---
title: Automatically expand runner's settings block when linking to the runner's settings
page
merge_request:
author:
type: other
---
title: fix error caused when using the search bar while unauthenticated
merge_request: 20970
author:
type: fixed
---
title: Enable frozen string in app/models/*.rb
merge_request: 20851
author: gfyoung
type: performance
---
title: Update to Rouge 3.2.0, including Terraform and Crystal lexer and bug fixes
merge_request: 20991
author:
type: changed
class AddForeignKeyFromNotificationSettingsToUsers < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
class NotificationSetting < ActiveRecord::Base
self.table_name = 'notification_settings'
include EachBatch
end
class User < ActiveRecord::Base
self.table_name = 'users'
end
DOWNTIME = false
disable_ddl_transaction!
def up
NotificationSetting.each_batch(of: 1000) do |batch|
batch.where('NOT EXISTS (?)', User.select(1).where('users.id = notification_settings.user_id'))
.delete_all
end
add_concurrent_foreign_key(:notification_settings, :users, column: :user_id, on_delete: :cascade)
end
def down
remove_foreign_key(:notification_settings, column: :user_id)
end
end
......@@ -2350,6 +2350,7 @@ ActiveRecord::Schema.define(version: 20180726172057) do
add_foreign_key "milestones", "projects", name: "fk_9bd0a0c791", on_delete: :cascade
add_foreign_key "note_diff_files", "notes", column: "diff_note_id", on_delete: :cascade
add_foreign_key "notes", "projects", name: "fk_99e097b079", on_delete: :cascade
add_foreign_key "notification_settings", "users", name: "fk_0c95e91db7", on_delete: :cascade
add_foreign_key "oauth_openid_requests", "oauth_access_grants", column: "access_grant_id", name: "fk_oauth_openid_requests_oauth_access_grants_access_grant_id"
add_foreign_key "pages_domains", "projects", name: "fk_ea2f6dfc6f", on_delete: :cascade
add_foreign_key "personal_access_tokens", "users"
......
......@@ -347,13 +347,7 @@ def expire_first_branch_cache
end
```
## Anti-Patterns
This is a collection of [anti-patterns][anti-pattern] that should be avoided
unless these changes have a measurable, significant and positive impact on
production environments.
### String Freezing
## String Freezing
In recent Ruby versions calling `freeze` on a String leads to it being allocated
only once and re-used. For example, on Ruby 2.3 this will only allocate the
......@@ -365,17 +359,38 @@ only once and re-used. For example, on Ruby 2.3 this will only allocate the
end
```
Blindly adding a `.freeze` call to every String is an anti-pattern that should
be avoided unless one can prove (using production data) the call actually has a
positive impact on performance.
Depending on the size of the String and how frequently it would be allocated
(before the `.freeze` call was added), this _may_ make things faster, but
there's no guarantee it will.
Strings will be frozen by default in Ruby 3.0. To prepare our code base for
this eventuality, it's a good practice to add the following header to all
Ruby files:
```ruby
# frozen_string_literal: true
```
This may cause test failures in the code that expects to be able to manipulate
strings. Instead of using `dup`, use the unary plus to get an unfrozen string:
```ruby
test = +"hello"
test += " world"
```
## Anti-Patterns
This feature of Ruby wasn't really meant to make things faster directly, instead
it was meant to reduce the number of allocations. Depending on the size of the
String and how frequently it would be allocated (before the `.freeze` call was
added), this _may_ make things faster, but there's no guarantee it will.
This is a collection of [anti-patterns][anti-pattern] that should be avoided
unless these changes have a measurable, significant and positive impact on
production environments.
Another common flavour of this is to not only freeze a String, but also assign
it to a constant, for example:
### Moving Allocations to Constants
Storing an object as a constant so you only allocate it once _may_ improve
performance, but there's no guarantee this will. Looking up constants has an
impact on runtime performance, and as such, using a constant instead of
referencing an object directly may even slow code down. For example:
```ruby
SOME_CONSTANT = 'foo'.freeze
......@@ -393,13 +408,6 @@ there's nothing stopping somebody from doing this elsewhere in the code:
SOME_CONSTANT = 'bar'
```
### Moving Allocations to Constants
Storing an object as a constant so you only allocate it once _may_ improve
performance, but there's no guarantee this will. Looking up constants has an
impact on runtime performance, and as such, using a constant instead of
referencing an object directly may even slow code down.
[#15607]: https://gitlab.com/gitlab-org/gitlab-ce/issues/15607
[yorickpeterse]: https://gitlab.com/yorickpeterse
[anti-pattern]: https://en.wikipedia.org/wiki/Anti-pattern
......@@ -88,7 +88,7 @@ storage in a safe place. **Each code can be used only once** to log in to your
account.
If you lose the recovery codes or just want to generate new ones, you can do so
from the **Profile settings ➔ Account** page where you first enabled 2FA.
[using SSH](#generate-new-recovery-codes-using-ssh).
## Logging in with 2FA Enabled
......
require 'yaml'
require_relative 'helper'
module Backup
class Repository
include Backup::Helper
attr_reader :progress
def initialize(progress)
......@@ -42,131 +39,36 @@ module Backup
end
def prepare_directories
Gitlab.config.repositories.storages.each do |name, repository_storage|
delete_all_repositories(name, repository_storage)
Gitlab.config.repositories.storages.each do |name, _repository_storage|
Gitlab::GitalyClient::StorageService.new(name).delete_all_repositories
end
end
def backup_project(project)
gitaly_migrate(:repository_backup, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |is_enabled|
if is_enabled
backup_project_gitaly(project)
else
backup_project_local(project)
end
end
backup_custom_hooks(project)
rescue => e
progress_warn(project, e, 'Failed to backup repo')
end
def backup_project_gitaly(project)
path_to_project_bundle = path_to_bundle(project)
Gitlab::GitalyClient::RepositoryService.new(project.repository)
.create_bundle(path_to_project_bundle)
end
def backup_project_local(project)
path_to_project_repo = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
path_to_repo(project)
end
path_to_project_bundle = path_to_bundle(project)
cmd = %W(#{Gitlab.config.git.bin_path} --git-dir=#{path_to_project_repo} bundle create #{path_to_project_bundle} --all)
output, status = Gitlab::Popen.popen(cmd)
progress_warn(project, cmd.join(' '), output) unless status.zero?
end
def delete_all_repositories(name, repository_storage)
gitaly_migrate(:delete_all_repositories, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |is_enabled|
if is_enabled
Gitlab::GitalyClient::StorageService.new(name).delete_all_repositories
else
local_delete_all_repositories(name, repository_storage)
end
end
end
def local_delete_all_repositories(name, repository_storage)
path = repository_storage.legacy_disk_path
return unless File.exist?(path)
bk_repos_path = File.join(Gitlab.config.backup.path, "tmp", "#{name}-repositories.old." + Time.now.to_i.to_s)
FileUtils.mkdir_p(bk_repos_path, mode: 0700)
files = Dir.glob(File.join(path, "*"), File::FNM_DOTMATCH) - [File.join(path, "."), File.join(path, "..")]
begin
FileUtils.mv(files, bk_repos_path)
rescue Errno::EACCES
access_denied_error(path)
rescue Errno::EBUSY
resource_busy_error(path)
end
end
def local_restore_custom_hooks(project, dir)
path_to_project_repo = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
path_to_repo(project)
end
cmd = %W(tar -xf #{path_to_tars(project, dir)} -C #{path_to_project_repo} #{dir})
output, status = Gitlab::Popen.popen(cmd)
unless status.zero?
progress_warn(project, cmd.join(' '), output)
end
end
def gitaly_restore_custom_hooks(project, dir)
custom_hooks_path = path_to_tars(project, dir)
Gitlab::GitalyClient::RepositoryService.new(project.repository)
.restore_custom_hooks(custom_hooks_path)
end
def local_backup_custom_hooks(project)
in_path(path_to_tars(project)) do |dir|
path_to_project_repo = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
path_to_repo(project)
backup_custom_hooks(project)
rescue => e
progress_warn(project, e, 'Failed to backup repo')
end
break unless File.exist?(File.join(path_to_project_repo, dir))
FileUtils.mkdir_p(path_to_tars(project))
cmd = %W(tar -cf #{path_to_tars(project, dir)} -c #{path_to_project_repo} #{dir})
output, status = Gitlab::Popen.popen(cmd)
unless status.zero?
progress_warn(project, cmd.join(' '), output)
end
end
end
def backup_custom_hooks(project)
FileUtils.mkdir_p(project_backup_path(project))
def gitaly_backup_custom_hooks(project)
FileUtils.mkdir_p(path_to_tars(project))
custom_hooks_path = path_to_tars(project, 'custom_hooks')
custom_hooks_path = custom_hooks_tar(project)
Gitlab::GitalyClient::RepositoryService.new(project.repository)
.backup_custom_hooks(custom_hooks_path)
end
def backup_custom_hooks(project)
gitaly_migrate(:backup_custom_hooks, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |is_enabled|
if is_enabled
gitaly_backup_custom_hooks(project)
else
local_backup_custom_hooks(project)
end
end
end
def restore_custom_hooks(project)
in_path(path_to_tars(project)) do |dir|
gitaly_migrate(:restore_custom_hooks, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |is_enabled|
if is_enabled
gitaly_restore_custom_hooks(project, dir)
else
local_restore_custom_hooks(project, dir)
end
end
end
return unless Dir.exist?(project_backup_path(project))
return if Dir.glob("#{project_backup_path(project)}/custom_hooks*").none?
custom_hooks_path = custom_hooks_tar(project)
Gitlab::GitalyClient::RepositoryService.new(project.repository)
.restore_custom_hooks(custom_hooks_path)
end
def restore
......@@ -181,7 +83,8 @@ module Backup
restore_repo_success = nil
if File.exist?(path_to_project_bundle)
begin
project.repository.create_from_bundle path_to_project_bundle
project.repository.create_from_bundle(path_to_project_bundle)
restore_custom_hooks(project)
restore_repo_success = true
rescue => e
restore_repo_success = false
......@@ -197,8 +100,6 @@ module Backup
progress.puts "[Failed] restoring #{project.full_path} repository".color(:red)
end
restore_custom_hooks(project)
wiki = ProjectWiki.new(project)
path_to_wiki_bundle = path_to_bundle(wiki)
......@@ -219,48 +120,28 @@ module Backup
protected
def path_to_repo(project)
project.repository.path_to_repo
end
def path_to_bundle(project)
File.join(backup_repos_path, project.disk_path + '.bundle')
end
def path_to_tars(project, dir = nil)
path = File.join(backup_repos_path, project.disk_path)
if dir
File.join(path, "#{dir}.tar")
else
path
def project_backup_path(project)
File.join(backup_repos_path, project.disk_path)
end
def custom_hooks_tar(project)
File.join(project_backup_path(project), "custom_hooks.tar")
end
def backup_repos_path
File.join(Gitlab.config.backup.path, 'repositories')
end
def in_path(path)
return unless Dir.exist?(path)
dir_entries = Dir.entries(path)
if dir_entries.include?('custom_hooks') || dir_entries.include?('custom_hooks.tar')
yield('custom_hooks')
end
end
def prepare
FileUtils.rm_rf(backup_repos_path)
FileUtils.mkdir_p(Gitlab.config.backup.path)
FileUtils.mkdir(backup_repos_path, mode: 0700)
end
def silent
{ err: '/dev/null', out: '/dev/null' }
end
private
def progress_warn(project, cmd, output)
......@@ -273,18 +154,8 @@ module Backup
project_or_wiki.repository.empty?
end
def repository_storage_paths_args
Gitlab.config.repositories.storages.values.map { |rs| rs.legacy_disk_path }
end
def display_repo_path(project)
project.hashed_storage?(:repository) ? "#{project.full_path} (#{project.disk_path})" : project.full_path
end
def gitaly_migrate(method, status: Gitlab::GitalyClient::MigrationStatus::OPT_IN, &block)
Gitlab::GitalyClient.migrate(method, status: status, &block)
rescue GRPC::NotFound, GRPC::BadStatus => e
raise Error, e
end
end
end
......@@ -26,6 +26,12 @@ module Gitlab
end
end
# This is called from within a rake task only used by Admins, so allow writing
# to STDOUT
def self.log(message)
puts message # rubocop:disable Rails/Output
end
attr_reader :user, :project_name, :bare_repo
delegate :log, to: :class
......@@ -59,11 +65,10 @@ module Gitlab
import_type: 'bare_repository',
namespace_id: group&.id).execute
if project.persisted? && mv_repo(project)
if project.persisted? && mv_repositories(project)
log " * Created #{project.name} (#{project_full_path})".color(:green)
project.write_repository_config
Gitlab::Git::Repository.create_hooks(project.repository.path_to_repo, Gitlab.config.gitlab_shell.hooks_path)
ProjectCacheWorker.perform_async(project.id)
else
......@@ -74,12 +79,11 @@ module Gitlab
project
end
def mv_repo(project)
storage_path = storage_path_for_shard(project.repository_storage)
FileUtils.mv(repo_path, project.repository.path_to_repo)
def mv_repositories(project)
mv_repo(bare_repo.repo_path, project.repository)
if bare_repo.wiki_exists?
FileUtils.mv(wiki_path, File.join(storage_path, project.disk_path + '.wiki.git'))
mv_repo(bare_repo.wiki_path, project.wiki.repository)
end
true
......@@ -89,6 +93,11 @@ module Gitlab
false
end
def mv_repo(path, repository)
repository.create_from_bundle(bundle(path))
FileUtils.rm_rf(path)
end
def storage_path_for_shard(shard)
Gitlab.config.repositories.storages[shard].legacy_disk_path
end
......@@ -101,10 +110,17 @@ module Gitlab
Groups::NestedCreateService.new(user, group_path: group_path).execute
end
# This is called from within a rake task only used by Admins, so allow writing
# to STDOUT
def self.log(message)
puts message # rubocop:disable Rails/Output
def bundle(repo_path)
# TODO: we could save some time and disk space by using
# `git bundle create - --all` and streaming the bundle directly to
# Gitaly, rather than writing it on disk first
bundle_path = "#{repo_path}.bundle"
cmd = %W(#{Gitlab.config.git.bin_path} --git-dir=#{repo_path} bundle create #{bundle_path} --all)
output, status = Gitlab::Popen.popen(cmd)
raise output unless status.zero?
bundle_path
end
end
end
......
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/953
#
module Gitlab
module BareRepositoryImport
class Repository
......
......@@ -40,7 +40,7 @@ module Gitlab
# Accepts a path in the form of "#{hex_secret}/#{filename}"
def find_correct_path(upload_path)
upload = Upload.find_by(uploader: 'FileUploader', path: upload_path)
return unless upload && upload.local?
return unless upload && upload.local? && upload.model
upload.absolute_path
rescue => e
......
......@@ -39,19 +39,6 @@ module Gitlab
ChecksumError = Class.new(StandardError)
class << self
# Unlike `new`, `create` takes the repository path
def create(repo_path, bare: true, symlink_hooks_to: nil)
FileUtils.mkdir_p(repo_path, mode: 0770)
# Equivalent to `git --git-path=#{repo_path} init [--bare]`
repo = Rugged::Repository.init_at(repo_path, bare)
repo.close
create_hooks(repo_path, symlink_hooks_to) if symlink_hooks_to.present?
true
end
def create_hooks(repo_path, global_hooks_path)
local_hooks_path = File.join(repo_path, 'hooks')
real_local_hooks_path = :not_found
......
......@@ -18,6 +18,21 @@ module Gitlab
private
def download_or_copy_upload(uploader, upload_path)
if uploader.upload.local?
copy_files(uploader.path, upload_path)
else
download(uploader.url, upload_path)
end
end
def download(url, upload_path)
File.open(upload_path, 'w') do |file|
# Download (stream) file from the uploader's location
IO.copy_stream(URI.parse(url).open, file)
end
end
def tar_with_options(archive:, dir:, options:)
execute(%W(tar -#{options} #{archive} -C #{dir} .))
end
......
......@@ -10,15 +10,18 @@ module Gitlab
new(*args).import
end
def initialize(archive_file:, shared:)
def initialize(project:, archive_file:, shared:)
@project = project
@archive_file = archive_file
@shared = shared
end
def import
mkdir_p(@shared.export_path)
mkdir_p(@shared.archive_path)
remove_symlinks!
remove_symlinks
copy_archive
wait_for_archived_file do
decompress_archive
......@@ -27,7 +30,8 @@ module Gitlab
@shared.error(e)
false
ensure
remove_symlinks!
remove_import_file
remove_symlinks
end
private
......@@ -51,7 +55,15 @@ module Gitlab
result
end
def remove_symlinks!
def copy_archive
return if @archive_file
@archive_file = File.join(@shared.archive_path, Gitlab::ImportExport.export_filename(project: @project))
download_or_copy_upload(@project.import_export_upload.import_file, @archive_file)
end
def remove_symlinks
extracted_files.each do |path|
FileUtils.rm(path) if File.lstat(path).symlink?
end
......@@ -59,6 +71,10 @@ module Gitlab
true
end
def remove_import_file
FileUtils.rm_rf(@archive_file)
end
def extracted_files
Dir.glob("#{@shared.export_path}/**/*", File::FNM_DOTMATCH).reject { |f| IGNORED_FILENAMES.include?(File.basename(f)) }
end
......
......@@ -35,7 +35,8 @@ module Gitlab
end
def import_file
Gitlab::ImportExport::FileImporter.import(archive_file: @archive_file,
Gitlab::ImportExport::FileImporter.import(project: @project,
archive_file: @archive_file,
shared: @shared)
end
......@@ -91,7 +92,14 @@ module Gitlab
end
def remove_import_file
FileUtils.rm_rf(@archive_file)
return unless Gitlab::ImportExport.object_storage?
upload = @project.import_export_upload
return unless upload&.import_file&.file
upload.remove_import_file!
upload.save!
end
def overwrite_project
......
......@@ -91,10 +91,7 @@ module Gitlab
mkdir_p(File.join(uploads_export_path, secret))
File.open(upload_path, 'w') do |file|
# Download (stream) file from the uploader's location
IO.copy_stream(URI.parse(upload.file.url).open, file)
end
download_or_copy_upload(upload, upload_path)
end
end
end
......
module Gitlab
module Kubernetes
module Helm
HELM_VERSION = '2.7.0'.freeze
HELM_VERSION = '2.7.2'.freeze
NAMESPACE = 'gitlab-managed-apps'.freeze
end
end
......
......@@ -2,11 +2,17 @@ module Gitlab
module TemplateHelper
include Gitlab::Utils::StrongMemoize
def prepare_template_environment(file_path)
return unless file_path.present?
def prepare_template_environment(file)
return unless file&.path.present?
if Gitlab::ImportExport.object_storage?
params[:import_export_upload] = ImportExportUpload.new(import_file: file)
else
FileUtils.mkdir_p(File.dirname(import_upload_path))
FileUtils.copy_entry(file_path, import_upload_path)
FileUtils.copy_entry(file.path, import_upload_path)
params[:import_source] = import_upload_path
end
end
def import_upload_path
......
......@@ -13,7 +13,7 @@ describe Admin::ServicesController do
Service.available_services_names.each do |service_name|
context "#{service_name}" do
let!(:service) do
service_template = service_name.concat("_service").camelize.constantize
service_template = "#{service_name}_service".camelize.constantize
service_template.where(template: true).first_or_create
end
......
require 'spec_helper'
describe 'Import/Export - project import integration test', :js do
include Select2Helper
let(:user) { create(:user) }
let(:file) { File.join(Rails.root, 'spec', 'features', 'projects', 'import_export', 'test_project_export.tar.gz') }
let(:export_path) { "#{Dir.tmpdir}/import_file_spec" }
before do
stub_feature_flags(import_export_object_storage: true)
stub_uploads_object_storage(FileUploader)
allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
gitlab_sign_in(user)
end
after do
FileUtils.rm_rf(export_path, secure: true)
end
context 'when selecting the namespace' do
let(:user) { create(:admin) }
let!(:namespace) { user.namespace }
let(:project_path) { 'test-project-path' + SecureRandom.hex }
context 'prefilled the path' do
it 'user imports an exported project successfully' do
visit new_project_path
select2(namespace.id, from: '#project_namespace_id')
fill_in :project_path, with: project_path, visible: true
click_import_project_tab
click_link 'GitLab export'
expect(page).to have_content('Import an exported GitLab project')
expect(URI.parse(current_url).query).to eq("namespace_id=#{namespace.id}&path=#{project_path}")
attach_file('file', file)
click_on 'Import project'
expect(Project.count).to eq(1)
project = Project.last
expect(project).not_to be_nil
expect(project.description).to eq("Foo Bar")
expect(project.issues).not_to be_empty
expect(project.merge_requests).not_to be_empty
expect(project_hook_exists?(project)).to be true
expect(wiki_exists?(project)).to be true
expect(project.import_state.status).to eq('finished')
end
end
context 'path is not prefilled' do
it 'user imports an exported project successfully' do
visit new_project_path
click_import_project_tab
click_link 'GitLab export'
fill_in :path, with: 'test-project-path', visible: true
attach_file('file', file)
expect { click_on 'Import project' }.to change { Project.count }.by(1)
project = Project.last
expect(project).not_to be_nil
expect(page).to have_content("Project 'test-project-path' is being imported")
end
end
end
it 'invalid project' do
project = create(:project, namespace: user.namespace)
visit new_project_path
select2(user.namespace.id, from: '#project_namespace_id')
fill_in :project_path, with: project.name, visible: true
click_import_project_tab
click_link 'GitLab export'
attach_file('file', file)
click_on 'Import project'
page.within('.flash-container') do
expect(page).to have_content('Project could not be imported')
end
end
def wiki_exists?(project)
wiki = ProjectWiki.new(project)
wiki.repository.exists? && !wiki.repository.empty?
end
def project_hook_exists?(project)
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
Gitlab::Git::Hook.new('post-receive', project.repository.raw_repository).exists?
end
end
def click_import_project_tab
find('#import-project-tab').click
end
end
......@@ -8,6 +8,7 @@ describe 'Import/Export - project import integration test', :js do
let(:export_path) { "#{Dir.tmpdir}/import_file_spec" }
before do
stub_feature_flags(import_export_object_storage: false)
allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
gitlab_sign_in(user)
end
......
......@@ -62,7 +62,9 @@ describe('new dropdown component', () => {
vm.dropdownOpen = true;
setTimeout(() => {
expect(vm.$refs.dropdownMenu.scrollIntoView).toHaveBeenCalled();
expect(vm.$refs.dropdownMenu.scrollIntoView).toHaveBeenCalledWith({
block: 'nearest',
});
done();
});
......
......@@ -73,8 +73,7 @@ describe Backup::Repository do
end
end
describe '#delete_all_repositories', :seed_helper do
shared_examples('delete_all_repositories') do
describe '#prepare_directories', :seed_helper do
before do
allow(FileUtils).to receive(:mkdir_p).and_call_original
allow(FileUtils).to receive(:mv).and_call_original
......@@ -84,11 +83,11 @@ describe Backup::Repository do
ensure_seeds
end
it 'removes all repositories' do
it' removes all repositories' do
# Sanity check: there should be something for us to delete
expect(list_repositories).to include(File.join(SEED_STORAGE_PATH, TEST_REPO_PATH))
subject.delete_all_repositories('default', Gitlab.config.repositories.storages['default'])
subject.prepare_directories
expect(list_repositories).to be_empty
end
......@@ -98,15 +97,6 @@ describe Backup::Repository do
end
end
context 'with gitaly' do
it_behaves_like 'delete_all_repositories'
end
context 'without gitaly', :skip_gitaly_mock do
it_behaves_like 'delete_all_repositories'
end
end
describe '#empty_repo?' do
context 'for a wiki' do
let(:wiki) { create(:project_wiki) }
......
require 'spec_helper'
describe Gitlab::BareRepositoryImport::Importer, repository: true do
describe Gitlab::BareRepositoryImport::Importer, :seed_helper do
let!(:admin) { create(:admin) }
let!(:base_dir) { Dir.mktmpdir + '/' }
let(:bare_repository) { Gitlab::BareRepositoryImport::Repository.new(base_dir, File.join(base_dir, "#{project_path}.git")) }
let(:gitlab_shell) { Gitlab::Shell.new }
let(:source_project) { TEST_REPO_PATH }
subject(:importer) { described_class.new(admin, bare_repository) }
......@@ -17,16 +18,11 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do
after do
FileUtils.rm_rf(base_dir)
TestEnv.clean_test_path
ensure_seeds
Rainbow.enabled = @rainbow
end
around do |example|
# TODO migrate BareRepositoryImport https://gitlab.com/gitlab-org/gitaly/issues/953
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
example.run
end
end
shared_examples 'importing a repository' do
describe '.execute' do
it 'creates a project for a repository in storage' do
......@@ -86,8 +82,8 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do
importer.create_project_if_needed
end
it 'creates the Git repo on disk with the proper symlink for hooks' do
create_bare_repository("#{project_path}.git")
it 'creates the Git repo on disk' do
prepare_repository("#{project_path}.git", source_project)
importer.create_project_if_needed
......@@ -97,9 +93,6 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do
expect(gitlab_shell.exists?(project.repository_storage, repo_path)).to be(true)
expect(gitlab_shell.exists?(project.repository_storage, hook_path)).to be(true)
full_hook_path = File.join(project.repository.path_to_repo, 'hooks')
expect(File.readlink(full_hook_path)).to eq(Gitlab.config.gitlab_shell.hooks_path)
end
context 'hashed storage enabled' do
......@@ -148,7 +141,7 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do
end
it 'creates the Git repo in disk' do
create_bare_repository("#{project_path}.git")
prepare_repository("#{project_path}.git", source_project)
importer.create_project_if_needed
......@@ -158,25 +151,25 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do
expect(gitlab_shell.exists?(project.repository_storage, project.disk_path + '.wiki.git')).to be(true)
end
it 'moves an existing project to the correct path' do
context 'with a repository already on disk' do
let!(:base_dir) { TestEnv.repos_path }
# This is a quick way to get a valid repository instead of copying an
# existing one. Since it's not persisted, the importer will try to
# create the project.
project = build(:project, :legacy_storage, :repository)
original_commit_count = project.repository.commit_count
legacy_path = Gitlab.config.repositories.storages[project.repository_storage].legacy_disk_path
let(:project) { build(:project, :legacy_storage, :repository) }
let(:project_path) { project.full_path }
bare_repo = Gitlab::BareRepositoryImport::Repository.new(legacy_path, project.repository.path)
gitlab_importer = described_class.new(admin, bare_repo)
it 'moves an existing project to the correct path' do
original_commit_count = project.repository.commit_count
expect(gitlab_importer).to receive(:create_project).and_call_original
expect(importer).to receive(:create_project).and_call_original
new_project = gitlab_importer.create_project_if_needed
new_project = importer.create_project_if_needed
expect(new_project.repository.commit_count).to eq(original_commit_count)
end
end
end
context 'with Wiki' do
let(:project_path) { 'a-group/a-project' }
......@@ -185,8 +178,8 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do
it_behaves_like 'importing a repository'
it 'creates the Wiki git repo in disk' do
create_bare_repository("#{project_path}.git")
create_bare_repository("#{project_path}.wiki.git")
prepare_repository("#{project_path}.git", source_project)
prepare_repository("#{project_path}.wiki.git", source_project)
expect(Projects::CreateService).to receive(:new).with(admin, hash_including(skip_wiki: true,
import_type: 'bare_repository')).and_call_original
......@@ -213,8 +206,13 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do
end
end
def create_bare_repository(project_path)
def prepare_repository(project_path, source_project)
repo_path = File.join(base_dir, project_path)
Gitlab::Git::Repository.create(repo_path, bare: true)
return create_bare_repository(repo_path) unless source_project
cmd = %W(#{Gitlab.config.git.bin_path} clone --bare #{source_project} #{repo_path})
system(git_env, *cmd, chdir: SEED_STORAGE_PATH, out: '/dev/null', err: '/dev/null')
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Cleanup::ProjectUploads do
subject { described_class.new(logger: logger) }
let(:logger) { double(:logger) }
before do
allow(logger).to receive(:info).at_least(1).times
allow(logger).to receive(:debug).at_least(1).times
end
describe '#run!' do
shared_examples_for 'moves the file' do
shared_examples_for 'a real run' do
let(:args) { [dry_run: false] }
it 'moves the file to its proper location' do
subject.run!(*args)
expect(File.exist?(path)).to be_falsey
expect(File.exist?(new_path)).to be_truthy
end
it 'logs action as done' do
expect(logger).to receive(:info).with("Looking for orphaned project uploads to clean up...")
expect(logger).to receive(:info).with("Did #{action}")
subject.run!(*args)
end
end
shared_examples_for 'a dry run' do
it 'does not move the file' do
subject.run!(*args)
expect(File.exist?(path)).to be_truthy
expect(File.exist?(new_path)).to be_falsey
end
it 'logs action as able to be done' do
expect(logger).to receive(:info).with("Looking for orphaned project uploads to clean up. Dry run...")
expect(logger).to receive(:info).with("Can #{action}")
subject.run!(*args)
end
end
context 'when dry_run is false' do
let(:args) { [dry_run: false] }
it_behaves_like 'a real run'
end
context 'when dry_run is nil' do
let(:args) { [dry_run: nil] }
it_behaves_like 'a real run'
end
context 'when dry_run is true' do
let(:args) { [dry_run: true] }
it_behaves_like 'a dry run'
end
context 'with dry_run not specified' do
let(:args) { [] }
it_behaves_like 'a dry run'
end
end
shared_examples_for 'moves the file to lost and found' do
let(:action) { "move to lost and found #{path} -> #{new_path}" }
it_behaves_like 'moves the file'
end
shared_examples_for 'fixes the file' do
let(:action) { "fix #{path} -> #{new_path}" }
it_behaves_like 'moves the file'
end
context 'orphaned project upload file' do
context 'when an upload record matching the secret and filename is found' do
context 'when the project is still in legacy storage' do
let(:orphaned) { create(:upload, :issuable_upload, :with_file, model: create(:project, :legacy_storage)) }
let(:new_path) { orphaned.absolute_path }
let(:path) { File.join(FileUploader.root, 'some', 'wrong', 'location', orphaned.path) }
before do
FileUtils.mkdir_p(File.dirname(path))
FileUtils.mv(new_path, path)
end
it_behaves_like 'fixes the file'
end
context 'when the project was moved to hashed storage' do
let(:orphaned) { create(:upload, :issuable_upload, :with_file) }
let(:new_path) { orphaned.absolute_path }
let(:path) { File.join(FileUploader.root, 'some', 'wrong', 'location', orphaned.path) }
before do
FileUtils.mkdir_p(File.dirname(path))
FileUtils.mv(new_path, path)
end
it_behaves_like 'fixes the file'
end
context 'when the project is missing (the upload *record* is an orphan)' do
let(:orphaned) { create(:upload, :issuable_upload, :with_file, model: build(:project, :legacy_storage)) }
let!(:path) { orphaned.absolute_path }
let!(:new_path) { File.join(FileUploader.root, '-', 'project-lost-found', orphaned.model.full_path, orphaned.path) }
before do
orphaned.model.delete
end
it_behaves_like 'moves the file to lost and found'
end
# We will probably want to add logic (Reschedule background upload) to
# cover Case 2 in https://gitlab.com/gitlab-org/gitlab-ce/issues/46535#note_75355104
context 'when the file should be in object storage' do
context 'when the file otherwise has the correct local path' do
let!(:orphaned) { create(:upload, :issuable_upload, :object_storage, model: build(:project, :legacy_storage)) }
let!(:path) { File.join(FileUploader.root, orphaned.model.full_path, orphaned.path) }
before do
stub_feature_flags(import_export_object_storage: true)
stub_uploads_object_storage(FileUploader)
FileUtils.mkdir_p(File.dirname(path))
FileUtils.touch(path)
end
it 'does not move the file' do
expect(File.exist?(path)).to be_truthy
subject.run!(dry_run: false)
expect(File.exist?(path)).to be_truthy
end
end
# E.g. the upload file was orphaned, and then uploads were migrated to
# object storage
context 'when the file has the wrong local path' do
let!(:orphaned) { create(:upload, :issuable_upload, :object_storage, model: build(:project, :legacy_storage)) }
let!(:path) { File.join(FileUploader.root, 'wrong', orphaned.path) }
let!(:new_path) { File.join(FileUploader.root, '-', 'project-lost-found', 'wrong', orphaned.path) }
before do
stub_feature_flags(import_export_object_storage: true)
stub_uploads_object_storage(FileUploader)
FileUtils.mkdir_p(File.dirname(path))
FileUtils.touch(path)
end
it_behaves_like 'moves the file to lost and found'
end
end
end
context 'when a matching upload record can not be found' do
context 'when the file path fits the known pattern' do
let!(:orphaned) { create(:upload, :issuable_upload, :with_file, model: build(:project, :legacy_storage)) }
let!(:path) { orphaned.absolute_path }
let!(:new_path) { File.join(FileUploader.root, '-', 'project-lost-found', orphaned.model.full_path, orphaned.path) }
before do
orphaned.delete
end
it_behaves_like 'moves the file to lost and found'
end
context 'when the file path does not fit the known pattern' do
let!(:invalid_path) { File.join('group', 'file.jpg') }
let!(:path) { File.join(FileUploader.root, invalid_path) }
let!(:new_path) { File.join(FileUploader.root, '-', 'project-lost-found', invalid_path) }
before do
FileUtils.mkdir_p(File.dirname(path))
FileUtils.touch(path)
end
after do
File.delete(path) if File.exist?(path)
end
it_behaves_like 'moves the file to lost and found'
end
end
end
context 'non-orphaned project upload file' do
it 'does not move the file' do
tracked = create(:upload, :issuable_upload, :with_file, model: build(:project, :legacy_storage))
tracked_path = tracked.absolute_path
expect(logger).not_to receive(:info).with(/move|fix/i)
expect(File.exist?(tracked_path)).to be_truthy
subject.run!(dry_run: false)
expect(File.exist?(tracked_path)).to be_truthy
end
end
context 'ignorable cases' do
# Because we aren't concerned about these, and can save a lot of
# processing time by ignoring them. If we wish to cleanup hashed storage
# directories, it should simply require removing this test and modifying
# the find command.
context 'when the file is already in hashed storage' do
let(:project) { create(:project) }
before do
expect(logger).not_to receive(:info).with(/move|fix/i)
end
it 'does not move even an orphan file' do
orphaned = create(:upload, :issuable_upload, :with_file, model: project)
path = orphaned.absolute_path
orphaned.delete
expect(File.exist?(path)).to be_truthy
subject.run!(dry_run: false)
expect(File.exist?(path)).to be_truthy
end
end
it 'does not move any non-project (FileUploader) uploads' do
paths = []
orphaned1 = create(:upload, :personal_snippet_upload, :with_file)
orphaned2 = create(:upload, :namespace_upload, :with_file)
orphaned3 = create(:upload, :attachment_upload, :with_file)
paths << orphaned1.absolute_path
paths << orphaned2.absolute_path
paths << orphaned3.absolute_path
Upload.delete_all
expect(logger).not_to receive(:info).with(/move|fix/i)
paths.each do |path|
expect(File.exist?(path)).to be_truthy
end
subject.run!(dry_run: false)
paths.each do |path|
expect(File.exist?(path)).to be_truthy
end
end
it 'does not move any uploads in tmp (which would interfere with ongoing upload activity)' do
path = File.join(FileUploader.root, 'tmp', 'foo.jpg')
FileUtils.mkdir_p(File.dirname(path))
FileUtils.touch(path)
expect(logger).not_to receive(:info).with(/move|fix/i)
expect(File.exist?(path)).to be_truthy
subject.run!(dry_run: false)
expect(File.exist?(path)).to be_truthy
end
end
end
end
require 'spec_helper'
describe Gitlab::Git::AttributesAtRefParser, seed_helper: true do
describe Gitlab::Git::AttributesAtRefParser, :seed_helper do
let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
......
require 'spec_helper'
describe Gitlab::Git::AttributesParser, seed_helper: true do
describe Gitlab::Git::AttributesParser, :seed_helper do
let(:attributes_path) { File.join(SEED_STORAGE_PATH, 'with-git-attributes.git', 'info', 'attributes') }
let(:data) { File.read(attributes_path) }
......
# coding: utf-8
require "spec_helper"
describe Gitlab::Git::Blame, seed_helper: true do
describe Gitlab::Git::Blame, :seed_helper do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '') }
let(:blame) do
Gitlab::Git::Blame.new(repository, SeedRepo::Commit::ID, "CONTRIBUTING.md")
......
......@@ -2,7 +2,7 @@
require "spec_helper"
describe Gitlab::Git::BlobSnippet, seed_helper: true do
describe Gitlab::Git::BlobSnippet, :seed_helper do
describe '#data' do
context 'empty lines' do
let(:snippet) { Gitlab::Git::BlobSnippet.new('master', nil, nil, nil) }
......
......@@ -2,7 +2,7 @@
require "spec_helper"
describe Gitlab::Git::Blob, seed_helper: true do
describe Gitlab::Git::Blob, :seed_helper do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '') }
describe 'initialize' do
......
require "spec_helper"
describe Gitlab::Git::Branch, seed_helper: true do
describe Gitlab::Git::Branch, :seed_helper do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '') }
let(:rugged) do
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
......
require "spec_helper"
describe Gitlab::Git::Commit, seed_helper: true do
describe Gitlab::Git::Commit, :seed_helper do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '') }
let(:commit) { described_class.find(repository, SeedRepo::Commit::ID) }
let(:rugged_commit) do
......
require 'spec_helper'
describe Gitlab::Git::CommitterWithHooks, seed_helper: true do
describe Gitlab::Git::CommitterWithHooks, :seed_helper do
# TODO https://gitlab.com/gitlab-org/gitaly/issues/1234
skip 'needs to be moved to gitaly-ruby test suite' do
shared_examples 'calling wiki hooks' do
......
require "spec_helper"
describe Gitlab::Git::Compare, seed_helper: true do
describe Gitlab::Git::Compare, :seed_helper do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '') }
let(:compare) { Gitlab::Git::Compare.new(repository, SeedRepo::BigCommit::ID, SeedRepo::Commit::ID, straight: false) }
let(:compare_straight) { Gitlab::Git::Compare.new(repository, SeedRepo::BigCommit::ID, SeedRepo::Commit::ID, straight: true) }
......
require 'spec_helper'
describe Gitlab::Git::DiffCollection, seed_helper: true do
describe Gitlab::Git::DiffCollection, :seed_helper do
subject do
Gitlab::Git::DiffCollection.new(
iterator,
......
require "spec_helper"
describe Gitlab::Git::Diff, seed_helper: true do
describe Gitlab::Git::Diff, :seed_helper do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '') }
before do
......
require 'spec_helper'
describe Gitlab::Git::HooksService, seed_helper: true do
describe Gitlab::Git::HooksService, :seed_helper do
let(:gl_id) { 'user-456' }
let(:gl_username) { 'janedoe' }
let(:user) { Gitlab::Git::User.new(gl_username, 'Jane Doe', 'janedoe@example.com', gl_id) }
......
require 'spec_helper'
describe Gitlab::Git::Index, seed_helper: true do
describe Gitlab::Git::Index, :seed_helper do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '') }
let(:index) { described_class.new(repository) }
......
require 'spec_helper'
describe Gitlab::Git::RemoteRepository, seed_helper: true do
describe Gitlab::Git::RemoteRepository, :seed_helper do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '') }
subject { described_class.new(repository) }
......
# coding: utf-8
require "spec_helper"
describe Gitlab::Git::Repository, seed_helper: true do
describe Gitlab::Git::Repository, :seed_helper do
include Gitlab::EncodingHelper
using RSpec::Parameterized::TableSyntax
......
require "spec_helper"
describe Gitlab::Git::Tag, seed_helper: true do
describe Gitlab::Git::Tag, :seed_helper do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '') }
shared_examples 'Gitlab::Git::Repository#tags' do
......
require "spec_helper"
describe Gitlab::Git::Tree, seed_helper: true do
describe Gitlab::Git::Tree, :seed_helper do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '') }
context :repo do
......
require 'spec_helper'
describe Gitlab::GitalyClient::StorageService do
describe '#delete_all_repositories' do
let!(:project) { create(:project, :repository) }
it 'removes all repositories' do
described_class.new(project.repository_storage).delete_all_repositories
expect(project.repository.exists?).to be(false)
end
end
end
require 'spec_helper'
describe Gitlab::ImportExport::FileImporter do
let(:shared) { Gitlab::ImportExport::Shared.new(nil) }
let(:storage_path) { "#{Dir.tmpdir}/file_importer_spec" }
let(:valid_file) { "#{shared.export_path}/valid.json" }
let(:symlink_file) { "#{shared.export_path}/invalid.json" }
let(:hidden_symlink_file) { "#{shared.export_path}/.hidden" }
let(:subfolder_symlink_file) { "#{shared.export_path}/subfolder/invalid.json" }
let(:evil_symlink_file) { "#{shared.export_path}/.\nevil" }
before do
stub_const('Gitlab::ImportExport::FileImporter::MAX_RETRIES', 0)
stub_feature_flags(import_export_object_storage: true)
stub_uploads_object_storage(FileUploader)
allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(storage_path)
allow_any_instance_of(Gitlab::ImportExport::CommandLineUtil).to receive(:untar_zxf).and_return(true)
allow_any_instance_of(Gitlab::ImportExport::Shared).to receive(:relative_archive_path).and_return('test')
allow(SecureRandom).to receive(:hex).and_return('abcd')
setup_files
end
after do
FileUtils.rm_rf(storage_path)
end
context 'normal run' do
before do
described_class.import(project: build(:project), archive_file: '', shared: shared)
end
it 'removes symlinks in root folder' do
expect(File.exist?(symlink_file)).to be false
end
it 'removes hidden symlinks in root folder' do
expect(File.exist?(hidden_symlink_file)).to be false
end
it 'removes evil symlinks in root folder' do
expect(File.exist?(evil_symlink_file)).to be false
end
it 'removes symlinks in subfolders' do
expect(File.exist?(subfolder_symlink_file)).to be false
end
it 'does not remove a valid file' do
expect(File.exist?(valid_file)).to be true
end
it 'creates the file in the right subfolder' do
expect(shared.export_path).to include('test/abcd')
end
end
context 'error' do
before do
allow_any_instance_of(described_class).to receive(:wait_for_archived_file).and_raise(StandardError)
described_class.import(project: build(:project), archive_file: '', shared: shared)
end
it 'removes symlinks in root folder' do
expect(File.exist?(symlink_file)).to be false
end
it 'removes hidden symlinks in root folder' do
expect(File.exist?(hidden_symlink_file)).to be false
end
it 'removes symlinks in subfolders' do
expect(File.exist?(subfolder_symlink_file)).to be false
end
it 'does not remove a valid file' do
expect(File.exist?(valid_file)).to be true
end
end
def setup_files
FileUtils.mkdir_p("#{shared.export_path}/subfolder/")
FileUtils.touch(valid_file)
FileUtils.ln_s(valid_file, symlink_file)
FileUtils.ln_s(valid_file, subfolder_symlink_file)
FileUtils.ln_s(valid_file, hidden_symlink_file)
FileUtils.ln_s(valid_file, evil_symlink_file)
end
end
......@@ -24,7 +24,7 @@ describe Gitlab::ImportExport::FileImporter do
context 'normal run' do
before do
described_class.import(archive_file: '', shared: shared)
described_class.import(project: nil, archive_file: '', shared: shared)
end
it 'removes symlinks in root folder' do
......@@ -55,7 +55,7 @@ describe Gitlab::ImportExport::FileImporter do
context 'error' do
before do
allow_any_instance_of(described_class).to receive(:wait_for_archived_file).and_raise(StandardError)
described_class.import(archive_file: '', shared: shared)
described_class.import(project: nil, archive_file: '', shared: shared)
end
it 'removes symlinks in root folder' do
......
require 'spec_helper'
describe Gitlab::ImportExport::Importer do
let(:user) { create(:user) }
let(:test_path) { "#{Dir.tmpdir}/importer_spec" }
let(:shared) { project.import_export_shared }
let(:project) { create(:project) }
let(:import_file) { fixture_file_upload('spec/features/projects/import_export/test_project_export.tar.gz') }
subject(:importer) { described_class.new(project) }
before do
allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(test_path)
allow_any_instance_of(Gitlab::ImportExport::FileImporter).to receive(:remove_import_file)
stub_feature_flags(import_export_object_storage: true)
stub_uploads_object_storage(FileUploader)
FileUtils.mkdir_p(shared.export_path)
ImportExportUpload.create(project: project, import_file: import_file)
end
after do
FileUtils.rm_rf(test_path)
end
describe '#execute' do
it 'succeeds' do
importer.execute
expect(shared.errors).to be_empty
end
it 'extracts the archive' do
expect(Gitlab::ImportExport::FileImporter).to receive(:import).and_call_original
importer.execute
end
it 'checks the version' do
expect(Gitlab::ImportExport::VersionChecker).to receive(:check!).and_call_original
importer.execute
end
context 'all restores are executed' do
[
Gitlab::ImportExport::AvatarRestorer,
Gitlab::ImportExport::RepoRestorer,
Gitlab::ImportExport::WikiRestorer,
Gitlab::ImportExport::UploadsRestorer,
Gitlab::ImportExport::LfsRestorer,
Gitlab::ImportExport::StatisticsRestorer
].each do |restorer|
it "calls the #{restorer}" do
fake_restorer = double(restorer.to_s)
expect(fake_restorer).to receive(:restore).and_return(true).at_least(1)
expect(restorer).to receive(:new).and_return(fake_restorer).at_least(1)
importer.execute
end
end
it 'restores the ProjectTree' do
expect(Gitlab::ImportExport::ProjectTreeRestorer).to receive(:new).and_call_original
importer.execute
end
it 'removes the import file' do
expect(importer).to receive(:remove_import_file).and_call_original
importer.execute
expect(project.import_export_upload.import_file&.file).to be_nil
end
end
context 'when project successfully restored' do
let!(:existing_project) { create(:project, namespace: user.namespace) }
let(:project) { create(:project, namespace: user.namespace, name: 'whatever', path: 'whatever') }
before do
restorers = double(:restorers, all?: true)
allow(subject).to receive(:import_file).and_return(true)
allow(subject).to receive(:check_version!).and_return(true)
allow(subject).to receive(:restorers).and_return(restorers)
allow(project).to receive(:import_data).and_return(double(data: { 'original_path' => existing_project.path }))
end
context 'when import_data' do
context 'has original_path' do
it 'overwrites existing project' do
expect_any_instance_of(::Projects::OverwriteProjectService).to receive(:execute).with(existing_project)
subject.execute
end
end
context 'has not original_path' do
before do
allow(project).to receive(:import_data).and_return(double(data: {}))
end
it 'does not call the overwrite service' do
expect_any_instance_of(::Projects::OverwriteProjectService).not_to receive(:execute).with(existing_project)
subject.execute
end
end
end
end
end
end
......@@ -10,9 +10,10 @@ describe Gitlab::ImportExport::Importer do
before do
allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(test_path)
allow_any_instance_of(Gitlab::ImportExport::FileImporter).to receive(:remove_import_file)
FileUtils.mkdir_p(shared.export_path)
FileUtils.cp(Rails.root.join('spec/features/projects/import_export/test_project_export.tar.gz'), test_path)
allow(subject).to receive(:remove_import_file)
end
after do
......@@ -69,7 +70,7 @@ describe Gitlab::ImportExport::Importer do
let(:project) { create(:project, namespace: user.namespace, name: 'whatever', path: 'whatever') }
before do
restorers = double
restorers = double(:restorers, all?: true)
allow(subject).to receive(:import_file).and_return(true)
allow(subject).to receive(:check_version!).and_return(true)
......
......@@ -14,7 +14,7 @@ describe Gitlab::Kubernetes::Helm::InstallCommand do
let(:commands) do
<<~EOS
helm init --client-only >/dev/null
helm install #{application.chart} --name #{application.name} --namespace #{namespace} -f /data/helm/#{application.name}/config/values.yaml >/dev/null
helm install #{application.chart} --name #{application.name} --version #{application.version} --namespace #{namespace} -f /data/helm/#{application.name}/config/values.yaml >/dev/null
EOS
end
end
......@@ -42,7 +42,7 @@ describe Gitlab::Kubernetes::Helm::InstallCommand do
<<~EOS
helm init --client-only >/dev/null
helm repo add #{application.name} #{application.repository}
helm install #{application.chart} --name #{application.name} --namespace #{namespace} -f /data/helm/#{application.name}/config/values.yaml >/dev/null
helm install #{application.chart} --name #{application.name} --version #{application.version} --namespace #{namespace} -f /data/helm/#{application.name}/config/values.yaml >/dev/null
EOS
end
end
......@@ -56,7 +56,7 @@ describe Gitlab::Kubernetes::Helm::InstallCommand do
<<~EOS
helm init --client-only >/dev/null
helm repo add #{application.name} #{application.repository}
helm install #{application.chart} --name #{application.name} --namespace #{namespace} -f /data/helm/#{application.name}/config/values.yaml >/dev/null
helm install #{application.chart} --name #{application.name} --version #{application.version} --namespace #{namespace} -f /data/helm/#{application.name}/config/values.yaml >/dev/null
EOS
end
end
......
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180710162338_add_foreign_key_from_notification_settings_to_users.rb')
describe AddForeignKeyFromNotificationSettingsToUsers, :migration do
let(:notification_settings) { table(:notification_settings) }
let(:users) { table(:users) }
let(:projects) { table(:projects) }
before do
users.create!(email: 'email@email.com', name: 'foo', username: 'foo', projects_limit: 0)
projects.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce', namespace_id: 1)
end
describe 'removal of orphans without user' do
let!(:notification_setting_without_user) { create_notification_settings!(user_id: 123) }
let!(:notification_setting_with_user) { create_notification_settings!(user_id: users.last.id) }
it 'removes orphaned notification_settings without user' do
expect { migrate! }.to change { notification_settings.count }.by(-1)
end
it "doesn't remove notification_settings with valid user" do
expect { migrate! }.not_to change { notification_setting_with_user.reload }
end
end
def create_notification_settings!(**opts)
notification_settings.create!(
source_id: projects.last.id,
source_type: 'Project',
user_id: users.last.id,
**opts)
end
end
......@@ -23,6 +23,20 @@ describe Clusters::Applications::Ingress do
it { is_expected.to contain_exactly(cluster) }
end
describe '#make_installing!' do
before do
application.make_installing!
end
context 'application install previously errored with older version' do
let(:application) { create(:clusters_applications_ingress, :scheduled, version: '0.22.0') }
it 'updates the application version' do
expect(application.reload.version).to eq('0.23.0')
end
end
end
describe '#make_installed!' do
before do
application.make_installed!
......@@ -73,9 +87,17 @@ describe Clusters::Applications::Ingress do
it 'should be initialized with ingress arguments' do
expect(subject.name).to eq('ingress')
expect(subject.chart).to eq('stable/nginx-ingress')
expect(subject.version).to be_nil
expect(subject.version).to eq('0.23.0')
expect(subject.values).to eq(ingress.values)
end
context 'application failed to install previously' do
let(:ingress) { create(:clusters_applications_ingress, :errored, version: 'nginx') }
it 'should be initialized with the locked version' do
expect(subject.version).to eq('0.23.0')
end
end
end
describe '#values' do
......
......@@ -25,6 +25,20 @@ describe Clusters::Applications::Jupyter do
end
end
describe '#make_installing!' do
before do
application.make_installing!
end
context 'application install previously errored with older version' do
let(:application) { create(:clusters_applications_jupyter, :scheduled, version: 'v0.5') }
it 'updates the application version' do
expect(application.reload.version).to eq('v0.6')
end
end
end
describe '#install_command' do
let!(:ingress) { create(:clusters_applications_ingress, :installed, external_ip: '127.0.0.1') }
let!(:jupyter) { create(:clusters_applications_jupyter, cluster: ingress.cluster) }
......@@ -36,10 +50,18 @@ describe Clusters::Applications::Jupyter do
it 'should be initialized with 4 arguments' do
expect(subject.name).to eq('jupyter')
expect(subject.chart).to eq('jupyter/jupyterhub')
expect(subject.version).to be_nil
expect(subject.version).to eq('v0.6')
expect(subject.repository).to eq('https://jupyterhub.github.io/helm-chart/')
expect(subject.values).to eq(jupyter.values)
end
context 'application failed to install previously' do
let(:jupyter) { create(:clusters_applications_jupyter, :errored, version: '0.0.1') }
it 'should be initialized with the locked version' do
expect(subject.version).to eq('v0.6')
end
end
end
describe '#values' do
......
......@@ -16,6 +16,20 @@ describe Clusters::Applications::Prometheus do
it { is_expected.to contain_exactly(cluster) }
end
describe '#make_installing!' do
before do
application.make_installing!
end
context 'application install previously errored with older version' do
let(:application) { create(:clusters_applications_prometheus, :scheduled, version: '6.7.2') }
it 'updates the application version' do
expect(application.reload.version).to eq('6.7.3')
end
end
end
describe 'transition to installed' do
let(:project) { create(:project) }
let(:cluster) { create(:cluster, projects: [project]) }
......@@ -155,6 +169,14 @@ describe Clusters::Applications::Prometheus do
expect(command.version).to eq('6.7.3')
expect(command.values).to eq(prometheus.values)
end
context 'application failed to install previously' do
let(:prometheus) { create(:clusters_applications_prometheus, :errored, version: '2.0.0') }
it 'should be initialized with the locked version' do
expect(subject.version).to eq('6.7.3')
end
end
end
describe '#values' do
......
......@@ -8,6 +8,20 @@ describe Clusters::Applications::Runner do
it { is_expected.to belong_to(:runner) }
describe '#make_installing!' do
before do
application.make_installing!
end
context 'application install previously errored with older version' do
let(:application) { create(:clusters_applications_runner, :scheduled, version: '0.1.30') }
it 'updates the application version' do
expect(application.reload.version).to eq('0.1.31')
end
end
end
describe '.installed' do
subject { described_class.installed }
......@@ -31,10 +45,18 @@ describe Clusters::Applications::Runner do
it 'should be initialized with 4 arguments' do
expect(subject.name).to eq('runner')
expect(subject.chart).to eq('runner/gitlab-runner')
expect(subject.version).to be_nil
expect(subject.version).to eq('0.1.31')
expect(subject.repository).to eq('https://charts.gitlab.io')
expect(subject.values).to eq(gitlab_runner.values)
end
context 'application failed to install previously' do
let(:gitlab_runner) { create(:clusters_applications_runner, :errored, runner: ci_runner, version: '0.1.13') }
it 'should be initialized with the locked version' do
expect(subject.version).to eq('0.1.31')
end
end
end
describe '#values' do
......
......@@ -74,6 +74,14 @@ describe DeployToken do
expect(deploy_token.active?).to be_falsy
end
end
context "when it hasn't been revoked and has no expiry" do
let(:deploy_token) { create(:deploy_token, expires_at: nil) }
it 'should return true' do
expect(deploy_token.active?).to be_truthy
end
end
end
describe '#username' do
......
......@@ -127,6 +127,13 @@ describe MergeRequestDiff do
expect(diffs.map(&:new_path)).to contain_exactly('files/ruby/popen.rb')
end
it 'only serializes diff files found by query' do
expect(diff_with_commits.merge_request_diff_files.count).to be > 10
expect_any_instance_of(MergeRequestDiffFile).to receive(:to_hash).once
diffs
end
it 'uses the diffs from the DB' do
expect(diff_with_commits).to receive(:load_diffs)
......
......@@ -7,6 +7,8 @@ describe API::ProjectImport do
let(:namespace) { create(:group) }
before do
allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
stub_feature_flags(import_export_object_storage: true)
stub_uploads_object_storage(FileUploader)
namespace.add_owner(user)
end
......
......@@ -3,7 +3,7 @@ require 'spec_helper'
describe Projects::GitlabProjectsImportService do
set(:namespace) { create(:namespace) }
let(:path) { 'test-path' }
let(:file) { fixture_file_upload('spec/fixtures/doc_sample.txt', 'text/plain') }
let(:file) { fixture_file_upload('spec/fixtures/project_export.tar.gz') }
let(:overwrite) { false }
let(:import_params) { { namespace_id: namespace.id, path: path, file: file, overwrite: overwrite } }
......
......@@ -28,6 +28,18 @@ describe Users::ActivityService do
end
end
context 'when a bad object is passed' do
let(:fake_object) { double(username: 'hello') }
it 'does not record activity' do
service = described_class.new(fake_object, 'pull')
expect(service).not_to receive(:record_activity)
service.execute
end
end
context 'when last activity is today' do
let(:last_activity_on) { Date.today }
......
......@@ -101,10 +101,4 @@ bla/bla.txt
handle.write('# hello'.encode(enc))
end
end
# Prevent developer git configurations from being persisted to test
# repositories
def git_env
{ 'GIT_TEMPLATE_DIR' => '' }
end
end
......@@ -243,6 +243,14 @@ module TestEnv
set_repo_refs(target_repo_path, refs)
end
def create_bare_repository(path)
FileUtils.mkdir_p(path)
system(git_env, *%W(#{Gitlab.config.git.bin_path} -C #{path} init --bare),
out: '/dev/null',
err: '/dev/null')
end
def repos_path
@repos_path ||= Gitlab.config.repositories.storages[REPOS_STORAGE].legacy_disk_path
end
......
......@@ -7,7 +7,7 @@ shared_examples 'helm commands' do
echo http://mirror.clarkson.edu/alpine/v$ALPINE_VERSION/main >> /etc/apk/repositories
echo http://mirror1.hs-esslingen.de/pub/Mirrors/alpine/v$ALPINE_VERSION/main >> /etc/apk/repositories
apk add -U wget ca-certificates openssl >/dev/null
wget -q -O - https://kubernetes-helm.storage.googleapis.com/helm-v2.7.0-linux-amd64.tar.gz | tar zxC /tmp >/dev/null
wget -q -O - https://kubernetes-helm.storage.googleapis.com/helm-v2.7.2-linux-amd64.tar.gz | tar zxC /tmp >/dev/null
mv /tmp/linux-amd64/helm /usr/bin/
EOS
end
......
RSpec.configure do |config|
config.before(:each, :repository) do
TestEnv.clean_test_path
end
config.before(:all, :broken_storage) do
FileUtils.rm_rf Gitlab.config.repositories.storages.broken.legacy_disk_path
end
......
......@@ -87,6 +87,27 @@ describe 'gitlab:app namespace rake task' do
expect { run_rake_task('gitlab:backup:restore') }.to output.to_stdout
end
end
context 'when the restore directory is not empty' do
before do
# We only need a backup of the repositories for this test
stub_env('SKIP', 'db,uploads,builds,artifacts,lfs,registry')
end
it 'removes stale data' do
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout
excluded_project = create(:project, :repository, name: 'mepmep')
expect { run_rake_task('gitlab:backup:restore') }.to output.to_stdout
raw_repo = excluded_project.repository.raw
# The restore will not find the repository in the backup, but will create
# an empty one in its place
expect(raw_repo.empty?).to be(true)
end
end
end # backup_restore task
describe 'backup' do
......
......@@ -68,317 +68,86 @@ describe 'gitlab:cleanup rake tasks' do
end
end
# A single integration test that is redundant with one part of the
# Gitlab::Cleanup::ProjectUploads spec.
#
# Additionally, this tests DRY_RUN env var values, and the extra line of
# output that says you can disable DRY_RUN if it's enabled.
describe 'cleanup:project_uploads' do
context 'orphaned project upload file' do
context 'when an upload record matching the secret and filename is found' do
context 'when the project is still in legacy storage' do
let!(:orphaned) { create(:upload, :issuable_upload, :with_file, model: build(:project, :legacy_storage)) }
let!(:correct_path) { orphaned.absolute_path }
let!(:other_project) { create(:project, :legacy_storage) }
let!(:orphaned_path) { correct_path.sub(/#{orphaned.model.full_path}/, other_project.full_path) }
let!(:logger) { double(:logger) }
before do
FileUtils.mkdir_p(File.dirname(orphaned_path))
FileUtils.mv(correct_path, orphaned_path)
end
it 'moves the file to its proper location' do
expect(Rails.logger).to receive(:info).twice
expect(Rails.logger).to receive(:info).with("Did fix #{orphaned_path} -> #{correct_path}")
expect(File.exist?(orphaned_path)).to be_truthy
expect(File.exist?(correct_path)).to be_falsey
expect(main_object).to receive(:logger).and_return(logger).at_least(1).times
stub_env('DRY_RUN', 'false')
run_rake_task('gitlab:cleanup:project_uploads')
expect(File.exist?(orphaned_path)).to be_falsey
expect(File.exist?(correct_path)).to be_truthy
allow(logger).to receive(:info).at_least(1).times
allow(logger).to receive(:debug).at_least(1).times
end
it 'a dry run does not move the file' do
expect(Rails.logger).to receive(:info).twice
expect(Rails.logger).to receive(:info).with("Can fix #{orphaned_path} -> #{correct_path}")
expect(Rails.logger).to receive(:info)
expect(File.exist?(orphaned_path)).to be_truthy
expect(File.exist?(correct_path)).to be_falsey
run_rake_task('gitlab:cleanup:project_uploads')
expect(File.exist?(orphaned_path)).to be_truthy
expect(File.exist?(correct_path)).to be_falsey
end
context 'when the project record is missing (Upload#absolute_path raises error)' do
let!(:lost_and_found_path) { File.join(FileUploader.root, '-', 'project-lost-found', other_project.full_path, orphaned.path) }
context 'with a fixable orphaned project upload file' do
let(:orphaned) { create(:upload, :issuable_upload, :with_file, model: build(:project, :legacy_storage)) }
let(:new_path) { orphaned.absolute_path }
let(:path) { File.join(FileUploader.root, 'some', 'wrong', 'location', orphaned.path) }
before do
orphaned.model.delete
end
it 'moves the file to lost and found' do
expect(Rails.logger).to receive(:info).twice
expect(Rails.logger).to receive(:info).with("Did move to lost and found #{orphaned_path} -> #{lost_and_found_path}")
expect(File.exist?(orphaned_path)).to be_truthy
expect(File.exist?(lost_and_found_path)).to be_falsey
stub_env('DRY_RUN', 'false')
run_rake_task('gitlab:cleanup:project_uploads')
expect(File.exist?(orphaned_path)).to be_falsey
expect(File.exist?(lost_and_found_path)).to be_truthy
end
it 'a dry run does not move the file' do
expect(Rails.logger).to receive(:info).twice
expect(Rails.logger).to receive(:info).with("Can move to lost and found #{orphaned_path} -> #{lost_and_found_path}")
expect(Rails.logger).to receive(:info)
expect(File.exist?(orphaned_path)).to be_truthy
expect(File.exist?(lost_and_found_path)).to be_falsey
run_rake_task('gitlab:cleanup:project_uploads')
expect(File.exist?(orphaned_path)).to be_truthy
expect(File.exist?(lost_and_found_path)).to be_falsey
end
end
end
context 'when the project was moved to hashed storage' do
let!(:orphaned) { create(:upload, :issuable_upload, :with_file) }
let!(:correct_path) { orphaned.absolute_path }
let!(:orphaned_path) { File.join(FileUploader.root, 'foo', 'bar', orphaned.path) }
before do
FileUtils.mkdir_p(File.dirname(orphaned_path))
FileUtils.mv(correct_path, orphaned_path)
end
it 'moves the file to its proper location' do
expect(Rails.logger).to receive(:info).twice
expect(Rails.logger).to receive(:info).with("Did fix #{orphaned_path} -> #{correct_path}")
expect(File.exist?(orphaned_path)).to be_truthy
expect(File.exist?(correct_path)).to be_falsey
stub_env('DRY_RUN', 'false')
run_rake_task('gitlab:cleanup:project_uploads')
expect(File.exist?(orphaned_path)).to be_falsey
expect(File.exist?(correct_path)).to be_truthy
end
it 'a dry run does not move the file' do
expect(Rails.logger).to receive(:info).twice
expect(Rails.logger).to receive(:info).with("Can fix #{orphaned_path} -> #{correct_path}")
expect(Rails.logger).to receive(:info)
expect(File.exist?(orphaned_path)).to be_truthy
expect(File.exist?(correct_path)).to be_falsey
run_rake_task('gitlab:cleanup:project_uploads')
expect(File.exist?(orphaned_path)).to be_truthy
expect(File.exist?(correct_path)).to be_falsey
end
end
FileUtils.mkdir_p(File.dirname(path))
FileUtils.mv(new_path, path)
end
context 'when a matching upload record can not be found' do
context 'when the file path fits the known pattern' do
let!(:orphaned) { create(:upload, :issuable_upload, :with_file, model: build(:project, :legacy_storage)) }
let!(:orphaned_path) { orphaned.absolute_path }
let!(:lost_and_found_path) { File.join(FileUploader.root, '-', 'project-lost-found', orphaned.model.full_path, orphaned.path) }
context 'with DRY_RUN disabled' do
before do
orphaned.delete
end
it 'moves the file to lost and found' do
expect(Rails.logger).to receive(:info).twice
expect(Rails.logger).to receive(:info).with("Did move to lost and found #{orphaned_path} -> #{lost_and_found_path}")
expect(File.exist?(orphaned_path)).to be_truthy
expect(File.exist?(lost_and_found_path)).to be_falsey
stub_env('DRY_RUN', 'false')
run_rake_task('gitlab:cleanup:project_uploads')
expect(File.exist?(orphaned_path)).to be_falsey
expect(File.exist?(lost_and_found_path)).to be_truthy
end
it 'a dry run does not move the file' do
expect(Rails.logger).to receive(:info).twice
expect(Rails.logger).to receive(:info).with("Can move to lost and found #{orphaned_path} -> #{lost_and_found_path}")
expect(Rails.logger).to receive(:info)
expect(File.exist?(orphaned_path)).to be_truthy
expect(File.exist?(lost_and_found_path)).to be_falsey
run_rake_task('gitlab:cleanup:project_uploads')
expect(File.exist?(orphaned_path)).to be_truthy
expect(File.exist?(lost_and_found_path)).to be_falsey
end
end
context 'when the file path does not fit the known pattern' do
let!(:invalid_path) { File.join('group', 'file.jpg') }
let!(:orphaned_path) { File.join(FileUploader.root, invalid_path) }
let!(:lost_and_found_path) { File.join(FileUploader.root, '-', 'project-lost-found', invalid_path) }
before do
FileUtils.mkdir_p(File.dirname(orphaned_path))
FileUtils.touch(orphaned_path)
end
after do
File.delete(orphaned_path) if File.exist?(orphaned_path)
end
it 'moves the file to lost and found' do
expect(Rails.logger).to receive(:info).twice
expect(Rails.logger).to receive(:info).with("Did move to lost and found #{orphaned_path} -> #{lost_and_found_path}")
expect(File.exist?(orphaned_path)).to be_truthy
expect(File.exist?(lost_and_found_path)).to be_falsey
stub_env('DRY_RUN', 'false')
it 'moves the file to its proper location' do
run_rake_task('gitlab:cleanup:project_uploads')
expect(File.exist?(orphaned_path)).to be_falsey
expect(File.exist?(lost_and_found_path)).to be_truthy
expect(File.exist?(path)).to be_falsey
expect(File.exist?(new_path)).to be_truthy
end
it 'a dry run does not move the file' do
expect(Rails.logger).to receive(:info).twice
expect(Rails.logger).to receive(:info).with("Can move to lost and found #{orphaned_path} -> #{lost_and_found_path}")
expect(Rails.logger).to receive(:info)
expect(File.exist?(orphaned_path)).to be_truthy
expect(File.exist?(lost_and_found_path)).to be_falsey
it 'logs action as done' do
expect(logger).to receive(:info).with("Looking for orphaned project uploads to clean up...")
expect(logger).to receive(:info).with("Did fix #{path} -> #{new_path}")
run_rake_task('gitlab:cleanup:project_uploads')
expect(File.exist?(orphaned_path)).to be_truthy
expect(File.exist?(lost_and_found_path)).to be_falsey
end
end
end
end
context 'non-orphaned project upload file' do
shared_examples_for 'does not move the file' do
it 'does not move the file' do
tracked = create(:upload, :issuable_upload, :with_file, model: build(:project, :legacy_storage))
tracked_path = tracked.absolute_path
expect(Rails.logger).not_to receive(:info).with(/move|fix/i)
expect(File.exist?(tracked_path)).to be_truthy
stub_env('DRY_RUN', 'false')
run_rake_task('gitlab:cleanup:project_uploads')
expect(File.exist?(tracked_path)).to be_truthy
end
expect(File.exist?(path)).to be_truthy
expect(File.exist?(new_path)).to be_falsey
end
context 'ignorable cases' do
shared_examples_for 'does not move anything' do
it 'does not move even an orphan file' do
orphaned = create(:upload, :issuable_upload, :with_file, model: project)
orphaned_path = orphaned.absolute_path
orphaned.delete
expect(File.exist?(orphaned_path)).to be_truthy
it 'logs action as able to be done' do
expect(logger).to receive(:info).with("Looking for orphaned project uploads to clean up. Dry run...")
expect(logger).to receive(:info).with("Can fix #{path} -> #{new_path}")
expect(logger).to receive(:info).with(/To clean up these files run this command with DRY_RUN=false/)
run_rake_task('gitlab:cleanup:project_uploads')
expect(File.exist?(orphaned_path)).to be_truthy
end
end
# Because we aren't concerned about these, and can save a lot of
# processing time by ignoring them. If we wish to cleanup hashed storage
# directories, it should simply require removing this test and modifying
# the find command.
context 'when the file is already in hashed storage' do
let(:project) { create(:project) }
before do
stub_env('DRY_RUN', 'false')
expect(Rails.logger).not_to receive(:info).with(/move|fix/i)
end
it_behaves_like 'does not move anything'
end
context 'when DRY_RUN env var is unset' do
let(:project) { create(:project, :legacy_storage) }
it_behaves_like 'does not move anything'
end
context 'when DRY_RUN env var is true' do
let(:project) { create(:project, :legacy_storage) }
context 'with DRY_RUN explicitly enabled' do
before do
stub_env('DRY_RUN', 'true')
end
it_behaves_like 'does not move anything'
it_behaves_like 'does not move the file'
end
context 'when DRY_RUN env var is foo' do
let(:project) { create(:project, :legacy_storage) }
context 'with DRY_RUN set to an unknown value' do
before do
stub_env('DRY_RUN', 'foo')
end
it_behaves_like 'does not move anything'
it_behaves_like 'does not move the file'
end
it 'does not move any non-project (FileUploader) uploads' do
stub_env('DRY_RUN', 'false')
paths = []
orphaned1 = create(:upload, :personal_snippet_upload, :with_file)
orphaned2 = create(:upload, :namespace_upload, :with_file)
orphaned3 = create(:upload, :attachment_upload, :with_file)
paths << orphaned1.absolute_path
paths << orphaned2.absolute_path
paths << orphaned3.absolute_path
Upload.delete_all
expect(Rails.logger).not_to receive(:info).with(/move|fix/i)
paths.each do |path|
expect(File.exist?(path)).to be_truthy
end
run_rake_task('gitlab:cleanup:project_uploads')
paths.each do |path|
expect(File.exist?(path)).to be_truthy
end
end
it 'does not move any uploads in tmp (which would interfere with ongoing upload activity)' do
stub_env('DRY_RUN', 'false')
path = File.join(FileUploader.root, 'tmp', 'foo.jpg')
FileUtils.mkdir_p(File.dirname(path))
FileUtils.touch(path)
expect(Rails.logger).not_to receive(:info).with(/move|fix/i)
expect(File.exist?(path)).to be_truthy
run_rake_task('gitlab:cleanup:project_uploads')
expect(File.exist?(path)).to be_truthy
context 'with DRY_RUN unset' do
it_behaves_like 'does not move the file'
end
end
end
......
......@@ -4,10 +4,9 @@ describe CreateGpgSignatureWorker do
let(:project) { create(:project, :repository) }
let(:commits) { project.repository.commits('HEAD', limit: 3).commits }
let(:commit_shas) { commits.map(&:id) }
context 'when GpgKey is found' do
let(:gpg_commit) { instance_double(Gitlab::Gpg::Commit) }
context 'when GpgKey is found' do
before do
allow(Project).to receive(:find_by).with(id: project.id).and_return(project)
allow(project).to receive(:commits_by).with(oids: commit_shas).and_return(commits)
......@@ -36,6 +35,16 @@ describe CreateGpgSignatureWorker do
end
end
context 'handles when a string is passed in for the commit SHA' do
it 'creates a signature once' do
allow(Gitlab::Gpg::Commit).to receive(:new).with(commits.first).and_return(gpg_commit)
expect(gpg_commit).to receive(:signature).once
described_class.new.perform(commit_shas.first, project.id)
end
end
context 'when Commit is not found' do
let(:nonexisting_commit_sha) { '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a34' }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment