Commit bddbcaef authored by Tiago Botelho's avatar Tiago Botelho

Backports every CE related change from ee-44542 to CE

parent 7603beff
class Import::BaseController < ApplicationController class Import::BaseController < ApplicationController
private private
def find_already_added_projects(import_type)
current_user.created_projects.where(import_type: import_type).includes(:import_state)
end
def find_jobs(import_type)
current_user.created_projects
.includes(:import_state)
.where(import_type: import_type)
.to_json(only: [:id], methods: [:import_status])
end
def find_or_create_namespace(names, owner) def find_or_create_namespace(names, owner)
names = params[:target_namespace].presence || names names = params[:target_namespace].presence || names
......
...@@ -22,16 +22,14 @@ class Import::BitbucketController < Import::BaseController ...@@ -22,16 +22,14 @@ class Import::BitbucketController < Import::BaseController
@repos, @incompatible_repos = repos.partition { |repo| repo.valid? } @repos, @incompatible_repos = repos.partition { |repo| repo.valid? }
@already_added_projects = current_user.created_projects.where(import_type: 'bitbucket') @already_added_projects = find_already_added_projects('bitbucket')
already_added_projects_names = @already_added_projects.pluck(:import_source) already_added_projects_names = @already_added_projects.pluck(:import_source)
@repos.to_a.reject! { |repo| already_added_projects_names.include?(repo.full_name) } @repos.to_a.reject! { |repo| already_added_projects_names.include?(repo.full_name) }
end end
def jobs def jobs
render json: current_user.created_projects render json: find_jobs('bitbucket')
.where(import_type: 'bitbucket')
.to_json(only: [:id, :import_status])
end end
def create def create
......
...@@ -46,15 +46,14 @@ class Import::FogbugzController < Import::BaseController ...@@ -46,15 +46,14 @@ class Import::FogbugzController < Import::BaseController
@repos = client.repos @repos = client.repos
@already_added_projects = current_user.created_projects.where(import_type: 'fogbugz') @already_added_projects = find_already_added_projects('fogbugz')
already_added_projects_names = @already_added_projects.pluck(:import_source) already_added_projects_names = @already_added_projects.pluck(:import_source)
@repos.reject! { |repo| already_added_projects_names.include? repo.name } @repos.reject! { |repo| already_added_projects_names.include? repo.name }
end end
def jobs def jobs
jobs = current_user.created_projects.where(import_type: 'fogbugz').to_json(only: [:id, :import_status]) render json: find_jobs('fogbugz')
render json: jobs
end end
def create def create
......
...@@ -24,15 +24,14 @@ class Import::GithubController < Import::BaseController ...@@ -24,15 +24,14 @@ class Import::GithubController < Import::BaseController
def status def status
@repos = client.repos @repos = client.repos
@already_added_projects = current_user.created_projects.where(import_type: provider) @already_added_projects = find_already_added_projects(provider)
already_added_projects_names = @already_added_projects.pluck(:import_source) already_added_projects_names = @already_added_projects.pluck(:import_source)
@repos.reject! { |repo| already_added_projects_names.include? repo.full_name } @repos.reject! { |repo| already_added_projects_names.include? repo.full_name }
end end
def jobs def jobs
jobs = current_user.created_projects.where(import_type: provider).to_json(only: [:id, :import_status]) render json: find_jobs(provider)
render json: jobs
end end
def create def create
......
...@@ -12,15 +12,14 @@ class Import::GitlabController < Import::BaseController ...@@ -12,15 +12,14 @@ class Import::GitlabController < Import::BaseController
def status def status
@repos = client.projects @repos = client.projects
@already_added_projects = current_user.created_projects.where(import_type: "gitlab") @already_added_projects = find_already_added_projects('gitlab')
already_added_projects_names = @already_added_projects.pluck(:import_source) already_added_projects_names = @already_added_projects.pluck(:import_source)
@repos = @repos.to_a.reject { |repo| already_added_projects_names.include? repo["path_with_namespace"] } @repos = @repos.to_a.reject { |repo| already_added_projects_names.include? repo["path_with_namespace"] }
end end
def jobs def jobs
jobs = current_user.created_projects.where(import_type: "gitlab").to_json(only: [:id, :import_status]) render json: find_jobs('gitlab')
render json: jobs
end end
def create def create
......
...@@ -73,15 +73,14 @@ class Import::GoogleCodeController < Import::BaseController ...@@ -73,15 +73,14 @@ class Import::GoogleCodeController < Import::BaseController
@repos = client.repos @repos = client.repos
@incompatible_repos = client.incompatible_repos @incompatible_repos = client.incompatible_repos
@already_added_projects = current_user.created_projects.where(import_type: "google_code") @already_added_projects = find_already_added_projects('google_code')
already_added_projects_names = @already_added_projects.pluck(:import_source) already_added_projects_names = @already_added_projects.pluck(:import_source)
@repos.reject! { |repo| already_added_projects_names.include? repo.name } @repos.reject! { |repo| already_added_projects_names.include? repo.name }
end end
def jobs def jobs
jobs = current_user.created_projects.where(import_type: "google_code").to_json(only: [:id, :import_status]) render json: find_jobs('google_code')
render json: jobs
end end
def create def create
......
...@@ -67,6 +67,9 @@ class Project < ActiveRecord::Base ...@@ -67,6 +67,9 @@ class Project < ActiveRecord::Base
before_save :ensure_runners_token before_save :ensure_runners_token
after_save :update_project_statistics, if: :namespace_id_changed? after_save :update_project_statistics, if: :namespace_id_changed?
after_save :create_import_state, if: ->(project) { project.import? && project.import_state.nil? }
after_create :create_project_feature, unless: :project_feature after_create :create_project_feature, unless: :project_feature
after_create :create_ci_cd_settings, after_create :create_ci_cd_settings,
...@@ -157,6 +160,8 @@ class Project < ActiveRecord::Base ...@@ -157,6 +160,8 @@ class Project < ActiveRecord::Base
has_one :fork_network_member has_one :fork_network_member
has_one :fork_network, through: :fork_network_member has_one :fork_network, through: :fork_network_member
has_one :import_state, autosave: true, class_name: 'ProjectImportState', inverse_of: :project
# Merge Requests for target project should be removed with it # Merge Requests for target project should be removed with it
has_many :merge_requests, foreign_key: 'target_project_id' has_many :merge_requests, foreign_key: 'target_project_id'
has_many :source_of_merge_requests, foreign_key: 'source_project_id', class_name: 'MergeRequest' has_many :source_of_merge_requests, foreign_key: 'source_project_id', class_name: 'MergeRequest'
...@@ -385,55 +390,9 @@ class Project < ActiveRecord::Base ...@@ -385,55 +390,9 @@ class Project < ActiveRecord::Base
scope :abandoned, -> { where('projects.last_activity_at < ?', 6.months.ago) } scope :abandoned, -> { where('projects.last_activity_at < ?', 6.months.ago) }
scope :excluding_project, ->(project) { where.not(id: project) } scope :excluding_project, ->(project) { where.not(id: project) }
scope :import_started, -> { where(import_status: 'started') }
state_machine :import_status, initial: :none do
event :import_schedule do
transition [:none, :finished, :failed] => :scheduled
end
event :force_import_start do
transition [:none, :finished, :failed] => :started
end
event :import_start do
transition scheduled: :started
end
event :import_finish do
transition started: :finished
end
event :import_fail do
transition [:scheduled, :started] => :failed
end
event :import_retry do
transition failed: :started
end
state :scheduled
state :started
state :finished
state :failed
after_transition [:none, :finished, :failed] => :scheduled do |project, _|
project.run_after_commit do
job_id = add_import_job
update(import_jid: job_id) if job_id
end
end
after_transition started: :finished do |project, _| scope :joins_import_state, -> { joins("LEFT JOIN project_mirror_data import_state ON import_state.project_id = projects.id") }
project.reset_cache_and_import_attrs scope :import_started, -> { joins_import_state.where("import_state.status = 'started' OR projects.import_status = 'started'") }
if Gitlab::ImportSources.importer_names.include?(project.import_type) && project.repo_exists?
project.run_after_commit do
Projects::AfterImportService.new(project).execute
end
end
end
end
class << self class << self
# Searches for a list of projects based on the query given in `query`. # Searches for a list of projects based on the query given in `query`.
...@@ -663,10 +622,6 @@ class Project < ActiveRecord::Base ...@@ -663,10 +622,6 @@ class Project < ActiveRecord::Base
external_import? || forked? || gitlab_project_import? || bare_repository_import? external_import? || forked? || gitlab_project_import? || bare_repository_import?
end end
def no_import?
import_status == 'none'
end
def external_import? def external_import?
import_url.present? import_url.present?
end end
...@@ -679,6 +634,93 @@ class Project < ActiveRecord::Base ...@@ -679,6 +634,93 @@ class Project < ActiveRecord::Base
import_started? || import_scheduled? import_started? || import_scheduled?
end end
def import_state_args
{
status: self[:import_status],
jid: self[:import_jid],
last_error: self[:import_error]
}
end
def ensure_import_state
return if self[:import_status] == 'none' || self[:import_status].nil?
return unless import_state.nil?
create_import_state(import_state_args)
update_column(:import_status, 'none')
end
def import_schedule
ensure_import_state
import_state&.schedule
end
def force_import_start
ensure_import_state
import_state&.force_start
end
def import_start
ensure_import_state
import_state&.start
end
def import_fail
ensure_import_state
import_state&.fail_op
end
def import_finish
ensure_import_state
import_state&.finish
end
def import_jid=(new_jid)
ensure_import_state
import_state&.jid = new_jid
end
def import_jid
ensure_import_state
import_state&.jid
end
def import_error=(new_error)
ensure_import_state
import_state&.last_error = new_error
end
def import_error
ensure_import_state
import_state&.last_error
end
def import_status=(new_status)
ensure_import_state
import_state&.status = new_status
end
def import_status
ensure_import_state
import_state&.status || 'none'
end
def no_import?
import_status == 'none'
end
def import_started? def import_started?
# import? does SQL work so only run it if it looks like there's an import running # import? does SQL work so only run it if it looks like there's an import running
import_status == 'started' && import? import_status == 'started' && import?
...@@ -1480,7 +1522,7 @@ class Project < ActiveRecord::Base ...@@ -1480,7 +1522,7 @@ class Project < ActiveRecord::Base
def rename_repo_notify! def rename_repo_notify!
# When we import a project overwriting the original project, there # When we import a project overwriting the original project, there
# is a move operation. In that case we don't want to send the instructions. # is a move operation. In that case we don't want to send the instructions.
send_move_instructions(full_path_was) unless started? send_move_instructions(full_path_was) unless import_started?
expires_full_path_cache expires_full_path_cache
self.old_path_with_namespace = full_path_was self.old_path_with_namespace = full_path_was
...@@ -1534,7 +1576,8 @@ class Project < ActiveRecord::Base ...@@ -1534,7 +1576,8 @@ class Project < ActiveRecord::Base
return unless import_jid return unless import_jid
Gitlab::SidekiqStatus.unset(import_jid) Gitlab::SidekiqStatus.unset(import_jid)
update_column(:import_jid, nil)
import_state.update_column(:jid, nil)
end end
def running_or_pending_build_count(force: false) def running_or_pending_build_count(force: false)
...@@ -1553,7 +1596,8 @@ class Project < ActiveRecord::Base ...@@ -1553,7 +1596,8 @@ class Project < ActiveRecord::Base
sanitized_message = Gitlab::UrlSanitizer.sanitize(error_message) sanitized_message = Gitlab::UrlSanitizer.sanitize(error_message)
import_fail import_fail
update_column(:import_error, sanitized_message)
import_state.update_column(:last_error, sanitized_message)
rescue ActiveRecord::ActiveRecordError => e rescue ActiveRecord::ActiveRecordError => e
Rails.logger.error("Error setting import status to failed: #{e.message}. Original error: #{sanitized_message}") Rails.logger.error("Error setting import status to failed: #{e.message}. Original error: #{sanitized_message}")
ensure ensure
......
class ProjectImportState < ActiveRecord::Base
include AfterCommitQueue
self.table_name = "project_mirror_data"
belongs_to :project, inverse_of: :import_state
validates :project, presence: true
state_machine :status, initial: :none do
event :schedule do
transition [:none, :finished, :failed] => :scheduled
end
event :force_start do
transition [:none, :finished, :failed] => :started
end
event :start do
transition scheduled: :started
end
event :finish do
transition started: :finished
end
event :fail_op do
transition [:scheduled, :started] => :failed
end
state :scheduled
state :started
state :finished
state :failed
after_transition [:none, :finished, :failed] => :scheduled do |state, _|
state.run_after_commit do
job_id = project.add_import_job
update(jid: job_id) if job_id
end
end
after_transition started: :finished do |state, _|
project = state.project
project.reset_cache_and_import_attrs
if Gitlab::ImportSources.importer_names.include?(project.import_type) && project.repo_exists?
state.run_after_commit do
Projects::AfterImportService.new(project).execute
end
end
end
end
end
...@@ -142,7 +142,7 @@ module Projects ...@@ -142,7 +142,7 @@ module Projects
if @project if @project
@project.errors.add(:base, message) @project.errors.add(:base, message)
@project.mark_import_as_failed(message) if @project.import? @project.mark_import_as_failed(message) if @project.persisted? && @project.import?
end end
@project @project
......
...@@ -63,11 +63,10 @@ module Gitlab ...@@ -63,11 +63,10 @@ module Gitlab
end end
def find_project(id) def find_project(id)
# We only care about the import JID so we can refresh it. We also only # TODO: Only select the JID
# want the project if it hasn't been marked as failed yet. It's possible # This is due to the fact that the JID could be present in either the project record or
# the import gets marked as stuck when jobs of the current stage failed # its associated import_state record
# somehow. Project.import_started.find_by(id: id)
Project.select(:import_jid).import_started.find_by(id: id)
end end
end end
end end
......
...@@ -31,7 +31,10 @@ module Gitlab ...@@ -31,7 +31,10 @@ module Gitlab
end end
def find_project(id) def find_project(id)
Project.select(:import_jid).import_started.find_by(id: id) # TODO: Only select the JID
# This is due to the fact that the JID could be present in either the project record or
# its associated import_state record
Project.import_started.find_by(id: id)
end end
end end
end end
......
...@@ -22,7 +22,8 @@ class StuckImportJobsWorker ...@@ -22,7 +22,8 @@ class StuckImportJobsWorker
end end
def mark_projects_with_jid_as_failed! def mark_projects_with_jid_as_failed!
jids_and_ids = enqueued_projects_with_jid.pluck(:import_jid, :id).to_h # TODO: Rollback this change to use SQL through #pluck
jids_and_ids = enqueued_projects_with_jid.map { |project| [project.import_jid, project.id] }.to_h
# Find the jobs that aren't currently running or that exceeded the threshold. # Find the jobs that aren't currently running or that exceeded the threshold.
completed_jids = Gitlab::SidekiqStatus.completed_jids(jids_and_ids.keys) completed_jids = Gitlab::SidekiqStatus.completed_jids(jids_and_ids.keys)
...@@ -42,15 +43,15 @@ class StuckImportJobsWorker ...@@ -42,15 +43,15 @@ class StuckImportJobsWorker
end end
def enqueued_projects def enqueued_projects
Project.with_import_status(:scheduled, :started) Project.joins_import_state.where("(import_state.status = 'scheduled' OR import_state.status = 'started') OR (projects.import_status = 'scheduled' OR projects.import_status = 'started')")
end end
def enqueued_projects_with_jid def enqueued_projects_with_jid
enqueued_projects.where.not(import_jid: nil) enqueued_projects.where.not("import_state.jid IS NULL AND projects.import_jid IS NULL")
end end
def enqueued_projects_without_jid def enqueued_projects_without_jid
enqueued_projects.where(import_jid: nil) enqueued_projects.where("import_state.jid IS NULL AND projects.import_jid IS NULL")
end end
def error_message def error_message
......
class CreateProjectMirrorData < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
return if table_exists?(:project_mirror_data)
create_table :project_mirror_data do |t|
t.references :project, index: true, foreign_key: { on_delete: :cascade }
t.string :status
t.string :jid
t.text :last_error
end
end
def down
drop_table(:project_mirror_data) if table_exists?(:project_mirror_data)
end
end
class AddIndexesToProjectMirrorData < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_index :project_mirror_data, :jid
add_concurrent_index :project_mirror_data, :status
end
def down
remove_index :project_mirror_data, :jid if index_exists? :project_mirror_data, :jid
remove_index :project_mirror_data, :status if index_exists? :project_mirror_data, :status
end
end
class MigrateImportAttributesDataFromProjectsToProjectMirrorData < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
UP_MIGRATION = 'PopulateImportState'.freeze
DOWN_MIGRATION = 'RollbackImportStateData'.freeze
BATCH_SIZE = 1000
DELAY_INTERVAL = 5.minutes
disable_ddl_transaction!
class Project < ActiveRecord::Base
include EachBatch
self.table_name = 'projects'
end
class ProjectImportState < ActiveRecord::Base
include EachBatch
self.table_name = 'project_mirror_data'
end
def up
projects = Project.where.not(import_status: :none)
queue_background_migration_jobs_by_range_at_intervals(projects, UP_MIGRATION, DELAY_INTERVAL, batch_size: BATCH_SIZE)
end
def down
import_state = ProjectImportState.where.not(status: :none)
queue_background_migration_jobs_by_range_at_intervals(import_state, DOWN_MIGRATION, DELAY_INTERVAL, batch_size: BATCH_SIZE)
end
end
...@@ -11,7 +11,7 @@ ...@@ -11,7 +11,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20180503150427) do ActiveRecord::Schema.define(version: 20180503175054) do
# These are extensions that must be enabled in order to support this database # These are extensions that must be enabled in order to support this database
enable_extension "plpgsql" enable_extension "plpgsql"
...@@ -1518,6 +1518,17 @@ ActiveRecord::Schema.define(version: 20180503150427) do ...@@ -1518,6 +1518,17 @@ ActiveRecord::Schema.define(version: 20180503150427) do
add_index "project_import_data", ["project_id"], name: "index_project_import_data_on_project_id", using: :btree add_index "project_import_data", ["project_id"], name: "index_project_import_data_on_project_id", using: :btree
create_table "project_mirror_data", force: :cascade do |t|
t.integer "project_id"
t.string "status"
t.string "jid"
t.text "last_error"
end
add_index "project_mirror_data", ["jid"], name: "index_project_mirror_data_on_jid", using: :btree
add_index "project_mirror_data", ["project_id"], name: "index_project_mirror_data_on_project_id", using: :btree
add_index "project_mirror_data", ["status"], name: "index_project_mirror_data_on_status", using: :btree
create_table "project_statistics", force: :cascade do |t| create_table "project_statistics", force: :cascade do |t|
t.integer "project_id", null: false t.integer "project_id", null: false
t.integer "namespace_id", null: false t.integer "namespace_id", null: false
...@@ -2211,6 +2222,7 @@ ActiveRecord::Schema.define(version: 20180503150427) do ...@@ -2211,6 +2222,7 @@ ActiveRecord::Schema.define(version: 20180503150427) do
add_foreign_key "project_features", "projects", name: "fk_18513d9b92", on_delete: :cascade add_foreign_key "project_features", "projects", name: "fk_18513d9b92", on_delete: :cascade
add_foreign_key "project_group_links", "projects", name: "fk_daa8cee94c", on_delete: :cascade add_foreign_key "project_group_links", "projects", name: "fk_daa8cee94c", on_delete: :cascade
add_foreign_key "project_import_data", "projects", name: "fk_ffb9ee3a10", on_delete: :cascade add_foreign_key "project_import_data", "projects", name: "fk_ffb9ee3a10", on_delete: :cascade
add_foreign_key "project_mirror_data", "projects", on_delete: :cascade
add_foreign_key "project_statistics", "projects", on_delete: :cascade add_foreign_key "project_statistics", "projects", on_delete: :cascade
add_foreign_key "protected_branch_merge_access_levels", "protected_branches", name: "fk_8a3072ccb3", on_delete: :cascade add_foreign_key "protected_branch_merge_access_levels", "protected_branches", name: "fk_8a3072ccb3", on_delete: :cascade
add_foreign_key "protected_branch_push_access_levels", "protected_branches", name: "fk_9ffc86a3d9", on_delete: :cascade add_foreign_key "protected_branch_push_access_levels", "protected_branches", name: "fk_9ffc86a3d9", on_delete: :cascade
......
...@@ -136,6 +136,7 @@ module API ...@@ -136,6 +136,7 @@ module API
def self.preload_relation(projects_relation, options = {}) def self.preload_relation(projects_relation, options = {})
projects_relation.preload(:project_feature, :route) projects_relation.preload(:project_feature, :route)
.preload(:import_state)
.preload(namespace: [:route, :owner], .preload(namespace: [:route, :owner],
tags: :taggings) tags: :taggings)
end end
......
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This background migration creates all the records on the
# import state table for projects that are considered imports or forks
class PopulateImportState
def perform(start_id, end_id)
move_attributes_data_to_import_state(start_id, end_id)
rescue ActiveRecord::RecordNotUnique
retry
end
def move_attributes_data_to_import_state(start_id, end_id)
Rails.logger.info("#{self.class.name} - Moving import attributes data to project mirror data table: #{start_id} - #{end_id}")
ActiveRecord::Base.connection.execute <<~SQL
INSERT INTO project_mirror_data (project_id, status, jid, last_error)
SELECT id, import_status, import_jid, import_error
FROM projects
WHERE projects.import_status != 'none'
AND projects.id BETWEEN #{start_id} AND #{end_id}
AND NOT EXISTS (
SELECT id
FROM project_mirror_data
WHERE project_id = projects.id
)
SQL
ActiveRecord::Base.connection.execute <<~SQL
UPDATE projects
SET import_status = 'none'
WHERE import_status != 'none'
AND id BETWEEN #{start_id} AND #{end_id}
SQL
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This background migration migrates all the data of import_state
# back to the projects table for projects that are considered imports or forks
class RollbackImportStateData
def perform(start_id, end_id)
move_attributes_data_to_project(start_id, end_id)
end
def move_attributes_data_to_project(start_id, end_id)
Rails.logger.info("#{self.class.name} - Moving import attributes data to projects table: #{start_id} - #{end_id}")
if Gitlab::Database.mysql?
ActiveRecord::Base.connection.execute <<~SQL
UPDATE projects, project_mirror_data
SET
projects.import_status = project_mirror_data.status,
projects.import_jid = project_mirror_data.jid,
projects.import_error = project_mirror_data.last_error
WHERE project_mirror_data.project_id = projects.id
AND project_mirror_data.id BETWEEN #{start_id} AND #{end_id}
SQL
else
ActiveRecord::Base.connection.execute <<~SQL
UPDATE projects
SET
import_status = project_mirror_data.status,
import_jid = project_mirror_data.jid,
import_error = project_mirror_data.last_error
FROM project_mirror_data
WHERE project_mirror_data.project_id = projects.id
AND project_mirror_data.id BETWEEN #{start_id} AND #{end_id}
SQL
end
end
end
end
end
...@@ -32,7 +32,8 @@ module Gitlab ...@@ -32,7 +32,8 @@ module Gitlab
Gitlab::SidekiqStatus Gitlab::SidekiqStatus
.set(jid, StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION) .set(jid, StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION)
project.update_column(:import_jid, jid) project.ensure_import_state
project.import_state&.update_column(:jid, jid)
Stage::ImportRepositoryWorker Stage::ImportRepositoryWorker
.perform_async(project.id) .perform_async(project.id)
......
...@@ -78,7 +78,8 @@ module Gitlab ...@@ -78,7 +78,8 @@ module Gitlab
def handle_errors def handle_errors
return unless errors.any? return unless errors.any?
project.update_column(:import_error, { project.ensure_import_state
project.import_state&.update_column(:last_error, {
message: 'The remote data could not be fully imported.', message: 'The remote data could not be fully imported.',
errors: errors errors: errors
}.to_json) }.to_json)
......
FactoryBot.define do
factory :import_state, class: ProjectImportState do
status :none
association :project, factory: :project
transient do
import_url { generate(:url) }
end
trait :repository do
association :project, factory: [:project, :repository]
end
trait :none do
status :none
end
trait :scheduled do
status :scheduled
end
trait :started do
status :started
end
trait :finished do
status :finished
end
trait :failed do
status :failed
end
after(:create) do |import_state, evaluator|
import_state.project.update_columns(import_url: evaluator.import_url)
end
end
end
...@@ -69,19 +69,43 @@ FactoryBot.define do ...@@ -69,19 +69,43 @@ FactoryBot.define do
end end
trait :import_scheduled do trait :import_scheduled do
import_status :scheduled transient do
status :scheduled
end
before(:create) do |project, evaluator|
project.create_import_state(status: evaluator.status)
end
end end
trait :import_started do trait :import_started do
import_status :started transient do
status :started
end
before(:create) do |project, evaluator|
project.create_import_state(status: evaluator.status)
end
end end
trait :import_finished do trait :import_finished do
import_status :finished transient do
status :finished
end
before(:create) do |project, evaluator|
project.create_import_state(status: evaluator.status)
end
end end
trait :import_failed do trait :import_failed do
import_status :failed transient do
status :failed
end
before(:create) do |project, evaluator|
project.create_import_state(status: evaluator.status)
end
end end
trait :archived do trait :archived do
......
...@@ -46,7 +46,7 @@ feature 'Import/Export - project import integration test', :js do ...@@ -46,7 +46,7 @@ feature 'Import/Export - project import integration test', :js do
expect(project.merge_requests).not_to be_empty expect(project.merge_requests).not_to be_empty
expect(project_hook_exists?(project)).to be true expect(project_hook_exists?(project)).to be true
expect(wiki_exists?(project)).to be true expect(wiki_exists?(project)).to be true
expect(project.import_status).to eq('finished') expect(project.import_state.status).to eq('finished')
end end
end end
......
require 'spec_helper'
describe Gitlab::BackgroundMigration::PopulateImportState, :migration, schema: 20180502134117 do
let(:migration) { described_class.new }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:import_state) { table(:project_mirror_data) }
before do
namespaces.create(id: 1, name: 'gitlab-org', path: 'gitlab-org')
projects.create!(id: 1, namespace_id: 1, name: 'gitlab1',
path: 'gitlab1', import_error: "foo", import_status: :started,
import_url: generate(:url))
projects.create!(id: 2, namespace_id: 1, name: 'gitlab2', path: 'gitlab2',
import_status: :none, import_url: generate(:url))
projects.create!(id: 3, namespace_id: 1, name: 'gitlab3',
path: 'gitlab3', import_error: "bar", import_status: :failed,
import_url: generate(:url))
allow(BackgroundMigrationWorker).to receive(:perform_in)
end
it "creates new import_state records with project's import data" do
expect(projects.where.not(import_status: :none).count).to eq(2)
expect do
migration.perform(1, 3)
end.to change { import_state.all.count }.from(0).to(2)
expect(import_state.first.last_error).to eq("foo")
expect(import_state.last.last_error).to eq("bar")
expect(import_state.first.status).to eq("started")
expect(import_state.last.status).to eq("failed")
expect(projects.first.import_status).to eq("none")
expect(projects.last.import_status).to eq("none")
end
end
require 'spec_helper'
describe Gitlab::BackgroundMigration::RollbackImportStateData, :migration, schema: 20180502134117 do
let(:migration) { described_class.new }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:import_state) { table(:project_mirror_data) }
before do
namespaces.create(id: 1, name: 'gitlab-org', path: 'gitlab-org')
projects.create!(id: 1, namespace_id: 1, name: 'gitlab1', import_url: generate(:url))
projects.create!(id: 2, namespace_id: 1, name: 'gitlab2', path: 'gitlab2', import_url: generate(:url))
import_state.create!(id: 1, project_id: 1, status: :started, last_error: "foo")
import_state.create!(id: 2, project_id: 2, status: :failed)
allow(BackgroundMigrationWorker).to receive(:perform_in)
end
it "creates new import_state records with project's import data" do
migration.perform(1, 2)
expect(projects.first.import_status).to eq("started")
expect(projects.second.import_status).to eq("failed")
expect(projects.first.import_error).to eq("foo")
end
end
...@@ -2,6 +2,7 @@ require 'spec_helper' ...@@ -2,6 +2,7 @@ require 'spec_helper'
describe Gitlab::GithubImport::Importer::RepositoryImporter do describe Gitlab::GithubImport::Importer::RepositoryImporter do
let(:repository) { double(:repository) } let(:repository) { double(:repository) }
let(:import_state) { double(:import_state) }
let(:client) { double(:client) } let(:client) { double(:client) }
let(:project) do let(:project) do
...@@ -12,7 +13,8 @@ describe Gitlab::GithubImport::Importer::RepositoryImporter do ...@@ -12,7 +13,8 @@ describe Gitlab::GithubImport::Importer::RepositoryImporter do
repository_storage: 'foo', repository_storage: 'foo',
disk_path: 'foo', disk_path: 'foo',
repository: repository, repository: repository,
create_wiki: true create_wiki: true,
import_state: import_state
) )
end end
......
...@@ -12,6 +12,8 @@ describe Gitlab::GithubImport::ParallelImporter do ...@@ -12,6 +12,8 @@ describe Gitlab::GithubImport::ParallelImporter do
let(:importer) { described_class.new(project) } let(:importer) { described_class.new(project) }
before do before do
create(:import_state, :started, project: project)
expect(Gitlab::GithubImport::Stage::ImportRepositoryWorker) expect(Gitlab::GithubImport::Stage::ImportRepositoryWorker)
.to receive(:perform_async) .to receive(:perform_async)
.with(project.id) .with(project.id)
...@@ -34,7 +36,7 @@ describe Gitlab::GithubImport::ParallelImporter do ...@@ -34,7 +36,7 @@ describe Gitlab::GithubImport::ParallelImporter do
it 'updates the import JID of the project' do it 'updates the import JID of the project' do
importer.execute importer.execute
expect(project.import_jid).to eq("github-importer/#{project.id}") expect(project.reload.import_jid).to eq("github-importer/#{project.id}")
end end
end end
end end
...@@ -273,6 +273,7 @@ project: ...@@ -273,6 +273,7 @@ project:
- statistics - statistics
- container_repositories - container_repositories
- uploads - uploads
- import_state
- members_and_requesters - members_and_requesters
- build_trace_section_names - build_trace_section_names
- root_of_fork_network - root_of_fork_network
......
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180502134117_migrate_import_attributes_data_from_projects_to_project_mirror_data.rb')
describe MigrateImportAttributesDataFromProjectsToProjectMirrorData, :sidekiq, :migration do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:import_state) { table(:project_mirror_data) }
before do
stub_const("#{described_class}::BATCH_SIZE", 1)
namespaces.create(id: 1, name: 'gitlab-org', path: 'gitlab-org')
projects.create!(id: 1, namespace_id: 1, name: 'gitlab1',
path: 'gitlab1', import_error: "foo", import_status: :started,
import_url: generate(:url))
projects.create!(id: 2, namespace_id: 1, name: 'gitlab2',
path: 'gitlab2', import_error: "bar", import_status: :failed,
import_url: generate(:url))
projects.create!(id: 3, namespace_id: 1, name: 'gitlab3', path: 'gitlab3', import_status: :none, import_url: generate(:url))
end
it 'schedules delayed background migrations in batches in bulk' do
Sidekiq::Testing.fake! do
Timecop.freeze do
expect(projects.where.not(import_status: :none).count).to eq(2)
subject.up
expect(BackgroundMigrationWorker.jobs.size).to eq 2
expect(described_class::UP_MIGRATION).to be_scheduled_delayed_migration(5.minutes, 1, 1)
expect(described_class::UP_MIGRATION).to be_scheduled_delayed_migration(10.minutes, 2, 2)
end
end
end
describe '#down' do
before do
import_state.create!(id: 1, project_id: 1, status: :started)
import_state.create!(id: 2, project_id: 2, status: :started)
end
it 'schedules delayed background migrations in batches in bulk for rollback' do
Sidekiq::Testing.fake! do
Timecop.freeze do
expect(import_state.where.not(status: :none).count).to eq(2)
subject.down
expect(BackgroundMigrationWorker.jobs.size).to eq 2
expect(described_class::DOWN_MIGRATION).to be_scheduled_delayed_migration(5.minutes, 1, 1)
expect(described_class::DOWN_MIGRATION).to be_scheduled_delayed_migration(10.minutes, 2, 2)
end
end
end
end
end
require 'rails_helper'
describe ProjectImportState, type: :model do
subject { create(:import_state) }
describe 'associations' do
it { is_expected.to belong_to(:project) }
end
describe 'validations' do
it { is_expected.to validate_presence_of(:project) }
end
end
...@@ -1695,7 +1695,8 @@ describe Project do ...@@ -1695,7 +1695,8 @@ describe Project do
it 'resets project import_error' do it 'resets project import_error' do
error_message = 'Some error' error_message = 'Some error'
mirror = create(:project_empty_repo, :import_started, import_error: error_message) mirror = create(:project_empty_repo, :import_started)
mirror.import_state.update_attributes(last_error: error_message)
expect { mirror.import_finish }.to change { mirror.import_error }.from(error_message).to(nil) expect { mirror.import_finish }.to change { mirror.import_error }.from(error_message).to(nil)
end end
...@@ -3339,7 +3340,8 @@ describe Project do ...@@ -3339,7 +3340,8 @@ describe Project do
context 'with an import JID' do context 'with an import JID' do
it 'unsets the import JID' do it 'unsets the import JID' do
project = create(:project, import_jid: '123') project = create(:project)
create(:import_state, project: project, jid: '123')
expect(Gitlab::SidekiqStatus) expect(Gitlab::SidekiqStatus)
.to receive(:unset) .to receive(:unset)
......
...@@ -145,7 +145,7 @@ describe API::ProjectImport do ...@@ -145,7 +145,7 @@ describe API::ProjectImport do
describe 'GET /projects/:id/import' do describe 'GET /projects/:id/import' do
it 'returns the import status' do it 'returns the import status' do
project = create(:project, import_status: 'started') project = create(:project, :import_started)
project.add_master(user) project.add_master(user)
get api("/projects/#{project.id}/import", user) get api("/projects/#{project.id}/import", user)
...@@ -155,8 +155,9 @@ describe API::ProjectImport do ...@@ -155,8 +155,9 @@ describe API::ProjectImport do
end end
it 'returns the import status and the error if failed' do it 'returns the import status and the error if failed' do
project = create(:project, import_status: 'failed', import_error: 'error') project = create(:project, :import_failed)
project.add_master(user) project.add_master(user)
project.import_state.update_attributes(last_error: 'error')
get api("/projects/#{project.id}/import", user) get api("/projects/#{project.id}/import", user)
......
...@@ -23,7 +23,7 @@ describe Projects::CreateFromTemplateService do ...@@ -23,7 +23,7 @@ describe Projects::CreateFromTemplateService do
project = subject.execute project = subject.execute
expect(project).to be_saved expect(project).to be_saved
expect(project.scheduled?).to be(true) expect(project.import_scheduled?).to be(true)
end end
context 'the result project' do context 'the result project' do
......
...@@ -4,9 +4,10 @@ describe "projects/imports/new.html.haml" do ...@@ -4,9 +4,10 @@ describe "projects/imports/new.html.haml" do
let(:user) { create(:user) } let(:user) { create(:user) }
context 'when import fails' do context 'when import fails' do
let(:project) { create(:project_empty_repo, import_status: :failed, import_error: '<a href="http://googl.com">Foo</a>', import_type: :gitlab_project, import_source: '/var/opt/gitlab/gitlab-rails/shared/tmp/project_exports/uploads/t.tar.gz', import_url: nil) } let(:project) { create(:project_empty_repo, :import_failed, import_type: :gitlab_project, import_source: '/var/opt/gitlab/gitlab-rails/shared/tmp/project_exports/uploads/t.tar.gz', import_url: nil) }
before do before do
project.import_state.update_attributes(last_error: '<a href="http://googl.com">Foo</a>')
sign_in(user) sign_in(user)
project.add_master(user) project.add_master(user)
end end
......
require 'spec_helper' require 'spec_helper'
describe Gitlab::GithubImport::AdvanceStageWorker, :clean_gitlab_redis_shared_state do describe Gitlab::GithubImport::AdvanceStageWorker, :clean_gitlab_redis_shared_state do
let(:project) { create(:project, import_jid: '123') } let(:project) { create(:project) }
let(:import_state) { create(:import_state, project: project, jid: '123') }
let(:worker) { described_class.new } let(:worker) { described_class.new }
describe '#perform' do describe '#perform' do
...@@ -105,7 +106,8 @@ describe Gitlab::GithubImport::AdvanceStageWorker, :clean_gitlab_redis_shared_st ...@@ -105,7 +106,8 @@ describe Gitlab::GithubImport::AdvanceStageWorker, :clean_gitlab_redis_shared_st
# This test is there to make sure we only select the columns we care # This test is there to make sure we only select the columns we care
# about. # about.
expect(found.attributes).to eq({ 'id' => nil, 'import_jid' => '123' }) # TODO: enable this assertion back again
# expect(found.attributes).to include({ 'id' => nil, 'import_jid' => '123' })
end end
it 'returns nil if the project import is not running' do it 'returns nil if the project import is not running' do
......
...@@ -14,7 +14,8 @@ describe Gitlab::GithubImport::RefreshImportJidWorker do ...@@ -14,7 +14,8 @@ describe Gitlab::GithubImport::RefreshImportJidWorker do
end end
describe '#perform' do describe '#perform' do
let(:project) { create(:project, import_jid: '123abc') } let(:project) { create(:project) }
let(:import_state) { create(:import_state, project: project, jid: '123abc') }
context 'when the project does not exist' do context 'when the project does not exist' do
it 'does nothing' do it 'does nothing' do
...@@ -70,20 +71,21 @@ describe Gitlab::GithubImport::RefreshImportJidWorker do ...@@ -70,20 +71,21 @@ describe Gitlab::GithubImport::RefreshImportJidWorker do
describe '#find_project' do describe '#find_project' do
it 'returns a Project' do it 'returns a Project' do
project = create(:project, import_status: 'started') project = create(:project, :import_started)
expect(worker.find_project(project.id)).to be_an_instance_of(Project) expect(worker.find_project(project.id)).to be_an_instance_of(Project)
end end
it 'only selects the import JID field' do # it 'only selects the import JID field' do
project = create(:project, import_status: 'started', import_jid: '123abc') # project = create(:project, :import_started)
# project.import_state.update_attributes(jid: '123abc')
expect(worker.find_project(project.id).attributes) #
.to eq({ 'id' => nil, 'import_jid' => '123abc' }) # expect(worker.find_project(project.id).attributes)
end # .to eq({ 'id' => nil, 'import_jid' => '123abc' })
# end
it 'returns nil for a project for which the import process failed' do it 'returns nil for a project for which the import process failed' do
project = create(:project, import_status: 'failed') project = create(:project, :import_failed)
expect(worker.find_project(project.id)).to be_nil expect(worker.find_project(project.id)).to be_nil
end end
......
...@@ -11,10 +11,12 @@ describe RepositoryImportWorker do ...@@ -11,10 +11,12 @@ describe RepositoryImportWorker do
let(:project) { create(:project, :import_scheduled) } let(:project) { create(:project, :import_scheduled) }
context 'when worker was reset without cleanup' do context 'when worker was reset without cleanup' do
let(:jid) { '12345678' }
let(:started_project) { create(:project, :import_started, import_jid: jid) }
it 'imports the project successfully' do it 'imports the project successfully' do
jid = '12345678'
started_project = create(:project)
create(:import_state, :started, project: started_project, jid: jid)
allow(subject).to receive(:jid).and_return(jid) allow(subject).to receive(:jid).and_return(jid)
expect_any_instance_of(Projects::ImportService).to receive(:execute) expect_any_instance_of(Projects::ImportService).to receive(:execute)
......
...@@ -48,13 +48,21 @@ describe StuckImportJobsWorker do ...@@ -48,13 +48,21 @@ describe StuckImportJobsWorker do
describe 'with scheduled import_status' do describe 'with scheduled import_status' do
it_behaves_like 'project import job detection' do it_behaves_like 'project import job detection' do
let(:project) { create(:project, :import_scheduled, import_jid: '123') } let(:project) { create(:project, :import_scheduled) }
before do
project.import_state.update_attributes(jid: '123')
end
end end
end end
describe 'with started import_status' do describe 'with started import_status' do
it_behaves_like 'project import job detection' do it_behaves_like 'project import job detection' do
let(:project) { create(:project, :import_started, import_jid: '123') } let(:project) { create(:project, :import_started) }
before do
project.import_state.update_attributes(jid: '123')
end
end end
end end
end end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment