Commit 39c1e1cc authored by Stan Hu's avatar Stan Hu

Merge branch 'import-improve-project-restore' into 'master'

Refactor ProjectTreeRestorer

See merge request gitlab-org/gitlab!18024
parents 42b27577 d08fbf5e
......@@ -1966,27 +1966,6 @@ class Project < ApplicationRecord
(auto_devops || build_auto_devops)&.predefined_variables
end
def append_or_update_attribute(name, value)
if Project.reflect_on_association(name).try(:macro) == :has_many
# if this is 1-to-N relation, update the parent object
value.each do |item|
item.update!(
Project.reflect_on_association(name).foreign_key => id)
end
# force to drop relation cache
public_send(name).reset # rubocop:disable GitlabSecurity/PublicSend
# succeeded
true
else
# if this is another relation or attribute, update just object
update_attribute(name, value)
end
rescue ActiveRecord::RecordInvalid => e
raise e, "Failed to set #{name}: #{e.message}"
end
# Tries to set repository as read_only, checking for existing Git transfers in progress beforehand
#
# @return [Boolean] true when set to read_only or false when an existing git transfer is in progress
......
......@@ -12,8 +12,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
let(:restored_project_json) { project_tree_restorer.restore }
before do
allow(shared).to receive(:export_path).and_return('spec/fixtures/lib/gitlab/import_export/')
project_tree_restorer.instance_variable_set(:@path, 'ee/spec/fixtures/lib/gitlab/import_export/project.designs.json')
setup_import_export_config('designs', 'ee')
end
describe 'restoring design management data' do
......
......@@ -15,7 +15,6 @@ module Gitlab
@user = user
@shared = shared
@project = project
@saved = true
end
def restore
......@@ -33,7 +32,8 @@ module Gitlab
ActiveRecord::Base.uncached do
ActiveRecord::Base.no_touching do
update_project_params!
create_relations
create_project_relations!
post_import!
end
end
......@@ -69,68 +69,64 @@ module Gitlab
# in the DB. The structure and relationships between models are guessed from
# the configuration yaml file too.
# Finally, it updates each attribute in the newly imported project.
def create_relations
project_relations.each do |relation_key, relation_definition|
relation_key_s = relation_key.to_s
if relation_definition.present?
create_sub_relations(relation_key_s, relation_definition, @tree_hash)
elsif @tree_hash[relation_key_s].present?
save_relation_hash(relation_key_s, @tree_hash[relation_key_s])
end
def create_project_relations!
project_relations.each(&method(
:process_project_relation!))
end
def post_import!
@project.merge_requests.set_latest_merge_request_diff_ids!
@saved
end
def save_relation_hash(relation_key, relation_hash_batch)
relation_hash = create_relation(relation_key, relation_hash_batch)
def process_project_relation!(relation_key, relation_definition)
data_hashes = @tree_hash.delete(relation_key)
return unless data_hashes
remove_group_models(relation_hash) if relation_hash.is_a?(Array)
# we do not care if we process array or hash
data_hashes = [data_hashes] unless data_hashes.is_a?(Array)
@saved = false unless @project.append_or_update_attribute(relation_key, relation_hash)
# consume and remove objects from memory
while data_hash = data_hashes.shift
process_project_relation_item!(relation_key, relation_definition, data_hash)
end
end
save_id_mappings(relation_key, relation_hash_batch, relation_hash)
def process_project_relation_item!(relation_key, relation_definition, data_hash)
relation_object = build_relation(relation_key, relation_definition, data_hash)
return unless relation_object
return if group_model?(relation_object)
@project.reset
relation_object.project = @project
relation_object.save!
save_id_mapping(relation_key, data_hash, relation_object)
end
# Older, serialized CI pipeline exports may only have a
# merge_request_id and not the full hash of the merge request. To
# import these pipelines, we need to preserve the mapping between
# the old and new the merge request ID.
def save_id_mappings(relation_key, relation_hash_batch, relation_hash)
def save_id_mapping(relation_key, data_hash, relation_object)
return unless relation_key == 'merge_requests'
relation_hash = Array(relation_hash)
Array(relation_hash_batch).each_with_index do |raw_data, index|
merge_requests_mapping[raw_data['id']] = relation_hash[index]['id']
end
end
# Remove project models that became group models as we found them at group level.
# This no longer required saving them at the root project level.
# For example, in the case of an existing group label that matched the title.
def remove_group_models(relation_hash)
relation_hash.reject! do |value|
GROUP_MODELS.include?(value.class) && value.group_id
end
merge_requests_mapping[data_hash['id']] = relation_object.id
end
def project_relations
@project_relations ||= reader.attributes_finder.find_relations_tree(:project)
@project_relations ||=
reader
.attributes_finder
.find_relations_tree(:project)
.deep_stringify_keys
end
def update_project_params!
Gitlab::Timeless.timeless(@project) do
project_params = @tree_hash.reject do |key, value|
project_relations.include?(key.to_sym)
project_relations.include?(key)
end
project_params = project_params.merge(present_project_override_params)
project_params = project_params.merge(
present_project_override_params)
# Cleaning all imported and overridden params
project_params = Gitlab::ImportExport::AttributeCleaner.clean(
......@@ -140,6 +136,8 @@ module Gitlab
@project.assign_attributes(project_params)
@project.drop_visibility_level!
Gitlab::Timeless.timeless(@project) do
@project.save!
end
end
......@@ -156,73 +154,61 @@ module Gitlab
@project_override_params ||= @project.import_data&.data&.fetch('override_params', nil) || {}
end
# Given a relation hash containing one or more models and its relationships,
# loops through each model and each object from a model type and
# and assigns its correspondent attributes hash from +tree_hash+
# Example:
# +relation_key+ issues, loops through the list of *issues* and for each individual
# issue, finds any subrelations such as notes, creates them and assign them back to the hash
#
# Recursively calls this method if the sub-relation is a hash containing more sub-relations
def create_sub_relations(relation_key, relation_definition, tree_hash, save: true)
return if tree_hash[relation_key].blank?
tree_array = [tree_hash[relation_key]].flatten
# Avoid keeping a possible heavy object in memory once we are done with it
while relation_item = tree_array.shift
# The transaction at this level is less speedy than one single transaction
# But we can't have it in the upper level or GC won't get rid of the AR objects
# after we save the batch.
Project.transaction do
process_sub_relation(relation_key, relation_definition, relation_item)
# For every subrelation that hangs from Project, save the associated records altogether
# This effectively batches all records per subrelation item, only keeping those in memory
# We have to keep in mind that more batch granularity << Memory, but >> Slowness
if save
save_relation_hash(relation_key, [relation_item])
tree_hash[relation_key].delete(relation_item)
end
end
def build_relations(relation_key, relation_definition, data_hashes)
data_hashes.map do |data_hash|
build_relation(relation_key, relation_definition, data_hash)
end.compact
end
tree_hash.delete(relation_key) if save
end
def build_relation(relation_key, relation_definition, data_hash)
# TODO: This is hack to not create relation for the author
# Rather make `RelationFactory#set_note_author` to take care of that
return data_hash if relation_key == 'author'
def process_sub_relation(relation_key, relation_definition, relation_item)
# create relation objects recursively for all sub-objects
relation_definition.each do |sub_relation_key, sub_relation_definition|
# We just use author to get the user ID, do not attempt to create an instance.
next if sub_relation_key == :author
sub_relation_key_s = sub_relation_key.to_s
# create dependent relations if present
if sub_relation_definition.present?
create_sub_relations(sub_relation_key_s, sub_relation_definition, relation_item, save: false)
end
# transform relation hash to actual object
sub_relation_hash = relation_item[sub_relation_key_s]
if sub_relation_hash.present?
relation_item[sub_relation_key_s] = create_relation(sub_relation_key, sub_relation_hash)
end
end
transform_sub_relations!(data_hash, sub_relation_key, sub_relation_definition)
end
def create_relation(relation_key, relation_hash_list)
relation_array = [relation_hash_list].flatten.map do |relation_hash|
Gitlab::ImportExport::RelationFactory.create(
relation_sym: relation_key.to_sym,
relation_hash: relation_hash,
relation_hash: data_hash,
members_mapper: members_mapper,
merge_requests_mapping: merge_requests_mapping,
user: @user,
project: @project,
excluded_keys: excluded_keys_for_relation(relation_key))
end.compact
end
def transform_sub_relations!(data_hash, sub_relation_key, sub_relation_definition)
sub_data_hash = data_hash[sub_relation_key]
return unless sub_data_hash
# if object is a hash we can create simple object
# as it means that this is 1-to-1 vs 1-to-many
sub_data_hash =
if sub_data_hash.is_a?(Array)
build_relations(
sub_relation_key,
sub_relation_definition,
sub_data_hash).presence
else
build_relation(
sub_relation_key,
sub_relation_definition,
sub_data_hash)
end
# persist object(s) or delete from relation
if sub_data_hash
data_hash[sub_relation_key] = sub_data_hash
else
data_hash.delete(sub_relation_key)
end
end
relation_hash_list.is_a?(Array) ? relation_array : relation_array.first
def group_model?(relation_object)
GROUP_MODELS.include?(relation_object.class) && relation_object.group_id
end
def reader
......
......@@ -38,10 +38,13 @@ module Gitlab
IMPORTED_OBJECT_MAX_RETRIES = 5.freeze
EXISTING_OBJECT_CHECK = %i[milestone milestones label labels project_label project_labels group_label group_labels project_feature merge_request].freeze
EXISTING_OBJECT_CHECK = %i[milestone milestones label labels project_label project_labels group_label group_labels project_feature merge_request ProjectCiCdSetting].freeze
TOKEN_RESET_MODELS = %i[Project Namespace Ci::Trigger Ci::Build Ci::Runner ProjectHook].freeze
# This represents all relations that have unique key on `project_id`
UNIQUE_RELATIONS = %i[project_feature ProjectCiCdSetting].freeze
def self.create(*args)
new(*args).create
end
......@@ -274,7 +277,7 @@ module Gitlab
end
def setup_pipeline
@relation_hash.fetch('stages').each do |stage|
@relation_hash.fetch('stages', []).each do |stage|
stage.statuses.each do |status|
status.pipeline = imported_object
end
......@@ -324,7 +327,8 @@ module Gitlab
end
def find_or_create_object!
return relation_class.find_or_create_by(project_id: @project.id) if @relation_name == :project_feature
return relation_class.find_or_create_by(project_id: @project.id) if UNIQUE_RELATIONS.include?(@relation_name)
return find_or_create_merge_request! if @relation_name == :merge_request
# Can't use IDs as validation exists calling `group` or `project` attributes
......
......@@ -6680,6 +6680,25 @@
]
}
]
},
{
"id": 41,
"project_id": 5,
"ref": "master",
"sha": "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73",
"before_sha": null,
"push_data": null,
"created_at": "2016-03-22T15:20:35.763Z",
"updated_at": "2016-03-22T15:20:35.763Z",
"tag": null,
"yaml_errors": null,
"committed_at": null,
"status": "failed",
"started_at": null,
"finished_at": null,
"duration": null,
"stages": [
]
}
],
"triggers": [
......
......@@ -2,6 +2,8 @@ require 'spec_helper'
include ImportExport::CommonUtil
describe Gitlab::ImportExport::ProjectTreeRestorer do
include ImportExport::CommonUtil
let(:shared) { project.import_export_shared }
describe 'restore project tree' do
......@@ -16,7 +18,8 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
RSpec::Mocks.with_temporary_scope do
@project = create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project')
@shared = @project.import_export_shared
allow(@shared).to receive(:export_path).and_return('spec/fixtures/lib/gitlab/import_export/')
setup_import_export_config('complex')
allow_any_instance_of(Repository).to receive(:fetch_source_branch!).and_return(true)
allow_any_instance_of(Gitlab::Git::Repository).to receive(:branch_exists?).and_return(false)
......@@ -257,9 +260,9 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
it 'has the correct number of pipelines and statuses' do
expect(@project.ci_pipelines.size).to eq(5)
expect(@project.ci_pipelines.size).to eq(6)
@project.ci_pipelines.zip([2, 2, 2, 2, 2])
@project.ci_pipelines.zip([0, 2, 2, 2, 2, 2])
.each do |(pipeline, expected_status_size)|
expect(pipeline.statuses.size).to eq(expected_status_size)
end
......@@ -268,7 +271,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
context 'when restoring hierarchy of pipeline, stages and jobs' do
it 'restores pipelines' do
expect(Ci::Pipeline.all.count).to be 5
expect(Ci::Pipeline.all.count).to be 6
end
it 'restores pipeline stages' do
......@@ -314,21 +317,33 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
end
context 'Light JSON' do
context 'project.json file access check' do
let(:user) { create(:user) }
let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') }
let(:project_tree_restorer) { described_class.new(user: user, shared: shared, project: project) }
let(:restored_project_json) { project_tree_restorer.restore }
before do
allow(shared).to receive(:export_path).and_return('spec/fixtures/lib/gitlab/import_export/')
it 'does not read a symlink' do
Dir.mktmpdir do |tmpdir|
setup_symlink(tmpdir, 'project.json')
allow(shared).to receive(:export_path).and_call_original
expect(project_tree_restorer.restore).to eq(false)
expect(shared.errors).to include('Incorrect JSON format')
end
end
end
context 'Light JSON' do
let(:user) { create(:user) }
let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') }
let(:project_tree_restorer) { described_class.new(user: user, shared: shared, project: project) }
let(:restored_project_json) { project_tree_restorer.restore }
context 'with a simple project' do
before do
project_tree_restorer.instance_variable_set(:@path, "spec/fixtures/lib/gitlab/import_export/project.light.json")
restored_project_json
setup_import_export_config('light')
expect(restored_project_json).to eq(true)
end
it_behaves_like 'restores project correctly',
......@@ -339,19 +354,6 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
first_issue_labels: 1,
services: 1
context 'project.json file access check' do
it 'does not read a symlink' do
Dir.mktmpdir do |tmpdir|
setup_symlink(tmpdir, 'project.json')
allow(shared).to receive(:export_path).and_call_original
restored_project_json
expect(shared.errors).to be_empty
end
end
end
context 'when there is an existing build with build token' do
before do
create(:ci_build, token: 'abcd')
......@@ -367,6 +369,10 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
context 'when the project has overridden params in import data' do
before do
setup_import_export_config('light')
end
it 'handles string versions of visibility_level' do
# Project needs to be in a group for visibility level comparison
# to happen
......@@ -375,24 +381,21 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
project.create_import_data(data: { override_params: { visibility_level: Gitlab::VisibilityLevel::INTERNAL.to_s } })
restored_project_json
expect(restored_project_json).to eq(true)
expect(project.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
end
it 'overwrites the params stored in the JSON' do
project.create_import_data(data: { override_params: { description: "Overridden" } })
restored_project_json
expect(restored_project_json).to eq(true)
expect(project.description).to eq("Overridden")
end
it 'does not allow setting params that are excluded from import_export settings' do
project.create_import_data(data: { override_params: { lfs_enabled: true } })
restored_project_json
expect(restored_project_json).to eq(true)
expect(project.lfs_enabled).to be_falsey
end
......@@ -408,7 +411,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
project.create_import_data(data: { override_params: disabled_access_levels })
restored_project_json
expect(restored_project_json).to eq(true)
aggregate_failures do
access_level_keys.each do |key|
......@@ -429,9 +432,8 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
before do
project_tree_restorer.instance_variable_set(:@path, "spec/fixtures/lib/gitlab/import_export/project.group.json")
restored_project_json
setup_import_export_config('group')
expect(restored_project_json).to eq(true)
end
it_behaves_like 'restores project correctly',
......@@ -463,11 +465,11 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
before do
project_tree_restorer.instance_variable_set(:@path, "spec/fixtures/lib/gitlab/import_export/project.light.json")
setup_import_export_config('light')
end
it 'does not import any templated services' do
restored_project_json
expect(restored_project_json).to eq(true)
expect(project.services.where(template: true).count).to eq(0)
end
......@@ -477,8 +479,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
expect_any_instance_of(Gitlab::ImportExport::Shared).not_to receive(:error)
restored_project_json
expect(restored_project_json).to eq(true)
expect(project.labels.count).to eq(1)
end
......@@ -487,8 +488,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
expect_any_instance_of(Gitlab::ImportExport::Shared).not_to receive(:error)
restored_project_json
expect(restored_project_json).to eq(true)
expect(project.group.milestones.count).to eq(1)
expect(project.milestones.count).to eq(0)
end
......@@ -504,13 +504,14 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
group: create(:group))
end
it 'preserves the project milestone IID' do
project_tree_restorer.instance_variable_set(:@path, "spec/fixtures/lib/gitlab/import_export/project.milestone-iid.json")
before do
setup_import_export_config('milestone-iid')
end
it 'preserves the project milestone IID' do
expect_any_instance_of(Gitlab::ImportExport::Shared).not_to receive(:error)
restored_project_json
expect(restored_project_json).to eq(true)
expect(project.milestones.count).to eq(2)
expect(Milestone.find_by_title('Another milestone').iid).to eq(1)
expect(Milestone.find_by_title('Group-level milestone').iid).to eq(2)
......@@ -518,19 +519,21 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
context 'with external authorization classification labels' do
before do
setup_import_export_config('light')
end
it 'converts empty external classification authorization labels to nil' do
project.create_import_data(data: { override_params: { external_authorization_classification_label: "" } })
restored_project_json
expect(restored_project_json).to eq(true)
expect(project.external_authorization_classification_label).to be_nil
end
it 'preserves valid external classification authorization labels' do
project.create_import_data(data: { override_params: { external_authorization_classification_label: "foobar" } })
restored_project_json
expect(restored_project_json).to eq(true)
expect(project.external_authorization_classification_label).to eq("foobar")
end
end
......
......@@ -3,6 +3,8 @@
require 'spec_helper'
describe Gitlab::ImportExport::RelationRenameService do
include ImportExport::CommonUtil
let(:renames) do
{
'example_relation1' => 'new_example_relation1',
......@@ -21,12 +23,12 @@ describe Gitlab::ImportExport::RelationRenameService do
context 'when importing' do
let(:project_tree_restorer) { Gitlab::ImportExport::ProjectTreeRestorer.new(user: user, shared: shared, project: project) }
let(:import_path) { 'spec/fixtures/lib/gitlab/import_export' }
let(:file_content) { IO.read("#{import_path}/project.json") }
let!(:json_file) { ActiveSupport::JSON.decode(file_content) }
let(:file_content) { IO.read(File.join(shared.export_path, 'project.json')) }
let(:json_file) { ActiveSupport::JSON.decode(file_content) }
before do
allow(shared).to receive(:export_path).and_return(import_path)
setup_import_export_config('complex')
allow(ActiveSupport::JSON).to receive(:decode).and_call_original
allow(ActiveSupport::JSON).to receive(:decode).with(file_content).and_return(json_file)
end
......
......@@ -3342,22 +3342,6 @@ describe Project do
end
end
describe '#append_or_update_attribute' do
let(:project) { create(:project) }
it 'shows full error updating an invalid MR' do
expect { project.append_or_update_attribute(:merge_requests, [create(:merge_request)]) }
.to raise_error(ActiveRecord::RecordInvalid, /Failed to set merge_requests:/)
end
it 'updates the project successfully' do
merge_request = create(:merge_request, target_project: project, source_project: project)
expect { project.append_or_update_attribute(:merge_requests, [merge_request]) }
.not_to raise_error
end
end
describe '#update' do
let(:project) { create(:project) }
......
......@@ -8,5 +8,12 @@ module ImportExport
File.open("#{tmpdir}/test", 'w') { |file| file.write("test") }
FileUtils.ln_s("#{tmpdir}/test", "#{tmpdir}/#{symlink_name}")
end
def setup_import_export_config(name, prefix = nil)
export_path = [prefix, 'spec', 'fixtures', 'lib', 'gitlab', 'import_export', name].compact
export_path = File.join(*export_path)
allow_any_instance_of(Gitlab::ImportExport).to receive(:export_path) { export_path }
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment