Commit 773ba745 authored by Robert Speicher's avatar Robert Speicher

Merge branch '208803-streamin-serializer-and-writer' into 'master'

Resolve "Export via streaming serializer, introduce "Writer" abstraction"

Closes #208803

See merge request gitlab-org/gitlab!26501
parents f8585f97 38f289cf
# frozen_string_literal: true
module Projects
module ImportExport
class ProjectExportPresenter < Gitlab::View::Presenter::Delegated
include ActiveModel::Serializers::JSON
presents :project
def project_members
super + converted_group_members
end
def description
self.respond_to?(:override_description) ? override_description : super
end
private
def converted_group_members
group_members.each do |group_member|
group_member.source_type = 'Project' # Make group members project members of the future import
end
end
# rubocop: disable CodeReuse/ActiveRecord
def group_members
return [] unless current_user.can?(:admin_group, project.group)
# We need `.where.not(user_id: nil)` here otherwise when a group has an
# invitee, it would make the following query return 0 rows since a NULL
# user_id would be present in the subquery
# See http://stackoverflow.com/questions/129077/not-in-clause-and-null-values
non_null_user_ids = project.project_members.where.not(user_id: nil).select(:user_id)
GroupMembersFinder.new(project.group).execute.where.not(user_id: non_null_user_ids)
end
# rubocop: enable CodeReuse/ActiveRecord
end
end
end
...@@ -54,7 +54,16 @@ module Projects ...@@ -54,7 +54,16 @@ module Projects
end end
def project_tree_saver def project_tree_saver
Gitlab::ImportExport::Project::TreeSaver.new(project: project, current_user: current_user, shared: shared, params: params) tree_saver_class.new(project: project, current_user: current_user, shared: shared, params: params)
end
def tree_saver_class
if ::Feature.enabled?(:streaming_serializer, project)
Gitlab::ImportExport::Project::TreeSaver
else
# Once we remove :streaming_serializer feature flag, Project::LegacyTreeSaver should be removed as well
Gitlab::ImportExport::Project::LegacyTreeSaver
end
end end
def uploads_saver def uploads_saver
......
...@@ -185,6 +185,6 @@ describe Gitlab::ImportExport::Group::TreeSaver do ...@@ -185,6 +185,6 @@ describe Gitlab::ImportExport::Group::TreeSaver do
end end
def group_json(filename) def group_json(filename)
JSON.parse(IO.read(filename)) ::JSON.parse(IO.read(filename))
end end
end end
...@@ -80,6 +80,6 @@ describe Gitlab::ImportExport::Project::TreeSaver do ...@@ -80,6 +80,6 @@ describe Gitlab::ImportExport::Project::TreeSaver do
end end
def project_json(filename) def project_json(filename)
JSON.parse(IO.read(filename)) ::JSON.parse(IO.read(filename))
end end
end end
...@@ -49,7 +49,7 @@ module Gitlab ...@@ -49,7 +49,7 @@ module Gitlab
end end
def tree_saver def tree_saver
@tree_saver ||= RelationTreeSaver.new @tree_saver ||= LegacyRelationTreeSaver.new
end end
end end
end end
......
# frozen_string_literal: true
module Gitlab
module ImportExport
module JSON
class LegacyWriter
include Gitlab::ImportExport::CommandLineUtil
attr_reader :path
def initialize(path)
@path = path
@last_array = nil
@keys = Set.new
mkdir_p(File.dirname(@path))
file.write('{}')
end
def close
@file&.close
@file = nil
end
def set(hash)
hash.each do |key, value|
write(key, value)
end
end
def write(key, value)
raise ArgumentError, "key '#{key}' already written" if @keys.include?(key)
# rewind by one byte, to overwrite '}'
file.pos = file.size - 1
file.write(',') if @keys.any?
file.write(key.to_json)
file.write(':')
file.write(value.to_json)
file.write('}')
@keys.add(key)
@last_array = nil
@last_array_count = nil
end
def append(key, value)
unless @last_array == key
write(key, [])
@last_array = key
@last_array_count = 0
end
# rewind by two bytes, to overwrite ']}'
file.pos = file.size - 2
file.write(',') if @last_array_count > 0
file.write(value.to_json)
file.write(']}')
@last_array_count += 1
end
private
def file
@file ||= File.open(@path, "wb")
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module ImportExport
module JSON
class StreamingSerializer
include Gitlab::ImportExport::CommandLineUtil
BATCH_SIZE = 100
class Raw < String
def to_json(*_args)
to_s
end
end
def initialize(exportable, relations_schema, json_writer)
@exportable = exportable
@relations_schema = relations_schema
@json_writer = json_writer
end
def execute
serialize_root
includes.each do |relation_definition|
serialize_relation(relation_definition)
end
end
private
attr_reader :json_writer, :relations_schema, :exportable
def serialize_root
attributes = exportable.as_json(
relations_schema.merge(include: nil, preloads: nil))
json_writer.set(attributes)
end
def serialize_relation(definition)
raise ArgumentError, 'definition needs to be Hash' unless definition.is_a?(Hash)
raise ArgumentError, 'definition needs to have exactly one Hash element' unless definition.one?
key, options = definition.first
record = exportable.public_send(key) # rubocop: disable GitlabSecurity/PublicSend
if record.is_a?(ActiveRecord::Relation)
serialize_many_relations(key, record, options)
else
serialize_single_relation(key, record, options)
end
end
def serialize_many_relations(key, records, options)
key_preloads = preloads&.dig(key)
records = records.preload(key_preloads) if key_preloads
records.find_each(batch_size: BATCH_SIZE) do |record|
json = Raw.new(record.to_json(options))
json_writer.append(key, json)
end
end
def serialize_single_relation(key, record, options)
json = Raw.new(record.to_json(options))
json_writer.write(key, json)
end
def includes
relations_schema[:include]
end
def preloads
relations_schema[:preload]
end
end
end
end
end
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
module Gitlab module Gitlab
module ImportExport module ImportExport
class RelationTreeSaver class LegacyRelationTreeSaver
include Gitlab::ImportExport::CommandLineUtil include Gitlab::ImportExport::CommandLineUtil
def serialize(exportable, relations_tree) def serialize(exportable, relations_tree)
......
# frozen_string_literal: true
module Gitlab
module ImportExport
module Project
class LegacyTreeSaver
attr_reader :full_path
def initialize(project:, current_user:, shared:, params: {})
@params = params
@project = project
@current_user = current_user
@shared = shared
@full_path = File.join(@shared.export_path, ImportExport.project_filename)
end
def save
project_tree = tree_saver.serialize(@project, reader.project_tree)
fix_project_tree(project_tree)
tree_saver.save(project_tree, @shared.export_path, ImportExport.project_filename)
true
rescue => e
@shared.error(e)
false
end
private
# Aware that the resulting hash needs to be pure-hash and
# does not include any AR objects anymore, only objects that run `.to_json`
def fix_project_tree(project_tree)
if @params[:description].present?
project_tree['description'] = @params[:description]
end
project_tree['project_members'] += group_members_array
end
def reader
@reader ||= Gitlab::ImportExport::Reader.new(shared: @shared)
end
def group_members_array
group_members.as_json(reader.group_members_tree).each do |group_member|
group_member['source_type'] = 'Project' # Make group members project members of the future import
end
end
def group_members
return [] unless @current_user.can?(:admin_group, @project.group)
# We need `.where.not(user_id: nil)` here otherwise when a group has an
# invitee, it would make the following query return 0 rows since a NULL
# user_id would be present in the subquery
# See http://stackoverflow.com/questions/129077/not-in-clause-and-null-values
non_null_user_ids = @project.project_members.where.not(user_id: nil).select(:user_id)
GroupMembersFinder.new(@project.group).execute.where.not(user_id: non_null_user_ids)
end
def tree_saver
@tree_saver ||= Gitlab::ImportExport::LegacyRelationTreeSaver.new
end
end
end
end
end
...@@ -15,52 +15,40 @@ module Gitlab ...@@ -15,52 +15,40 @@ module Gitlab
end end
def save def save
project_tree = tree_saver.serialize(@project, reader.project_tree) json_writer = ImportExport::JSON::LegacyWriter.new(@full_path)
fix_project_tree(project_tree)
tree_saver.save(project_tree, @shared.export_path, ImportExport.project_filename) serializer = ImportExport::JSON::StreamingSerializer.new(exportable, reader.project_tree, json_writer)
serializer.execute
true true
rescue => e rescue => e
@shared.error(e) @shared.error(e)
false false
ensure
json_writer&.close
end end
private private
# Aware that the resulting hash needs to be pure-hash and
# does not include any AR objects anymore, only objects that run `.to_json`
def fix_project_tree(project_tree)
if @params[:description].present?
project_tree['description'] = @params[:description]
end
project_tree['project_members'] += group_members_array
end
def reader def reader
@reader ||= Gitlab::ImportExport::Reader.new(shared: @shared) @reader ||= Gitlab::ImportExport::Reader.new(shared: @shared)
end end
def group_members_array def exportable
group_members.as_json(reader.group_members_tree).each do |group_member| @project.present(exportable_params)
group_member['source_type'] = 'Project' # Make group members project members of the future import
end
end end
def group_members def exportable_params
return [] unless @current_user.can?(:admin_group, @project.group) params = {
presenter_class: presenter_class,
# We need `.where.not(user_id: nil)` here otherwise when a group has an current_user: @current_user
# invitee, it would make the following query return 0 rows since a NULL }
# user_id would be present in the subquery params[:override_description] = @params[:description] if @params[:description].present?
# See http://stackoverflow.com/questions/129077/not-in-clause-and-null-values params
non_null_user_ids = @project.project_members.where.not(user_id: nil).select(:user_id)
GroupMembersFinder.new(@project.group).execute.where.not(user_id: non_null_user_ids)
end end
def tree_saver def presenter_class
@tree_saver ||= RelationTreeSaver.new Projects::ImportExport::ProjectExportPresenter
end end
end end
end end
......
...@@ -19,7 +19,6 @@ namespace :gitlab do ...@@ -19,7 +19,6 @@ namespace :gitlab do
if ENV['EXPORT_DEBUG'].present? if ENV['EXPORT_DEBUG'].present?
ActiveRecord::Base.logger = logger ActiveRecord::Base.logger = logger
Gitlab::Metrics::Exporter::SidekiqExporter.instance.start
logger.level = Logger::DEBUG logger.level = Logger::DEBUG
else else
logger.level = Logger::INFO logger.level = Logger::INFO
......
...@@ -23,7 +23,6 @@ namespace :gitlab do ...@@ -23,7 +23,6 @@ namespace :gitlab do
if ENV['IMPORT_DEBUG'].present? if ENV['IMPORT_DEBUG'].present?
ActiveRecord::Base.logger = logger ActiveRecord::Base.logger = logger
Gitlab::Metrics::Exporter::SidekiqExporter.instance.start
logger.level = Logger::DEBUG logger.level = Logger::DEBUG
else else
logger.level = Logger::INFO logger.level = Logger::INFO
......
...@@ -197,6 +197,6 @@ describe Gitlab::ImportExport::Group::TreeSaver do ...@@ -197,6 +197,6 @@ describe Gitlab::ImportExport::Group::TreeSaver do
end end
def group_json(filename) def group_json(filename)
JSON.parse(IO.read(filename)) ::JSON.parse(IO.read(filename))
end end
end end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::ImportExport::JSON::LegacyWriter do
let(:path) { "#{Dir.tmpdir}/legacy_writer_spec/test.json" }
subject { described_class.new(path) }
after do
FileUtils.rm_rf(path)
end
describe "#write" do
context "when key is already written" do
it "raises exception" do
key = "key"
value = "value"
subject.write(key, value)
expect { subject.write(key, "new value") }.to raise_exception("key '#{key}' already written")
end
end
context "when key is not already written" do
context "when multiple key value pairs are stored" do
it "writes correct json" do
expected_hash = { "key" => "value_1", "key_1" => "value_2" }
expected_hash.each do |key, value|
subject.write(key, value)
end
subject.close
expect(saved_json(path)).to eq(expected_hash)
end
end
end
end
describe "#append" do
context "when key is already written" do
it "appends values under a given key" do
key = "key"
values = %w(value_1 value_2)
expected_hash = { key => values }
values.each do |value|
subject.append(key, value)
end
subject.close
expect(saved_json(path)).to eq(expected_hash)
end
end
context "when key is not already written" do
it "writes correct json" do
expected_hash = { "key" => ["value"] }
subject.append("key", "value")
subject.close
expect(saved_json(path)).to eq(expected_hash)
end
end
end
describe "#set" do
it "writes correct json" do
expected_hash = { "key" => "value_1", "key_1" => "value_2" }
subject.set(expected_hash)
subject.close
expect(saved_json(path)).to eq(expected_hash)
end
end
def saved_json(filename)
::JSON.parse(IO.read(filename))
end
end
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
describe Gitlab::ImportExport::RelationTreeSaver do describe Gitlab::ImportExport::LegacyRelationTreeSaver do
let(:exportable) { create(:group) } let(:exportable) { create(:group) }
let(:relation_tree_saver) { described_class.new } let(:relation_tree_saver) { described_class.new }
let(:tree) { {} } let(:tree) { {} }
......
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::ImportExport::Project::LegacyTreeSaver do
describe 'saves the project tree into a json object' do
let(:shared) { project.import_export_shared }
let(:project_tree_saver) { described_class.new(project: project, current_user: user, shared: shared) }
let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
let(:user) { create(:user) }
let!(:project) { setup_project }
before do
project.add_maintainer(user)
allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
allow_any_instance_of(MergeRequest).to receive(:source_branch_sha).and_return('ABCD')
allow_any_instance_of(MergeRequest).to receive(:target_branch_sha).and_return('DCBA')
end
after do
FileUtils.rm_rf(export_path)
end
it 'saves project successfully' do
expect(project_tree_saver.save).to be true
end
context ':export_fast_serialize feature flag checks' do
before do
expect(Gitlab::ImportExport::Reader).to receive(:new).with(shared: shared).and_return(reader)
expect(reader).to receive(:project_tree).and_return(project_tree)
end
let(:serializer) { instance_double('Gitlab::ImportExport::FastHashSerializer') }
let(:reader) { instance_double('Gitlab::ImportExport::Reader') }
let(:project_tree) do
{
include: [{ issues: { include: [] } }],
preload: { issues: nil }
}
end
context 'when :export_fast_serialize feature is enabled' do
before do
stub_feature_flags(export_fast_serialize: true)
end
it 'uses FastHashSerializer' do
expect(Gitlab::ImportExport::FastHashSerializer)
.to receive(:new)
.with(project, project_tree)
.and_return(serializer)
expect(serializer).to receive(:execute)
project_tree_saver.save
end
end
context 'when :export_fast_serialize feature is disabled' do
before do
stub_feature_flags(export_fast_serialize: false)
end
it 'is serialized via built-in `as_json`' do
expect(project).to receive(:as_json).with(project_tree)
project_tree_saver.save
end
end
end
# It is mostly duplicated in
# `spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb`
# except:
# context 'with description override' do
# context 'group members' do
# ^ These are specific for the Project::TreeSaver
context 'JSON' do
let(:saved_project_json) do
project_tree_saver.save
project_json(project_tree_saver.full_path)
end
# It is not duplicated in
# `spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb`
context 'with description override' do
let(:params) { { description: 'Foo Bar' } }
let(:project_tree_saver) { described_class.new(project: project, current_user: user, shared: shared, params: params) }
it 'overrides the project description' do
expect(saved_project_json).to include({ 'description' => params[:description] })
end
end
it 'saves the correct json' do
expect(saved_project_json).to include({ 'description' => 'description', 'visibility_level' => 20 })
end
it 'has approvals_before_merge set' do
expect(saved_project_json['approvals_before_merge']).to eq(1)
end
it 'has milestones' do
expect(saved_project_json['milestones']).not_to be_empty
end
it 'has merge requests' do
expect(saved_project_json['merge_requests']).not_to be_empty
end
it 'has merge request\'s milestones' do
expect(saved_project_json['merge_requests'].first['milestone']).not_to be_empty
end
it 'has merge request\'s source branch SHA' do
expect(saved_project_json['merge_requests'].first['source_branch_sha']).to eq('ABCD')
end
it 'has merge request\'s target branch SHA' do
expect(saved_project_json['merge_requests'].first['target_branch_sha']).to eq('DCBA')
end
it 'has events' do
expect(saved_project_json['merge_requests'].first['milestone']['events']).not_to be_empty
end
it 'has snippets' do
expect(saved_project_json['snippets']).not_to be_empty
end
it 'has snippet notes' do
expect(saved_project_json['snippets'].first['notes']).not_to be_empty
end
it 'has releases' do
expect(saved_project_json['releases']).not_to be_empty
end
it 'has no author on releases' do
expect(saved_project_json['releases'].first['author']).to be_nil
end
it 'has the author ID on releases' do
expect(saved_project_json['releases'].first['author_id']).not_to be_nil
end
it 'has issues' do
expect(saved_project_json['issues']).not_to be_empty
end
it 'has issue comments' do
notes = saved_project_json['issues'].first['notes']
expect(notes).not_to be_empty
expect(notes.first['type']).to eq('DiscussionNote')
end
it 'has issue assignees' do
expect(saved_project_json['issues'].first['issue_assignees']).not_to be_empty
end
it 'has author on issue comments' do
expect(saved_project_json['issues'].first['notes'].first['author']).not_to be_empty
end
it 'has project members' do
expect(saved_project_json['project_members']).not_to be_empty
end
it 'has merge requests diffs' do
expect(saved_project_json['merge_requests'].first['merge_request_diff']).not_to be_empty
end
it 'has merge request diff files' do
expect(saved_project_json['merge_requests'].first['merge_request_diff']['merge_request_diff_files']).not_to be_empty
end
it 'has merge request diff commits' do
expect(saved_project_json['merge_requests'].first['merge_request_diff']['merge_request_diff_commits']).not_to be_empty
end
it 'has merge requests comments' do
expect(saved_project_json['merge_requests'].first['notes']).not_to be_empty
end
it 'has author on merge requests comments' do
expect(saved_project_json['merge_requests'].first['notes'].first['author']).not_to be_empty
end
it 'has pipeline stages' do
expect(saved_project_json.dig('ci_pipelines', 0, 'stages')).not_to be_empty
end
it 'has pipeline statuses' do
expect(saved_project_json.dig('ci_pipelines', 0, 'stages', 0, 'statuses')).not_to be_empty
end
it 'has pipeline builds' do
builds_count = saved_project_json
.dig('ci_pipelines', 0, 'stages', 0, 'statuses')
.count { |hash| hash['type'] == 'Ci::Build' }
expect(builds_count).to eq(1)
end
it 'has no when YML attributes but only the DB column' do
expect_any_instance_of(Gitlab::Ci::YamlProcessor).not_to receive(:build_attributes)
saved_project_json
end
it 'has pipeline commits' do
expect(saved_project_json['ci_pipelines']).not_to be_empty
end
it 'has ci pipeline notes' do
expect(saved_project_json['ci_pipelines'].first['notes']).not_to be_empty
end
it 'has labels with no associations' do
expect(saved_project_json['labels']).not_to be_empty
end
it 'has labels associated to records' do
expect(saved_project_json['issues'].first['label_links'].first['label']).not_to be_empty
end
it 'has project and group labels' do
label_types = saved_project_json['issues'].first['label_links'].map { |link| link['label']['type'] }
expect(label_types).to match_array(%w(ProjectLabel GroupLabel))
end
it 'has priorities associated to labels' do
priorities = saved_project_json['issues'].first['label_links'].flat_map { |link| link['label']['priorities'] }
expect(priorities).not_to be_empty
end
it 'has issue resource label events' do
expect(saved_project_json['issues'].first['resource_label_events']).not_to be_empty
end
it 'has merge request resource label events' do
expect(saved_project_json['merge_requests'].first['resource_label_events']).not_to be_empty
end
it 'saves the correct service type' do
expect(saved_project_json['services'].first['type']).to eq('CustomIssueTrackerService')
end
it 'saves the properties for a service' do
expect(saved_project_json['services'].first['properties']).to eq('one' => 'value')
end
it 'has project feature' do
project_feature = saved_project_json['project_feature']
expect(project_feature).not_to be_empty
expect(project_feature["issues_access_level"]).to eq(ProjectFeature::DISABLED)
expect(project_feature["wiki_access_level"]).to eq(ProjectFeature::ENABLED)
expect(project_feature["builds_access_level"]).to eq(ProjectFeature::PRIVATE)
end
it 'has custom attributes' do
expect(saved_project_json['custom_attributes'].count).to eq(2)
end
it 'has badges' do
expect(saved_project_json['project_badges'].count).to eq(2)
end
it 'does not complain about non UTF-8 characters in MR diff files' do
ActiveRecord::Base.connection.execute("UPDATE merge_request_diff_files SET diff = '---\n- :diff: !binary |-\n LS0tIC9kZXYvbnVsbAorKysgYi9pbWFnZXMvbnVjb3IucGRmCkBAIC0wLDAg\n KzEsMTY3OSBAQAorJVBERi0xLjUNJeLjz9MNCisxIDAgb2JqDTw8L01ldGFk\n YXR'")
expect(project_tree_saver.save).to be true
end
context 'group members' do
let(:user2) { create(:user, email: 'group@member.com') }
let(:member_emails) do
saved_project_json['project_members'].map do |pm|
pm['user']['email']
end
end
before do
Group.first.add_developer(user2)
end
it 'does not export group members if it has no permission' do
Group.first.add_developer(user)
expect(member_emails).not_to include('group@member.com')
end
it 'does not export group members as maintainer' do
Group.first.add_maintainer(user)
expect(member_emails).not_to include('group@member.com')
end
it 'exports group members as group owner' do
Group.first.add_owner(user)
expect(member_emails).to include('group@member.com')
end
context 'as admin' do
let(:user) { create(:admin) }
it 'exports group members as admin' do
expect(member_emails).to include('group@member.com')
end
it 'exports group members as project members' do
member_types = saved_project_json['project_members'].map { |pm| pm['source_type'] }
expect(member_types).to all(eq('Project'))
end
end
end
context 'project attributes' do
it 'does not contain the runners token' do
expect(saved_project_json).not_to include("runners_token" => 'token')
end
end
it 'has a board and a list' do
expect(saved_project_json['boards'].first['lists']).not_to be_empty
end
end
end
def setup_project
release = create(:release)
group = create(:group)
project = create(:project,
:public,
:repository,
:issues_disabled,
:wiki_enabled,
:builds_private,
description: 'description',
releases: [release],
group: group,
approvals_before_merge: 1
)
allow(project).to receive(:commit).and_return(Commit.new(RepoHelpers.sample_commit, project))
issue = create(:issue, assignees: [user], project: project)
snippet = create(:project_snippet, project: project)
project_label = create(:label, project: project)
group_label = create(:group_label, group: group)
create(:label_link, label: project_label, target: issue)
create(:label_link, label: group_label, target: issue)
create(:label_priority, label: group_label, priority: 1)
milestone = create(:milestone, project: project)
merge_request = create(:merge_request, source_project: project, milestone: milestone)
ci_build = create(:ci_build, project: project, when: nil)
ci_build.pipeline.update(project: project)
create(:commit_status, project: project, pipeline: ci_build.pipeline)
create(:milestone, project: project)
create(:discussion_note, noteable: issue, project: project)
create(:note, noteable: merge_request, project: project)
create(:note, noteable: snippet, project: project)
create(:note_on_commit,
author: user,
project: project,
commit_id: ci_build.pipeline.sha)
create(:resource_label_event, label: project_label, issue: issue)
create(:resource_label_event, label: group_label, merge_request: merge_request)
create(:event, :created, target: milestone, project: project, author: user)
create(:service, project: project, type: 'CustomIssueTrackerService', category: 'issue_tracker', properties: { one: 'value' })
create(:project_custom_attribute, project: project)
create(:project_custom_attribute, project: project)
create(:project_badge, project: project)
create(:project_badge, project: project)
board = create(:board, project: project, name: 'TestBoard')
create(:list, board: board, position: 0, label: project_label)
project
end
def project_json(filename)
::JSON.parse(IO.read(filename))
end
end
...@@ -25,57 +25,6 @@ describe Gitlab::ImportExport::Project::TreeSaver do ...@@ -25,57 +25,6 @@ describe Gitlab::ImportExport::Project::TreeSaver do
expect(project_tree_saver.save).to be true expect(project_tree_saver.save).to be true
end end
context ':export_fast_serialize feature flag checks' do
before do
expect(Gitlab::ImportExport::Reader).to receive(:new).with(shared: shared).and_return(reader)
expect(reader).to receive(:project_tree).and_return(project_tree)
end
let(:serializer) { instance_double('Gitlab::ImportExport::FastHashSerializer') }
let(:reader) { instance_double('Gitlab::ImportExport::Reader') }
let(:project_tree) do
{
include: [{ issues: { include: [] } }],
preload: { issues: nil }
}
end
context 'when :export_fast_serialize feature is enabled' do
before do
stub_feature_flags(export_fast_serialize: true)
end
it 'uses FastHashSerializer' do
expect(Gitlab::ImportExport::FastHashSerializer)
.to receive(:new)
.with(project, project_tree)
.and_return(serializer)
expect(serializer).to receive(:execute)
project_tree_saver.save
end
end
context 'when :export_fast_serialize feature is disabled' do
before do
stub_feature_flags(export_fast_serialize: false)
end
it 'is serialized via built-in `as_json`' do
expect(project).to receive(:as_json).with(project_tree)
project_tree_saver.save
end
end
end
# It is mostly duplicated in
# `spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb`
# except:
# context 'with description override' do
# context 'group members' do
# ^ These are specific for the Project::TreeSaver
context 'JSON' do context 'JSON' do
let(:saved_project_json) do let(:saved_project_json) do
project_tree_saver.save project_tree_saver.save
...@@ -392,6 +341,6 @@ describe Gitlab::ImportExport::Project::TreeSaver do ...@@ -392,6 +341,6 @@ describe Gitlab::ImportExport::Project::TreeSaver do
end end
def project_json(filename) def project_json(filename)
JSON.parse(IO.read(filename)) ::JSON.parse(IO.read(filename))
end end
end end
# frozen_string_literal: true
require 'spec_helper'
describe Projects::ImportExport::ProjectExportPresenter do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
let_it_be(:user) { create(:user) }
subject { described_class.new(project, current_user: user) }
describe '#description' do
context "override_description not provided" do
it "keeps original description" do
expect(subject.description).to eq(project.description)
end
end
context "override_description provided" do
let(:description) { "overridden description" }
subject { described_class.new(project, current_user: user, override_description: description) }
it "overrides description" do
expect(subject.description).to eq(description)
end
end
end
describe '#as_json' do
context "override_description not provided" do
it "keeps original description" do
expect(subject.as_json["description"]).to eq(project.description)
end
end
context "override_description provided" do
let(:description) { "overridden description" }
subject { described_class.new(project, current_user: user, override_description: description) }
it "overrides description" do
expect(subject.as_json["description"]).to eq(description)
end
end
end
describe '#project_members' do
let(:user2) { create(:user, email: 'group@member.com') }
let(:member_emails) do
subject.project_members.map do |pm|
pm.user.email
end
end
before do
group.add_developer(user2)
end
it 'does not export group members if it has no permission' do
group.add_developer(user)
expect(member_emails).not_to include('group@member.com')
end
it 'does not export group members as maintainer' do
group.add_maintainer(user)
expect(member_emails).not_to include('group@member.com')
end
it 'exports group members as group owner' do
group.add_owner(user)
expect(member_emails).to include('group@member.com')
end
context 'as admin' do
let(:user) { create(:admin) }
it 'exports group members as admin' do
expect(member_emails).to include('group@member.com')
end
it 'exports group members as project members' do
member_types = subject.project_members.map { |pm| pm.source_type }
expect(member_types).to all(eq('Project'))
end
end
end
end
...@@ -26,11 +26,29 @@ describe Projects::ImportExport::ExportService do ...@@ -26,11 +26,29 @@ describe Projects::ImportExport::ExportService do
service.execute service.execute
end end
context 'when :streaming_serializer feature is enabled' do
before do
stub_feature_flags(streaming_serializer: true)
end
it 'saves the models' do it 'saves the models' do
expect(Gitlab::ImportExport::Project::TreeSaver).to receive(:new).and_call_original expect(Gitlab::ImportExport::Project::TreeSaver).to receive(:new).and_call_original
service.execute service.execute
end end
end
context 'when :streaming_serializer feature is disabled' do
before do
stub_feature_flags(streaming_serializer: false)
end
it 'saves the models' do
expect(Gitlab::ImportExport::Project::LegacyTreeSaver).to receive(:new).and_call_original
service.execute
end
end
it 'saves the uploads' do it 'saves the uploads' do
expect(Gitlab::ImportExport::UploadsSaver).to receive(:new).and_call_original expect(Gitlab::ImportExport::UploadsSaver).to receive(:new).and_call_original
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment