Commit 6879fa53 authored by George Koltsov's avatar George Koltsov

Add merge requests to Project Migration

  - Add merge requests to Project Migration when
    using Group Migration tool
parent 106972a8
# frozen_string_literal: true
module BulkImports
module Projects
module Pipelines
class MergeRequestsPipeline
include NdjsonPipeline
relation_name 'merge_requests'
extractor ::BulkImports::Common::Extractors::NdjsonExtractor, relation: relation
def after_run(_)
context.portable.merge_requests.set_latest_merge_request_diff_ids!
end
end
end
end
end
......@@ -17,10 +17,18 @@ module BulkImports
def load(context, data)
url = data['httpUrlToRepo']
url = url.sub("://", "://oauth2:#{context.configuration.access_token}@")
project = context.portable
Gitlab::UrlBlocker.validate!(url, allow_local_network: allow_local_requests?, allow_localhost: allow_local_requests?)
context.portable.repository.import_repository(url)
project.ensure_repository
project.repository.fetch_as_mirror(url)
end
# The initial fetch can bring in lots of loose refs and objects.
# Running a `git gc` will make importing merge requests faster.
def after_run(_)
::Repositories::HousekeepingService.new(context.portable, :gc).execute
end
private
......
......@@ -27,6 +27,10 @@ module BulkImports
pipeline: BulkImports::Common::Pipelines::BoardsPipeline,
stage: 4
},
merge_requests: {
pipeline: BulkImports::Projects::Pipelines::MergeRequestsPipeline,
stage: 4
},
uploads: {
pipeline: BulkImports::Common::Pipelines::UploadsPipeline,
stage: 5
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Projects::Pipelines::MergeRequestsPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :repository, group: group) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:entity) do
create(
:bulk_import_entity,
:project_entity,
project: project,
bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: 'My Destination Project',
destination_namespace: group.full_path
)
end
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:mr) do
{
'iid' => 7,
'author_id' => 22,
'source_project_id' => 1234,
'target_project_id' => 1234,
'title' => 'Imported MR',
'description' => 'Description',
'state' => 'opened',
'source_branch' => 'feature',
'target_branch' => 'main',
'source_branch_sha' => 'ABCD',
'target_branch_sha' => 'DCBA',
'created_at' => '2020-06-14T15:02:47.967Z',
'updated_at' => '2020-06-14T15:03:47.967Z',
'merge_request_diff' => {
'state' => 'collected',
'base_commit_sha' => 'ae73cb07c9eeaf35924a10f713b364d32b2dd34f',
'head_commit_sha' => 'a97f74ddaa848b707bea65441c903ae4bf5d844d',
'start_commit_sha' => '9eea46b5c72ead701c22f516474b95049c9d9462',
'merge_request_diff_commits' => [
{
'sha' => 'COMMIT1',
'relative_order' => 0,
'message' => 'commit message',
'authored_date' => '2014-08-06T08:35:52.000+02:00',
'committed_date' => '2014-08-06T08:35:52.000+02:00',
'commit_author' => {
'name' => 'Commit Author',
'email' => 'gitlab@example.com'
},
'committer' => {
'name' => 'Committer',
'email' => 'committer@example.com'
}
}
],
'merge_request_diff_files' => [
{
'relative_order' => 0,
'utf8_diff' => '--- a/.gitignore\n+++ b/.gitignore\n@@ -1 +1 @@ test\n',
'new_path' => '.gitignore',
'old_path' => '.gitignore',
'a_mode' => '100644',
'b_mode' => '100644',
'new_file' => false,
'renamed_file' => false,
'deleted_file' => false,
'too_large' => false
}
]
}
}.merge(attributes)
end
let(:attributes) { {} }
let(:imported_mr) { project.merge_requests.find_by_title(mr['title']) }
subject(:pipeline) { described_class.new(context) }
describe '#run' do
before do
group.add_owner(user)
allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
allow(extractor).to receive(:remove_tmp_dir)
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[mr, 0]]))
end
allow(project.repository).to receive(:fetch_source_branch!).and_return(true)
allow(project.repository).to receive(:branch_exists?).and_return(false)
allow(project.repository).to receive(:create_branch)
pipeline.run
end
it 'imports a merge request' do
expect(project.merge_requests.count).to eq(1)
expect(imported_mr.title).to eq(mr['title'])
expect(imported_mr.description).to eq(mr['description'])
expect(imported_mr.state).to eq(mr['state'])
expect(imported_mr.iid).to eq(mr['iid'])
expect(imported_mr.created_at).to eq(mr['created_at'])
expect(imported_mr.updated_at).to eq(mr['updated_at'])
expect(imported_mr.author).to eq(user)
end
context 'merge request state' do
context 'when mr is closed' do
let(:attributes) { { 'state' => 'closed' } }
it 'imported mr as closed' do
expect(imported_mr.state).to eq(attributes['state'])
end
end
context 'when mr is merged' do
let(:attributes) { { 'state' => 'merged' } }
it 'imported mr as merged' do
expect(imported_mr.state).to eq(attributes['state'])
end
end
end
context 'source & target project' do
it 'has the new project as target' do
expect(imported_mr.target_project).to eq(project)
end
it 'has the new project as source' do
expect(imported_mr.source_project).to eq(project)
end
context 'when source/target projects differ' do
let(:attributes) { { 'source_project_id' => 4321 } }
it 'has no source' do
expect(imported_mr.source_project).to be_nil
end
context 'when diff_head_sha is present' do
let(:attributes) { { 'diff_head_sha' => 'HEAD', 'source_project_id' => 4321 } }
it 'has the new project as source' do
expect(imported_mr.source_project).to eq(project)
end
end
end
end
context 'resource label events' do
let(:attributes) { { 'resource_label_events' => [{ 'action' => 'add', 'user_id' => 1 }] } }
it 'restores resource label events' do
expect(imported_mr.resource_label_events.first.action).to eq('add')
end
end
context 'award emoji' do
let(:attributes) { { 'award_emoji' => [{ 'name' => 'tada', 'user_id' => 22 }] } }
it 'has award emoji' do
expect(imported_mr.award_emoji.first.name).to eq(attributes['award_emoji'].first['name'])
end
end
context 'notes' do
let(:note) { imported_mr.notes.first }
let(:attributes) do
{
'notes' => [
{
'note' => 'Issue note',
'note_html' => '<p>something else entirely</p>',
'cached_markdown_version' => 917504,
'author_id' => 22,
'author' => { 'name' => 'User 22' },
'created_at' => '2016-06-14T15:02:56.632Z',
'updated_at' => '2016-06-14T15:02:47.770Z',
'award_emoji' => [{ 'name' => 'clapper', 'user_id' => 22 }]
}
]
}
end
it 'imports mr note' do
expect(note).to be_present
expect(note.note).to include('By User 22')
expect(note.note).to include(attributes['notes'].first['note'])
expect(note.author).to eq(user)
end
it 'has award emoji' do
emoji = note.award_emoji.first
expect(emoji.name).to eq('clapper')
expect(emoji.user).to eq(user)
end
it 'does not import note_html' do
expect(note.note_html).to match(attributes['notes'].first['note'])
expect(note.note_html).not_to match(attributes['notes'].first['note_html'])
end
end
context 'system note metadata' do
let(:attributes) do
{
'notes' => [
{
'note' => 'added 3 commits',
'system' => true,
'author_id' => 22,
'author' => { 'name' => 'User 22' },
'created_at' => '2016-06-14T15:02:56.632Z',
'updated_at' => '2016-06-14T15:02:47.770Z',
'system_note_metadata' => { 'action' => 'commit', 'commit_count' => 3 }
}
]
}
end
it 'restores system note metadata' do
note = imported_mr.notes.first
expect(note.system).to eq(true)
expect(note.noteable_type).to eq('MergeRequest')
expect(note.system_note_metadata.action).to eq('commit')
expect(note.system_note_metadata.commit_count).to eq(3)
end
end
context 'diffs' do
it 'imports merge request diff' do
expect(imported_mr.merge_request_diff).to be_present
end
it 'has the correct data for merge request latest_merge_request_diff' do
expect(imported_mr.latest_merge_request_diff_id).to eq(imported_mr.merge_request_diffs.maximum(:id))
end
it 'imports diff files' do
expect(imported_mr.merge_request_diff.merge_request_diff_files.count).to eq(1)
end
context 'diff commits' do
it 'imports diff commits' do
expect(imported_mr.merge_request_diff.merge_request_diff_commits.count).to eq(1)
end
it 'assigns committer and author details to diff commits' do
commit = imported_mr.merge_request_diff.merge_request_diff_commits.first
expect(commit.commit_author_id).not_to be_nil
expect(commit.committer_id).not_to be_nil
end
it 'assigns the correct commit users to diff commits' do
commit = MergeRequestDiffCommit.find_by(sha: 'COMMIT1')
expect(commit.commit_author.name).to eq('Commit Author')
expect(commit.commit_author.email).to eq('gitlab@example.com')
expect(commit.committer.name).to eq('Committer')
expect(commit.committer.email).to eq('committer@example.com')
end
end
end
context 'labels' do
let(:attributes) do
{
'label_links' => [
{ 'label' => { 'title' => 'imported label 1', 'type' => 'ProjectLabel' } },
{ 'label' => { 'title' => 'imported label 2', 'type' => 'ProjectLabel' } }
]
}
end
it 'imports labels' do
expect(imported_mr.labels.pluck(:title)).to contain_exactly('imported label 1', 'imported label 2')
end
end
context 'milestone' do
let(:attributes) { { 'milestone' => { 'title' => 'imported milestone' } } }
it 'imports milestone' do
expect(imported_mr.milestone.title).to eq(attributes.dig('milestone', 'title'))
end
end
end
end
......@@ -3,71 +3,72 @@
require 'spec_helper'
RSpec.describe BulkImports::Projects::Pipelines::RepositoryPipeline do
describe '#run' do
let_it_be(:user) { create(:user) }
let_it_be(:parent) { create(:project) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:bulk_import_configuration) { create(:bulk_import_configuration, bulk_import: bulk_import) }
let_it_be(:entity) do
create(
:bulk_import_entity,
:project_entity,
bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: 'My Destination Repository',
destination_namespace: parent.full_path,
project: parent
)
end
let_it_be(:user) { create(:user) }
let_it_be(:parent) { create(:project) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:bulk_import_configuration) { create(:bulk_import_configuration, bulk_import: bulk_import) }
let_it_be(:entity) do
create(
:bulk_import_entity,
:project_entity,
bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: 'My Destination Repository',
destination_namespace: parent.full_path,
project: parent
)
end
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
context 'successfully imports repository' do
let(:project_data) do
{
'httpUrlToRepo' => 'http://test.git'
}
end
let(:extracted_data) { BulkImports::Pipeline::ExtractedData.new(data: project_data) }
subject { described_class.new(context) }
subject(:pipeline) { described_class.new(context) }
before do
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(extracted_data)
end
end
describe '#run' do
context 'successfully imports repository' do
let(:project_data) { { 'httpUrlToRepo' => 'http://test.git' } }
it 'imports new repository into destination project' do
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: project_data))
end
url = project_data['httpUrlToRepo'].sub("://", "://oauth2:#{bulk_import_configuration.access_token}@")
expect_next_instance_of(Gitlab::GitalyClient::RepositoryService) do |repository_service|
url = project_data['httpUrlToRepo'].sub("://", "://oauth2:#{bulk_import_configuration.access_token}@")
expect(repository_service).to receive(:import_repository).with(url).and_return 0
end
expect(context.portable).to receive(:ensure_repository)
expect(context.portable.repository).to receive(:fetch_as_mirror).with(url)
subject.run
pipeline.run
end
end
context 'blocked local networks' do
let(:project_data) do
{
'httpUrlToRepo' => 'http://localhost/foo.git'
}
end
let(:project_data) { { 'httpUrlToRepo' => 'http://localhost/foo.git' } }
before do
it 'imports new repository into destination project' do
allow(Gitlab.config.gitlab).to receive(:host).and_return('notlocalhost.gitlab.com')
allow(Gitlab::CurrentSettings).to receive(:allow_local_requests_from_web_hooks_and_services?).and_return(false)
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: project_data))
end
end
subject { described_class.new(context) }
pipeline.run
it 'imports new repository into destination project' do
subject.run
expect(context.entity.failed?).to be_truthy
expect(context.entity.failed?).to eq(true)
end
end
end
describe '#after_run' do
it 'executes housekeeping service after import' do
service = instance_double(Repositories::HousekeepingService)
expect(Repositories::HousekeepingService).to receive(:new).with(context.portable, :gc).and_return(service)
expect(service).to receive(:execute)
pipeline.after_run(context)
end
end
end
......@@ -10,6 +10,7 @@ RSpec.describe BulkImports::Projects::Stage do
[2, BulkImports::Common::Pipelines::LabelsPipeline],
[3, BulkImports::Projects::Pipelines::IssuesPipeline],
[4, BulkImports::Common::Pipelines::BoardsPipeline],
[4, BulkImports::Projects::Pipelines::MergeRequestsPipeline],
[5, BulkImports::Common::Pipelines::UploadsPipeline],
[6, BulkImports::Common::Pipelines::EntityFinisher]
]
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment