Commit 03b2fe02 authored by George Koltsov's avatar George Koltsov

Import group epics via ndjson when using Bulk Import

  - Use ndjson files when importing group epic when using
    Bulk Import functionality
  - Such approach is required instead of using GraphQL
    in order to preserve epics subrealtion associations
  - Ndjson approach downloads ndjson file containing
    all source group epics and imports them

Changelog: changed
EE: true
parent 9f5bdd47
# frozen_string_literal: true
module BulkImports
module Groups
module Graphql
module GetEpicAwardEmojiQuery
extend self
def to_s
<<-'GRAPHQL'
query($full_path: ID!, $epic_iid: ID!, $cursor: String, $per_page: Int) {
group(fullPath: $full_path) {
epic(iid: $epic_iid) {
award_emoji: awardEmoji(first: $per_page, after: $cursor) {
page_info: pageInfo {
next_page: endCursor
has_next_page: hasNextPage
}
nodes {
name
user {
public_email: publicEmail
}
}
}
}
}
}
GRAPHQL
end
def variables(context)
iid = context.extra[:epic_iid]
{
full_path: context.entity.source_full_path,
cursor: context.tracker.next_page,
epic_iid: iid,
per_page: ::BulkImports::Tracker::DEFAULT_PAGE_SIZE
}
end
def data_path
base_path << 'nodes'
end
def page_info_path
base_path << 'page_info'
end
private
def base_path
%w[data group epic award_emoji]
end
end
end
end
end
# frozen_string_literal: true
module BulkImports
module Groups
module Graphql
module GetEpicEventsQuery
extend self
def to_s
<<-'GRAPHQL'
query($full_path: ID!, $epic_iid: ID!, $cursor: String, $per_page: Int) {
group(fullPath: $full_path) {
epic(iid: $epic_iid) {
events(first: $per_page, after: $cursor) {
page_info: pageInfo {
next_page: endCursor
has_next_page: hasNextPage
}
nodes {
action
created_at: createdAt
updated_at: updatedAt
author {
public_email: publicEmail
}
}
}
}
}
}
GRAPHQL
end
def variables(context)
iid = context.extra[:epic_iid]
{
full_path: context.entity.source_full_path,
cursor: context.tracker.next_page,
epic_iid: iid,
per_page: ::BulkImports::Tracker::DEFAULT_PAGE_SIZE
}
end
def data_path
base_path << 'nodes'
end
def page_info_path
base_path << 'page_info'
end
private
def base_path
%w[data group epic events]
end
end
end
end
end
# frozen_string_literal: true
module BulkImports
module Groups
module Graphql
module GetEpicsQuery
extend self
def to_s
<<-'GRAPHQL'
query($full_path: ID!, $cursor: String, $per_page: Int) {
group(fullPath: $full_path) {
epics(
includeDescendantGroups: false,
first: $per_page,
after: $cursor
) {
page_info: pageInfo {
next_page: endCursor
has_next_page: hasNextPage
}
nodes {
id
iid
title
description
state
created_at: createdAt
closed_at: closedAt
start_date: startDate
start_date_fixed: startDateFixed
start_date_is_fixed: startDateIsFixed
due_date_fixed: dueDateFixed
due_date_is_fixed: dueDateIsFixed
relative_position: relativePosition
confidential
author {
public_email: publicEmail
}
parent {
iid
}
children {
nodes {
iid
}
}
labels {
nodes {
title
}
}
}
}
}
}
GRAPHQL
end
def variables(context)
{
full_path: context.entity.source_full_path,
cursor: context.tracker.next_page,
per_page: ::BulkImports::Tracker::DEFAULT_PAGE_SIZE
}
end
def base_path
%w[data group epics]
end
def data_path
base_path << 'nodes'
end
def page_info_path
base_path << 'page_info'
end
end
end
end
end
# frozen_string_literal: true
module BulkImports
module Groups
module Pipelines
class EpicAwardEmojiPipeline < ::BulkImports::Pipeline::EpicBase
extractor ::BulkImports::Common::Extractors::GraphqlExtractor,
query: ::BulkImports::Groups::Graphql::GetEpicAwardEmojiQuery
transformer ::BulkImports::Common::Transformers::ProhibitedAttributesTransformer
transformer ::BulkImports::Common::Transformers::UserReferenceTransformer
# rubocop: disable CodeReuse/ActiveRecord
def load(context, data)
return unless data
epic = context.group.epics.find_by(iid: context.extra[:epic_iid])
return if award_emoji_exists?(epic, data)
raise NotAllowedError unless Ability.allowed?(context.current_user, :award_emoji, epic)
epic.award_emoji.create!(data)
end
private
def award_emoji_exists?(epic, data)
epic.award_emoji.exists?(user_id: data['user_id'], name: data['name'])
end
# rubocop: enable CodeReuse/ActiveRecord
end
end
end
end
# frozen_string_literal: true
module BulkImports
module Groups
module Pipelines
class EpicEventsPipeline < ::BulkImports::Pipeline::EpicBase
extractor ::BulkImports::Common::Extractors::GraphqlExtractor,
query: ::BulkImports::Groups::Graphql::GetEpicEventsQuery
transformer ::BulkImports::Common::Transformers::ProhibitedAttributesTransformer
transformer ::BulkImports::Common::Transformers::UserReferenceTransformer, reference: 'author'
def transform(context, data)
# Only create 'reopened' & 'closed' events.
# 'created' event get created when epic is persisted.
# Avoid creating duplicates & protect from additional
# potential undesired events.
return unless data['action'] == 'REOPENED' || data['action'] == 'CLOSED'
data.merge!(
'group_id' => context.group.id,
'action' => data['action'].downcase
)
end
def load(context, data)
return unless data
epic = context.group.epics.find_by_iid(context.extra[:epic_iid])
return unless epic
::Event.transaction do
create_event!(epic, data)
create_resource_state_event!(epic, data)
end
end
private
def create_event!(epic, data)
epic.events.create!(data)
end
# In order for events to be shown in the UI we need to create
# `ResourceStateEvent` record
def create_resource_state_event!(epic, data)
state_event_data = {
user_id: data['author_id'],
state: data['action'],
created_at: data['created_at']
}
epic.resource_state_events.create!(state_event_data)
end
end
end
end
end
......@@ -4,45 +4,45 @@ module BulkImports
module Groups
module Pipelines
class EpicsPipeline
include ::BulkImports::Pipeline
include BulkImports::NdjsonPipeline
extractor ::BulkImports::Common::Extractors::GraphqlExtractor,
query: ::BulkImports::Groups::Graphql::GetEpicsQuery
RELATION = 'epics'
transformer ::BulkImports::Common::Transformers::ProhibitedAttributesTransformer
transformer ::BulkImports::Common::Transformers::UserReferenceTransformer, reference: 'author'
transformer ::BulkImports::Groups::Transformers::EpicAttributesTransformer
extractor ::BulkImports::Common::Extractors::NdjsonExtractor, relation: RELATION
def transform(_, data)
cache_epic_source_params(data)
end
def load(context, data)
raise ::BulkImports::Pipeline::NotAllowedError unless authorized?
def transform(context, data)
relation_hash = data.first
relation_index = data.last
relation_definition = import_export_config.top_relation_tree(RELATION)
context.group.epics.create!(data)
deep_transform_relation!(relation_hash, RELATION, relation_definition) do |key, hash|
Gitlab::ImportExport::Group::RelationFactory.create(
relation_index: relation_index,
relation_sym: key.to_sym,
relation_hash: hash,
importable: context.portable,
members_mapper: members_mapper,
object_builder: object_builder,
user: context.current_user,
excluded_keys: import_export_config.relation_excluded_keys(key)
)
end
private
def authorized?
context.current_user.can?(:admin_epic, context.group)
end
def cache_epic_source_params(data)
source_id = GlobalID.parse(data['id'])&.model_id
source_iid = data['iid']
def load(_, epic)
return unless epic
if source_id
cache_key = "bulk_import:#{context.bulk_import.id}:entity:#{context.entity.id}:epic:#{source_iid}"
source_params = { source_id: source_id }
::Gitlab::Redis::Cache.with do |redis|
redis.set(cache_key, source_params.to_json, ex: ::BulkImports::Pipeline::CACHE_KEY_EXPIRATION)
end
epic.save! unless epic.persisted?
end
data
private
def members_mapper
@members_mapper ||= Gitlab::ImportExport::MembersMapper.new(
exported_members: [], # importer user is authoring everything for now
user: context.current_user,
importable: context.portable
)
end
end
end
......
# frozen_string_literal: true
module BulkImports
module Groups
module Transformers
class EpicAttributesTransformer
def transform(context, data)
data
.then { |data| add_parent(context, data) }
.then { |data| add_children(context, data) }
.then { |data| add_labels(context, data) }
end
private
def add_parent(context, data)
data.merge(
'parent' => context.group.epics.find_by_iid(data.dig('parent', 'iid'))
)
end
def add_children(context, data)
nodes = Array.wrap(data.dig('children', 'nodes'))
children_iids = nodes.filter_map { |child| child['iid'] }
data.merge('children' => context.group.epics.where(iid: children_iids)) # rubocop: disable CodeReuse/ActiveRecord
end
def add_labels(context, data)
data['labels'] = data.dig('labels', 'nodes').filter_map do |node|
context.group.labels.find_by_title(node['title'])
end
data
end
end
end
end
end
......@@ -14,14 +14,6 @@ module EE
pipeline: ::BulkImports::Groups::Pipelines::EpicsPipeline,
stage: 2
},
epic_award_emojis: {
pipeline: ::BulkImports::Groups::Pipelines::EpicAwardEmojiPipeline,
stage: 3
},
epic_events: {
pipeline: ::BulkImports::Groups::Pipelines::EpicEventsPipeline,
stage: 3
},
# Override the CE stage value for the EntityFinisher Pipeline
finisher: {
stage: 4
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Groups::Graphql::GetEpicAwardEmojiQuery do
it 'has a valid query' do
context = BulkImports::Pipeline::Context.new(create(:bulk_import_tracker), epic_iid: 1)
result = GitlabSchema.execute(
described_class.to_s,
variables: described_class.variables(context)
).to_h
expect(result['errors']).to be_blank
end
describe '#data_path' do
it 'returns data path' do
expected = %w[data group epic award_emoji nodes]
expect(described_class.data_path).to eq(expected)
end
end
describe '#page_info_path' do
it 'returns pagination information path' do
expected = %w[data group epic award_emoji page_info]
expect(described_class.page_info_path).to eq(expected)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Groups::Graphql::GetEpicEventsQuery do
it 'has a valid query' do
context = BulkImports::Pipeline::Context.new(create(:bulk_import_tracker), epic_iid: 1)
result = GitlabSchema.execute(
described_class.to_s,
variables: described_class.variables(context)
).to_h
expect(result['errors']).to be_blank
end
describe '#data_path' do
it 'returns data path' do
expected = %w[data group epic events nodes]
expect(described_class.data_path).to eq(expected)
end
end
describe '#page_info_path' do
it 'returns pagination information path' do
expected = %w[data group epic events page_info]
expect(described_class.page_info_path).to eq(expected)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Groups::Graphql::GetEpicsQuery do
it 'has a valid query' do
context = BulkImports::Pipeline::Context.new(create(:bulk_import_tracker))
result = GitlabSchema.execute(
described_class.to_s,
variables: described_class.variables(context)
).to_h
expect(result['errors']).to be_blank
end
describe '#data_path' do
it 'returns data path' do
expected = %w[data group epics nodes]
expect(described_class.data_path).to eq(expected)
end
end
describe '#page_info_path' do
it 'returns pagination information path' do
expected = %w[data group epics page_info]
expect(described_class.page_info_path).to eq(expected)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::EpicAwardEmojiPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:epic) { create(:epic, group: group) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:entity) do
create(
:bulk_import_entity,
group: group,
bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: 'My Destination Group',
destination_namespace: group.full_path
)
end
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
before do
stub_licensed_features(epics: true)
group.add_owner(user)
end
subject { described_class.new(context) }
describe '#initialize' do
it 'update context with next epic iid' do
subject
expect(context.extra[:epic_iid]).to eq(epic.iid)
end
end
describe '#run' do
it 'imports epic award emoji' do
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor)
.to receive(:extract)
.and_return(extracted_data)
end
expect { subject.run }.to change(::AwardEmoji, :count).by(1)
expect(epic.award_emoji.first.name).to eq('thumbsup')
end
context 'when extracted data many pages' do
it 'runs pipeline for the second page' do
first_page = extracted_data(has_next_page: true)
last_page = extracted_data
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor)
.to receive(:extract)
.and_return(first_page, last_page)
end
subject.run
end
end
context 'when there is many epics to import' do
let_it_be(:second_epic) { create(:epic, group: group) }
it 'runs the pipeline for the next epic' do
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor)
.to receive(:extract)
.twice # for each epic
.and_return(extracted_data)
end
expect(context.extra)
.to receive(:[]=)
.with(:epic_iid, epic.iid)
.and_call_original
expect(context.extra)
.to receive(:[]=)
.with(:epic_iid, second_epic.iid)
.and_call_original
expect(context.extra)
.to receive(:[]=)
.with(:epic_iid, nil)
.and_call_original
subject.run
end
end
end
describe '#load' do
let(:data) { { 'name' => 'thumbsup', 'user_id' => user.id } }
context 'when emoji does not exist' do
it 'creates new emoji' do
expect { subject.load(context, data) }.to change(::AwardEmoji, :count).by(1)
epic = group.epics.last
emoji = epic.award_emoji.first
expect(emoji.name).to eq(data['name'])
expect(emoji.user).to eq(user)
end
end
context 'when same emoji exists' do
it 'does not create a new emoji' do
epic.award_emoji.create!(data)
expect { subject.load(context, data) }.not_to change(::AwardEmoji, :count)
end
end
context 'when user is not allowed to award emoji' do
it 'raises NotAllowedError exception' do
allow(Ability).to receive(:allowed?).with(user, :award_emoji, epic).and_return(false)
expect { subject.load(context, data) }.to raise_error(described_class::NotAllowedError)
end
end
end
describe 'pipeline parts' do
it { expect(described_class).to include_module(BulkImports::Pipeline) }
it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
it 'has extractors' do
expect(described_class.get_extractor)
.to eq(
klass: BulkImports::Common::Extractors::GraphqlExtractor,
options: {
query: BulkImports::Groups::Graphql::GetEpicAwardEmojiQuery
}
)
end
it 'has transformers' do
expect(described_class.transformers)
.to contain_exactly(
{ klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil },
{ klass: BulkImports::Common::Transformers::UserReferenceTransformer, options: nil }
)
end
end
def extracted_data(has_next_page: false)
data = [{ 'name' => 'thumbsup' }]
page_info = {
'has_next_page' => has_next_page,
'next_page' => has_next_page ? 'cursor' : nil
}
BulkImports::Pipeline::ExtractedData.new(data: data, page_info: page_info)
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::EpicEventsPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:epic) { create(:epic, group: group) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:entity) do
create(
:bulk_import_entity,
group: group,
bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: 'My Destination Group',
destination_namespace: group.full_path
)
end
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
before do
stub_licensed_features(epics: true)
group.add_owner(user)
end
subject { described_class.new(context) }
describe '#initialize' do
it 'update context with next epic iid' do
subject
expect(context.extra[:epic_iid]).to eq(epic.iid)
end
end
describe '#run' do
it 'imports epic events and resource state events' do
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor)
.to receive(:extract)
.and_return(extracted_data)
end
subject.run
expect(epic.events.first.action).to eq('closed')
expect(epic.resource_state_events.first.state).to eq('closed')
end
end
describe '#transform' do
it 'downcases action & adds group_id' do
data = { 'action' => 'CLOSED' }
result = subject.transform(context, data)
expect(result['group_id']).to eq(group.id)
expect(result['action']).to eq(data['action'].downcase)
end
context 'when action is not listed as permitted' do
it 'returns' do
data = { 'action' => 'created' }
expect(subject.transform(nil, data)).to eq(nil)
end
end
end
describe '#load' do
context 'when exception occurs during resource state event creation' do
it 'reverts created event' do
allow(subject).to receive(:create_resource_state_event!).and_raise(StandardError)
data = { 'action' => 'reopened', 'author_id' => user.id }
expect { subject.load(context, data) }.to raise_error(StandardError)
expect(epic.events.count).to eq(0)
expect(epic.resource_state_events.count).to eq(0)
end
end
context 'when epic could not be found' do
it 'does not create new event' do
context.extra[:epic_iid] = 'not_iid'
expect { subject.load(context, nil) }.to not_change { Event.count }.and not_change { ResourceStateEvent.count }
end
end
end
context 'when extracted data many pages' do
it 'runs pipeline for the second page' do
first_page = extracted_data(has_next_page: true)
last_page = extracted_data
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor)
.to receive(:extract)
.and_return(first_page, last_page)
end
subject.run
end
end
context 'when there is many epics to import' do
let_it_be(:second_epic) { create(:epic, group: group) }
it 'runs the pipeline for the next epic' do
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor)
.to receive(:extract)
.twice # for each epic
.and_return(extracted_data)
end
expect(context.extra)
.to receive(:[]=)
.with(:epic_iid, epic.iid)
.and_call_original
expect(context.extra)
.to receive(:[]=)
.with(:epic_iid, second_epic.iid)
.and_call_original
expect(context.extra)
.to receive(:[]=)
.with(:epic_iid, nil)
.and_call_original
subject.run
end
end
describe 'pipeline parts' do
it { expect(described_class).to include_module(BulkImports::Pipeline) }
it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
it 'has extractors' do
expect(described_class.get_extractor)
.to eq(
klass: BulkImports::Common::Extractors::GraphqlExtractor,
options: {
query: BulkImports::Groups::Graphql::GetEpicEventsQuery
}
)
end
it 'has transformers' do
expect(described_class.transformers)
.to contain_exactly(
{ klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil },
{ klass: BulkImports::Common::Transformers::UserReferenceTransformer, options: { reference: 'author' } }
)
end
end
def extracted_data(has_next_page: false)
data = [
{
'action' => 'CLOSED',
'created_at' => '2021-02-15T15:08:57Z',
'updated_at' => '2021-02-15T16:08:57Z',
'author' => {
'public_email' => user.email
}
}
]
page_info = {
'has_next_page' => has_next_page,
'next_page' => has_next_page ? 'cursor' : nil
}
BulkImports::Pipeline::ExtractedData.new(
data: data,
page_info: page_info
)
end
end
......@@ -2,11 +2,11 @@
require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::EpicsPipeline, :clean_gitlab_redis_cache do
RSpec.describe BulkImports::Groups::Pipelines::EpicsPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:filepath) { 'spec/fixtures/bulk_imports/epics.ndjson.gz' }
let_it_be(:entity) do
create(
:bulk_import_entity,
......@@ -21,7 +21,10 @@ RSpec.describe BulkImports::Groups::Pipelines::EpicsPipeline, :clean_gitlab_redi
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:tmpdir) { Dir.mktmpdir }
before do
FileUtils.copy_file(filepath, File.join(tmpdir, 'epics.ndjson.gz'))
stub_licensed_features(epics: true)
group.add_owner(user)
end
......@@ -30,126 +33,65 @@ RSpec.describe BulkImports::Groups::Pipelines::EpicsPipeline, :clean_gitlab_redi
describe '#run' do
it 'imports group epics into destination group' do
first_page = extracted_data(has_next_page: true)
last_page = extracted_data
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor)
.to receive(:extract)
.and_return(first_page, last_page)
allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
allow_next_instance_of(BulkImports::FileDownloadService) do |service|
allow(service).to receive(:execute)
end
expect { subject.run }.to change(::Epic, :count).by(2)
expect { subject.run }.to change(::Epic, :count).by(5)
expect(group.epics.first.award_emoji.first.name).to eq('thumbsup')
expect(group.epics.first.state).to eq('opened')
expect(group.epics.first.notes.count).to eq(4)
expect(group.epics.first.notes.first.award_emoji.first.name).to eq('drum')
label = group.epics.first.labels.first
expect(group.epics.first.labels.count).to eq(1)
expect(label.title).to eq('title')
expect(label.description).to eq('description')
expect(label.color).to eq('#cd2c5c')
end
end
describe '#load' do
context 'when user is authorized to create the epic' do
it 'creates the epic' do
author = create(:user, email: 'member@email.com')
parent_epic = create(:epic, group: group)
child_epic = create(:epic, group: group)
label = create(:group_label, group: group)
group.add_developer(author)
data = {
'id' => 99,
'iid' => 99,
'title' => 'epic',
'state' => 'opened',
'confidential' => false,
'author_id' => author.id,
'parent' => parent_epic,
'children' => [child_epic],
'labels' => [
label
]
}
expect { subject.load(context, data) }.to change(::Epic, :count).by(1)
epic = group.epics.find_by_iid(99)
expect(epic.group).to eq(group)
expect(epic.author).to eq(author)
expect(epic.title).to eq('epic')
expect(epic.state).to eq('opened')
expect(epic.confidential).to eq(false)
expect(epic.parent).to eq(parent_epic)
expect(epic.children).to contain_exactly(child_epic)
expect(epic.labels).to contain_exactly(label)
end
end
context 'when epic is not persisted' do
it 'saves the epic' do
epic = build(:epic, group: group)
context 'when user is not authorized to create the epic' do
before do
allow(user).to receive(:can?).with(:admin_epic, group).and_return(false)
end
expect(epic).to receive(:save!)
it 'raises NotAllowedError' do
expect { subject.load(context, extracted_data) }
.to raise_error(::BulkImports::Pipeline::NotAllowedError)
end
subject.load(context, epic)
end
end
describe '#transform' do
it 'caches epic source id in redis' do
data = { 'id' => 'gid://gitlab/Epic/1', 'iid' => 1 }
cache_key = "bulk_import:#{bulk_import.id}:entity:#{entity.id}:epic:#{data['iid']}"
source_params = { source_id: '1' }.to_json
context 'when epic is persisted' do
it 'does not save epic' do
epic = create(:epic, group: group)
expect(epic).not_to receive(:save!)
::Gitlab::Redis::Cache.with do |redis|
expect(redis).to receive(:set).with(cache_key, source_params, ex: ::BulkImports::Pipeline::CACHE_KEY_EXPIRATION)
subject.load(context, epic)
end
end
subject.transform(context, data)
context 'when epic is missing' do
it 'returns' do
expect(subject.load(context, nil)).to be_nil
end
end
end
describe 'pipeline parts' do
it { expect(described_class).to include_module(BulkImports::Pipeline) }
it { expect(described_class).to include_module(BulkImports::NdjsonPipeline) }
it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
it 'has extractors' do
it 'has extractor' do
expect(described_class.get_extractor)
.to eq(
klass: BulkImports::Common::Extractors::GraphqlExtractor,
options: {
query: BulkImports::Groups::Graphql::GetEpicsQuery
}
klass: BulkImports::Common::Extractors::NdjsonExtractor,
options: { relation: described_class::RELATION }
)
end
it 'has transformers' do
expect(described_class.transformers)
.to contain_exactly(
{ klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil },
{ klass: BulkImports::Common::Transformers::UserReferenceTransformer, options: { reference: 'author' } },
{ klass: BulkImports::Groups::Transformers::EpicAttributesTransformer, options: nil }
)
end
end
def extracted_data(has_next_page: false)
data = [
{
'id' => "gid://gitlab/Epic/99",
'iid' => has_next_page ? 2 : 1,
'title' => 'epic1',
'state' => 'closed',
'confidential' => true,
'labels' => {
'nodes' => []
}
}
]
page_info = {
'has_next_page' => has_next_page,
'next_page' => has_next_page ? 'cursor' : nil
}
BulkImports::Pipeline::ExtractedData.new(data: data, page_info: page_info)
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Groups::Transformers::EpicAttributesTransformer do
let_it_be(:importer_user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:bulk_import) { create(:bulk_import, user: importer_user) }
let_it_be(:entity) { create(:bulk_import_entity, bulk_import: bulk_import, group: group) }
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
describe '#transform' do
it 'transforms the epic attributes' do
transformed = subject.transform(context, epic_data)
expect(transformed).to eq({
'title' => 'title',
'description' => 'description',
'state' => 'opened',
'create_at' => '2021-01-27T03:42:30Z',
'closed_at' => nil,
'start_date' => nil,
'start_date_fixed' => nil,
'start_date_is_fixed' => false,
'due_date_fixed' => nil,
'due_date_is_fixed' => false,
'relative_position' => 1073716855,
'confidential' => false,
'parent' => nil,
'children' => [],
'labels' => []
})
end
context 'labels' do
it 'maps the existing labels' do
label = create(:group_label, group: group)
data = epic_data(labels_titles: [label.title, 'NON EXISTING TITLE'])
transformed_data = subject.transform(context, data)
expect(transformed_data['labels'].to_a).to contain_exactly(label)
end
end
context 'parent and children epics' do
it 'sets parent and child epics when they exist' do
parent = create(:epic, group: group)
child = create(:epic, group: group)
data = epic_data(parent_iid: parent.iid, children_iids: [child.iid])
transformed_data = subject.transform(context, data)
expect(transformed_data['parent']).to eq(parent)
expect(transformed_data['children']).to contain_exactly(child)
end
it 'removes the parent and children id when they dont exist' do
data = epic_data(parent_iid: 9998, children_iids: [9999])
transformed_data = subject.transform(context, data)
expect(transformed_data['parent']).to be_nil
expect(transformed_data['children']).to be_empty
end
end
def epic_data(parent_iid: nil, children_iids: [], labels_titles: [], public_email: '')
{
'title' => 'title',
'description' => 'description',
'state' => 'opened',
'create_at' => '2021-01-27T03:42:30Z',
'closed_at' => nil,
'start_date' => nil,
'start_date_fixed' => nil,
'start_date_is_fixed' => false,
'due_date_fixed' => nil,
'due_date_is_fixed' => false,
'relative_position' => 1073716855,
'confidential' => false,
'parent' => {
'iid' => parent_iid
},
'children' => {
'nodes' => children_iids.map { |iid| { 'iid' => iid } }
},
'labels' => {
'nodes' => labels_titles.map { |title| { 'title' => title } }
}
}
end
end
end
......@@ -13,8 +13,6 @@ RSpec.describe BulkImports::Stage do
[1, BulkImports::Groups::Pipelines::BadgesPipeline],
[1, BulkImports::Groups::Pipelines::IterationsPipeline],
[2, BulkImports::Groups::Pipelines::EpicsPipeline],
[3, BulkImports::Groups::Pipelines::EpicAwardEmojiPipeline],
[3, BulkImports::Groups::Pipelines::EpicEventsPipeline],
[4, BulkImports::Groups::Pipelines::EntityFinisher]
]
end
......
......@@ -69,6 +69,7 @@ module Gitlab
# the relation_hash, updating references with new object IDs, mapping users using
# the "members_mapper" object, also updating notes if required.
def create
return @relation_hash if author_relation?
return if invalid_relation? || predefined_relation?
setup_base_models
......@@ -95,6 +96,10 @@ module Gitlab
relation_class.try(:predefined_id?, @relation_hash['id'])
end
def author_relation?
@relation_name == :author
end
def setup_models
raise NotImplementedError
end
......
......@@ -43,6 +43,15 @@ RSpec.describe Gitlab::ImportExport::Base::RelationFactory do
end
end
context 'when author relation' do
let(:relation_sym) { :author }
let(:relation_hash) { { 'name' => 'User', 'project_id' => project.id } }
it 'returns author hash unchanged' do
expect(subject).to eq(relation_hash)
end
end
context 'when #setup_models is not implemented' do
it 'raises NotImplementedError' do
expect { subject }.to raise_error(NotImplementedError)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment