Commit 20ea35d5 authored by Jan Provaznik's avatar Jan Provaznik

Merge branch 'add-issues-importer' into 'master'

Add issues importer

See merge request gitlab-org/gitlab!27229
parents 47fe1973 281ebf28
......@@ -39,4 +39,8 @@ class JiraImportData < ProjectImportData
data['jira'].delete(FORCE_IMPORT_KEY)
end
def current_project
projects.last
end
end
......@@ -556,6 +556,13 @@
:resource_boundary: :unknown
:weight: 1
:idempotent:
- :name: jira_importer:jira_import_import_issue
:feature_category: :importers
:has_external_dependencies:
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent:
- :name: jira_importer:jira_import_stage_finish_import
:feature_category: :importers
:has_external_dependencies:
......
# frozen_string_literal: true
module Gitlab
module JiraImport
class ImportIssueWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include NotifyUponDeath
include Gitlab::JiraImport::QueueOptions
include Gitlab::Import::DatabaseHelpers
def perform(project_id, jira_issue_id, issue_attributes, waiter_key)
issue_id = insert_and_return_id(issue_attributes, Issue)
cache_issue_mapping(issue_id, jira_issue_id, project_id)
rescue => ex
# Todo: Record jira issue id(or better jira issue key),
# so that we can report the list of failed to import issues to the user
# see https://gitlab.com/gitlab-org/gitlab/-/issues/211653
#
# It's possible the project has been deleted since scheduling this
# job. In this case we'll just skip creating the issue.
Gitlab::ErrorTracking.track_exception(ex, project_id: project_id)
JiraImport.increment_issue_failures(project_id)
ensure
# ensure we notify job waiter that the job has finished
JobWaiter.notify(waiter_key, jid) if waiter_key
end
private
def cache_issue_mapping(issue_id, jira_issue_id, project_id)
cache_key = JiraImport.jira_issue_cache_key(project_id, jira_issue_id)
Gitlab::Cache::Import::Caching.write(cache_key, issue_id)
end
end
end
end
......@@ -11,6 +11,7 @@ module Gitlab
def import(project)
project.after_import
ensure
JiraImport.cache_cleanup(project.id)
project.import_data.becomes(JiraImportData).finish_import!
project.import_data.save!
end
......
......@@ -9,12 +9,19 @@ module Gitlab
private
def import(project)
# fake issues import workers for now
# new job waiter will have zero jobs_remaining by default, so it will just pass on to next stage
jobs_waiter = JobWaiter.new
jobs_waiter = Gitlab::JiraImport::IssuesImporter.new(project).execute
project.import_state.refresh_jid_expiration
Gitlab::JiraImport::AdvanceStageWorker.perform_async(project.id, { jobs_waiter.key => jobs_waiter.jobs_remaining }, :attachments)
Gitlab::JiraImport::AdvanceStageWorker.perform_async(
project.id,
{ jobs_waiter.key => jobs_waiter.jobs_remaining },
next_stage(project)
)
end
def next_stage(project)
Gitlab::JiraImport.get_issues_next_start_at(project.id) < 0 ? :attachments : :issues
end
end
end
......
......@@ -26,6 +26,7 @@ module Gitlab
def start_import
return false unless project
return false if Feature.disabled?(:jira_issue_import, project)
return false unless project.jira_force_import?
return true if start(project.import_state)
Gitlab::Import::Logger.info(
......
......@@ -70,6 +70,21 @@ module Gitlab
value
end
# Increment the integer value of a key by one.
# Sets the value to zero if missing before incrementing
#
# key - The cache key to increment.
# timeout - The time after which the cache key should expire.
# @return - the incremented value
def self.increment(raw_key, timeout: TIMEOUT)
key = cache_key_for(raw_key)
Redis::Cache.with do |redis|
redis.incr(key)
redis.expire(key, timeout)
end
end
# Adds a value to a set.
#
# raw_key - The key of the set to add the value to.
......
# frozen_string_literal: true
module Gitlab
module JiraImport
JIRA_IMPORT_CACHE_TIMEOUT = 10.seconds.to_i
FAILED_ISSUES_COUNTER_KEY = 'jira-import/failed/%{project_id}/%{collection_type}'
NEXT_ITEMS_START_AT_KEY = 'jira-import/paginator/%{project_id}/%{collection_type}'
ITEMS_MAPPER_CACHE_KEY = 'jira-import/items-mapper/%{project_id}/%{collection_type}/%{jira_isssue_id}'
ALREADY_IMPORTED_ITEMS_CACHE_KEY = 'jira-importer/already-imported/%{project}/%{collection_type}'
def self.jira_issue_cache_key(project_id, jira_issue_id)
ITEMS_MAPPER_CACHE_KEY % { project_id: project_id, collection_type: :issues, jira_isssue_id: jira_issue_id }
end
def self.already_imported_cache_key(collection_type, project_id)
ALREADY_IMPORTED_ITEMS_CACHE_KEY % { collection_type: collection_type, project: project_id }
end
def self.jira_issues_next_page_cache_key(project_id)
NEXT_ITEMS_START_AT_KEY % { project_id: project_id, collection_type: :issues }
end
def self.failed_issues_counter_cache_key(project_id)
FAILED_ISSUES_COUNTER_KEY % { project_id: project_id, collection_type: :issues }
end
def self.increment_issue_failures(project_id)
Gitlab::Cache::Import::Caching.increment(self.failed_issues_counter_cache_key(project_id))
end
def self.get_issues_next_start_at(project_id)
Gitlab::Cache::Import::Caching.read(self.jira_issues_next_page_cache_key(project_id)).to_i
end
def self.store_issues_next_started_at(project_id, value)
cache_key = self.jira_issues_next_page_cache_key(project_id)
Gitlab::Cache::Import::Caching.write(cache_key, value)
end
def self.cache_cleanup(project_id)
Gitlab::Cache::Import::Caching.expire(self.failed_issues_counter_cache_key(project_id), JIRA_IMPORT_CACHE_TIMEOUT)
Gitlab::Cache::Import::Caching.expire(self.jira_issues_next_page_cache_key(project_id), JIRA_IMPORT_CACHE_TIMEOUT)
Gitlab::Cache::Import::Caching.expire(self.already_imported_cache_key(:issues, project_id), JIRA_IMPORT_CACHE_TIMEOUT)
end
end
end
# frozen_string_literal: true
module Gitlab
module JiraImport
class BaseImporter
attr_reader :project, :client, :formatter, :jira_project_key
def initialize(project)
raise Projects::ImportService::Error, _('Jira import feature is disabled.') unless Feature.enabled?(:jira_issue_import, project)
raise Projects::ImportService::Error, _('Jira integration not configured.') unless project.jira_service&.active?
@jira_project_key = project&.import_data&.becomes(JiraImportData)&.current_project&.key
raise Projects::ImportService::Error, _('Unable to find Jira project to import data from.') unless @jira_project_key
@project = project
@client = project.jira_service.client
@formatter = Gitlab::ImportFormatter.new
end
private
def imported_items_cache_key
raise NotImplementedError
end
def mark_as_imported(id)
Gitlab::Cache::Import::Caching.set_add(imported_items_cache_key, id)
end
def already_imported?(id)
Gitlab::Cache::Import::Caching.set_includes?(imported_items_cache_key, id)
end
end
end
end
# frozen_string_literal: true
module Gitlab
module JiraImport
class IssueSerializer
def initialize(project, jira_issue, params = {})
end
def execute
# this is going to be implemented in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/27201
{}
end
end
end
end
# frozen_string_literal: true
module Gitlab
module JiraImport
class IssuesImporter < BaseImporter
# Jira limits max items per request to be fetched to 100
# see https://jira.atlassian.com/browse/JRACLOUD-67570
# We set it to 1000 in case they change their mind.
BATCH_SIZE = 1000
attr_reader :imported_items_cache_key, :start_at, :job_waiter
def initialize(project)
super
# get cached start_at value, or zero if not cached yet
@start_at = Gitlab::JiraImport.get_issues_next_start_at(project.id)
@imported_items_cache_key = JiraImport.already_imported_cache_key(:issues, project.id)
@job_waiter = JobWaiter.new
end
def execute
import_issues
end
private
def import_issues
return job_waiter if jira_last_page_reached?
issues = fetch_issues(start_at)
update_start_at_with(issues)
schedule_issue_import_workers(issues)
end
def jira_last_page_reached?
start_at < 0
end
def update_start_at_with(issues)
@start_at += issues.size
# store -1 if this is the last page to be imported, so no more `ImportIssuesWorker` workers are scheduled
# from Gitlab::JiraImport::Stage::ImportIssuesWorker#perform
@start_at = -1 if issues.blank?
Gitlab::JiraImport.store_issues_next_started_at(project.id, start_at)
end
def schedule_issue_import_workers(issues)
next_iid = project.issues.maximum(:iid).to_i + 1
issues.each do |jira_issue|
# Technically it's possible that the same work is performed multiple
# times, as Sidekiq doesn't guarantee there will ever only be one
# instance of a job or if for some reason the paginated results
# returned from Jira include issues there were returned before.
# For such cases we exit early if issue was already imported.
next if already_imported?(jira_issue.id)
issue_attrs = IssueSerializer.new(project, jira_issue, { iid: next_iid }).execute
Gitlab::JiraImport::ImportIssueWorker.perform_async(project.id, jira_issue.id, issue_attrs, job_waiter.key)
job_waiter.jobs_remaining += 1
next_iid += 1
# Mark the issue as imported immediately so we don't end up
# importing it multiple times within same import.
# These ids are cleaned-up when import finishes.
# see Gitlab::JiraImport::Stage::FinishImportWorker
mark_as_imported(jira_issue.id)
end
job_waiter
end
def fetch_issues(start_at)
client.Issue.jql("PROJECT='#{jira_project_key}' ORDER BY created ASC", { max_results: BATCH_SIZE, start_at: start_at })
end
end
end
end
......@@ -11287,6 +11287,12 @@ msgstr ""
msgid "Jira Issue Import"
msgstr ""
msgid "Jira import feature is disabled."
msgstr ""
msgid "Jira integration not configured."
msgstr ""
msgid "JiraService|Events for %{noteable_model_name} are disabled."
msgstr ""
......@@ -21398,6 +21404,9 @@ msgstr ""
msgid "Unable to fetch vulnerable projects"
msgstr ""
msgid "Unable to find Jira project to import data from."
msgstr ""
msgid "Unable to generate new instance ID"
msgstr ""
......
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::JiraImport::BaseImporter do
let(:project) { create(:project) }
describe 'with any inheriting class' do
context 'when feature flag disabled' do
before do
stub_feature_flags(jira_issue_import: false)
end
it 'raises exception' do
expect { described_class.new(project) }.to raise_error(Projects::ImportService::Error, 'Jira import feature is disabled.')
end
end
context 'when feature flag enabled' do
before do
stub_feature_flags(jira_issue_import: true)
end
context 'when Jira service was not setup' do
it 'raises exception' do
expect { described_class.new(project) }.to raise_error(Projects::ImportService::Error, 'Jira integration not configured.')
end
end
context 'when Jira service exists' do
let!(:jira_service) { create(:jira_service, project: project) }
context 'when Jira import data is not present' do
it 'raises exception' do
expect { described_class.new(project) }.to raise_error(Projects::ImportService::Error, 'Unable to find Jira project to import data from.')
end
end
context 'when import data exists' do
let(:jira_import_data) do
data = JiraImportData.new
data << JiraImportData::JiraProjectDetails.new('xx', Time.now.strftime('%Y-%m-%d %H:%M:%S'), { user_id: 1, name: 'root' })
data
end
let(:project) { create(:project, import_data: jira_import_data) }
let(:subject) { described_class.new(project) }
context 'when #imported_items_cache_key is not implemented' do
it { expect { subject.send(:imported_items_cache_key) }.to raise_error(NotImplementedError) }
end
context 'when #imported_items_cache_key is implemented' do
before do
allow(subject).to receive(:imported_items_cache_key).and_return('dumb-importer-key')
end
describe '#imported_items_cache_key' do
it { expect(subject.send(:imported_items_cache_key)).to eq('dumb-importer-key') }
end
describe '#mark_as_imported', :clean_gitlab_redis_cache do
it 'stores id in redis cache' do
expect(Gitlab::Cache::Import::Caching).to receive(:set_add).once.and_call_original
subject.send(:mark_as_imported, 'some-id')
expect(Gitlab::Cache::Import::Caching.set_includes?(subject.send(:imported_items_cache_key), 'some-id')).to be true
end
end
describe '#already_imported?', :clean_gitlab_redis_cache do
it 'returns false if value is not in cache' do
expect(Gitlab::Cache::Import::Caching).to receive(:set_includes?).once.and_call_original
expect(subject.send(:already_imported?, 'some-id')).to be false
end
it 'returns true if value already stored in cache' do
Gitlab::Cache::Import::Caching.set_add(subject.send(:imported_items_cache_key), 'some-id')
expect(subject.send(:already_imported?, 'some-id')).to be true
end
end
end
end
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::JiraImport::IssuesImporter do
let(:user) { create(:user) }
let(:jira_import_data) do
data = JiraImportData.new
data << JiraImportData::JiraProjectDetails.new('XX', Time.now.strftime('%Y-%m-%d %H:%M:%S'), { user_id: user.id, name: user.name })
data
end
let(:project) { create(:project, import_data: jira_import_data) }
let!(:jira_service) { create(:jira_service, project: project) }
subject { described_class.new(project) }
before do
stub_feature_flags(jira_issue_import: true)
end
describe '#imported_items_cache_key' do
it_behaves_like 'raise exception if not implemented'
it { expect(subject.imported_items_cache_key).to eq("jira-importer/already-imported/#{project.id}/issues") }
end
describe '#execute', :clean_gitlab_redis_cache do
context 'when no returned issues' do
it 'does not schedule any import jobs' do
expect(subject).to receive(:fetch_issues).with(0).and_return([])
expect(subject).not_to receive(:already_imported?)
expect(subject).not_to receive(:mark_as_imported)
expect(Gitlab::JiraImport::ImportIssueWorker).not_to receive(:perform_async)
job_waiter = subject.execute
expect(job_waiter.jobs_remaining).to eq(0)
expect(Gitlab::JiraImport.get_issues_next_start_at(project.id)).to eq(-1)
end
end
context 'with results returned' do
JiraIssue = Struct.new(:id)
let_it_be(:jira_issue1) { JiraIssue.new(1) }
let_it_be(:jira_issue2) { JiraIssue.new(2) }
context 'when single page of results is returned' do
before do
stub_const("#{described_class.name}::BATCH_SIZE", 3)
end
it 'schedules 2 import jobs' do
expect(subject).to receive(:fetch_issues).and_return([jira_issue1, jira_issue2])
expect(Gitlab::JiraImport::ImportIssueWorker).to receive(:perform_async).twice
expect(Gitlab::Cache::Import::Caching).to receive(:set_add).twice.and_call_original
expect(Gitlab::Cache::Import::Caching).to receive(:set_includes?).twice.and_call_original
allow_next_instance_of(Gitlab::JiraImport::IssueSerializer) do |instance|
allow(instance).to receive(:execute).and_return({ key: 'data' })
end
job_waiter = subject.execute
expect(job_waiter.jobs_remaining).to eq(2)
expect(Gitlab::JiraImport.get_issues_next_start_at(project.id)).to eq(2)
end
end
context 'when there is more than one page of results' do
before do
stub_const("#{described_class.name}::BATCH_SIZE", 2)
end
it 'schedules 3 import jobs' do
expect(subject).to receive(:fetch_issues).with(0).and_return([jira_issue1, jira_issue2])
expect(Gitlab::JiraImport::ImportIssueWorker).to receive(:perform_async).twice.times
expect(Gitlab::Cache::Import::Caching).to receive(:set_add).twice.times.and_call_original
expect(Gitlab::Cache::Import::Caching).to receive(:set_includes?).twice.times.and_call_original
allow_next_instance_of(Gitlab::JiraImport::IssueSerializer) do |instance|
allow(instance).to receive(:execute).and_return({ key: 'data' })
end
job_waiter = subject.execute
expect(job_waiter.jobs_remaining).to eq(2)
expect(Gitlab::JiraImport.get_issues_next_start_at(project.id)).to eq(2)
end
end
context 'when duplicate results are returned' do
before do
stub_const("#{described_class.name}::BATCH_SIZE", 2)
end
it 'schedules 2 import jobs' do
expect(subject).to receive(:fetch_issues).with(0).and_return([jira_issue1, jira_issue1])
expect(Gitlab::JiraImport::ImportIssueWorker).to receive(:perform_async).once
expect(Gitlab::Cache::Import::Caching).to receive(:set_add).once.and_call_original
expect(Gitlab::Cache::Import::Caching).to receive(:set_includes?).twice.times.and_call_original
allow_next_instance_of(Gitlab::JiraImport::IssueSerializer) do |instance|
allow(instance).to receive(:execute).and_return({ key: 'data' })
end
job_waiter = subject.execute
expect(job_waiter.jobs_remaining).to eq(1)
expect(Gitlab::JiraImport.get_issues_next_start_at(project.id)).to eq(2)
end
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::JiraImport do
let(:project_id) { 321 }
describe '.jira_issue_cache_key' do
it 'returns cache key for Jira issue imported to given project' do
expect(described_class.jira_issue_cache_key(project_id, 'DEMO-123')).to eq("jira-import/items-mapper/#{project_id}/issues/DEMO-123")
end
end
describe '.already_imported_cache_key' do
it 'returns cache key for already imported items' do
expect(described_class.already_imported_cache_key(:issues, project_id)).to eq("jira-importer/already-imported/#{project_id}/issues")
end
end
describe '.jira_issues_next_page_cache_key' do
it 'returns cache key for next issues' do
expect(described_class.jira_issues_next_page_cache_key(project_id)).to eq("jira-import/paginator/#{project_id}/issues")
end
end
describe '.get_issues_next_start_at', :clean_gitlab_redis_cache do
it 'returns zero when not defined' do
expect(Gitlab::Cache::Import::Caching.read("jira-import/paginator/#{project_id}/issues")).to be nil
expect(described_class.get_issues_next_start_at(project_id)).to eq(0)
end
it 'returns negative value for next issues to be imported starting point' do
Gitlab::Cache::Import::Caching.write("jira-import/paginator/#{project_id}/issues", -10)
expect(Gitlab::Cache::Import::Caching.read("jira-import/paginator/#{project_id}/issues")).to eq('-10')
expect(described_class.get_issues_next_start_at(project_id)).to eq(-10)
end
it 'returns cached value for next issues to be imported starting point' do
Gitlab::Cache::Import::Caching.write("jira-import/paginator/#{project_id}/issues", 10)
expect(Gitlab::Cache::Import::Caching.read("jira-import/paginator/#{project_id}/issues")).to eq('10')
expect(described_class.get_issues_next_start_at(project_id)).to eq(10)
end
end
describe '.store_issues_next_started_at', :clean_gitlab_redis_cache do
it 'stores nil value' do
described_class.store_issues_next_started_at(project_id, nil)
expect(Gitlab::Cache::Import::Caching.read("jira-import/paginator/#{project_id}/issues")).to eq ''
expect(Gitlab::Cache::Import::Caching.read("jira-import/paginator/#{project_id}/issues").to_i).to eq(0)
end
it 'stores positive value' do
described_class.store_issues_next_started_at(project_id, 10)
expect(Gitlab::Cache::Import::Caching.read("jira-import/paginator/#{project_id}/issues").to_i).to eq(10)
end
it 'stores negative value' do
described_class.store_issues_next_started_at(project_id, -10)
expect(Gitlab::Cache::Import::Caching.read("jira-import/paginator/#{project_id}/issues").to_i).to eq(-10)
end
end
end
# frozen_string_literal: true
shared_examples 'raise exception if not implemented' do
it { expect { described_class.new(project).imported_items_cache_key }.not_to raise_error }
end
......@@ -16,7 +16,7 @@ shared_examples 'exit import not started' do
it 'does nothing, and exits' do
expect(Gitlab::JiraImport::AdvanceStageWorker).not_to receive(:perform_async)
worker.perform(project.id)
described_class.new.perform(project.id)
end
end
......@@ -25,8 +25,8 @@ shared_examples 'advance to next stage' do |next_stage|
it "advances to #{next_stage} stage" do
expect(Gitlab::JobWaiter).to receive(:new).and_return(job_waiter)
expect(Gitlab::JiraImport::AdvanceStageWorker).to receive(:perform_async).with(project.id, { job_waiter.key => job_waiter.jobs_remaining }, next_stage.to_sym)
expect(Gitlab::JiraImport::AdvanceStageWorker).to receive(:perform_async).with(project.id, { job_waiter.key => job_waiter.jobs_remaining }, next_stage.to_sym).and_return([])
worker.perform(project.id)
described_class.new.perform(project.id)
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::JiraImport::ImportIssueWorker do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
describe 'modules' do
it { expect(described_class).to include_module(ApplicationWorker) }
it { expect(described_class).to include_module(Gitlab::NotifyUponDeath) }
it { expect(described_class).to include_module(Gitlab::JiraImport::QueueOptions) }
it { expect(described_class).to include_module(Gitlab::Import::DatabaseHelpers) }
end
subject { described_class.new }
describe '#perform', :clean_gitlab_redis_cache do
let(:issue_attrs) { build(:issue, project_id: project.id).as_json.compact }
context 'when any exception raised while inserting to DB' do
before do
allow(subject).to receive(:insert_and_return_id).and_raise(StandardError)
expect(Gitlab::JobWaiter).to receive(:notify)
subject.perform(project.id, 123, issue_attrs, 'some-key')
end
it 'record a failed to import issue' do
expect(Gitlab::Cache::Import::Caching.read(Gitlab::JiraImport.failed_issues_counter_cache_key(project.id)).to_i).to eq(1)
end
end
context 'when record is successfully inserted' do
before do
subject.perform(project.id, 123, issue_attrs, 'some-key')
end
it 'does not record import failure' do
expect(Gitlab::Cache::Import::Caching.read(Gitlab::JiraImport.failed_issues_counter_cache_key(project.id)).to_i).to eq(0)
end
end
end
end
......@@ -3,8 +3,8 @@
require 'spec_helper'
describe Gitlab::JiraImport::Stage::FinishImportWorker do
let(:project) { create(:project) }
let(:worker) { described_class.new }
let_it_be(:project) { create(:project) }
let_it_be(:worker) { described_class.new }
describe 'modules' do
it_behaves_like 'include import workers modules'
......@@ -46,7 +46,7 @@ describe Gitlab::JiraImport::Stage::FinishImportWorker do
it 'changes import state to finished' do
worker.perform(project.id)
expect(project.reload.import_state.status).to eq "finished"
expect(project.reload.import_state.status).to eq("finished")
end
it 'removes force-import flag' do
......
......@@ -3,8 +3,7 @@
require 'spec_helper'
describe Gitlab::JiraImport::Stage::ImportAttachmentsWorker do
let(:project) { create(:project) }
let(:worker) { described_class.new }
let_it_be(:project) { create(:project) }
describe 'modules' do
it_behaves_like 'include import workers modules'
......
......@@ -3,8 +3,8 @@
require 'spec_helper'
describe Gitlab::JiraImport::Stage::ImportIssuesWorker do
let(:project) { create(:project) }
let(:worker) { described_class.new }
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
describe 'modules' do
it_behaves_like 'include import workers modules'
......@@ -30,10 +30,49 @@ describe Gitlab::JiraImport::Stage::ImportIssuesWorker do
it_behaves_like 'exit import not started'
end
context 'when import started' do
context 'when import started', :clean_gitlab_redis_cache do
let(:jira_import_data) do
data = JiraImportData.new
data << JiraImportData::JiraProjectDetails.new('XX', Time.now.strftime('%Y-%m-%d %H:%M:%S'), { user_id: user.id, name: user.name })
data
end
let(:project) { create(:project, import_data: jira_import_data) }
let!(:jira_service) { create(:jira_service, project: project) }
let!(:import_state) { create(:import_state, status: :started, project: project) }
it_behaves_like 'advance to next stage', :attachments
before do
allow_next_instance_of(Gitlab::JiraImport::IssuesImporter) do |instance|
allow(instance).to receive(:fetch_issues).and_return([])
end
end
context 'when start_at is nil' do
it_behaves_like 'advance to next stage', :attachments
end
context 'when start_at is zero' do
before do
allow(Gitlab::Cache::Import::Caching).to receive(:read).and_return(0)
end
it_behaves_like 'advance to next stage', :issues
end
context 'when start_at is greater than zero' do
before do
allow(Gitlab::Cache::Import::Caching).to receive(:read).and_return(25)
end
it_behaves_like 'advance to next stage', :issues
end
context 'when start_at is below zero' do
before do
allow(Gitlab::Cache::Import::Caching).to receive(:read).and_return(-1)
end
it_behaves_like 'advance to next stage', :attachments
end
end
end
end
......
......@@ -3,8 +3,7 @@
require 'spec_helper'
describe Gitlab::JiraImport::Stage::ImportLabelsWorker do
let(:project) { create(:project) }
let(:worker) { described_class.new }
let_it_be(:project) { create(:project) }
describe 'modules' do
it_behaves_like 'include import workers modules'
......
......@@ -3,8 +3,7 @@
require 'spec_helper'
describe Gitlab::JiraImport::Stage::ImportNotesWorker do
let(:project) { create(:project) }
let(:worker) { described_class.new }
let_it_be(:project) { create(:project) }
describe 'modules' do
it_behaves_like 'include import workers modules'
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::JiraImport::Stage::StartImportWorker do
let(:project) { create(:project) }
let(:project) { create(:project, import_type: 'jira') }
let(:worker) { described_class.new }
let(:jid) { '12345678' }
......@@ -24,13 +24,19 @@ describe Gitlab::JiraImport::Stage::StartImportWorker do
end
end
context 'when feature flag not enabled' do
context 'when feature flag enabled' do
let(:symbol_keys_project) do
{ key: 'AA', scheduled_at: 2.days.ago.strftime('%Y-%m-%d %H:%M:%S'), scheduled_by: { 'user_id' => 1, 'name' => 'tester1' } }
end
let(:import_data) { JiraImportData.new( data: { 'jira' => { JiraImportData::FORCE_IMPORT_KEY => true, projects: [symbol_keys_project] } }) }
before do
stub_feature_flags(jira_issue_import: true)
end
context 'when import is not scheudled' do
let!(:import_state) { create(:import_state, project: project, status: :none, jid: jid) }
context 'when import is not scheduled' do
let(:project) { create(:project, import_type: 'jira') }
let(:import_state) { create(:import_state, project: project, status: :none, jid: jid) }
it 'exits because import not started' do
expect(Gitlab::JiraImport::Stage::ImportLabelsWorker).not_to receive(:perform_async)
......@@ -40,17 +46,32 @@ describe Gitlab::JiraImport::Stage::StartImportWorker do
end
context 'when import is scheduled' do
let!(:import_state) { create(:import_state, project: project, status: :scheduled, jid: jid) }
let(:import_state) { create(:import_state, status: :scheduled, jid: jid) }
let(:project) { create(:project, import_type: 'jira', import_state: import_state) }
it 'advances to importing labels' do
expect(Gitlab::JiraImport::Stage::ImportLabelsWorker).to receive(:perform_async)
context 'when this is a mirror sync in a jira imported project' do
it 'exits early' do
expect(Gitlab::Import::SetAsyncJid).not_to receive(:set_jid)
expect(Gitlab::JiraImport::Stage::ImportLabelsWorker).not_to receive(:perform_async)
worker.perform(project.id)
worker.perform(project.id)
end
end
context 'when scheduled import is a hard triggered jira import and not a mirror' do
let!(:project) { create(:project, import_type: 'jira', import_data: import_data, import_state: import_state) }
it 'advances to importing labels' do
expect(Gitlab::JiraImport::Stage::ImportLabelsWorker).to receive(:perform_async)
worker.perform(project.id)
end
end
end
context 'when import is started' do
let!(:import_state) { create(:import_state, project: project, status: :started, jid: jid) }
let!(:import_state) { create(:import_state, status: :started, jid: jid) }
let!(:project) { create(:project, import_type: 'jira', import_data: import_data, import_state: import_state) }
context 'when this is the same worker that stated import' do
it 'advances to importing labels' do
......@@ -72,7 +93,8 @@ describe Gitlab::JiraImport::Stage::StartImportWorker do
end
context 'when import is finished' do
let!(:import_state) { create(:import_state, project: project, status: :finished, jid: jid) }
let!(:import_state) { create(:import_state, status: :finished, jid: jid) }
let!(:project) { create(:project, import_type: 'jira', import_data: import_data, import_state: import_state) }
it 'advances to importing labels' do
allow(worker).to receive(:jid).and_return(jid)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment