Commit e0622faa authored by Josianne Hyson's avatar Josianne Hyson

Create BulkImport model to group import models

We want to start importing Group and Project data directly from
another GitLab instance, via the API. To do this, we need somewhere
to store authentication data, entity mapping data and other metadata
so that we can perform the import in background jobs.

Create the BulkImport model which is associated with a user, and used to
group together all the data relating to the one bulk import request from
the user. This will be followed up by the introduction of the other
models the store the import data.

This model introduces:

1. `source_type` -> where the data for this import is being sourced
   from. For now this will just be GitLab imports, but in the future we
   could expand this structure to other services like GitHub and
   Bitbucket.
2. `user_id` -> the user that initiated the import.
3. `status` -> to be used by the state machine for recording the status
   of the overall import.

This is a component of the Group Migration MVC epic:
https://gitlab.com/groups/gitlab-org/-/epics/4374

MR: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/42978
Issue: https://gitlab.com/gitlab-org/gitlab/-/issues/250280
parent 5f3de8d2
# frozen_string_literal: true
class BulkImport < ApplicationRecord
belongs_to :user, optional: false
validates :source_type, :status, presence: true
enum source_type: { gitlab: 0 }
state_machine :status, initial: :created do
state :created, value: 0
end
end
......@@ -167,6 +167,8 @@ class User < ApplicationRecord
has_many :assigned_issues, class_name: "Issue", through: :issue_assignees, source: :issue
has_many :assigned_merge_requests, class_name: "MergeRequest", through: :merge_request_assignees, source: :merge_request
has_many :bulk_imports
has_many :custom_attributes, class_name: 'UserCustomAttribute'
has_many :callouts, class_name: 'UserCallout'
has_many :term_agreements
......
---
title: Create a set of models to store the temporary data needed for a bulk import
merge_request: 42978
author:
type: changed
# frozen_string_literal: true
class CreateBulkImport < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
create_table :bulk_imports do |t|
t.references :user, type: :integer, index: true, null: false, foreign_key: { on_delete: :cascade }
t.integer :source_type, null: false, limit: 2
t.integer :status, null: false, limit: 2
t.timestamps_with_timezone
end
end
end
def down
with_lock_retries do
drop_table :bulk_imports
end
end
end
8196e28f6fe8cdb4cf710922b5cd218030ba587c629de7ee75dc061d05c7e1a9
\ No newline at end of file
......@@ -9816,6 +9816,24 @@ CREATE SEQUENCE broadcast_messages_id_seq
ALTER SEQUENCE broadcast_messages_id_seq OWNED BY broadcast_messages.id;
CREATE TABLE bulk_imports (
id bigint NOT NULL,
user_id integer NOT NULL,
source_type smallint NOT NULL,
status smallint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL
);
CREATE SEQUENCE bulk_imports_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE bulk_imports_id_seq OWNED BY bulk_imports.id;
CREATE TABLE chat_names (
id integer NOT NULL,
user_id integer NOT NULL,
......@@ -17304,6 +17322,8 @@ ALTER TABLE ONLY boards_epic_user_preferences ALTER COLUMN id SET DEFAULT nextva
ALTER TABLE ONLY broadcast_messages ALTER COLUMN id SET DEFAULT nextval('broadcast_messages_id_seq'::regclass);
ALTER TABLE ONLY bulk_imports ALTER COLUMN id SET DEFAULT nextval('bulk_imports_id_seq'::regclass);
ALTER TABLE ONLY chat_names ALTER COLUMN id SET DEFAULT nextval('chat_names_id_seq'::regclass);
ALTER TABLE ONLY chat_teams ALTER COLUMN id SET DEFAULT nextval('chat_teams_id_seq'::regclass);
......@@ -18280,6 +18300,9 @@ ALTER TABLE ONLY boards
ALTER TABLE ONLY broadcast_messages
ADD CONSTRAINT broadcast_messages_pkey PRIMARY KEY (id);
ALTER TABLE ONLY bulk_imports
ADD CONSTRAINT bulk_imports_pkey PRIMARY KEY (id);
ALTER TABLE ONLY chat_names
ADD CONSTRAINT chat_names_pkey PRIMARY KEY (id);
......@@ -19785,6 +19808,8 @@ CREATE INDEX index_boards_on_project_id ON boards USING btree (project_id);
CREATE INDEX index_broadcast_message_on_ends_at_and_broadcast_type_and_id ON broadcast_messages USING btree (ends_at, broadcast_type, id);
CREATE INDEX index_bulk_imports_on_user_id ON bulk_imports USING btree (user_id);
CREATE UNIQUE INDEX index_chat_names_on_service_id_and_team_id_and_chat_id ON chat_names USING btree (service_id, team_id, chat_id);
CREATE UNIQUE INDEX index_chat_names_on_user_id_and_service_id ON chat_names USING btree (user_id, service_id);
......@@ -22835,6 +22860,9 @@ ALTER TABLE ONLY project_statistics
ALTER TABLE ONLY user_details
ADD CONSTRAINT fk_rails_12e0b3043d FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
ALTER TABLE ONLY bulk_imports
ADD CONSTRAINT fk_rails_130a09357d FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
ALTER TABLE ONLY diff_note_positions
ADD CONSTRAINT fk_rails_13c7212859 FOREIGN KEY (note_id) REFERENCES notes(id) ON DELETE CASCADE;
......
# frozen_string_literal: true
FactoryBot.define do
factory :bulk_import, class: 'BulkImport' do
user
source_type { :gitlab }
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImport, type: :model do
describe 'associations' do
it { is_expected.to belong_to(:user).required }
end
describe 'validations' do
it { is_expected.to validate_presence_of(:source_type) }
it { is_expected.to validate_presence_of(:status) }
it { is_expected.to define_enum_for(:source_type).with_values(%i[gitlab]) }
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment