Commit 672e86ee authored by João Alexandre Cunha's avatar João Alexandre Cunha Committed by Mayra Cabrera

Resolve "GitLab Migration - Migrate Snippets Repositories"

parent c1f3d5e8
......@@ -197,6 +197,13 @@ class Snippet < ApplicationRecord
Snippet.find_by(id: id, project: project)
end
def find_by_project_title_trunc_created_at(project, title, created_at)
where(project: project, title: title)
.find_by(
"date_trunc('second', created_at at time zone :tz) at time zone :tz = :created_at",
tz: created_at.zone, created_at: created_at)
end
def max_file_limit
MAX_FILE_COUNT
end
......
......@@ -12,6 +12,7 @@ RSpec.describe BulkImports::Projects::Stage do
[2, BulkImports::Common::Pipelines::BadgesPipeline],
[3, BulkImports::Projects::Pipelines::IssuesPipeline],
[3, BulkImports::Projects::Pipelines::SnippetsPipeline],
[4, BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline],
[4, BulkImports::Common::Pipelines::BoardsPipeline],
[4, BulkImports::Projects::Pipelines::MergeRequestsPipeline],
[4, BulkImports::Projects::Pipelines::ExternalPullRequestsPipeline],
......
# frozen_string_literal: true
module BulkImports
module Projects
module Graphql
module GetSnippetRepositoryQuery
extend Queryable
extend self
def to_s
<<-'GRAPHQL'
query($full_path: ID!) {
project(fullPath: $full_path) {
snippets {
page_info: pageInfo {
next_page: endCursor
has_next_page: hasNextPage
}
nodes {
title
createdAt
httpUrlToRepo
}
}
}
}
GRAPHQL
end
def variables(context)
{
full_path: context.entity.source_full_path,
cursor: context.tracker.next_page,
per_page: ::BulkImports::Tracker::DEFAULT_PAGE_SIZE
}
end
def base_path
%w[data project snippets]
end
def data_path
base_path << 'nodes'
end
end
end
end
end
# frozen_string_literal: true
module BulkImports
module Projects
module Pipelines
class SnippetsRepositoryPipeline
include Pipeline
extractor Common::Extractors::GraphqlExtractor, query: Graphql::GetSnippetRepositoryQuery
def transform(_context, data)
data.tap do |d|
d['createdAt'] = DateTime.parse(data['createdAt'])
end
end
def load(context, data)
return unless data['httpUrlToRepo'].present?
oauth2_url = oauth2(data['httpUrlToRepo'])
validate_url(oauth2_url)
matched_snippet = find_matched_snippet(data)
# Skip snippets that we couldn't find a match. Probably because more snippets were
# added after the migration had already started, namely after the SnippetsPipeline
# has already run.
return unless matched_snippet
matched_snippet.create_repository
matched_snippet.repository.fetch_as_mirror(oauth2_url)
response = Snippets::RepositoryValidationService.new(nil, matched_snippet).execute
# skips matched_snippet repository creation if repository is invalid
return cleanup_snippet_repository(matched_snippet) if response.error?
Snippets::UpdateStatisticsService.new(matched_snippet).execute
end
private
def find_matched_snippet(data)
Snippet.find_by_project_title_trunc_created_at(
context.portable, data['title'], data['createdAt'])
end
def allow_local_requests?
Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services?
end
def oauth2(url)
url.sub("://", "://oauth2:#{context.configuration.access_token}@")
end
def validate_url(url)
Gitlab::UrlBlocker.validate!(
url,
allow_local_network: allow_local_requests?,
allow_localhost: allow_local_requests?)
end
def cleanup_snippet_repository(snippet)
snippet.repository.remove
snippet.snippet_repository.delete
snippet.repository.expire_exists_cache
end
end
end
end
end
......@@ -35,6 +35,10 @@ module BulkImports
pipeline: BulkImports::Projects::Pipelines::SnippetsPipeline,
stage: 3
},
snippets_repository: {
pipeline: BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline,
stage: 4
},
boards: {
pipeline: BulkImports::Common::Pipelines::BoardsPipeline,
stage: 4
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Projects::Graphql::GetSnippetRepositoryQuery do
describe 'query repository based on full_path' do
let_it_be(:entity) { create(:bulk_import_entity) }
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
it 'has a valid query' do
query = GraphQL::Query.new(
GitlabSchema,
described_class.to_s,
variables: described_class.variables(context)
)
result = GitlabSchema.static_validator.validate(query)
expect(result[:errors]).to be_empty
end
it 'returns snippet httpUrlToRepo' do
expect(described_class.to_s).to include('httpUrlToRepo')
end
it 'returns snippet createdAt' do
expect(described_class.to_s).to include('createdAt')
end
it 'returns snippet title' do
expect(described_class.to_s).to include('title')
end
describe '.variables' do
it 'queries project based on source_full_path and pagination' do
expected = { full_path: entity.source_full_path, cursor: nil, per_page: 500 }
expect(described_class.variables(context)).to eq(expected)
end
end
describe '.data_path' do
it '.data_path returns data path' do
expected = %w[data project snippets nodes]
expect(described_class.data_path).to eq(expected)
end
end
describe '.page_info_path' do
it '.page_info_path returns pagination information path' do
expected = %w[data project snippets page_info]
expect(described_class.page_info_path).to eq(expected)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline do
let(:user) { create(:user) }
let(:project) { create(:project) }
let(:bulk_import) { create(:bulk_import, user: user) }
let(:bulk_import_configuration) { create(:bulk_import_configuration, bulk_import: bulk_import) }
let!(:matched_snippet) { create(:snippet, project: project, created_at: "1981-12-13T23:59:59Z")}
let(:entity) do
create(
:bulk_import_entity,
:project_entity,
project: project,
bulk_import: bulk_import_configuration.bulk_import,
source_full_path: 'source/full/path',
destination_name: 'My Destination Project',
destination_namespace: project.full_path
)
end
let(:tracker) { create(:bulk_import_tracker, entity: entity) }
let(:context) { BulkImports::Pipeline::Context.new(tracker) }
subject(:pipeline) { described_class.new(context) }
let(:http_url_to_repo) { 'https://example.com/foo/bar/snippets/42.git' }
let(:data) do
[
{
'title' => matched_snippet.title,
'httpUrlToRepo' => http_url_to_repo,
'createdAt' => matched_snippet.created_at.to_s
}
]
end
let(:page_info) do
{
'next_page' => 'eyJpZCI6IjIyMDA2OTYifQ',
'has_next_page' => false
}
end
let(:extracted_data) { BulkImports::Pipeline::ExtractedData.new(data: data, page_info: page_info) }
describe 'extractor' do
it 'is a GraphqlExtractor with Graphql::GetSnippetRepositoryQuery' do
expect(described_class.get_extractor).to eq(
klass: BulkImports::Common::Extractors::GraphqlExtractor,
options: {
query: BulkImports::Projects::Graphql::GetSnippetRepositoryQuery
})
end
end
describe '#run' do
let(:validation_response) { double(Hash, 'error?': false) }
before do
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(extracted_data)
end
allow_next_instance_of(Snippets::RepositoryValidationService) do |repository_validation|
allow(repository_validation).to receive(:execute).and_return(validation_response)
end
end
shared_examples 'skippable snippet' do
it 'does not create snippet repo' do
pipeline.run
expect(Gitlab::GlRepository::SNIPPET.repository_for(matched_snippet).exists?).to be false
end
end
context 'when a snippet is not matched' do
let(:data) do
[
{
'title' => 'unmatched title',
'httpUrlToRepo' => http_url_to_repo,
'createdAt' => matched_snippet.created_at.to_s
}
]
end
it_behaves_like 'skippable snippet'
end
context 'when httpUrlToRepo is empty' do
let(:data) do
[
{
'title' => matched_snippet.title,
'createdAt' => matched_snippet.created_at.to_s
}
]
end
it_behaves_like 'skippable snippet'
end
context 'when a snippet matches' do
context 'when snippet url is valid' do
it 'creates snippet repo' do
expect { pipeline.run }
.to change { Gitlab::GlRepository::SNIPPET.repository_for(matched_snippet).exists? }.to true
end
it 'updates snippets statistics' do
allow_next_instance_of(Repository) do |repository|
allow(repository).to receive(:fetch_as_mirror)
end
service = double(Snippets::UpdateStatisticsService)
expect(Snippets::UpdateStatisticsService).to receive(:new).with(kind_of(Snippet)).and_return(service)
expect(service).to receive(:execute)
pipeline.run
end
it 'fetches snippet repo from url' do
expect_next_instance_of(Repository) do |repository|
expect(repository)
.to receive(:fetch_as_mirror)
.with("https://oauth2:#{bulk_import_configuration.access_token}@example.com/foo/bar/snippets/42.git")
end
pipeline.run
end
end
context 'when url is invalid' do
let(:http_url_to_repo) { 'http://0.0.0.0' }
it_behaves_like 'skippable snippet'
end
context 'when snippet is invalid' do
let(:validation_response) { double(Hash, 'error?': true) }
before do
allow_next_instance_of(Repository) do |repository|
allow(repository).to receive(:fetch_as_mirror)
end
end
it 'does not leave a hanging SnippetRepository behind' do
pipeline.run
expect(SnippetRepository.where(snippet_id: matched_snippet.id).exists?).to be false
end
it 'does not call UpdateStatisticsService' do
expect(Snippets::UpdateStatisticsService).not_to receive(:new)
pipeline.run
end
it_behaves_like 'skippable snippet'
end
end
end
end
......@@ -14,6 +14,7 @@ RSpec.describe BulkImports::Projects::Stage do
[2, BulkImports::Common::Pipelines::BadgesPipeline],
[3, BulkImports::Projects::Pipelines::IssuesPipeline],
[3, BulkImports::Projects::Pipelines::SnippetsPipeline],
[4, BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline],
[4, BulkImports::Common::Pipelines::BoardsPipeline],
[4, BulkImports::Projects::Pipelines::MergeRequestsPipeline],
[4, BulkImports::Projects::Pipelines::ExternalPullRequestsPipeline],
......
......@@ -403,6 +403,51 @@ RSpec.describe Snippet do
end
end
describe '.find_by_project_title_trunc_created_at' do
let_it_be(:snippet) { create(:snippet) }
let_it_be(:created_at_without_ms) { snippet.created_at.change(usec: 0) }
it 'returns a record if arguments match' do
result = described_class.find_by_project_title_trunc_created_at(
snippet.project,
snippet.title,
created_at_without_ms
)
expect(result).to eq(snippet)
end
it 'returns nil if project does not match' do
result = described_class.find_by_project_title_trunc_created_at(
'unmatched project',
snippet.title,
created_at_without_ms # to_s truncates ms of the argument
)
expect(result).to be(nil)
end
it 'returns nil if title does not match' do
result = described_class.find_by_project_title_trunc_created_at(
snippet.project,
'unmatched title',
created_at_without_ms # to_s truncates ms of the argument
)
expect(result).to be(nil)
end
it 'returns nil if created_at does not match' do
result = described_class.find_by_project_title_trunc_created_at(
snippet.project,
snippet.title,
snippet.created_at # fails match by milliseconds
)
expect(result).to be(nil)
end
end
describe '#participants' do
let_it_be(:project) { create(:project, :public) }
let_it_be(:snippet) { create(:snippet, content: 'foo', project: project) }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment