Commit 02ce9400 authored by Mark Lapierre's avatar Mark Lapierre

Merge branch 'acunskis-large-github-import-improvements-v2' into 'master'

E2E: Add import run time data and improve skip logic for large github import

See merge request gitlab-org/gitlab!67001
parents 38c501ee 5a7f6f36
# frozen_string_literal: true # frozen_string_literal: true
require 'octokit' require 'octokit'
require 'parallel'
# rubocop:disable Rails/Pluck # rubocop:disable Rails/Pluck
module QA module QA
...@@ -21,11 +20,10 @@ module QA ...@@ -21,11 +20,10 @@ module QA
let(:user) do let(:user) do
Resource::User.fabricate_via_api! do |resource| Resource::User.fabricate_via_api! do |resource|
resource.api_client = api_client resource.api_client = api_client
resource.hard_delete_on_api_removal = true
end end
end end
let(:github_repo) { 'rspec/rspec-core' } let(:github_repo) { ENV['QA_LARGE_GH_IMPORT_REPO'] || 'rspec/rspec-core' }
let(:github_client) do let(:github_client) do
Octokit.middleware = Faraday::RackBuilder.new do |builder| Octokit.middleware = Faraday::RackBuilder.new do |builder|
builder.response(:logger, logger, headers: false, bodies: false) builder.response(:logger, logger, headers: false, bodies: false)
...@@ -98,13 +96,19 @@ module QA ...@@ -98,13 +96,19 @@ module QA
end end
after do |example| after do |example|
# skip saving data if example is skipped or failed before import finished
next if example.pending? next if example.pending?
user.remove_via_api!
next unless defined?(@import_time)
# save data for comparison after run finished # save data for comparison after run finished
save_json( save_json(
"data", "data",
{ {
import_time: @import_time,
github: { github: {
project_name: github_repo,
branches: gh_branches, branches: gh_branches,
commits: gh_commits, commits: gh_commits,
labels: gh_labels, labels: gh_labels,
...@@ -113,6 +117,7 @@ module QA ...@@ -113,6 +117,7 @@ module QA
issues: gh_issues issues: gh_issues
}, },
gitlab: { gitlab: {
project_name: imported_project.path_with_namespace,
branches: gl_branches, branches: gl_branches,
commits: gl_commits, commits: gl_commits,
labels: gl_labels, labels: gl_labels,
...@@ -125,6 +130,8 @@ module QA ...@@ -125,6 +130,8 @@ module QA
end end
it 'imports large Github repo via api' do it 'imports large Github repo via api' do
start = Time.now
imported_project # import the project imported_project # import the project
fetch_github_objects # fetch all objects right after import has started fetch_github_objects # fetch all objects right after import has started
...@@ -132,6 +139,7 @@ module QA ...@@ -132,6 +139,7 @@ module QA
duration: 3600, duration: 3600,
interval: 30 interval: 30
) )
@import_time = Time.now - start
aggregate_failures do aggregate_failures do
verify_repository_import verify_repository_import
...@@ -146,7 +154,7 @@ module QA ...@@ -146,7 +154,7 @@ module QA
# #
# @return [void] # @return [void]
def fetch_github_objects def fetch_github_objects
logger.debug("Fetching objects for github repo: '#{github_repo}'") logger.debug("== Fetching objects for github repo: '#{github_repo}' ==")
gh_repo gh_repo
gh_branches gh_branches
...@@ -161,7 +169,7 @@ module QA ...@@ -161,7 +169,7 @@ module QA
# #
# @return [void] # @return [void]
def verify_repository_import def verify_repository_import
logger.debug("Verifying repository import") logger.debug("== Verifying repository import ==")
expect(imported_project.description).to eq(gh_repo.description) expect(imported_project.description).to eq(gh_repo.description)
# check via include, importer creates more branches # check via include, importer creates more branches
# https://gitlab.com/gitlab-org/gitlab/-/issues/332711 # https://gitlab.com/gitlab-org/gitlab/-/issues/332711
...@@ -173,7 +181,7 @@ module QA ...@@ -173,7 +181,7 @@ module QA
# #
# @return [void] # @return [void]
def verify_merge_requests_import def verify_merge_requests_import
logger.debug("Verifying merge request import") logger.debug("== Verifying merge request import ==")
verify_mrs_or_issues('mr') verify_mrs_or_issues('mr')
end end
...@@ -181,7 +189,7 @@ module QA ...@@ -181,7 +189,7 @@ module QA
# #
# @return [void] # @return [void]
def verify_issues_import def verify_issues_import
logger.debug("Verifying issue import") logger.debug("== Verifying issue import ==")
verify_mrs_or_issues('issue') verify_mrs_or_issues('issue')
end end
...@@ -189,15 +197,16 @@ module QA ...@@ -189,15 +197,16 @@ module QA
# #
# @return [void] # @return [void]
def verify_labels_import def verify_labels_import
logger.debug("Verifying label import") logger.debug("== Verifying label import ==")
expect(gl_labels).to match_array(gh_labels) # check via include, additional labels can be inherited from parent group
expect(gl_labels).to include(*gh_labels)
end end
# Verify milestones import # Verify milestones import
# #
# @return [void] # @return [void]
def verify_milestones_import def verify_milestones_import
logger.debug("Verifying milestones import") logger.debug("== Verifying milestones import ==")
expect(gl_milestones).to match_array(gh_milestones) expect(gl_milestones).to match_array(gh_milestones)
end end
...@@ -217,8 +226,9 @@ module QA ...@@ -217,8 +226,9 @@ module QA
eq(actual.length), eq(actual.length),
"Expected to contain same amount of #{type}s. Expected: #{expected.length}, actual: #{actual.length}" "Expected to contain same amount of #{type}s. Expected: #{expected.length}, actual: #{actual.length}"
) )
logger.debug("= Comparing #{type}s =")
actual.each do |title, actual_item| actual.each do |title, actual_item|
logger.debug("Comparing #{type} with title '#{title}'") print "." # indicate that it is still going but don't spam the output with newlines
expected_item = expected[title] expected_item = expected[title]
...@@ -235,34 +245,47 @@ module QA ...@@ -235,34 +245,47 @@ module QA
) )
expect(expected_item[:comments]).to match_array(actual_item[:comments]) expect(expected_item[:comments]).to match_array(actual_item[:comments])
end end
puts # print newline after last print to make output pretty
end end
# Imported project branches # Imported project branches
# #
# @return [Array] # @return [Array]
def gl_branches def gl_branches
@gl_branches ||= imported_project.repository_branches(auto_paginate: true).map { |b| b[:name] } @gl_branches ||= begin
logger.debug("= Fetching branches =")
imported_project.repository_branches(auto_paginate: true).map { |b| b[:name] }
end
end end
# Imported project commits # Imported project commits
# #
# @return [Array] # @return [Array]
def gl_commits def gl_commits
@gl_commits ||= imported_project.commits(auto_paginate: true).map { |c| c[:id] } @gl_commits ||= begin
logger.debug("= Fetching commits =")
imported_project.commits(auto_paginate: true).map { |c| c[:id] }
end
end end
# Imported project labels # Imported project labels
# #
# @return [Array] # @return [Array]
def gl_labels def gl_labels
@gl_labels ||= imported_project.labels(auto_paginate: true).map { |label| label.slice(:name, :color) } @gl_labels ||= begin
logger.debug("= Fetching labels =")
imported_project.labels(auto_paginate: true).map { |label| label.slice(:name, :color) }
end
end end
# Imported project milestones # Imported project milestones
# #
# @return [<Type>] <description> # @return [<Type>] <description>
def gl_milestones def gl_milestones
@gl_milestones ||= imported_project.milestones(auto_paginate: true).map { |ms| ms.slice(:title, :description) } @gl_milestones ||= begin
logger.debug("= Fetching milestones =")
imported_project.milestones(auto_paginate: true).map { |ms| ms.slice(:title, :description) }
end
end end
# Imported project merge requests # Imported project merge requests
...@@ -270,19 +293,17 @@ module QA ...@@ -270,19 +293,17 @@ module QA
# @return [Hash] # @return [Hash]
def mrs def mrs
@mrs ||= begin @mrs ||= begin
logger.debug("Fetching merge requests") logger.debug("= Fetching merge requests =")
imported_mrs = imported_project.merge_requests(auto_paginate: true) imported_mrs = imported_project.merge_requests(auto_paginate: true)
# fetch comments in parallel since we need to do it for each mr separately logger.debug("= Transforming merge request objects for comparison =")
logger.debug("Transforming merge request objects for comparison") imported_mrs.each_with_object({}) do |mr, hash|
mrs_hashes = Parallel.map(imported_mrs) do |mr|
resource = Resource::MergeRequest.init do |resource| resource = Resource::MergeRequest.init do |resource|
resource.project = imported_project resource.project = imported_project
resource.iid = mr[:iid] resource.iid = mr[:iid]
resource.api_client = api_client resource.api_client = api_client
end end
{ hash[mr[:title]] = {
title: mr[:title],
body: mr[:description], body: mr[:description],
comments: resource.comments(auto_paginate: true) comments: resource.comments(auto_paginate: true)
# remove system notes # remove system notes
...@@ -290,13 +311,6 @@ module QA ...@@ -290,13 +311,6 @@ module QA
.map { |c| sanitize(c[:body]) } .map { |c| sanitize(c[:body]) }
} }
end end
mrs_hashes.each_with_object({}) do |mr, hash|
hash[mr[:title]] = {
body: mr[:body],
comments: mr[:comments]
}
end
end end
end end
...@@ -305,30 +319,21 @@ module QA ...@@ -305,30 +319,21 @@ module QA
# @return [Hash] # @return [Hash]
def gl_issues def gl_issues
@gl_issues ||= begin @gl_issues ||= begin
logger.debug("Fetching issues") logger.debug("= Fetching issues =")
imported_issues = imported_project.issues(auto_paginate: true) imported_issues = imported_project.issues(auto_paginate: true)
# fetch comments in parallel since we need to do it for each mr separately logger.debug("= Transforming issue objects for comparison =")
logger.debug("Transforming issue objects for comparison") imported_issues.each_with_object({}) do |issue, hash|
issue_hashes = Parallel.map(imported_issues) do |issue|
resource = Resource::Issue.init do |issue_resource| resource = Resource::Issue.init do |issue_resource|
issue_resource.project = imported_project issue_resource.project = imported_project
issue_resource.iid = issue[:iid] issue_resource.iid = issue[:iid]
issue_resource.api_client = api_client issue_resource.api_client = api_client
end end
{ hash[issue[:title]] = {
title: issue[:title],
body: issue[:description], body: issue[:description],
comments: resource.comments(auto_paginate: true).map { |c| sanitize(c[:body]) } comments: resource.comments(auto_paginate: true).map { |c| sanitize(c[:body]) }
} }
end end
issue_hashes.each_with_object({}) do |issue, hash|
hash[issue[:title]] = {
body: issue[:body],
comments: issue[:comments]
}
end
end end
end end
......
...@@ -55,12 +55,13 @@ module Matchers ...@@ -55,12 +55,13 @@ module Matchers
def wait_and_check(actual, expectation_name) def wait_and_check(actual, expectation_name)
attempt = 0 attempt = 0
QA::Runtime::Logger.debug("Running eventually matcher with '#{operator_msg}' operator")
QA::Support::Retrier.retry_until( QA::Support::Retrier.retry_until(
max_attempts: @attempts, max_attempts: @attempts,
max_duration: @duration, max_duration: @duration,
sleep_interval: @interval || 0.5 sleep_interval: @interval || 0.5
) do ) do
QA::Runtime::Logger.debug("Evaluating expectation '#{operator_msg}', attempt: #{attempt += 1}") QA::Runtime::Logger.debug("evaluating expectation, attempt: #{attempt += 1}")
public_send(expectation_name, actual) public_send(expectation_name, actual)
rescue RSpec::Expectations::ExpectationNotMetError, QA::Resource::ApiFabricator::ResourceNotFoundError rescue RSpec::Expectations::ExpectationNotMetError, QA::Resource::ApiFabricator::ResourceNotFoundError
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment