Commit 37ac8f83 authored by Andrejs Cunskis's avatar Andrejs Cunskis Committed by Mark Lapierre

E2E: Test large github import

parent ef4d5c50
...@@ -14,11 +14,11 @@ module QA ...@@ -14,11 +14,11 @@ module QA
end end
end end
attribute :id attributes :id,
attribute :iid :iid,
attribute :assignee_ids :assignee_ids,
attribute :labels :labels,
attribute :title :title
def initialize def initialize
@assignee_ids = [] @assignee_ids = []
...@@ -41,13 +41,21 @@ module QA ...@@ -41,13 +41,21 @@ module QA
end end
def api_get_path def api_get_path
"/projects/#{project.id}/issues/#{id}" "/projects/#{project.id}/issues/#{iid}"
end end
def api_post_path def api_post_path
"/projects/#{project.id}/issues" "/projects/#{project.id}/issues"
end end
def api_put_path
"/projects/#{project.id}/issues/#{iid}"
end
def api_comments_path
"#{api_get_path}/notes"
end
def api_post_body def api_post_body
{ {
assignee_ids: assignee_ids, assignee_ids: assignee_ids,
...@@ -59,20 +67,28 @@ module QA ...@@ -59,20 +67,28 @@ module QA
end end
end end
def api_put_path
"/projects/#{project.id}/issues/#{iid}"
end
def set_issue_assignees(assignee_ids:) def set_issue_assignees(assignee_ids:)
put_body = { assignee_ids: assignee_ids } put_body = { assignee_ids: assignee_ids }
response = put Runtime::API::Request.new(api_client, api_put_path).url, put_body response = put Runtime::API::Request.new(api_client, api_put_path).url, put_body
unless response.code == HTTP_STATUS_OK unless response.code == HTTP_STATUS_OK
raise ResourceUpdateFailedError, "Could not update issue assignees to #{assignee_ids}. Request returned (#{response.code}): `#{response}`." raise(
ResourceUpdateFailedError,
"Could not update issue assignees to #{assignee_ids}. Request returned (#{response.code}): `#{response}`."
)
end end
QA::Runtime::Logger.debug("Successfully updated issue assignees to #{assignee_ids}") QA::Runtime::Logger.debug("Successfully updated issue assignees to #{assignee_ids}")
end end
# Get issue comments
#
# @return [Array]
def comments(auto_paginate: false)
return parse_body(api_get_from(api_comments_path)) unless auto_paginate
auto_paginated_response(Runtime::API::Request.new(api_client, api_comments_path, per_page: '100').url)
end
end end
end end
end end
...@@ -160,9 +160,10 @@ module QA ...@@ -160,9 +160,10 @@ module QA
# Get MR comments # Get MR comments
# #
# @return [Array] # @return [Array]
def comments def comments(auto_paginate: false)
response = get(Runtime::API::Request.new(api_client, api_comments_path).url) return parse_body(api_get_from(api_comments_path)) unless auto_paginate
parse_body(response)
auto_paginated_response(Runtime::API::Request.new(api_client, api_comments_path, per_page: '100').url)
end end
private private
......
...@@ -32,7 +32,7 @@ module QA ...@@ -32,7 +32,7 @@ module QA
end end
attribute :path_with_namespace do attribute :path_with_namespace do
"#{sandbox_path}#{group.path}/#{name}" if group "#{group.full_path}/#{name}"
end end
alias_method :full_path, :path_with_namespace alias_method :full_path, :path_with_namespace
...@@ -268,14 +268,16 @@ module QA ...@@ -268,14 +268,16 @@ module QA
result[:import_status] result[:import_status]
end end
def commits def commits(auto_paginate: false)
response = get(request_url(api_commits_path)) return parse_body(api_get_from(api_commits_path)) unless auto_paginate
parse_body(response)
auto_paginated_response(request_url(api_commits_path, per_page: '100'))
end end
def merge_requests def merge_requests(auto_paginate: false)
response = get(request_url(api_merge_requests_path)) return parse_body(api_get_from(api_merge_requests_path)) unless auto_paginate
parse_body(response)
auto_paginated_response(request_url(api_merge_requests_path, per_page: '100'))
end end
def merge_request_with_title(title) def merge_request_with_title(title)
...@@ -299,9 +301,10 @@ module QA ...@@ -299,9 +301,10 @@ module QA
parse_body(response) parse_body(response)
end end
def repository_branches def repository_branches(auto_paginate: false)
response = get(request_url(api_repository_branches_path)) return parse_body(api_get_from(api_repository_branches_path)) unless auto_paginate
parse_body(response)
auto_paginated_response(request_url(api_repository_branches_path, per_page: '100'))
end end
def repository_tags def repository_tags
...@@ -324,19 +327,22 @@ module QA ...@@ -324,19 +327,22 @@ module QA
parse_body(response) parse_body(response)
end end
def issues def issues(auto_paginate: false)
response = get(request_url(api_issues_path)) return parse_body(api_get_from(api_issues_path)) unless auto_paginate
parse_body(response)
auto_paginated_response(request_url(api_issues_path, per_page: '100'))
end end
def labels def labels(auto_paginate: false)
response = get(request_url(api_labels_path)) return parse_body(api_get_from(api_labels_path)) unless auto_paginate
parse_body(response)
auto_paginated_response(request_url(api_labels_path, per_page: '100'))
end end
def milestones def milestones(auto_paginate: false)
response = get(request_url(api_milestones_path)) return parse_body(api_get_from(api_milestones_path)) unless auto_paginate
parse_body(response)
auto_paginated_response(request_url(api_milestones_path, per_page: '100'))
end end
def wikis def wikis
......
# frozen_string_literal: true
require 'octokit'
require 'parallel'
# rubocop:disable Rails/Pluck
module QA
# Only executes in custom job/pipeline
RSpec.describe 'Manage', :github, :requires_admin, only: { job: 'large-github-import' } do
describe 'Project import' do
let(:api_client) { Runtime::API::Client.as_admin }
let(:group) do
Resource::Group.fabricate_via_api! do |resource|
resource.api_client = api_client
end
end
let(:user) do
Resource::User.fabricate_via_api! do |resource|
resource.api_client = api_client
resource.hard_delete_on_api_removal = true
end
end
let(:differ) { RSpec::Support::Differ.new(color: true) }
let(:github_repo) { 'allure-framework/allure-ruby' }
let(:github_client) do
Octokit.middleware = Faraday::RackBuilder.new do |builder|
builder.response(:logger, Runtime::Logger.logger, headers: false, bodies: false)
end
Octokit::Client.new(access_token: Runtime::Env.github_access_token, auto_paginate: true)
end
let(:gh_branches) { github_client.branches(github_repo).map(&:name) }
let(:gh_commits) { github_client.commits(github_repo).map(&:sha) }
let(:gh_repo) { github_client.repository(github_repo) }
let(:gh_labels) { github_client.labels(github_repo) }
let(:gh_milestones) { github_client.list_milestones(github_repo, state: 'all') }
let(:gh_all_issues) do
github_client.list_issues(github_repo, state: 'all')
end
let(:gh_prs) do
gh_all_issues.select(&:pull_request).each_with_object({}) do |pr, hash|
hash[pr.title] = {
body: pr.body || '',
comments: [*gh_pr_comments[pr.html_url], *gh_issue_comments[pr.html_url]].compact.sort
}
end
end
let(:gh_issues) do
gh_all_issues.reject(&:pull_request).each_with_object({}) do |issue, hash|
hash[issue.title] = {
body: issue.body || '',
comments: gh_issue_comments[issue.html_url]
}
end
end
let(:gh_issue_comments) do
github_client.issues_comments(github_repo).each_with_object(Hash.new { |h, k| h[k] = [] }) do |c, hash|
hash[c.html_url.gsub(/\#\S+/, "")] << c.body # use base html url as key
end
end
let(:gh_pr_comments) do
github_client.pull_requests_comments(github_repo).each_with_object(Hash.new { |h, k| h[k] = [] }) do |c, hash|
hash[c.html_url.gsub(/\#\S+/, "")] << c.body # use base html url as key
end
end
let(:imported_project) do
Resource::ProjectImportedFromGithub.fabricate_via_api! do |project|
project.add_name_uuid = false
project.name = 'imported-project'
project.group = group
project.github_personal_access_token = Runtime::Env.github_access_token
project.github_repository_path = github_repo
project.api_client = api_client
end
end
before do
group.add_member(user, Resource::Members::AccessLevel::MAINTAINER)
end
it 'imports large Github repo via api' do
imported_project # import the project
fetch_github_objects # fetch all objects right after import has started
expect { imported_project.reload!.import_status }.to eventually_eq('finished').within(
duration: 3600,
interval: 30
)
aggregate_failures do
verify_repository_import
verify_merge_requests_import
verify_issues_import
verify_labels_import
verify_milestones_import
end
end
# Persist all objects from repository being imported
#
# @return [void]
def fetch_github_objects
Runtime::Logger.debug("Fetching objects for github repo: '#{github_repo}'")
gh_repo
gh_branches
gh_commits
gh_prs
gh_issues
gh_labels
gh_milestones
end
# Verify repository imported correctly
#
# @return [void]
def verify_repository_import
branches = imported_project.repository_branches(auto_paginate: true).map { |b| b[:name] }
commits = imported_project.commits(auto_paginate: true).map { |c| c[:id] }
expect(imported_project.description).to eq(gh_repo.description)
# check via include, importer creates more branches
# https://gitlab.com/gitlab-org/gitlab/-/issues/332711
expect(branches).to include(*gh_branches)
expect(commits).to match_array(gh_commits)
end
# Verify imported merge requests and mr issues
#
# @return [void]
def verify_merge_requests_import
verify_mrs_or_issues('mrs')
end
# Verify imported issues and issue comments
#
# @return [void]
def verify_issues_import
verify_mrs_or_issues('issues')
end
# Verify imported labels
#
# @return [void]
def verify_labels_import
labels = imported_project.labels(auto_paginate: true).map { |label| label.slice(:name, :color) }
actual_labels = gh_labels.map { |label| { name: label.name, color: "##{label.color}" } }
expect(labels.length).to eq(actual_labels.length)
expect(labels).to match_array(actual_labels)
end
# Verify milestones import
#
# @return [void]
def verify_milestones_import
milestones = imported_project.milestones(auto_paginate: true).map { |ms| ms.slice(:title, :description) }
actual_milestones = gh_milestones.map { |ms| { title: ms.title, description: ms.description } }
expect(milestones.length).to eq(actual_milestones.length)
expect(milestones).to match_array(actual_milestones)
end
private
# Verify imported mrs or issues
#
# @param [String] type verification object, 'mrs' or 'issues'
# @return [void]
def verify_mrs_or_issues(type)
msg = ->(title) { "expected #{type} with title '#{title}' to have" }
expected = type == 'mrs' ? mrs : gl_issues
actual = type == 'mrs' ? gh_prs : gh_issues
expect(expected.keys).to match_array(actual.keys)
actual.each do |title, actual_item|
expected_item = expected[title]
expect(expected_item).to be_truthy, "#{msg.call(title)} been imported"
next unless expected_item
expect(expected_item[:body]).to(
include(actual_item[:body]),
"#{msg.call(title)} same description. #{diff(expected_item[:body], actual_item[:body])}"
)
expect(expected_item[:comments].length).to(
eq(actual_item[:comments].length),
"#{msg.call(title)} same amount of comments"
)
expect(expected_item[:comments]).to match_array(actual_item[:comments])
end
end
# Imported project merge requests
#
# @return [Hash]
def mrs
@mrs ||= begin
imported_mrs = imported_project.merge_requests(auto_paginate: true)
# fetch comments in parallel since we need to do it for each mr separately
mrs_hashes = Parallel.map(imported_mrs, in_processes: 5) do |mr|
resource = Resource::MergeRequest.init do |resource|
resource.project = imported_project
resource.iid = mr[:iid]
resource.api_client = api_client
end
{
title: mr[:title],
body: mr[:description],
comments: resource.comments(auto_paginate: true)
# remove system notes
.reject { |c| c[:system] || c[:body].match?(/^(\*\*Review:\*\*)|(\*Merged by:).*/) }
.map { |c| sanitize(c[:body]) }
}
end
mrs_hashes.each_with_object({}) do |mr, hash|
hash[mr[:title]] = {
body: mr[:body],
comments: mr[:comments]
}
end
end
end
# Imported project issues
#
# @return [Hash]
def gl_issues
@gl_issues ||= begin
imported_issues = imported_project.issues(auto_paginate: true)
# fetch comments in parallel since we need to do it for each mr separately
issue_hashes = Parallel.map(imported_issues, in_processes: 5) do |issue|
resource = Resource::Issue.init do |issue_resource|
issue_resource.project = imported_project
issue_resource.iid = issue[:iid]
issue_resource.api_client = api_client
end
{
title: issue[:title],
body: issue[:description],
comments: resource.comments(auto_paginate: true).map { |c| sanitize(c[:body]) }
}
end
issue_hashes.each_with_object({}) do |issue, hash|
hash[issue[:title]] = {
body: issue[:body],
comments: issue[:comments]
}
end
end
end
# Remove added prefixes by importer
#
# @param [String] body
# @return [String]
def sanitize(body)
body.gsub(/\*Created by: \S+\*\n\n/, "")
end
# Diff of 2 objects
#
# @param [Object] actual
# @param [Object] expected
# @return [String]
def diff(actual, expected)
"diff:\n#{differ.diff(actual, expected)}"
end
end
end
end
# rubocop:enable Rails/Pluck
...@@ -79,11 +79,20 @@ module QA ...@@ -79,11 +79,20 @@ module QA
error.response error.response
end end
def auto_paginated_response(url)
pages = []
with_paginated_response_body(url) { |response| pages << response }
pages.flatten
end
def with_paginated_response_body(url) def with_paginated_response_body(url)
loop do loop do
response = get(url) response = get(url)
page, pages = response.headers.values_at(:x_page, :x_total_pages)
api_endpoint = url.match(%r{v4/(\S+)\?})[1]
QA::Runtime::Logger.debug("Fetching page #{response.headers[:x_page]} of #{response.headers[:x_total_pages]}...") QA::Runtime::Logger.debug("Fetching page (#{page}/#{pages}) for '#{api_endpoint}' ...") unless pages.to_i <= 1
yield parse_body(response) yield parse_body(response)
...@@ -96,7 +105,7 @@ module QA ...@@ -96,7 +105,7 @@ module QA
def pagination_links(response) def pagination_links(response)
response.headers[:link].split(',').map do |link| response.headers[:link].split(',').map do |link|
match = link.match(/\<(?<url>.*)\>\; rel=\"(?<rel>\w+)\"/) match = link.match(/<(?<url>.*)>; rel="(?<rel>\w+)"/)
break nil unless match break nil unless match
{ url: match[:url], rel: match[:rel] } { url: match[:url], rel: match[:rel] }
......
...@@ -24,6 +24,7 @@ module Matchers ...@@ -24,6 +24,7 @@ module Matchers
chain(:within) do |options = {}| chain(:within) do |options = {}|
@duration = options[:duration] @duration = options[:duration]
@attempts = options[:attempts] @attempts = options[:attempts]
@interval = options[:interval]
end end
def supports_block_expectations? def supports_block_expectations?
...@@ -55,7 +56,7 @@ module Matchers ...@@ -55,7 +56,7 @@ module Matchers
QA::Support::Retrier.retry_until( QA::Support::Retrier.retry_until(
max_attempts: @attempts, max_attempts: @attempts,
max_duration: @duration, max_duration: @duration,
sleep_interval: 0.5 sleep_interval: @interval || 0.5
) do ) do
public_send(expectation_name, actual) public_send(expectation_name, actual)
rescue RSpec::Expectations::ExpectationNotMetError, QA::Resource::ApiFabricator::ResourceNotFoundError rescue RSpec::Expectations::ExpectationNotMetError, QA::Resource::ApiFabricator::ResourceNotFoundError
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment