Commit a315961d authored by Toon Claes's avatar Toon Claes

Remove /geo/receive_events API endpoint

And all related services and workers.
parent 26e1561d
......@@ -72,10 +72,6 @@ class GeoNode < ActiveRecord::Base
geo_api_url('refresh_wikis')
end
def geo_events_url
geo_api_url('receive_events')
end
def geo_transfers_url(file_type, file_id)
geo_api_url("transfers/#{file_type}/#{file_id}")
end
......
module Geo
class RepositoryUpdateService
include Gitlab::Geo::ProjectLogHelpers
attr_reader :project, :clone_url, :logger
LEASE_TIMEOUT = 1.hour.freeze
LEASE_KEY_PREFIX = 'geo_repository_fetch'.freeze
def initialize(project, clone_url, logger = Rails.logger)
@project = project
@clone_url = clone_url
@logger = logger
end
def execute
try_obtain_lease do
project.create_repository unless project.repository_exists?
project.repository.after_create if project.empty_repo?
project.repository.fetch_geo_mirror(clone_url)
project.repository.expire_all_method_caches
project.repository.expire_branch_cache
project.repository.expire_content_cache
end
rescue Gitlab::Shell::Error => e
log_error('Error fetching repository for project', e)
rescue Gitlab::Git::Repository::NoRepository => e
log_error('Error invalid repository', e)
log_info('Invalidating cache for project')
project.repository.after_create
end
private
def try_obtain_lease
log_info('Trying to obtain lease to sync repository')
repository_lease = Gitlab::ExclusiveLease.new(lease_key, timeout: LEASE_TIMEOUT).try_obtain
unless repository_lease.present?
log_info('Could not obtain lease to sync repository')
return
end
begin
yield
ensure
log_info('Releasing leases to sync repository')
Gitlab::ExclusiveLease.cancel(lease_key, repository_lease)
end
end
def lease_key
@lease_key ||= "#{LEASE_KEY_PREFIX}:#{project.id}"
end
end
end
module Geo
class ScheduleKeyChangeService
attr_reader :id, :key, :action
def initialize(params)
@id = params['id']
@key = params['key']
@action = params['event_name']
end
def execute
GeoKeyRefreshWorker.perform_async(@id, @key, @action)
end
end
end
module Geo
class ScheduleRepoCreateService
attr_reader :id
def initialize(params)
@id = params['project_id']
end
def execute
GeoRepositoryCreateWorker.perform_async(id)
end
end
end
module Geo
class ScheduleRepoDestroyService
attr_reader :id, :name, :path_with_namespace
def initialize(params)
@id = params['project_id']
@name = params['name']
@path_with_namespace = params['path_with_namespace']
end
def execute
GeoRepositoryDestroyWorker.perform_async(id, name, path_with_namespace)
end
end
end
module Geo
class ScheduleRepoFetchService
def initialize(params)
@project_id = params[:project_id]
@remote_url = params[:project][:git_ssh_url]
end
def execute
GeoRepositoryFetchWorker.perform_async(@project_id, @remote_url)
end
end
end
module Geo
class ScheduleRepoMoveService
attr_reader :id, :name, :old_path_with_namespace, :path_with_namespace
def initialize(params)
@id = params['project_id']
@name = params['name']
@old_path_with_namespace = params['old_path_with_namespace']
@path_with_namespace = params['path_with_namespace']
end
def execute
GeoRepositoryMoveWorker.perform_async(id, name, old_path_with_namespace, path_with_namespace)
end
end
end
module Geo
class ScheduleRepoUpdateService
attr_reader :id, :clone_url, :push_data
def initialize(params)
@id = params[:project_id]
@clone_url = params[:project][:git_ssh_url]
@push_data = { 'type' => params[:object_kind], 'before' => params[:before],
'after' => params[:newref], 'ref' => params[:ref] }
end
def execute
GeoRepositoryUpdateWorker.perform_async(@id, @clone_url, @push_data)
end
end
end
require 'active_support/concern'
module GeoDynamicBackoff
extend ActiveSupport::Concern
included do
sidekiq_options retry: 55
sidekiq_retry_in do |count|
count <= 30 ? linear_backoff_strategy(count) : geometric_backoff_strategy(count)
end
end
class_methods do
private
def linear_backoff_strategy(count)
rand(1..20) + count
end
def geometric_backoff_strategy(count)
# This strategy is based on the original one from sidekiq
count = count - 30 # we must start counting after 30
(count**4) + 15 + (rand(30) * (count + 1))
end
end
end
class GeoKeyRefreshWorker
include Sidekiq::Worker
include ::GeoDynamicBackoff
include GeoQueue
def perform(key_id, key, action)
action = action.to_sym
case action
when :key_create
# ActiveRecord::RecordNotFound when not found (so job will retry)
key = Key.find(key_id)
key.add_to_shell
when :key_destroy
# we are physically removing the key after model is removed
# so we must reconstruct ids to schedule removal
key = Key.new(id: key_id, key: key)
key.remove_from_shell
else
raise "Invalid action: #{action}"
end
end
end
class GeoRepositoryCreateWorker
include Sidekiq::Worker
include GeoQueue
def perform(id)
project = Project.find(id)
project.ensure_storage_path_exists
project.create_repository unless project.repository_exists? || project.import?
end
end
class GeoRepositoryFetchWorker
include Sidekiq::Worker
include ::GeoDynamicBackoff
include GeoQueue
include Gitlab::ShellAdapter
sidekiq_options queue: 'geo_repository_update'
def perform(project_id, clone_url)
project = Project.find(project_id)
Geo::RepositoryUpdateService.new(project, clone_url, logger).execute
end
end
class GeoRepositoryMoveWorker
include Sidekiq::Worker
include GeoQueue
def perform(id, name, old_path_with_namespace, new_path_with_namespace)
Geo::MoveRepositoryService.new(id, name, old_path_with_namespace, new_path_with_namespace).execute
end
end
class GeoRepositoryUpdateWorker
include Sidekiq::Worker
include Gitlab::ShellAdapter
include DedicatedSidekiqQueue
attr_accessor :project
def perform(project_id, _clone_url, push_data = nil)
@project = Project.find(project_id)
@push_data = push_data
process_hooks if push_data # we should be compatible with old unprocessed data
end
private
def fetch_repository(remote_url)
@project.create_repository unless @project.repository_exists?
@project.repository.after_create if @project.empty_repo?
@project.repository.fetch_geo_mirror(remote_url)
end
def process_hooks
if @push_data['type'] == 'push'
branch = Gitlab::Git.ref_name(@push_data['ref'])
process_push(branch)
end
end
def process_push(branch)
@project.repository.after_push_commit(branch)
if push_remove_branch?
@project.repository.after_remove_branch
elsif push_to_new_branch?
@project.repository.after_create_branch
end
ProjectCacheWorker.perform_async(@project.id)
end
def push_remove_branch?
Gitlab::Git.branch_ref?(@push_data['ref']) && Gitlab::Git.blank_ref?(@push_data['after'])
end
def push_to_new_branch?
Gitlab::Git.branch_ref?(@push_data['ref']) && Gitlab::Git.blank_ref?(@push_data['before'])
end
end
......@@ -56,17 +56,7 @@ screen.
Previous implementation (GitLab =< 8.6.x) used custom code to handle
notification from **Primary** to **Secondary** by HTTP requests.
We decided to move away from custom code and integrate by using
**System Webhooks**, as we have more people using them, so any
improvements we make to this communication layer, many other will
benefit from.
There is a specific **internal** endpoint in our api code (Grape),
that receives all requests from this System Hooks:
`/api/v3/geo/receive_events`.
We switch and filter from each event by the `event_name` field.
TODO
## Readonly
......
......@@ -50,45 +50,6 @@ module API
required_attributes! [:projects]
::Geo::ScheduleWikiRepoUpdateService.new(params[:projects]).execute
end
# Receive event streams from primary and enqueue changes
#
# Example request:
# POST /geo/receive_events
post 'receive_events' do
authenticate_by_gitlab_geo_token!
require_node_to_be_enabled!
check_node_restricted_project_ids!
required_attributes! %w(event_name)
case params['event_name']
when 'key_create', 'key_destroy'
required_attributes! %w(key id)
::Geo::ScheduleKeyChangeService.new(params).execute
when 'repository_update'
required_attributes! %w(event_name project_id project)
::Geo::ScheduleRepoFetchService.new(params).execute
when 'push'
required_attributes! %w(event_name project_id project)
::Geo::ScheduleRepoUpdateService.new(params).execute
when 'tag_push'
required_attributes! %w(event_name project_id project)
::Geo::ScheduleWikiRepoUpdateService.new(params).execute
when 'project_create'
required_attributes! %w(event_name project_id)
::Geo::ScheduleRepoCreateService.new(params).execute
when 'project_destroy'
required_attributes! %w(event_name project_id path_with_namespace)
::Geo::ScheduleRepoDestroyService.new(params).execute
when 'project_rename'
required_attributes! %w(event_name project_id path_with_namespace old_path_with_namespace)
::Geo::ScheduleRepoMoveService.new(params).execute
when 'project_transfer'
required_attributes! %w(event_name project_id path_with_namespace old_path_with_namespace)
::Geo::ScheduleRepoMoveService.new(params).execute
end
end
end
helpers do
......@@ -111,14 +72,6 @@ module API
def require_node_to_be_secondary!
forbidden! 'Geo node is not secondary node.' unless Gitlab::Geo.current_node&.secondary?
end
def check_node_restricted_project_ids!
return unless params.key?(:project_id)
unless Gitlab::Geo.current_node&.projects_include?(params[:project_id].to_i)
not_found!
end
end
end
end
end
......@@ -161,13 +161,6 @@ module API
end
end
def authenticate_by_gitlab_geo_token!
token = headers['X-Gitlab-Token'].try(:chomp)
unless token && Devise.secure_compare(geo_token, token)
unauthorized!
end
end
def authenticated_as_admin!
authenticate!
forbidden! unless current_user.admin?
......
......@@ -37,7 +37,7 @@ module Gitlab
end
def geo_routes
geo_routes = %w(refresh_wikis receive_events)
geo_routes = %w(refresh_wikis)
API_VERSIONS.flat_map { |version| geo_routes.map { |route| "api/v#{version}/geo/#{route}" } }
end
......
......@@ -248,14 +248,6 @@ describe GeoNode, type: :model do
end
end
describe '#geo_events_url' do
let(:events_url) { "https://localhost:3000/gitlab/api/#{api_version}/geo/receive_events" }
it 'returns api url based on node uri' do
expect(new_node.geo_events_url).to eq(events_url)
end
end
describe '#geo_transfers_url' do
let(:transfers_url) { "https://localhost:3000/gitlab/api/#{api_version}/geo/transfers/lfs/1" }
......
......@@ -15,20 +15,6 @@ describe API::Geo do
allow(Gitlab::Geo).to receive(:current_node) { secondary_node }
end
describe 'POST /geo/receive_events authentication' do
it 'denies access if token is not present' do
post api('/geo/receive_events')
expect(response).to have_http_status(401)
end
it 'denies access if token is invalid' do
post api('/geo/receive_events'), nil, { 'X-Gitlab-Token' => 'nothing' }
expect(response).to have_http_status(401)
end
end
describe 'POST /geo/refresh_wikis disabled node' do
it 'responds with forbidden' do
secondary_node.enabled = false
......@@ -39,137 +25,6 @@ describe API::Geo do
end
end
describe 'POST /geo/receive_events disabled node' do
it 'responds with forbidden' do
secondary_node.enabled = false
post api('/geo/receive_events'), nil, geo_token_header
expect(response).to have_http_status(403)
end
end
describe 'POST /geo/receive_events when node has namespace restrictions' do
let(:synced_group) { create(:group) }
let(:secondary_node) { create(:geo_node, namespaces: [synced_group]) }
let(:push_payload) do
{
'event_name' => 'push',
'project' => {
'git_ssh_url' => 'git@example.com:mike/diaspora.git'
}
}
end
before do
allow(Gitlab::Geo).to receive(:current_node) { secondary_node }
allow_any_instance_of(::Geo::ScheduleRepoUpdateService).to receive(:execute)
allow_any_instance_of(::Geo::ScheduleRepoFetchService).to receive(:execute)
end
it 'responds with not found for projects that do not belong to selected namespaces to replicate' do
unsynced_project = create(:project)
post api('/geo/receive_events'), push_payload.merge('project_id' => unsynced_project.id), geo_token_header
expect(response).to have_http_status(404)
end
it 'responds with success for projects that belong to selected namespaces to replicate' do
project_in_synced_group = create(:project, group: synced_group)
post api('/geo/receive_events'), push_payload.merge('project_id' => project_in_synced_group.id), geo_token_header
expect(response).to have_http_status(201)
end
end
describe 'POST /geo/receive_events key events' do
before do
allow_any_instance_of(::Geo::ScheduleKeyChangeService).to receive(:execute)
end
let(:key_create_payload) do
{
'event_name' => 'key_create',
'created_at' => '2014-08-18 18:45:16 UTC',
'updated_at' => '2012-07-21T07:38:22Z',
'username' => 'root',
'key' => 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC58FwqHUbebw2SdT7SP4FxZ0w+lAO/erhy2ylhlcW/tZ3GY3mBu9VeeiSGoGz8hCx80Zrz+aQv28xfFfKlC8XQFpCWwsnWnQqO2Lv9bS8V1fIHgMxOHIt5Vs+9CAWGCCvUOAurjsUDoE2ALIXLDMKnJxcxD13XjWdK54j6ZXDB4syLF0C2PnAQSVY9X7MfCYwtuFmhQhKaBussAXpaVMRHltie3UYSBUUuZaB3J4cg/7TxlmxcNd+ppPRIpSZAB0NI6aOnqoBCpimscO/VpQRJMVLr3XiSYeT6HBiDXWHnIVPfQc03OGcaFqOit6p8lYKMaP/iUQLm+pgpZqrXZ9vB john@localhost',
'id' => 1
}
end
let(:key_destroy_payload) do
{
'event_name' => 'key_destroy',
'created_at' => '2014-08-18 18:45:16 UTC',
'updated_at' => '2012-07-21T07:38:22Z',
'username' => 'root',
'key' => 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC58FwqHUbebw2SdT7SP4FxZ0w+lAO/erhy2ylhlcW/tZ3GY3mBu9VeeiSGoGz8hCx80Zrz+aQv28xfFfKlC8XQFpCWwsnWnQqO2Lv9bS8V1fIHgMxOHIt5Vs+9CAWGCCvUOAurjsUDoE2ALIXLDMKnJxcxD13XjWdK54j6ZXDB4syLF0C2PnAQSVY9X7MfCYwtuFmhQhKaBussAXpaVMRHltie3UYSBUUuZaB3J4cg/7TxlmxcNd+ppPRIpSZAB0NI6aOnqoBCpimscO/VpQRJMVLr3XiSYeT6HBiDXWHnIVPfQc03OGcaFqOit6p8lYKMaP/iUQLm+pgpZqrXZ9vB john@localhost',
'id' => 1
}
end
it 'enqueues on disk key creation if admin and correct params' do
post api('/geo/receive_events'), key_create_payload, geo_token_header
expect(response).to have_http_status(201)
end
it 'enqueues on disk key removal if admin and correct params' do
post api('/geo/receive_events'), key_destroy_payload, geo_token_header
expect(response).to have_http_status(201)
end
end
describe 'POST /geo/receive_events push events' do
before do
allow_any_instance_of(::Geo::ScheduleRepoUpdateService).to receive(:execute)
allow_any_instance_of(::Geo::ScheduleRepoFetchService).to receive(:execute)
end
let(:push_payload) do
{
'event_name' => 'push',
'project_id' => 1,
'project' => {
'git_ssh_url' => 'git@example.com:mike/diaspora.git'
}
}
end
it 'starts refresh process if admin and correct params' do
post api('/geo/receive_events'), push_payload, geo_token_header
expect(response).to have_http_status(201)
end
end
describe 'POST /geo/receive_events push_tag events' do
before do
allow_any_instance_of(::Geo::ScheduleWikiRepoUpdateService).to receive(:execute)
end
let(:tag_push_payload) do
{
'event_name' => 'tag_push',
'project_id' => 1,
'project' => {
'git_ssh_url' => 'git@example.com:mike/diaspora.git'
}
}
end
it 'starts refresh process if admin and correct params' do
post api('/geo/receive_events'), tag_push_payload, geo_token_header
expect(response).to have_http_status(201)
end
end
describe 'GET /geo/transfers/attachment/1' do
let!(:secondary_node) { create(:geo_node) }
let(:note) { create(:note, :with_attachment) }
......
require 'spec_helper'
describe Geo::RepositoryUpdateService do
let(:project) { create(:project) }
let(:clone_url) { project.ssh_url_to_repo }
subject { described_class.new(project, clone_url) }
describe '#execute' do
before do
allow_any_instance_of(Gitlab::Geo).to receive_messages(secondary?: true)
allow(project.repository).to receive(:fetch_geo_mirror).and_return(true)
allow(project).to receive(:repository_exists?) { false }
allow(project).to receive(:empty_repo?) { true }
allow(project.repository).to receive(:expire_all_method_caches)
allow(project.repository).to receive(:expire_branch_cache)
allow(project.repository).to receive(:expire_content_cache)
end
it 'releases the lease' do
expect(Gitlab::ExclusiveLease).to receive(:cancel).once.and_call_original
subject.execute
end
it 'creates a new repository' do
expect(project).to receive(:create_repository)
subject.execute
end
it 'executes after_create hook' do
expect(project.repository).to receive(:after_create).at_least(:once)
subject.execute
end
it 'fetches the Geo mirror' do
expect(project.repository).to receive(:fetch_geo_mirror)
subject.execute
end
it 'expires repository caches' do
expect(project.repository).to receive(:expire_all_method_caches)
expect(project.repository).to receive(:expire_branch_cache)
expect(project.repository).to receive(:expire_content_cache)
subject.execute
end
it 'rescues Gitlab::Shell::Error failures' do
expect(project.repository).to receive(:fetch_geo_mirror).and_raise(Gitlab::Shell::Error)
expect { subject.execute }.not_to raise_error
end
it 'rescues Gitlab::Git::Repository::NoRepository failures and fires after_create hook' do
expect(project.repository).to receive(:fetch_geo_mirror).and_raise(Gitlab::Git::Repository::NoRepository)
expect_any_instance_of(Repository).to receive(:after_create)
expect { subject.execute }.not_to raise_error
end
end
end
require 'spec_helper'
describe Geo::ScheduleKeyChangeService do
subject(:key_create) { described_class.new('id' => 1, 'key' => key.key, 'action' => :create) }
subject(:key_delete) { described_class.new('id' => 1, 'key' => key.key, 'action' => :delete) }
let(:key) { FactoryGirl.build(:key) }
before do
allow_any_instance_of(GeoKeyRefreshWorker).to receive(:perform)
end
context 'key creation' do
it 'executes action' do
expect(key_create.execute).to be_truthy
end
end
context 'key removal' do
it 'executes action' do
expect(key_delete.execute).to be_truthy
end
end
end
require 'spec_helper'
describe Geo::ScheduleRepoCreateService do
let(:project) { create(:project, :repository) }
subject { described_class.new(project_id: project.id) }
describe '#execute' do
it 'schedules the repository creation' do
Sidekiq::Worker.clear_all
Sidekiq::Testing.fake! do
expect { subject.execute }.to change(GeoRepositoryCreateWorker.jobs, :size).by(1)
end
end
end
end
require 'spec_helper'
describe Geo::ScheduleRepoUpdateService do
include RepoHelpers
let(:user) { create :user }
let(:project) { create :project, :repository }
let(:blankrev) { Gitlab::Git::BLANK_SHA }
let(:oldrev) { sample_commit.parent_id }
let(:newrev) { sample_commit.id }
let(:ref) { 'refs/heads/master' }
let(:service) { execute_push_service(project, user, oldrev, newrev, ref) }
before do
project.team << [user, :master]
end
subject { described_class.new(service.push_data) }
context 'parsed push_data' do
it 'includes required params' do
expect(subject.push_data).to include('type', 'before', 'after', 'ref')
end
end
context '#execute' do
let(:push_data) { service.push_data }
let(:args) do
[
project.id,
push_data[:project][:git_ssh_url],
{
'type' => push_data[:object_kind],
'before' => push_data[:before],
'after' => push_data[:newref],
'ref' => push_data[:ref]
}
]
end
it 'schedule update service' do
expect(GeoRepositoryUpdateWorker).to receive(:perform_async).with(*args)
subject.execute
end
end
def execute_push_service(project, user, oldrev, newrev, ref)
service = GitPushService.new(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
service.execute
service
end
end
require 'spec_helper'
describe GeoDynamicBackoff do
class TestWorkerBackOff
include Sidekiq::Worker
include GeoDynamicBackoff
def perform(options)
false
end
end
let(:worker) do
TestWorkerBackOff
end
context 'retry strategy' do
it 'sets a custom strategy for retrying' do
expect(worker.sidekiq_retry_in_block).to be_a(Proc)
end
it 'when retry_count is in 1..30, retries with linear_backoff_strategy' do
expect(worker).to receive(:linear_backoff_strategy)
worker.sidekiq_retry_in_block.call(1)
expect(worker).to receive(:linear_backoff_strategy)
worker.sidekiq_retry_in_block.call(30)
end
it 'when retry_count is > 30, retries with geometric_backoff_strategy' do
expect(worker).to receive(:geometric_backoff_strategy)
worker.sidekiq_retry_in_block.call(31)
end
end
context '.linear_backoff_strategy' do
it 'returns rand + retry_count' do
allow(worker).to receive(:rand).and_return(1)
expect(worker.sidekiq_retry_in_block.call(1)).to eq(2)
end
end
context '.geometric_backoff_strategy' do
it 'when retry_count is 31 for a fixed rand()=1 returns 18' do
allow(worker).to receive(:rand).and_return(1)
expect(worker.sidekiq_retry_in_block.call(31)).to eq(18)
end
it 'when retry_count is 32 for a fixed rand()=1 returns 18' do
allow(worker).to receive(:rand).and_return(1)
expect(worker.sidekiq_retry_in_block.call(32)).to eq(34)
end
end
end
require 'spec_helper'
describe GeoKeyRefreshWorker do
subject(:key_create) { described_class.new.perform(key.id, key.key, 'key_create') }
subject(:key_delete) { described_class.new.perform(key.id, key.key, 'key_destroy') }
let(:key) { FactoryGirl.create(:key) }
context 'key creation' do
it 'adds key to shell' do
expect(Key).to receive(:find).with(key.id) { key }
expect(key).to receive(:add_to_shell)
expect { key_create }.not_to raise_error
end
end
context 'key removal' do
it 'removes key from the shell' do
expect(Key).to receive(:new).with(id: key.id, key: key.key) { key }
expect(key).to receive(:remove_from_shell)
expect { key_delete }.not_to raise_error
end
end
end
require 'spec_helper'
describe GeoRepositoryCreateWorker do
let(:user) { create :user }
let(:project) { create :project, :repository }
let(:perform!) { subject.perform(project.id) }
before do
expect(Project).to receive(:find).at_least(:once).with(project.id) { project }
end
context 'when no repository' do
before do
expect(project).to receive(:repository_exists?) { false }
end
it 'creates the repository' do
expect(project).to receive(:create_repository)
perform!
end
it 'does not create the repository when its being imported' do
expect(project).to receive(:import?) { true }
expect(project).not_to receive(:create_repository)
perform!
end
end
context 'when repository exists' do
before do
expect(project).to receive(:repository_exists?) { true }
end
it 'does not try to create the repository again' do
expect(project).not_to receive(:create_repository)
perform!
end
end
end
require 'spec_helper'
describe GeoRepositoryFetchWorker do
describe '#perform' do
let(:project) { create(:project) }
it 'delegates to Geo::RepositoryUpdateService' do
expect_any_instance_of(Geo::RepositoryUpdateService).to receive(:execute)
perform
end
end
def perform
subject.perform(project.id, project.ssh_url_to_repo)
end
end
require 'spec_helper'
describe GeoRepositoryUpdateWorker do
include RepoHelpers
let(:user) { create :user }
let(:project) { create :project, :repository }
let(:blankrev) { Gitlab::Git::BLANK_SHA }
let(:oldrev) { sample_commit.parent_id }
let(:newrev) { sample_commit.id }
let(:ref) { 'refs/heads/master' }
let(:service) { execute_push_service(project, user, oldrev, newrev, ref) }
let(:push_data) { service.push_data }
let(:parsed_push_data) do
{
'type' => push_data[:object_kind],
'before' => push_data[:before],
'after' => push_data[:after],
'ref' => push_data[:ref]
}
end
let(:clone_url) { push_data[:project][:git_ssh_url] }
let(:performed) { subject.perform(project.id, clone_url, parsed_push_data) }
before do
project.team << [user, :master]
expect(Project).to receive(:find).at_least(:once).with(project.id) { project }
end
context 'when empty repository' do
before do
allow(project.repository).to receive(:fetch_geo_mirror)
allow(project).to receive(:empty_repo?) { true }
end
it 'executes after_create hook' do
expect(project.repository).to receive(:after_create).at_least(:once)
performed
end
end
context '#process_hooks' do
before do
allow(subject).to receive(:fetch_repository)
end
it 'calls if push_data is present' do
expect(subject).to receive(:process_hooks)
performed
end
context 'when no push_data is present' do
let(:parsed_push_data) { nil }
it 'skips process_hooks' do
expect(subject).not_to receive(:process_hooks)
performed
end
end
end
context '#process_push' do
before do
allow(subject).to receive(:fetch_repository)
end
it 'executes after_push_commit' do
expect(project.repository).to receive(:after_push_commit).at_least(:once).with('master')
performed
end
context 'when removing branch' do
it 'executes after_remove_branch' do
allow(subject).to receive(:push_remove_branch?) { true }
expect(project.repository).to receive(:after_remove_branch)
performed
end
end
context 'when updating a new branch' do
it 'executes after_create_branch' do
allow(subject).to receive(:push_to_new_branch?) { true }
expect(project.repository).to receive(:after_create_branch)
performed
end
end
end
def execute_push_service(project, user, oldrev, newrev, ref)
service = GitPushService.new(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
service.execute
service
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment