Commit e8d68b12 authored by Ash McKenzie's avatar Ash McKenzie

New RateLimitedService module

This module is responsible for handling rate
limit checking and throttling with a focus on
services.
parent 4c6d90ad
# frozen_string_literal: true
module RateLimitedService
extend ActiveSupport::Concern
RateLimitedNotSetupError = Class.new(StandardError)
class RateLimitedError < StandardError
def initialize(key:, rate_limiter:)
@key = key
@rate_limiter = rate_limiter
end
def headers
# TODO: This will be fleshed out in https://gitlab.com/gitlab-org/gitlab/-/issues/342370
{}
end
def log_request(request, current_user)
rate_limiter.class.log_request(request, "#{key}_request_limit".to_sym, current_user)
end
private
attr_reader :key, :rate_limiter
end
class RateLimiterScopedAndKeyed
attr_reader :key, :opts, :rate_limiter_klass
def initialize(key:, opts:, rate_limiter_klass:)
@key = key
@opts = opts
@rate_limiter_klass = rate_limiter_klass
end
def rate_limit!(service)
evaluated_scope = evaluated_scope_for(service)
return if feature_flag_disabled?(evaluated_scope[:project])
rate_limiter = new_rate_limiter(evaluated_scope)
if rate_limiter.throttled?
raise RateLimitedError.new(key: key, rate_limiter: rate_limiter), _('This endpoint has been requested too many times. Try again later.')
end
end
private
def users_allowlist
@users_allowlist ||= opts[:users_allowlist] ? opts[:users_allowlist].call : []
end
def evaluated_scope_for(service)
opts[:scope].each_with_object({}) do |var, all|
all[var] = service.public_send(var) # rubocop: disable GitlabSecurity/PublicSend
end
end
def feature_flag_disabled?(project)
Feature.disabled?("rate_limited_service_#{key}", project, default_enabled: :yaml)
end
def new_rate_limiter(evaluated_scope)
rate_limiter_klass.new(key, **opts.merge(scope: evaluated_scope.values, users_allowlist: users_allowlist))
end
end
prepended do
attr_accessor :rate_limiter_bypassed
cattr_accessor :rate_limiter_scoped_and_keyed
def self.rate_limit(key:, opts:, rate_limiter_klass: ::Gitlab::ApplicationRateLimiter)
self.rate_limiter_scoped_and_keyed = RateLimiterScopedAndKeyed.new(key: key,
opts: opts,
rate_limiter_klass: rate_limiter_klass)
end
end
def execute_without_rate_limiting(*args, **kwargs)
self.rate_limiter_bypassed = true
execute(*args, **kwargs)
ensure
self.rate_limiter_bypassed = false
end
def execute(*args, **kwargs)
raise RateLimitedNotSetupError if rate_limiter_scoped_and_keyed.nil?
rate_limiter_scoped_and_keyed.rate_limit!(self) unless rate_limiter_bypassed
super
end
end
......@@ -3,6 +3,10 @@
module Issues
class CreateService < Issues::BaseService
include ResolveDiscussions
prepend RateLimitedService
rate_limit key: :issues_create,
opts: { scope: [:project, :current_user], users_allowlist: -> { [User.support_bot.username] } }
# NOTE: For Issues::CreateService, we require the spam_params and do not default it to nil, because
# spam_checking is likely to be necessary. However, if there is not a request available in scope
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe RateLimitedService do
let(:key) { :issues_create }
let(:scope) { [:project, :current_user] }
let(:opts) { { scope: scope, users_allowlist: -> { [User.support_bot.username] } } }
let(:rate_limiter_klass) { ::Gitlab::ApplicationRateLimiter }
let(:rate_limiter_instance) { rate_limiter_klass.new(key, **opts) }
describe 'RateLimitedError' do
subject { described_class::RateLimitedError.new(key: key, rate_limiter: rate_limiter_instance) }
describe '#headers' do
it 'returns a Hash of HTTP headers' do
# TODO: This will be fleshed out in https://gitlab.com/gitlab-org/gitlab/-/issues/342370
expected_headers = {}
expect(subject.headers).to eq(expected_headers)
end
end
describe '#log_request' do
it 'logs the request' do
request = instance_double(Grape::Request)
user = instance_double(User)
expect(rate_limiter_klass).to receive(:log_request).with(request, "#{key}_request_limit".to_sym, user)
subject.log_request(request, user)
end
end
end
describe 'RateLimiterScopedAndKeyed' do
subject { described_class::RateLimiterScopedAndKeyed.new(key: key, opts: opts, rate_limiter_klass: rate_limiter_klass) }
describe '#rate_limit!' do
let(:project_with_feature_enabled) { create(:project) }
let(:project_without_feature_enabled) { create(:project) }
let(:project) { nil }
let(:current_user) { create(:user) }
let(:service) { instance_double(Issues::CreateService, project: project, current_user: current_user) }
let(:evaluated_scope) { [project, current_user] }
let(:evaluated_opts) { { scope: evaluated_scope, users_allowlist: %w[support-bot] } }
let(:rate_limited_service_issues_create_feature_enabled) { nil }
before do
allow(rate_limiter_klass).to receive(:new).with(key, **evaluated_opts).and_return(rate_limiter_instance)
stub_feature_flags(rate_limited_service_issues_create: rate_limited_service_issues_create_feature_enabled)
end
shared_examples 'a service that does not attempt to throttle' do
it 'does not attempt to throttle' do
expect(rate_limiter_instance).not_to receive(:throttled?)
expect(subject.rate_limit!(service)).to be_nil
end
end
shared_examples 'a service that does attempt to throttle' do
before do
allow(rate_limiter_instance).to receive(:throttled?).and_return(throttled)
end
context 'when rate limiting is not in effect' do
let(:throttled) { false }
it 'does not raise an exception' do
expect(subject.rate_limit!(service)).to be_nil
end
end
context 'when rate limiting is in effect' do
let(:throttled) { true }
it 'raises a RateLimitedError exception' do
expect { subject.rate_limit!(service) }.to raise_error(described_class::RateLimitedError, 'This endpoint has been requested too many times. Try again later.')
end
end
end
context 'when :rate_limited_service_issues_create feature is globally disabled' do
let(:rate_limited_service_issues_create_feature_enabled) { false }
it_behaves_like 'a service that does not attempt to throttle'
end
context 'when :rate_limited_service_issues_create feature is globally enabled' do
let(:throttled) { nil }
let(:rate_limited_service_issues_create_feature_enabled) { true }
let(:project) { project_without_feature_enabled }
it_behaves_like 'a service that does attempt to throttle'
end
context 'when :rate_limited_service_issues_create feature is enabled for project_with_feature_enabled' do
let(:throttled) { nil }
let(:rate_limited_service_issues_create_feature_enabled) { project_with_feature_enabled }
context 'for project_without_feature_enabled' do
let(:project) { project_without_feature_enabled }
it_behaves_like 'a service that does not attempt to throttle'
end
context 'for project_with_feature_enabled' do
let(:project) { project_with_feature_enabled }
it_behaves_like 'a service that does attempt to throttle'
end
end
end
end
describe '#execute_without_rate_limiting' do
let(:rate_limiter_scoped_and_keyed) { instance_double(RateLimitedService::RateLimiterScopedAndKeyed) }
let(:subject) do
local_key = key
local_opts = opts
Class.new do
prepend RateLimitedService
rate_limit key: local_key, opts: local_opts
def execute(*args, **kwargs)
'main logic here'
end
end.new
end
before do
allow(RateLimitedService::RateLimiterScopedAndKeyed).to receive(:new).with(key: key, opts: opts, rate_limiter_klass: rate_limiter_klass).and_return(rate_limiter_scoped_and_keyed)
end
context 'bypasses rate limiting' do
it 'calls super' do
expect(rate_limiter_scoped_and_keyed).not_to receive(:rate_limit!).with(subject)
expect(subject.execute_without_rate_limiting).to eq('main logic here')
end
end
end
describe '#execute' do
context 'when rate_limit has not been called' do
let(:subject) { Class.new { prepend RateLimitedService }.new }
it 'raises an RateLimitedNotSetupError exception' do
expect { subject.execute }.to raise_error(described_class::RateLimitedNotSetupError)
end
end
context 'when rate_limit has been called' do
let(:rate_limiter_scoped_and_keyed) { instance_double(RateLimitedService::RateLimiterScopedAndKeyed) }
let(:subject) do
local_key = key
local_opts = opts
Class.new do
prepend RateLimitedService
rate_limit key: local_key, opts: local_opts
def execute(*args, **kwargs)
'main logic here'
end
end.new
end
before do
allow(RateLimitedService::RateLimiterScopedAndKeyed).to receive(:new).with(key: key, opts: opts, rate_limiter_klass: rate_limiter_klass).and_return(rate_limiter_scoped_and_keyed)
end
context 'and applies rate limiting' do
it 'raises an RateLimitedService::RateLimitedError exception' do
expect(rate_limiter_scoped_and_keyed).to receive(:rate_limit!).with(subject).and_raise(RateLimitedService::RateLimitedError.new(key: key, rate_limiter: rate_limiter_instance))
expect { subject.execute }.to raise_error(RateLimitedService::RateLimitedError)
end
end
context 'but does not apply rate limiting' do
it 'calls super' do
expect(rate_limiter_scoped_and_keyed).to receive(:rate_limit!).with(subject).and_return(nil)
expect(subject.execute).to eq('main logic here')
end
end
end
end
end
......@@ -10,6 +10,25 @@ RSpec.describe Issues::CreateService do
let(:spam_params) { double }
describe '.rate_limiter_scoped_and_keyed' do
it 'is set via the rate_limit call' do
expect(described_class.rate_limiter_scoped_and_keyed).to be_a(RateLimitedService::RateLimiterScopedAndKeyed)
expect(described_class.rate_limiter_scoped_and_keyed.key).to eq(:issues_create)
expect(described_class.rate_limiter_scoped_and_keyed.opts[:scope]).to eq(%i[project current_user])
expect(described_class.rate_limiter_scoped_and_keyed.opts[:users_allowlist].call).to eq(%w[support-bot])
expect(described_class.rate_limiter_scoped_and_keyed.rate_limiter_klass).to eq(Gitlab::ApplicationRateLimiter)
end
end
describe '#rate_limiter_bypassed' do
let(:subject) { described_class.new(project: project, spam_params: {}) }
it 'is nil by default' do
expect(subject.rate_limiter_bypassed).to be_nil
end
end
describe '#execute' do
let_it_be(:assignee) { create(:user) }
let_it_be(:milestone) { create(:milestone, project: project) }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment