Commit a1eb44bc authored by Ash McKenzie's avatar Ash McKenzie

Merge branch 'graphql-resolvers-complexity-ee' into 'master'

Compute resolver complexity based on items

Closes gitlab-ce#58408

See merge request gitlab-org/gitlab-ee!11575
parents 3708bb50 b8bb2b19
......@@ -9,5 +9,24 @@ module Resolvers
end
end
end
def self.resolver_complexity(args)
complexity = 1
complexity += 1 if args[:sort]
complexity += 5 if args[:search]
complexity
end
def self.complexity_multiplier(args)
# When fetching many items, additional complexity is added to the field
# depending on how many items is fetched. For each item we add 1% of the
# original complexity - this means that loading 100 items (our default
# maxp_age_size limit) doubles the original complexity.
#
# Complexity is not increased when searching by specific ID(s), because
# complexity difference is minimal in this case.
[args[:iid], args[:iids]].any? ? 0 : 0.01
end
end
end
......@@ -19,6 +19,16 @@ module ResolvesPipelines
description: "Filter pipelines by the sha of the commit they are run for"
end
class_methods do
def resolver_complexity(args)
complexity = super
complexity += 2 if args[:sha]
complexity += 2 if args[:ref]
complexity
end
end
def resolve_pipelines(project, params = {})
PipelinesFinder.new(project, context[:current_user], params).execute
end
......
......@@ -57,5 +57,12 @@ module Resolvers
IssuesFinder.new(context[:current_user], args).execute
end
def self.resolver_complexity(args)
complexity = super
complexity += 2 if args[:labelName]
complexity
end
end
end
......@@ -9,5 +9,9 @@ module Resolvers
def resolve(full_path:)
model_by_full_path(Project, full_path)
end
def self.complexity_multiplier(args)
0
end
end
end
......@@ -7,10 +7,40 @@ module Types
DEFAULT_COMPLEXITY = 1
def initialize(*args, **kwargs, &block)
# complexity is already defaulted to 1, but let's make it explicit
kwargs[:complexity] ||= DEFAULT_COMPLEXITY
kwargs[:complexity] ||= field_complexity(kwargs[:resolver_class])
super(*args, **kwargs, &block)
end
private
def field_complexity(resolver_class)
if resolver_class
field_resolver_complexity
else
DEFAULT_COMPLEXITY
end
end
def field_resolver_complexity
# Complexity can be either integer or proc. If proc is used then it's
# called when computing a query complexity and context and query
# arguments are available for computing complexity. For resolvers we use
# proc because we set complexity depending on arguments and number of
# items which can be loaded.
proc do |ctx, args, child_complexity|
page_size = @max_page_size || ctx.schema.default_max_page_size
limit_value = [args[:first], args[:last], page_size].compact.min
# Resolvers may add extra complexity depending on used arguments
complexity = child_complexity + self.resolver&.try(:resolver_complexity, args).to_i
# Resolvers may add extra complexity depending on number of items being loaded.
multiplier = self.resolver&.try(:complexity_multiplier, args).to_f
complexity += complexity * limit_value * multiplier
complexity.to_i
end
end
end
end
......@@ -19,9 +19,11 @@ module Types
null: false,
resolve: -> (obj, _args, _ctx) { Gitlab::Graphql::Loaders::BatchModelLoader.new(User, obj.author_id).find }
field :assignees, Types::UserType.connection_type, null: true
# Remove complexity when BatchLoader is used
field :assignees, Types::UserType.connection_type, null: true, complexity: 5
field :labels, Types::LabelType.connection_type, null: true
# Remove complexity when BatchLoader is used
field :labels, Types::LabelType.connection_type, null: true, complexity: 5
field :milestone, Types::MilestoneType,
null: true,
resolve: -> (obj, _args, _ctx) { Gitlab::Graphql::Loaders::BatchModelLoader.new(Milestone, obj.milestone_id).find }
......
---
title: 'GraphQL: improve evaluation of query complexity based on arguments and query
limits.'
merge_request: 28017
author:
type: added
......@@ -28,4 +28,19 @@ describe Resolvers::BaseResolver do
expect(result).to eq(test: 1)
end
end
it 'increases complexity based on arguments' do
field = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: described_class, null: false, max_page_size: 1)
expect(field.to_graphql.complexity.call({}, { sort: 'foo' }, 1)).to eq 3
expect(field.to_graphql.complexity.call({}, { search: 'foo' }, 1)).to eq 7
end
it 'does not increase complexity when filtering by iids' do
field = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: described_class, null: false, max_page_size: 100)
expect(field.to_graphql.complexity.call({}, { sort: 'foo' }, 1)).to eq 6
expect(field.to_graphql.complexity.call({}, { sort: 'foo', iid: 1 }, 1)).to eq 3
expect(field.to_graphql.complexity.call({}, { sort: 'foo', iids: [1, 2, 3] }, 1)).to eq 3
end
end
......@@ -46,6 +46,14 @@ describe ResolvesPipelines do
expect(resolve_pipelines({}, {})).to be_empty
end
it 'increases field complexity based on arguments' do
field = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: resolver, null: false, max_page_size: 1)
expect(field.to_graphql.complexity.call({}, {}, 1)).to eq 2
expect(field.to_graphql.complexity.call({}, { sha: 'foo' }, 1)).to eq 4
expect(field.to_graphql.complexity.call({}, { sha: 'ref' }, 1)).to eq 4
end
def resolve_pipelines(args = {}, context = { current_user: current_user })
resolve(resolver, obj: project, args: args, ctx: context)
end
......
......@@ -120,6 +120,13 @@ describe Resolvers::IssuesResolver do
end
end
it 'increases field complexity based on arguments' do
field = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: described_class, null: false, max_page_size: 100)
expect(field.to_graphql.complexity.call({}, {}, 1)).to eq 4
expect(field.to_graphql.complexity.call({}, { labelName: 'foo' }, 1)).to eq 8
end
def resolve_issues(args = {}, context = { current_user: current_user })
resolve(described_class, obj: project, args: args, ctx: context)
end
......
......@@ -26,6 +26,14 @@ describe Resolvers::ProjectResolver do
end
end
it 'does not increase complexity depending on number of load limits' do
field1 = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: described_class, null: false, max_page_size: 100)
field2 = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: described_class, null: false, max_page_size: 1)
expect(field1.to_graphql.complexity.call({}, {}, 1)).to eq 2
expect(field2.to_graphql.complexity.call({}, {}, 1)).to eq 2
end
def resolve_project(full_path)
resolve(described_class, args: { full_path: full_path })
end
......
......@@ -4,6 +4,18 @@ require 'spec_helper'
describe Types::BaseField do
context 'when considering complexity' do
let(:resolver) do
Class.new(described_class) do
def self.resolver_complexity(args)
2 if args[:foo]
end
def self.complexity_multiplier(args)
0.01
end
end
end
it 'defaults to 1' do
field = described_class.new(name: 'test', type: GraphQL::STRING_TYPE, null: true)
......@@ -15,5 +27,19 @@ describe Types::BaseField do
expect(field.to_graphql.complexity).to eq 12
end
it 'sets complexity depending on arguments for resolvers' do
field = described_class.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: resolver, max_page_size: 100, null: true)
expect(field.to_graphql.complexity.call({}, {}, 2)).to eq 4
expect(field.to_graphql.complexity.call({}, { first: 50 }, 2)).to eq 3
end
it 'sets complexity depending on number load limits for resolvers' do
field = described_class.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: resolver, max_page_size: 100, null: true)
expect(field.to_graphql.complexity.call({}, { first: 1 }, 2)).to eq 2
expect(field.to_graphql.complexity.call({}, { first: 1, foo: true }, 2)).to eq 4
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment