Commit e1aa7f50 authored by Ash McKenzie's avatar Ash McKenzie

Merge branch 'graphql-resolvers-complexity' into 'master'

[CE] Compute resolver complexity based on items

See merge request gitlab-org/gitlab-ce!28017
parents 863f2bcf 5ee7884d
...@@ -9,5 +9,24 @@ module Resolvers ...@@ -9,5 +9,24 @@ module Resolvers
end end
end end
end end
def self.resolver_complexity(args)
complexity = 1
complexity += 1 if args[:sort]
complexity += 5 if args[:search]
complexity
end
def self.complexity_multiplier(args)
# When fetching many items, additional complexity is added to the field
# depending on how many items is fetched. For each item we add 1% of the
# original complexity - this means that loading 100 items (our default
# maxp_age_size limit) doubles the original complexity.
#
# Complexity is not increased when searching by specific ID(s), because
# complexity difference is minimal in this case.
[args[:iid], args[:iids]].any? ? 0 : 0.01
end
end end
end end
...@@ -19,6 +19,16 @@ module ResolvesPipelines ...@@ -19,6 +19,16 @@ module ResolvesPipelines
description: "Filter pipelines by the sha of the commit they are run for" description: "Filter pipelines by the sha of the commit they are run for"
end end
class_methods do
def resolver_complexity(args)
complexity = super
complexity += 2 if args[:sha]
complexity += 2 if args[:ref]
complexity
end
end
def resolve_pipelines(project, params = {}) def resolve_pipelines(project, params = {})
PipelinesFinder.new(project, context[:current_user], params).execute PipelinesFinder.new(project, context[:current_user], params).execute
end end
......
...@@ -57,5 +57,12 @@ module Resolvers ...@@ -57,5 +57,12 @@ module Resolvers
IssuesFinder.new(context[:current_user], args).execute IssuesFinder.new(context[:current_user], args).execute
end end
def self.resolver_complexity(args)
complexity = super
complexity += 2 if args[:labelName]
complexity
end
end end
end end
...@@ -9,5 +9,9 @@ module Resolvers ...@@ -9,5 +9,9 @@ module Resolvers
def resolve(full_path:) def resolve(full_path:)
model_by_full_path(Project, full_path) model_by_full_path(Project, full_path)
end end
def self.complexity_multiplier(args)
0
end
end end
end end
...@@ -7,10 +7,40 @@ module Types ...@@ -7,10 +7,40 @@ module Types
DEFAULT_COMPLEXITY = 1 DEFAULT_COMPLEXITY = 1
def initialize(*args, **kwargs, &block) def initialize(*args, **kwargs, &block)
# complexity is already defaulted to 1, but let's make it explicit kwargs[:complexity] ||= field_complexity(kwargs[:resolver_class])
kwargs[:complexity] ||= DEFAULT_COMPLEXITY
super(*args, **kwargs, &block) super(*args, **kwargs, &block)
end end
private
def field_complexity(resolver_class)
if resolver_class
field_resolver_complexity
else
DEFAULT_COMPLEXITY
end
end
def field_resolver_complexity
# Complexity can be either integer or proc. If proc is used then it's
# called when computing a query complexity and context and query
# arguments are available for computing complexity. For resolvers we use
# proc because we set complexity depending on arguments and number of
# items which can be loaded.
proc do |ctx, args, child_complexity|
page_size = @max_page_size || ctx.schema.default_max_page_size
limit_value = [args[:first], args[:last], page_size].compact.min
# Resolvers may add extra complexity depending on used arguments
complexity = child_complexity + self.resolver&.try(:resolver_complexity, args).to_i
# Resolvers may add extra complexity depending on number of items being loaded.
multiplier = self.resolver&.try(:complexity_multiplier, args).to_f
complexity += complexity * limit_value * multiplier
complexity.to_i
end
end
end end
end end
...@@ -19,9 +19,11 @@ module Types ...@@ -19,9 +19,11 @@ module Types
null: false, null: false,
resolve: -> (obj, _args, _ctx) { Gitlab::Graphql::Loaders::BatchModelLoader.new(User, obj.author_id).find } resolve: -> (obj, _args, _ctx) { Gitlab::Graphql::Loaders::BatchModelLoader.new(User, obj.author_id).find }
field :assignees, Types::UserType.connection_type, null: true # Remove complexity when BatchLoader is used
field :assignees, Types::UserType.connection_type, null: true, complexity: 5
field :labels, Types::LabelType.connection_type, null: true # Remove complexity when BatchLoader is used
field :labels, Types::LabelType.connection_type, null: true, complexity: 5
field :milestone, Types::MilestoneType, field :milestone, Types::MilestoneType,
null: true, null: true,
resolve: -> (obj, _args, _ctx) { Gitlab::Graphql::Loaders::BatchModelLoader.new(Milestone, obj.milestone_id).find } resolve: -> (obj, _args, _ctx) { Gitlab::Graphql::Loaders::BatchModelLoader.new(Milestone, obj.milestone_id).find }
......
---
title: 'GraphQL: improve evaluation of query complexity based on arguments and query
limits.'
merge_request: 28017
author:
type: added
...@@ -28,4 +28,19 @@ describe Resolvers::BaseResolver do ...@@ -28,4 +28,19 @@ describe Resolvers::BaseResolver do
expect(result).to eq(test: 1) expect(result).to eq(test: 1)
end end
end end
it 'increases complexity based on arguments' do
field = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: described_class, null: false, max_page_size: 1)
expect(field.to_graphql.complexity.call({}, { sort: 'foo' }, 1)).to eq 3
expect(field.to_graphql.complexity.call({}, { search: 'foo' }, 1)).to eq 7
end
it 'does not increase complexity when filtering by iids' do
field = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: described_class, null: false, max_page_size: 100)
expect(field.to_graphql.complexity.call({}, { sort: 'foo' }, 1)).to eq 6
expect(field.to_graphql.complexity.call({}, { sort: 'foo', iid: 1 }, 1)).to eq 3
expect(field.to_graphql.complexity.call({}, { sort: 'foo', iids: [1, 2, 3] }, 1)).to eq 3
end
end end
...@@ -46,6 +46,14 @@ describe ResolvesPipelines do ...@@ -46,6 +46,14 @@ describe ResolvesPipelines do
expect(resolve_pipelines({}, {})).to be_empty expect(resolve_pipelines({}, {})).to be_empty
end end
it 'increases field complexity based on arguments' do
field = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: resolver, null: false, max_page_size: 1)
expect(field.to_graphql.complexity.call({}, {}, 1)).to eq 2
expect(field.to_graphql.complexity.call({}, { sha: 'foo' }, 1)).to eq 4
expect(field.to_graphql.complexity.call({}, { sha: 'ref' }, 1)).to eq 4
end
def resolve_pipelines(args = {}, context = { current_user: current_user }) def resolve_pipelines(args = {}, context = { current_user: current_user })
resolve(resolver, obj: project, args: args, ctx: context) resolve(resolver, obj: project, args: args, ctx: context)
end end
......
...@@ -120,6 +120,13 @@ describe Resolvers::IssuesResolver do ...@@ -120,6 +120,13 @@ describe Resolvers::IssuesResolver do
end end
end end
it 'increases field complexity based on arguments' do
field = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: described_class, null: false, max_page_size: 100)
expect(field.to_graphql.complexity.call({}, {}, 1)).to eq 4
expect(field.to_graphql.complexity.call({}, { labelName: 'foo' }, 1)).to eq 8
end
def resolve_issues(args = {}, context = { current_user: current_user }) def resolve_issues(args = {}, context = { current_user: current_user })
resolve(described_class, obj: project, args: args, ctx: context) resolve(described_class, obj: project, args: args, ctx: context)
end end
......
...@@ -26,6 +26,14 @@ describe Resolvers::ProjectResolver do ...@@ -26,6 +26,14 @@ describe Resolvers::ProjectResolver do
end end
end end
it 'does not increase complexity depending on number of load limits' do
field1 = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: described_class, null: false, max_page_size: 100)
field2 = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: described_class, null: false, max_page_size: 1)
expect(field1.to_graphql.complexity.call({}, {}, 1)).to eq 2
expect(field2.to_graphql.complexity.call({}, {}, 1)).to eq 2
end
def resolve_project(full_path) def resolve_project(full_path)
resolve(described_class, args: { full_path: full_path }) resolve(described_class, args: { full_path: full_path })
end end
......
...@@ -4,6 +4,18 @@ require 'spec_helper' ...@@ -4,6 +4,18 @@ require 'spec_helper'
describe Types::BaseField do describe Types::BaseField do
context 'when considering complexity' do context 'when considering complexity' do
let(:resolver) do
Class.new(described_class) do
def self.resolver_complexity(args)
2 if args[:foo]
end
def self.complexity_multiplier(args)
0.01
end
end
end
it 'defaults to 1' do it 'defaults to 1' do
field = described_class.new(name: 'test', type: GraphQL::STRING_TYPE, null: true) field = described_class.new(name: 'test', type: GraphQL::STRING_TYPE, null: true)
...@@ -15,5 +27,19 @@ describe Types::BaseField do ...@@ -15,5 +27,19 @@ describe Types::BaseField do
expect(field.to_graphql.complexity).to eq 12 expect(field.to_graphql.complexity).to eq 12
end end
it 'sets complexity depending on arguments for resolvers' do
field = described_class.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: resolver, max_page_size: 100, null: true)
expect(field.to_graphql.complexity.call({}, {}, 2)).to eq 4
expect(field.to_graphql.complexity.call({}, { first: 50 }, 2)).to eq 3
end
it 'sets complexity depending on number load limits for resolvers' do
field = described_class.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: resolver, max_page_size: 100, null: true)
expect(field.to_graphql.complexity.call({}, { first: 1 }, 2)).to eq 2
expect(field.to_graphql.complexity.call({}, { first: 1, foo: true }, 2)).to eq 4
end
end end
end end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment