Skip to content

Commit

Permalink
Merge pull request #4183 from rmosolgo/simplify-runtime
Browse files Browse the repository at this point in the history
Simplify some runtime code
  • Loading branch information
rmosolgo authored Sep 7, 2022
2 parents ff48425 + d9c647d commit 9cc79a9
Show file tree
Hide file tree
Showing 7 changed files with 190 additions and 272 deletions.
1 change: 0 additions & 1 deletion lib/graphql/execution.rb
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
# frozen_string_literal: true
require "graphql/execution/directive_checks"
require "graphql/execution/instrumentation"
require "graphql/execution/interpreter"
require "graphql/execution/lazy"
require "graphql/execution/lookahead"
Expand Down
92 changes: 0 additions & 92 deletions lib/graphql/execution/instrumentation.rb

This file was deleted.

244 changes: 185 additions & 59 deletions lib/graphql/execution/interpreter.rb
Original file line number Diff line number Diff line change
Expand Up @@ -11,76 +11,202 @@
module GraphQL
module Execution
class Interpreter
def self.begin_multiplex(multiplex)
# Since this is basically the batching context,
# share it for a whole multiplex
multiplex.context[:interpreter_instance] ||= self.new
end
class << self
# Used internally to signal that the query shouldn't be executed
# @api private
NO_OPERATION = {}.freeze

def self.begin_query(query, multiplex)
# The batching context is shared by the multiplex,
# so fetch it out and use that instance.
interpreter =
query.context.namespace(:interpreter)[:interpreter_instance] =
multiplex.context[:interpreter_instance]
interpreter.evaluate(query)
query
end
# @param schema [GraphQL::Schema]
# @param queries [Array<GraphQL::Query, Hash>]
# @param context [Hash]
# @param max_complexity [Integer, nil]
# @return [Array<Hash>] One result per query
def run_all(schema, query_options, context: {}, max_complexity: schema.max_complexity)
queries = query_options.map do |opts|
case opts
when Hash
GraphQL::Query.new(schema, nil, **opts)
when GraphQL::Query
opts
else
raise "Expected Hash or GraphQL::Query, not #{opts.class} (#{opts.inspect})"
end
end

def self.finish_multiplex(_results, multiplex)
interpreter = multiplex.context[:interpreter_instance]
interpreter.sync_lazies(multiplex: multiplex)
end
multiplex = Execution::Multiplex.new(schema: schema, queries: queries, context: context, max_complexity: max_complexity)
multiplex.trace("execute_multiplex", { multiplex: multiplex }) do
schema = multiplex.schema
queries = multiplex.queries
query_instrumenters = schema.instrumenters[:query]
multiplex_instrumenters = schema.instrumenters[:multiplex]

def self.finish_query(query, _multiplex)
{
"data" => query.context.namespace(:interpreter)[:runtime].final_result
}
end
# First, run multiplex instrumentation, then query instrumentation for each query
call_hooks(multiplex_instrumenters, multiplex, :before_multiplex, :after_multiplex) do
each_query_call_hooks(query_instrumenters, queries) do
schema = multiplex.schema
multiplex_analyzers = schema.multiplex_analyzers
queries = multiplex.queries
if multiplex.max_complexity
multiplex_analyzers += [GraphQL::Analysis::AST::MaxQueryComplexity]
end

# Run the eager part of `query`
# @return {Interpreter::Runtime}
def evaluate(query)
# Although queries in a multiplex _share_ an Interpreter instance,
# they also have another item of state, which is private to that query
# in particular, assign it here:
runtime = Runtime.new(query: query)
query.context.namespace(:interpreter)[:runtime] = runtime

query.trace("execute_query", {query: query}) do
runtime.run_eager
end
schema.analysis_engine.analyze_multiplex(multiplex, multiplex_analyzers)
begin
# Since this is basically the batching context,
# share it for a whole multiplex
multiplex.context[:interpreter_instance] ||= multiplex.schema.query_execution_strategy.new
# Do as much eager evaluation of the query as possible
results = []
queries.each_with_index do |query, idx|
multiplex.dataloader.append_job {
operation = query.selected_operation
result = if operation.nil? || !query.valid? || query.context.errors.any?
NO_OPERATION
else
begin
# Although queries in a multiplex _share_ an Interpreter instance,
# they also have another item of state, which is private to that query
# in particular, assign it here:
runtime = Runtime.new(query: query)
query.context.namespace(:interpreter)[:runtime] = runtime

runtime
end
query.trace("execute_query", {query: query}) do
runtime.run_eager
end
rescue GraphQL::ExecutionError => err
query.context.errors << err
NO_OPERATION
end
end
results[idx] = result
}
end

multiplex.dataloader.run

# Then, work through lazy results in a breadth-first way
multiplex.dataloader.append_job {
tracer = multiplex
query = multiplex.queries.length == 1 ? multiplex.queries[0] : nil
queries = multiplex ? multiplex.queries : [query]
final_values = queries.map do |query|
runtime = query.context.namespace(:interpreter)[:runtime]
# it might not be present if the query has an error
runtime ? runtime.final_result : nil
end
final_values.compact!
tracer.trace("execute_query_lazy", {multiplex: multiplex, query: query}) do
Interpreter::Resolve.resolve_all(final_values, multiplex.dataloader)
end
queries.each do |query|
runtime = query.context.namespace(:interpreter)[:runtime]
if runtime
runtime.delete_interpreter_context(:current_path)
runtime.delete_interpreter_context(:current_field)
runtime.delete_interpreter_context(:current_object)
runtime.delete_interpreter_context(:current_arguments)
end
end
}
multiplex.dataloader.run

# Then, find all errors and assign the result to the query object
results.each_with_index do |data_result, idx|
query = queries[idx]
# Assign the result so that it can be accessed in instrumentation
query.result_values = if data_result.equal?(NO_OPERATION)
if !query.valid? || query.context.errors.any?
# A bit weird, but `Query#static_errors` _includes_ `query.context.errors`
{ "errors" => query.static_errors.map(&:to_h) }
else
data_result
end
else
result = {
"data" => query.context.namespace(:interpreter)[:runtime].final_result
}

if query.context.errors.any?
error_result = query.context.errors.map(&:to_h)
result["errors"] = error_result
end

# Run the lazy part of `query` or `multiplex`.
# @return [void]
def sync_lazies(query: nil, multiplex: nil)
tracer = query || multiplex
if query.nil? && multiplex.queries.length == 1
query = multiplex.queries[0]
result
end
if query.context.namespace?(:__query_result_extensions__)
query.result_values["extensions"] = query.context.namespace(:__query_result_extensions__)
end
# Get the Query::Result, not the Hash
results[idx] = query.result
end

results
rescue Exception
# TODO rescue at a higher level so it will catch errors in analysis, too
# Assign values here so that the query's `@executed` becomes true
queries.map { |q| q.result_values ||= {} }
raise
end
end
end
end
end
queries = multiplex ? multiplex.queries : [query]
final_values = queries.map do |query|
runtime = query.context.namespace(:interpreter)[:runtime]
# it might not be present if the query has an error
runtime ? runtime.final_result : nil

private

# Call the before_ hooks of each query,
# Then yield if no errors.
# `call_hooks` takes care of appropriate cleanup.
def each_query_call_hooks(instrumenters, queries, i = 0)
if i >= queries.length
yield
else
query = queries[i]
call_hooks(instrumenters, query, :before_query, :after_query) {
each_query_call_hooks(instrumenters, queries, i + 1) {
yield
}
}
end
end
final_values.compact!
tracer.trace("execute_query_lazy", {multiplex: multiplex, query: query}) do
Interpreter::Resolve.resolve_all(final_values, multiplex.dataloader)

# Call each before hook, and if they all succeed, yield.
# If they don't all succeed, call after_ for each one that succeeded.
def call_hooks(instrumenters, object, before_hook_name, after_hook_name)
begin
successful = []
instrumenters.each do |instrumenter|
instrumenter.public_send(before_hook_name, object)
successful << instrumenter
end

# if any before hooks raise an exception, quit calling before hooks,
# but call the after hooks on anything that succeeded but also
# raise the exception that came from the before hook.
rescue GraphQL::ExecutionError => err
object.context.errors << err
rescue => e
raise call_after_hooks(successful, object, after_hook_name, e)
end

begin
yield # Call the user code
ensure
ex = call_after_hooks(successful, object, after_hook_name, nil)
raise ex if ex
end
end
queries.each do |query|
runtime = query.context.namespace(:interpreter)[:runtime]
if runtime
runtime.delete_interpreter_context(:current_path)
runtime.delete_interpreter_context(:current_field)
runtime.delete_interpreter_context(:current_object)
runtime.delete_interpreter_context(:current_arguments)

def call_after_hooks(instrumenters, object, after_hook_name, ex)
instrumenters.reverse_each do |instrumenter|
begin
instrumenter.public_send(after_hook_name, object)
rescue => e
ex = e
end
end
ex
end
nil
end

class ListResultFailedError < GraphQL::Error
Expand Down
Loading

0 comments on commit 9cc79a9

Please sign in to comment.