1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96
|
# frozen_string_literal: true
module GraphQL
module Execution
class Interpreter
module Resolve
# Continue field results in `results` until there's nothing else to continue.
# @return [void]
def self.resolve_all(results, dataloader)
dataloader.append_job { resolve(results, dataloader) }
nil
end
def self.resolve_each_depth(lazies_at_depth, dataloader)
depths = lazies_at_depth.keys
depths.sort!
next_depth = depths.first
if next_depth
lazies = lazies_at_depth[next_depth]
lazies_at_depth.delete(next_depth)
if lazies.any?
dataloader.append_job {
lazies.each(&:value) # resolve these Lazy instances
}
# Run lazies _and_ dataloader, see if more are enqueued
dataloader.run
resolve_each_depth(lazies_at_depth, dataloader)
end
end
nil
end
# After getting `results` back from an interpreter evaluation,
# continue it until you get a response-ready Ruby value.
#
# `results` is one level of _depth_ of a query or multiplex.
#
# Resolve all lazy values in that depth before moving on
# to the next level.
#
# It's assumed that the lazies will
# return {Lazy} instances if there's more work to be done,
# or return {Hash}/{Array} if the query should be continued.
#
# @return [void]
def self.resolve(results, dataloader)
# There might be pending jobs here that _will_ write lazies
# into the result hash. We should run them out, so we
# can be sure that all lazies will be present in the result hashes.
# A better implementation would somehow interleave (or unify)
# these approaches.
dataloader.run
next_results = []
while results.any?
result_value = results.shift
if result_value.is_a?(Runtime::GraphQLResultHash) || result_value.is_a?(Hash)
results.concat(result_value.values)
next
elsif result_value.is_a?(Runtime::GraphQLResultArray)
results.concat(result_value.values)
next
elsif result_value.is_a?(Array)
results.concat(result_value)
next
elsif result_value.is_a?(Lazy)
loaded_value = result_value.value
if loaded_value.is_a?(Lazy)
# Since this field returned another lazy,
# add it to the same queue
results << loaded_value
elsif loaded_value.is_a?(Runtime::GraphQLResultHash) || loaded_value.is_a?(Runtime::GraphQLResultArray) ||
loaded_value.is_a?(Hash) || loaded_value.is_a?(Array)
# Add these values in wholesale --
# they might be modified by later work in the dataloader.
next_results << loaded_value
end
end
end
if next_results.any?
# Any pending data loader jobs may populate the
# resutl arrays or result hashes accumulated in
# `next_results``. Run those **to completion**
# before continuing to resolve `next_results`.
# (Just `.append_job` doesn't work if any pending
# jobs require multiple passes.)
dataloader.run
dataloader.append_job { resolve(next_results, dataloader) }
end
nil
end
end
end
end
end
|