diff --git a/graphql/accessor_general.lua b/graphql/accessor_general.lua index 625a25e..388536d 100644 --- a/graphql/accessor_general.lua +++ b/graphql/accessor_general.lua @@ -798,7 +798,8 @@ end --- * `pivot_filter` (table, set of fields to match the objected pointed by --- `offset` arqument of the GraphQL query), --- * `resulting_object_cnt_max` (number), ---- * `fetched_object_cnt_max` (number). +--- * `fetched_object_cnt_max` (number), +--- * `resolveField` (function) for subrequests, see @{tarantool_graphql.new}. --- --- @return nil --- @@ -822,6 +823,7 @@ local function process_tuple(state, tuple, opts) 'query execution timeout exceeded, use `timeout_ms` to increase it') local collection_name = opts.collection_name local pcre = opts.pcre + local resolveField = opts.resolveField -- convert tuple -> object local obj = opts.unflatten_tuple(collection_name, tuple, @@ -835,6 +837,20 @@ local function process_tuple(state, tuple, opts) return true -- skip pivot item too end + -- make subrequests if needed + for k, v in pairs(filter) do + if obj[k] == nil then + local field_name = k + local sub_filter = v + local sub_opts = {dont_force_nullability = true} + local field = resolveField(field_name, obj, sub_filter, sub_opts) + if field == nil then return true end + obj[k] = field + -- XXX: Remove the value from a filter? But then we need to copy + -- the filter each time in the case. + end + end + -- filter out non-matching objects local match = utils.is_subtable(obj, filter) and match_using_re(obj, pcre) @@ -961,6 +977,7 @@ local function select_internal(self, collection_name, from, filter, args, extra) unflatten_tuple = self.funcs.unflatten_tuple, default_unflatten_tuple = default_unflatten_tuple, pcre = args.pcre, + resolveField = extra.resolveField, } if index == nil then diff --git a/graphql/core/rules.lua b/graphql/core/rules.lua index 41ab022..bfe2d9d 100644 --- a/graphql/core/rules.lua +++ b/graphql/core/rules.lua @@ -357,7 +357,11 @@ function rules.uniqueInputObjectFields(node, context) end end - validateValue(node.value) + if node.kind == 'inputObject' then + validateValue(node) + else + validateValue(node.value) + end end function rules.directivesAreDefined(node, context) diff --git a/graphql/core/validate.lua b/graphql/core/validate.lua index de685ab..516af36 100644 --- a/graphql/core/validate.lua +++ b/graphql/core/validate.lua @@ -268,9 +268,19 @@ local visitors = { rules = { rules.uniqueInputObjectFields } }, + inputObject = { + children = function(node) + return util.map(node.values or {}, function(value) + return value.value + end) + end, + + rules = { rules.uniqueInputObjectFields } + }, + variable = { enter = function(node, context) - context.variableReferences[node.name.value] = true + context.variableReferences[node.name.value] = true end }, diff --git a/graphql/tarantool_graphql.lua b/graphql/tarantool_graphql.lua index 3bcb865..eace032 100644 --- a/graphql/tarantool_graphql.lua +++ b/graphql/tarantool_graphql.lua @@ -359,7 +359,7 @@ local function separate_args_instance(args_instance, connection_args, else error(('cannot found "%s" field ("%s" value) ' .. 'within allowed fields'):format(tostring(k), - tostring(v))) + json.encode(v))) end end return object_args_instance, list_args_instance @@ -388,23 +388,44 @@ local function convert_simple_connection(state, connection, collection_name) -- gql type of connection field local destination_type = state.nullable_collection_types[c.destination_collection] - assert(destination_type ~= nil, ('destination_type (named %s) must not be nil'):format( c.destination_collection)) - + local raw_destination_type = destination_type local c_args = args_from_destination_collection(state, - c.destination_collection, c.type) + c.destination_collection, c.type) destination_type = specify_destination_type(destination_type, c.type) local c_list_args = state.list_arguments[c.destination_collection] + -- capture `raw_destination_type` + local function genResolveField(info) + return function(field_name, object, filter, opts) + assert(raw_destination_type.fields[field_name], + ('performing a subrequest by the non-existent ' .. + 'field "%s" of the collection "%s"'):format(field_name, + c.destination_collection)) + return raw_destination_type.fields[field_name].resolve( + object, filter, info, opts) + end + end + local field = { name = c.name, kind = destination_type, arguments = c_args, - resolve = function(parent, args_instance, info) + resolve = function(parent, args_instance, info, opts) + local opts = opts or {} + assert(type(opts) == 'table', + 'opts must be nil or a table, got ' .. type(opts)) + local dont_force_nullability = + opts.dont_force_nullability or false + assert(type(dont_force_nullability) == 'boolean', + 'opts.dont_force_nullability ' .. + 'must be nil or a boolean, got ' .. + type(dont_force_nullability)) + local destination_args_names, destination_args_values = parent_args_values(parent, c.parts) @@ -432,8 +453,10 @@ local function convert_simple_connection(state, connection, collection_name) destination_args_names = destination_args_names, destination_args_values = destination_args_values, } + local resolveField = genResolveField(info) local extra = { - qcontext = info.qcontext + qcontext = info.qcontext, + resolveField = resolveField, -- for subrequests } -- object_args_instance will be passed to 'filter' @@ -451,7 +474,8 @@ local function convert_simple_connection(state, connection, collection_name) -- we expect here exactly one object even for 1:1* -- connections because we processed all-parts-are-null -- situation above - assert(#objs == 1, 'expect one matching object, got ' .. + assert(#objs == 1 or dont_force_nullability, + 'expect one matching object, got ' .. tostring(#objs)) return objs[1] else -- c.type == '1:N' @@ -778,6 +802,84 @@ local function create_root_collection(state) }) end +--- Execute a function for each 1:1 or 1:1* connection of each collection. +--- +--- @tparam table state tarantool_graphql instance +--- +--- @tparam function func a function with the following parameters: +--- +--- * source collection name (string); +--- * connection (table). +local function for_each_1_1_connection(state, func) + for collection_name, collection in pairs(state.collections) do + for _, c in ipairs(collection.connections or {}) do + if c.type == '1:1' or c.type == '1:1*' then + func(collection_name, c) + end + end + end +end + +--- Add arguments corresponding to 1:1 and 1:1* connections (nested filters). +--- +--- @tparam table state graphql_tarantool instance +local function add_connection_arguments(state) + -- map destination collection to list of input objects + local input_objects = {} + -- map source collection and connection name to an input object + local lookup_input_objects = {} + + -- create InputObjects for each 1:1 or 1:1* connection of each collection + for_each_1_1_connection(state, function(collection_name, c) + -- XXX: support union collections + if c.variants ~= nil then return end + + local object = types.inputObject({ + name = c.name, + description = ('generated from the connection "%s" ' .. + 'of collection "%s" using collection "%s"'):format( + c.name, collection_name, c.destination_collection), + fields = state.object_arguments[c.destination_collection], + }) + + if input_objects[c.destination_collection] == nil then + input_objects[c.destination_collection] = {} + end + table.insert(input_objects[c.destination_collection], object) + + if lookup_input_objects[collection_name] == nil then + lookup_input_objects[collection_name] = {} + end + lookup_input_objects[collection_name][c.name] = object + end) + + -- update fields of collection arguments and input objects with other input + -- objects + for_each_1_1_connection(state, function(collection_name, c) + -- XXX: support union collections + if c.variants ~= nil then return end + + local new_object = lookup_input_objects[collection_name][c.name] + -- collection arguments + local fields = state.object_arguments[collection_name] + assert(fields[c.name] == nil, + 'we must not add an input object twice to the same collection ' .. + 'arguments list') + fields[c.name] = new_object + -- input objects + for _, input_object in ipairs(input_objects[collection_name] or {}) do + local fields = input_object.fields + assert(fields[c.name] == nil, + 'we must not add an input object twice to the same input ' .. + 'object') + fields[c.name] = { + name = c.name, + kind = new_object, + } + end + end) +end + local function parse_cfg(cfg) local state = {} @@ -839,14 +941,25 @@ local function parse_cfg(cfg) {skip_compound = true}) local list_args = convert_record_fields_to_args( accessor:list_args(collection_name)) - local args = utils.merge_tables(object_args, list_args) state.object_arguments[collection_name] = object_args state.list_arguments[collection_name] = list_args + end + + add_connection_arguments(state) + + -- fill all_arguments with object_arguments + list_arguments + for collection_name, collection in pairs(state.collections) do + local object_args = state.object_arguments[collection_name] + local list_args = state.list_arguments[collection_name] + + local args = utils.merge_tables(object_args, list_args) state.all_arguments[collection_name] = args end + -- create fake root `Query` collection create_root_collection(state) + return state end @@ -967,10 +1080,14 @@ end --- -- destination_args_values = <...>, --- -- } --- -- ---- -- extra is a table which contains additional data for the ---- -- query; by now it consists of a single qcontext table, ---- -- which can be used by accessor to store any query-related ---- -- data +--- -- `extra` is a table which contains additional data for +--- -- the query: +--- -- +--- -- * `qcontext` (table) can be used by an accessor to store +--- -- any query-related data; +--- -- * `resolveField(field_name, object, filter, opts)` +--- -- (function) for performing a subrequest on a fields +--- -- connected using a 1:1 or 1:1* connection. --- -- --- return ... --- end, diff --git a/test/local/space_nested_args.result b/test/local/space_nested_args.result new file mode 100644 index 0000000..56473d5 --- /dev/null +++ b/test/local/space_nested_args.result @@ -0,0 +1,66 @@ + + + +---------------------+ + | a-+ h x y | + | |\ \ |\ | + | b c d k l | + | | |\ \ | + | e f g m | + +---------------------+ +RESULT +--- +order_collection: +- order_id: order_id_1 + description: first order of Ivan + user_connection: + user_id: user_id_1 + last_name: Ivanov + first_name: Ivan +- order_id: order_id_2 + description: second order of Ivan + user_connection: + user_id: user_id_1 + last_name: Ivanov + first_name: Ivan +... + +RUN upside {{{ +QUERY + query emails_trace_upside($upside_body: String) { + email(in_reply_to: {in_reply_to: {body: $upside_body}}) { + body + in_reply_to { + body + in_reply_to { + body + } + } + } + } +VARIABLES +--- +upside_body: a +... + +RESULT +--- +email: +- body: g + in_reply_to: + body: d + in_reply_to: + body: a +- body: f + in_reply_to: + body: d + in_reply_to: + body: a +- body: e + in_reply_to: + body: b + in_reply_to: + body: a +... + +}}} + diff --git a/test/local/space_nested_args.test.lua b/test/local/space_nested_args.test.lua new file mode 100755 index 0000000..d4b9003 --- /dev/null +++ b/test/local/space_nested_args.test.lua @@ -0,0 +1,154 @@ +#!/usr/bin/env tarantool + +local fio = require('fio') + +-- require in-repo version of graphql/ sources despite current working directory +package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") + :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. + package.path + +local yaml = require('yaml') +local graphql = require('graphql') +local utils = require('graphql.utils') +local common_testdata = require('test.testdata.common_testdata') +local emails_testdata = require('test.testdata.nullable_1_1_conn_testdata') + +-- init box, upload test data and acquire metadata +-- ----------------------------------------------- + +-- init box and data schema +box.cfg{background = false} +common_testdata.init_spaces() +emails_testdata.init_spaces() + +-- upload test data +common_testdata.fill_test_data() +emails_testdata.fill_test_data() + +local LOCALPART_FN = 1 +local DOMAIN_FN = 2 +local BODY_FN = 7 + +for _, tuple in box.space.email:pairs() do + local body = tuple[BODY_FN] + if body:match('^[xy]$') then + local key = {tuple[LOCALPART_FN], tuple[DOMAIN_FN]} + box.space.email:delete(key) + end +end + +-- acquire metadata +local common_metadata = common_testdata.get_test_metadata() +local emails_metadata = emails_testdata.get_test_metadata() + +-- build accessor and graphql schemas +-- ---------------------------------- + +local common_accessor = graphql.accessor_space.new({ + schemas = common_metadata.schemas, + collections = common_metadata.collections, + service_fields = common_metadata.service_fields, + indexes = common_metadata.indexes, +}) + +local common_gql_wrapper = graphql.new({ + schemas = common_metadata.schemas, + collections = common_metadata.collections, + accessor = common_accessor, +}) + +local emails_accessor = graphql.accessor_space.new({ + schemas = emails_metadata.schemas, + collections = emails_metadata.collections, + service_fields = emails_metadata.service_fields, + indexes = emails_metadata.indexes, +}) + +local emails_gql_wrapper = graphql.new({ + schemas = emails_metadata.schemas, + collections = emails_metadata.collections, + accessor = emails_accessor, +}) + +-- run queries +-- ----------- + +local function print_and_return(...) + print(...) + return table.concat({...}, ' ') .. '\n' +end + +local function format_result(name, query, variables, result) + return ('RUN %s {{{\nQUERY\n%s\nVARIABLES\n%s\nRESULT\n%s\n}}}\n'):format( + name, query:rstrip(), yaml.encode(variables), yaml.encode(result)) +end + +local function run_common_queries(gql_wrapper) + local results = '' + + local query_1 = [[ + query user_by_order($user_id: String) { + order_collection(user_connection: {user_id: $user_id}) { + order_id + description + user_connection { + user_id + last_name + first_name + } + } + } + ]] + + utils.show_trace(function() + local variables_1 = {user_id = 'user_id_1'} + local gql_query_1 = gql_wrapper:compile(query_1) + local result = gql_query_1:execute(variables_1) + results = results .. print_and_return( + ('RESULT\n%s'):format(yaml.encode(result))) + end) + + return results +end + +local function run_emails_queries(gql_wrapper) + local results = '' + + -- upside traversal (1:1 connections) + -- ---------------------------------- + + local query_upside = [[ + query emails_trace_upside($upside_body: String) { + email(in_reply_to: {in_reply_to: {body: $upside_body}}) { + body + in_reply_to { + body + in_reply_to { + body + } + } + } + } + ]] + + utils.show_trace(function() + local variables_upside = {upside_body = 'a'} + local gql_query_upside = gql_wrapper:compile(query_upside) + local result = gql_query_upside:execute(variables_upside) + results = results .. print_and_return(format_result( + 'upside', query_upside, variables_upside, result)) + end) + + return results +end + +run_common_queries(common_gql_wrapper) +run_emails_queries(emails_gql_wrapper) + +-- clean up +-- -------- + +common_testdata.drop_spaces() +emails_testdata.drop_spaces() + +os.exit() diff --git a/test/testdata/array_and_map_testdata.lua b/test/testdata/array_and_map_testdata.lua old mode 100755 new mode 100644 diff --git a/test/testdata/nullable_1_1_conn_testdata.lua b/test/testdata/nullable_1_1_conn_testdata.lua index 8ec2932..cbf11af 100644 --- a/test/testdata/nullable_1_1_conn_testdata.lua +++ b/test/testdata/nullable_1_1_conn_testdata.lua @@ -125,7 +125,7 @@ function nullable_1_1_conn_testdata.init_spaces() local IN_REPLY_TO_DOMAIN_FN = 6 local BODY_FN = 7 -- luacheck: ignore - box.once('test_space_init_spaces', function() + box.once('init_spaces_nullable_1_1_conn', function() box.schema.create_space('email') box.space.email:create_index('message_id', {type = 'tree', unique = true, parts = { @@ -302,7 +302,7 @@ function nullable_1_1_conn_testdata.fill_test_data(virtbox) end function nullable_1_1_conn_testdata.drop_spaces() - box.space._schema:delete('oncetest_space_init_spaces') + box.space._schema:delete('onceinit_spaces_nullable_1_1_conn') box.space.email:drop() end