Skip to content
This repository was archived by the owner on Apr 14, 2022. It is now read-only.

Commit ffe9630

Browse files
committed
WIP: PoC of filtering over 1:1 connections borders
Related to #39.
1 parent 4ded4d7 commit ffe9630

File tree

4 files changed

+149
-1
lines changed

4 files changed

+149
-1
lines changed

graphql/accessor_general.lua

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -756,6 +756,8 @@ end
756756
--- `offset` arqument of the GraphQL query),
757757
--- * `resulting_object_cnt_max` (number),
758758
--- * `fetched_object_cnt_max` (number).
759+
--- * `fields` (table) XXX
760+
--- * `info` (table) XXX
759761
---
760762
--- @return nil
761763
---
@@ -779,6 +781,8 @@ local function process_tuple(state, tuple, opts)
779781
'query execution timeout exceeded, use `timeout_ms` to increase it')
780782
local collection_name = opts.collection_name
781783
local pcre = opts.pcre
784+
local fields = opts.fields
785+
local info = opts.info
782786

783787
-- convert tuple -> object
784788
local obj = opts.unflatten_tuple(collection_name, tuple,
@@ -792,6 +796,13 @@ local function process_tuple(state, tuple, opts)
792796
return true -- skip pivot item too
793797
end
794798

799+
-- make subrequests if needed
800+
for k, v in pairs(filter) do
801+
if obj[k] == nil and fields[k] ~= nil then
802+
obj[k] = fields[k].resolve(obj, {}, info)
803+
end
804+
end
805+
795806
-- filter out non-matching objects
796807
local match = utils.is_subtable(obj, filter) and
797808
match_using_re(obj, pcre)
@@ -918,6 +929,8 @@ local function select_internal(self, collection_name, from, filter, args, extra)
918929
unflatten_tuple = self.funcs.unflatten_tuple,
919930
default_unflatten_tuple = default_unflatten_tuple,
920931
pcre = args.pcre,
932+
fields = extra.fields,
933+
info = extra.info,
921934
}
922935

923936
if index == nil then

graphql/tarantool_graphql.lua

Lines changed: 28 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -323,6 +323,7 @@ gql_type = function(state, avro_schema, collection, collection_name)
323323
assert(destination_type ~= nil,
324324
('destination_type (named %s) must not be nil'):format(
325325
c.destination_collection))
326+
local raw_destination_type = destination_type
326327

327328
local c_args
328329
if c.type == '1:1' then
@@ -408,7 +409,11 @@ gql_type = function(state, avro_schema, collection, collection_name)
408409
destination_args_values = destination_args_values,
409410
}
410411
local extra = {
411-
qcontext = info.qcontext
412+
qcontext = info.qcontext,
413+
-- XXX: add only those fields that are for 1:1 or 1:1*
414+
-- connections or even table of its resolve function
415+
fields = raw_destination_type.fields,
416+
info = info, -- for subrequests
412417
}
413418
local object_args_instance = {} -- passed to 'filter'
414419
local list_args_instance = {} -- passed to 'args'
@@ -433,6 +438,9 @@ gql_type = function(state, avro_schema, collection, collection_name)
433438
-- we expect here exactly one object even for 1:1*
434439
-- connections because we processed all-parts-are-null
435440
-- situation above
441+
442+
-- XXX: what when a filter passed and no one object
443+
-- matched?
436444
assert(#objs == 1,
437445
'expect one matching object, got ' ..
438446
tostring(#objs))
@@ -596,6 +604,24 @@ local function parse_cfg(cfg)
596604
state.list_arguments[collection_name] = list_args
597605
state.all_arguments[collection_name] = args
598606
end
607+
608+
-- add arguments for 1:1 and 1:1* connections for each collection type
609+
for collection_name, collection in pairs(state.collections) do
610+
for _, c in ipairs(collection.connections or {}) do
611+
local destination_type =
612+
state.nullable_collection_types[c.destination_collection]
613+
if c.type == '1:1' or c.type == '1:1*' then
614+
state.all_arguments[collection_name][c.name] =
615+
types.inputObject({
616+
name = c.name,
617+
description = 'generated from the connection ' ..
618+
c.name,
619+
fields = destination_type.fields,
620+
}
621+
)
622+
end
623+
end
624+
end
599625
-- create fake root `Query` collection
600626
create_root_collection(state)
601627
return state
@@ -722,6 +748,7 @@ end
722748
--- -- query; by now it consists of a single qcontext table,
723749
--- -- which can be used by accessor to store any query-related
724750
--- -- data
751+
--- -- XXX: extra.fields, extra.info
725752
--- --
726753
--- return ...
727754
--- end,

test/local/space_nested_args.result

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
RESULT
2+
---
3+
order_collection:
4+
- order_id: order_id_1
5+
description: first order of Ivan
6+
user_connection:
7+
user_id: user_id_1
8+
last_name: Ivanov
9+
first_name: Ivan
10+
- order_id: order_id_2
11+
description: second order of Ivan
12+
user_connection:
13+
user_id: user_id_1
14+
last_name: Ivanov
15+
first_name: Ivan
16+
...
17+

test/local/space_nested_args.test.lua

Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
#!/usr/bin/env tarantool
2+
3+
local fio = require('fio')
4+
5+
-- require in-repo version of graphql/ sources despite current working directory
6+
package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)")
7+
:gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' ..
8+
package.path
9+
10+
local yaml = require('yaml')
11+
local graphql = require('graphql')
12+
local utils = require('graphql.utils')
13+
local testdata = require('test.testdata.common_testdata')
14+
15+
-- init box, upload test data and acquire metadata
16+
-- -----------------------------------------------
17+
18+
-- init box and data schema
19+
box.cfg{background = false}
20+
testdata.init_spaces()
21+
22+
-- upload test data
23+
testdata.fill_test_data()
24+
25+
-- acquire metadata
26+
local metadata = testdata.get_test_metadata()
27+
local schemas = metadata.schemas
28+
local collections = metadata.collections
29+
local service_fields = metadata.service_fields
30+
local indexes = metadata.indexes
31+
32+
-- build accessor and graphql schemas
33+
-- ----------------------------------
34+
35+
local accessor = graphql.accessor_space.new({
36+
schemas = schemas,
37+
collections = collections,
38+
service_fields = service_fields,
39+
indexes = indexes,
40+
})
41+
42+
local gql_wrapper = graphql.new({
43+
schemas = schemas,
44+
collections = collections,
45+
accessor = accessor,
46+
})
47+
48+
-- run queries
49+
-- -----------
50+
51+
local function print_and_return(...)
52+
print(...)
53+
return table.concat({...}, ' ') .. '\n'
54+
end
55+
56+
local function run_queries(gql_wrapper)
57+
local results = ''
58+
59+
local query_1 = [[
60+
query user_by_order($user_id: String) {
61+
order_collection(user_connection: {user_id: $user_id}) {
62+
order_id
63+
description
64+
user_connection {
65+
user_id
66+
last_name
67+
first_name
68+
}
69+
}
70+
}
71+
]]
72+
73+
utils.show_trace(function()
74+
local variables_1 = {user_id = 'user_id_1'}
75+
local gql_query_1 = gql_wrapper:compile(query_1)
76+
local result = gql_query_1:execute(variables_1)
77+
results = results .. print_and_return(
78+
('RESULT\n%s'):format(yaml.encode(result)))
79+
end)
80+
81+
return results
82+
end
83+
84+
run_queries(gql_wrapper)
85+
86+
-- clean up
87+
-- --------
88+
89+
testdata.drop_spaces()
90+
91+
os.exit()

0 commit comments

Comments
 (0)