Skip to content
This repository was archived by the owner on Apr 14, 2022. It is now read-only.

Add creation of avro-schema from query with multi-head connections, Unions, Maps and directives #201

Merged
merged 7 commits into from
Aug 10, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,15 @@ local compiled_query = graphql_lib.compile(query)
local result = compiled_query:execute(variables)
```

### Multi-head connections
A parent object is matching against a multi-head connection variants in the
order of the variants. The parent object should match with a determinant of
at least one variant except the following case. When source fields of all
variants are null the multi-head connection obligated to give null object as
the result. In this case the parent object is allowed to don’t match any variant.
One can use this feature to avoid to set any specific determinant value when a
multi-head connection is known to have no connected object.

### Mutations

Mutations are disabled for avro-schema-2\*, because it can work incorrectly for
Expand Down Expand Up @@ -368,6 +377,10 @@ git clone https://github.com/tarantool/graphql.git
git submodule update --recursive --init
make test
```
To run specific test:
```
TEST_RUN_TESTS=common/mutation make test
```

## Requirements

Expand Down
22 changes: 20 additions & 2 deletions graphql/convert_schema/resolve.lua
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ end
---
--- Note that connection key parts can be prefix of index key parts. Zero parts
--- count considered as ok by this check.
local function are_all_parts_null(parent, connection_parts)
local function are_all_parts_null(parent, connection_parts, opts)
local are_all_parts_null = true
local are_all_parts_non_null = true
for _, part in ipairs(connection_parts) do
Expand All @@ -47,7 +47,9 @@ local function are_all_parts_null(parent, connection_parts)
end

local ok = are_all_parts_null or are_all_parts_non_null
if not ok then -- avoid extra json.encode()
local opts = opts or {}
local no_assert = opts.no_assert or false
if not ok and not no_assert then -- avoid extra json.encode()
assert(ok,
'FULL MATCH constraint was failed: connection ' ..
'key parts must be all non-nulls or all nulls; ' ..
Expand Down Expand Up @@ -190,6 +192,22 @@ function resolve.gen_resolve_function_multihead(collection_name, connection,
end

return function(parent, _, info)
-- If a parent object does not have all source fields (for any of
-- variants) non-null then we do not resolve variant and just return
-- box.NULL.
local is_source_fields_found = false
for _, variant in ipairs(c.variants) do
is_source_fields_found =
not are_all_parts_null(parent, variant.parts, {no_assert = true})
if is_source_fields_found then
break
end
end

if not is_source_fields_found then
return box.NULL, nil
end

local v, variant_num, box_field_name = resolve_variant(parent)
local destination_type = union_types[variant_num]

Expand Down
10 changes: 6 additions & 4 deletions graphql/convert_schema/types.lua
Original file line number Diff line number Diff line change
Expand Up @@ -448,10 +448,12 @@ function types.convert(state, avro_schema, opts)
'got %s (avro_schema %s)'):format(type(avro_schema.values),
json.encode(avro_schema)))

-- validate avro schema format inside 'values'
types.convert(state, avro_schema.values, {context = context})

local res = core_types.map
table.insert(context.path, 'Map')
local converted_values = types.convert(state, avro_schema.values,
{context = context})
table.remove(context.path, #context.path)
local map_name = helpers.full_name('Map', context)
local res = core_types.map({values = converted_values, name = map_name})
return avro_t == 'map' and core_types.nonNull(res) or res
elseif avro_t == 'union' then
return union.convert(avro_schema, {
Expand Down
31 changes: 20 additions & 11 deletions graphql/core/types.lua
Original file line number Diff line number Diff line change
Expand Up @@ -191,17 +191,26 @@ function types.union(config)
return instance
end

types.map = types.scalar({
name = 'Map',
description = 'Map is a dictionary with string keys and values of ' ..
'arbitrary but same among all values type',
serialize = function(value) return value end,
parseValue = function(value) return value end,
parseLiteral = function(node)
error('Literal parsing is implemented in util.coerceValue; ' ..
'we should not go here')
end,
})
function types.map(config)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Add nonNull into the instance (see types.inputMap).

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I thought we use another mechanism for setting nullability.
Like this:

local res = core_types.map({values = converted_values, name = map_name})
return avro_t == 'map' and core_types.nonNull(res) or res

We do like this with records, arrays, etc. Why we may want to handle maps different way?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We don’t, but I propose to support it in the type object for unification with other type objects.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Well, okay, 'for unification' is a valid argument for me.

local instance = {
__type = 'Scalar',
subtype = 'Map',
name = config.name,
description = 'Map is a dictionary with string keys and values of ' ..
'arbitrary but same among all values type',
serialize = function(value) return value end,
parseValue = function(value) return value end,
parseLiteral = function(node)
error('Literal parsing is implemented in util.coerceValue; ' ..
'we should not go here')
end,
values = config.values,
}

instance.nonNull = types.nonNull(instance)

return instance
end

function types.inputObject(config)
assert(type(config.name) == 'string', 'type name must be provided as a string')
Expand Down
6 changes: 3 additions & 3 deletions graphql/impl.lua
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,8 @@ end
--- @treturn table result of the operation
local function compile_and_execute(state, query, variables, operation_name,
opts)
assert(type(state) == 'table', 'use :gql_execute(...) instead of ' ..
'.execute(...)')
assert(type(state) == 'table', 'use :compile_and_execute(...) ' ..
'instead of .compile_and_execute(...)')
assert(state.schema ~= nil, 'have not compiled schema')
check(query, 'query', 'string')
check(variables, 'variables', 'table', 'nil')
Expand Down Expand Up @@ -103,7 +103,7 @@ end
--- @treturn table compiled query with `execute` and `avro_schema` functions
local function gql_compile(state, query, opts)
assert(type(state) == 'table' and type(query) == 'string',
'use :validate(...) instead of .validate(...)')
'use :gql_compile(...) instead of .gql_compile(...)')
assert(state.schema ~= nil, 'have not compiled schema')
check(query, 'query', 'string')
check(opts, 'opts', 'table', 'nil')
Expand Down
131 changes: 128 additions & 3 deletions graphql/query_to_avro.lua
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,16 @@ local introspection = require(path .. '.introspection')
local query_util = require(path .. '.query_util')
local avro_helpers = require('graphql.avro_helpers')
local convert_schema_helpers = require('graphql.convert_schema.helpers')
local utils = require('graphql.utils')
local check = utils.check

-- module functions
local query_to_avro = {}

-- forward declaration
local object_to_avro
local map_to_avro
local union_to_avro

local gql_scalar_to_avro_index = {
String = "string",
Expand All @@ -29,7 +33,9 @@ local gql_scalar_to_avro_index = {

local function gql_scalar_to_avro(fieldType)
assert(fieldType.__type == "Scalar", "GraphQL scalar field expected")
assert(fieldType.name ~= "Map", "Map type is not supported")
if fieldType.subtype == "Map" then
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Note: here double quotes is used accross the file, so it is okay. The inconsistency btw file is because of different origin authors.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ok.

return map_to_avro(fieldType)
end
local result = gql_scalar_to_avro_index[fieldType.name]
assert(result ~= nil, "Unexpected scalar type: " .. fieldType.name)
return result
Expand Down Expand Up @@ -71,8 +77,10 @@ local function gql_type_to_avro(fieldType, subSelections, context)
result = gql_scalar_to_avro(fieldType)
elseif fieldTypeName == 'Object' then
result = object_to_avro(fieldType, subSelections, context)
elseif fieldTypeName == 'Interface' or fieldTypeName == 'Union' then
error('Interfaces and Unions are not supported yet')
elseif fieldTypeName == 'Union' then
result = union_to_avro(fieldType, subSelections, context)
elseif fieldTypeName == 'Interface' then
error('Interfaces are not supported yet')
else
error(string.format('Unknown type "%s"', tostring(fieldTypeName)))
end
Expand All @@ -85,6 +93,101 @@ local function gql_type_to_avro(fieldType, subSelections, context)
return result
end

--- The function converts a GraphQL Map type to avro-schema map type.
map_to_avro = function(mapType)
assert(mapType.values ~= nil, "GraphQL Map type must have 'values' field")
return {
type = "map",
values = gql_type_to_avro(mapType.values),
}
end

--- Converts a GraphQL Union type to avro-schema type.
---
--- Currently we use GraphQL Unions to implement both multi-head connections
--- and avro-schema unions. The function distinguishes between them relying on
--- 'fieldType.resolveType'. GraphQL Union implementing multi-head
--- connection does not have such field, as it has another mechanism of union
--- type resolving.
---
--- We have to distinguish between these two types of GraphQL Unions because
--- we want to create different avro-schemas for them.
---
--- GraphQL Unions implementing avro-schema unions are to be converted back
--- to avro-schema unions.
---
--- GraphQL Unions implementing multi-head connections are to be converted to
--- avro-schema records. Each field represents one union variant. Variant type
--- name is taken as a field name. Such records must have all fields nullable.
---
--- We convert Unions implementing multi-head connections to records instead of
--- unions because in case of 1:N connections we would not have valid
--- avro-schema (if use unions). Avro-schema unions may not contain more than
--- one schema with the same non-named type (in case of 1:N multi-head
--- connections we would have more than one 'array' in union).
union_to_avro = function(fieldType, subSelections, context)
assert(fieldType.types ~= nil, "GraphQL Union must have 'types' field")
check(fieldType.types, "fieldType.types", "table")
local is_multihead = (fieldType.resolveType == nil)
local result

if is_multihead then
check(fieldType.name, "fieldType.name", "string")
result = {
type = 'record',
name = fieldType.name,
fields = {}
}
else
result = {}
end

for _, box_type in ipairs(fieldType.types) do
-- In GraphQL schema all types in Unions are 'boxed'. Here we
-- 'Unbox' types and selectionSets. More info on 'boxing' can be
-- found at @{convert_schema.types.convert_multihead_connection}
-- and at @{convert_schema.union}.
check(box_type, "box_type", "table")
assert(box_type.__type == "Object", "Box type must be a GraphQL Object")
assert(utils.table_size(box_type.fields) == 1, 'Box Object must ' ..
'have exactly one field')
local type = select(2, next(box_type.fields))

local box_sub_selections
for _, s in pairs(subSelections) do
if s.typeCondition.name.value == box_type.name then
box_sub_selections = s
break
end
end
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

assert(box_sub_selections ~= nil)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ok, added.

assert(box_sub_selections ~= nil)

-- We have to extract subSelections from 'box' type.
local type_sub_selections
if box_sub_selections.selectionSet.selections[1].selectionSet ~= nil then
-- Object GraphQL type case.
type_sub_selections = box_sub_selections.selectionSet
.selections[1].selectionSet.selections
else
-- Scalar GraphQL type case.
type_sub_selections = box_sub_selections.selectionSet.selections[1]
end
assert(type_sub_selections ~= nil)

if is_multihead then
local avro_type = gql_type_to_avro(type.kind,
type_sub_selections, context)
avro_type = avro_helpers.make_avro_type_nullable(avro_type)
table.insert(result.fields, {name = type.name, type = avro_type})
else
table.insert(result, gql_type_to_avro(type.kind,
type_sub_selections, context))
end
end

return result
end

--- The function converts a single Object field to avro format.
local function field_to_avro(object_type, fields, context)
local firstField = fields[1]
Expand All @@ -97,6 +200,28 @@ local function field_to_avro(object_type, fields, context)

local fieldTypeAvro = gql_type_to_avro(fieldType.kind, subSelections,
context)
-- Currently we support only 'include' and 'skip' directives. Both of them
-- affect resulting avro-schema the same way: field with directive becomes
-- nullable, if it's already not. Nullable field does not change.
--
-- If it is a 1:N connection then it's 'array' field becomes 'array*'.
-- If it is avro-schema union, then 'null' will be added to the union
-- types. If there are more then one directive on a field then all works
-- the same way, like it is only one directive. (But we still check all
-- directives to be 'include' or 'skip').
if firstField.directives ~= nil then
for _, d in ipairs(firstField.directives) do
check(d.name, "directive.name", "table")
check(d.arguments, "directive.arguments", "table")
check(d.kind, "directive.kind", "string")
assert(d.kind == "directive")
check(d.name.value, "directive.name.value", "string")
assert(d.name.value == "include" or d.name.value == "skip",
"Only 'include' and 'skip' directives are supported for now")
end
fieldTypeAvro = avro_helpers.make_avro_type_nullable(fieldTypeAvro)
end

return {
name = convert_schema_helpers.base_name(fieldName),
type = fieldTypeAvro,
Expand Down
Loading