From 589778a521dba5e066b318bd1b312bfb674a0a62 Mon Sep 17 00:00:00 2001 From: SudoBobo Date: Mon, 19 Feb 2018 19:46:56 +0300 Subject: [PATCH 01/26] add comments on gql queries assert and compile functions --- graphql/tarantool_graphql.lua | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/graphql/tarantool_graphql.lua b/graphql/tarantool_graphql.lua index 2805bc7..5e01ca3 100644 --- a/graphql/tarantool_graphql.lua +++ b/graphql/tarantool_graphql.lua @@ -414,6 +414,8 @@ local function parse_cfg(cfg) return state end +--- The function checks if given query has an appropriate type +--- (mutations are not supported yet) local function assert_gql_query_ast(func_name, ast) assert(#ast.definitions == 1, func_name .. ': expected an one query') @@ -442,6 +444,8 @@ local function gql_execute(qstate, variables) operation_name) end +--- The function parses a raw query string, validate the resulting query +--- and return it ready for execution local function gql_compile(state, query) assert(type(state) == 'table' and type(query) == 'string', 'use :validate(...) instead of .validate(...)') From 417c83e70c783e156196a55a3c73cadfe7844497 Mon Sep 17 00:00:00 2001 From: SudoBobo Date: Tue, 20 Feb 2018 16:32:02 +0300 Subject: [PATCH 02/26] do small refactor; add comments --- graphql/tarantool_graphql.lua | 71 +++++++++++++++++++++++++++-------- 1 file changed, 55 insertions(+), 16 deletions(-) diff --git a/graphql/tarantool_graphql.lua b/graphql/tarantool_graphql.lua index 5e01ca3..3bf7ee4 100644 --- a/graphql/tarantool_graphql.lua +++ b/graphql/tarantool_graphql.lua @@ -30,6 +30,14 @@ local function avro_type(avro_schema) return 'record*' elseif utils.is_array(avro_schema) then return 'union' + elseif avro_schema.type == 'array' then + return 'array' + elseif avro_schema.type == 'array*' then + return 'array*' + elseif avro_schema.type == 'map' then + return 'map' + elseif avro_schema.type == 'map*' then + return 'map*' end elseif type(avro_schema) == 'string' then if avro_schema == 'int' then @@ -105,6 +113,8 @@ end --- Non-recursive version of the @{gql_type} function that returns --- InputObject instead of Object. +--- An error will be raised in case of fields that are not scalar types +--- as there are no sense in non scalar arguments local function gql_argument_type(state, avro_schema) assert(type(state) == 'table', 'state must be a table, got ' .. type(state)) @@ -115,17 +125,23 @@ local function gql_argument_type(state, avro_schema) assert(type(avro_schema.name) == 'string', ('avro_schema.name must be a string, got %s (avro_schema %s)') :format(type(avro_schema.name), json.encode(avro_schema))) + assert(type(avro_schema.fields) == 'table', ('avro_schema.fields must be a table, got %s (avro_schema %s)') :format(type(avro_schema.fields), json.encode(avro_schema))) + ---@tfixme iteration over null table + --- maybe avro.scheme was meant? local fields = {} - for _, field in ipairs(fields) do + for _, field in ipairs(avro_schema.fields) do + assert(type(field.name) == 'string', ('field.name must be a string, got %s (schema %s)') :format(type(field.name), json.encode(field))) + local gql_field_type = convert_scalar_type( field.type, {raise = true}) + fields[field.name] = { name = field.name, kind = types.nonNull(gql_field_type), @@ -140,6 +156,8 @@ local function gql_argument_type(state, avro_schema) })) return res + --- elseif avro_type(avro_schema) == 'array' then + else local res = convert_scalar_type(avro_schema, {raise = false}) if res == nil then @@ -149,20 +167,25 @@ local function gql_argument_type(state, avro_schema) end end + +--- Returns table of record's arguments +--- all arguments are nullable local function convert_record_fields_to_args(state, fields) local args = {} for _, field in ipairs(fields) do + assert(type(field.name) == 'string', ('field.name must be a string, got %s (schema %s)') :format(type(field.name), json.encode(field))) + local gql_class = gql_argument_type(state, field.type) args[field.name] = nullable(gql_class) end return args end ---- Convert each field of an avro-schema to a graphql type and corresponding ---- argument for an upper graphql type. +--- Recursively convert each field of an avro-schema to a graphql type and +--- corresponding argument for an upper graphql type. --- --- @tparam table state for read state.accessor and previously filled --- state.types @@ -170,10 +193,12 @@ end local function convert_record_fields(state, fields) local res = {} local object_args = {} + for _, field in ipairs(fields) do assert(type(field.name) == 'string', ('field.name must be a string, got %s (schema %s)') :format(type(field.name), json.encode(field))) + res[field.name] = { name = field.name, kind = gql_type(state, field.type), @@ -186,7 +211,7 @@ end --- The function recursively converts passed avro-schema to a graphql type. --- --- @tparam table state for read state.accessor and previously filled ---- state.types +--- state.types (state.types are gql types) --- @tparam table avro_schema input avro-schema --- @tparam[opt] table collection table with schema_name, connections fields --- described a collection (e.g. tarantool's spaces) @@ -230,6 +255,7 @@ gql_type = function(state, avro_schema, collection, collection_name) local fields, _ = convert_record_fields(state, avro_schema.fields) + -- if collection param is passed for _, c in ipairs((collection or {}).connections or {}) do assert(type(c.type) == 'string', 'connection.type must be a string, got ' .. type(c.type)) @@ -333,9 +359,12 @@ gql_type = function(state, avro_schema, collection, collection_name) error('unrecognized avro-schema type: ' .. json.encode(avro_schema)) end return res + end + end + local function parse_cfg(cfg) local state = {} state.types = utils.gen_booking_table({}) @@ -356,10 +385,11 @@ local function parse_cfg(cfg) local fields = {} - for name, collection in pairs(state.collections) do - collection.name = name + for collection_name, collection in pairs(state.collections) do + collection.name = collection_name assert(collection.schema_name ~= nil, 'collection.schema_name must not be nil') + local schema = cfg.schemas[collection.schema_name] assert(schema ~= nil, ('cfg.schemas[%s] must not be nil'):format( tostring(collection.schema_name))) @@ -367,21 +397,25 @@ local function parse_cfg(cfg) ('top-level schema name does not match the name in ' .. 'the schema itself: "%s" vs "%s"'):format(collection.schema_name, schema.name)) - state.types[name] = gql_type(state, schema, collection, name) + -- recursively converts all avro types into gql types in the given schema + state.types[collection_name] = gql_type(state, schema, collection, collection_name) + + -- prepare arguments local _, object_args = convert_record_fields(state, schema.fields) local list_args = convert_record_fields_to_args( - state, accessor:list_args(name)) + state, accessor:list_args(collection_name)) local args = utils.merge_tables(object_args, list_args) - state.object_arguments[name] = object_args - state.list_arguments[name] = list_args - state.all_arguments[name] = args + + state.object_arguments[collection_name] = object_args + state.list_arguments[collection_name] = list_args + state.all_arguments[collection_name] = args -- create entry points from collection names - fields[name] = { - kind = types.nonNull(types.list(state.types[name])), - arguments = state.all_arguments[name], + fields[collection_name] = { + kind = types.nonNull(types.list(state.types[collection_name])), + arguments = state.all_arguments[collection_name], resolve = function(rootValue, args_instance, info) local object_args_instance = {} -- passed to 'filter' local list_args_instance = {} -- passed to 'args' @@ -397,7 +431,7 @@ local function parse_cfg(cfg) end end local from = nil - return accessor:select(rootValue, name, from, + return accessor:select(rootValue, collection_name, from, object_args_instance, list_args_instance) end, } @@ -414,7 +448,7 @@ local function parse_cfg(cfg) return state end ---- The function checks if given query has an appropriate type +--- The function checks if given query has an appropriate type 'query' --- (mutations are not supported yet) local function assert_gql_query_ast(func_name, ast) assert(#ast.definitions == 1, @@ -427,6 +461,7 @@ local function assert_gql_query_ast(func_name, ast) type(operation_name)) end +--- The function just makes some assertions and then call graphql-lua execute local function gql_execute(qstate, variables) assert(qstate.state) local state = qstate.state @@ -446,6 +481,9 @@ end --- The function parses a raw query string, validate the resulting query --- and return it ready for execution +--- @tparam table state current state of graphql-lib, including +--- schemas, collections and accessor +--- @tparam string query raw query string local function gql_compile(state, query) assert(type(state) == 'table' and type(query) == 'string', 'use :validate(...) instead of .validate(...)') @@ -462,6 +500,7 @@ local function gql_compile(state, query) ast = ast, operation_name = operation_name, } + local gql_query = setmetatable(qstate, { __index = { execute = gql_execute, From 24ec6c4bfe129087c5d0cb0699dfabb31c997f2a Mon Sep 17 00:00:00 2001 From: SudoBobo Date: Wed, 21 Feb 2018 21:16:31 +0300 Subject: [PATCH 03/26] add support for array avro type (only with scalars); change args generation behavior - now args are NOT generated from arrays; do small refactor; add test for array avro type usage --- graphql/tarantool_graphql.lua | 95 ++++++++++++++++++++++++++++++----- 1 file changed, 82 insertions(+), 13 deletions(-) diff --git a/graphql/tarantool_graphql.lua b/graphql/tarantool_graphql.lua index 3bf7ee4..be1e19c 100644 --- a/graphql/tarantool_graphql.lua +++ b/graphql/tarantool_graphql.lua @@ -22,6 +22,7 @@ local tarantool_graphql = {} -- forward declarations local gql_type +--- Returns type of the top element in the avro schema local function avro_type(avro_schema) if type(avro_schema) == 'table' then if avro_schema.type == 'record' then @@ -104,10 +105,18 @@ local function convert_scalar_type(avro_schema, opts) elseif avro_t == 'string*' then return types.string end + + if opts.is_items_type then + error('avro array items must have know scalar type, not: ' .. + json.encode(avro_schema)) + end + if raise then error('unrecognized avro-schema scalar type: ' .. - json.encode(avro_schema)) + json.encode(avro_schema)) end + + return nil end @@ -130,8 +139,6 @@ local function gql_argument_type(state, avro_schema) ('avro_schema.fields must be a table, got %s (avro_schema %s)') :format(type(avro_schema.fields), json.encode(avro_schema))) - ---@tfixme iteration over null table - --- maybe avro.scheme was meant? local fields = {} for _, field in ipairs(avro_schema.fields) do @@ -170,8 +177,15 @@ end --- Returns table of record's arguments --- all arguments are nullable -local function convert_record_fields_to_args(state, fields) +--- +--- @tparam table state +--- @tparam table fields +--- @tparam table opts include is_for_args flag to specify +--- case when the function is used to collect arguments +local function convert_record_fields_to_args(state, fields, opts) local args = {} + local is_for_args = opts and opts.is_for_args or false + for _, field in ipairs(fields) do assert(type(field.name) == 'string', @@ -179,7 +193,15 @@ local function convert_record_fields_to_args(state, fields) :format(type(field.name), json.encode(field))) local gql_class = gql_argument_type(state, field.type) - args[field.name] = nullable(gql_class) + + -- arrays (gql lists) and maps can't be arguments + -- so these kinds are to be skipped + + ---@todo consider case when gql_class is wrapper nonNull around List + --- or Map + if not(is_for_args and (gql_class == 'List' or gql_class == 'Map')) then + args[field.name] = nullable(gql_class) + end end return args end @@ -190,9 +212,12 @@ end --- @tparam table state for read state.accessor and previously filled --- state.types --- @tparam table fields fields part from an avro-schema -local function convert_record_fields(state, fields) +--- @tparam table opts include is_for_args flag to specify +--- case when the function is used to collect arguments +local function convert_record_fields(state, fields, opts) local res = {} local object_args = {} + local is_for_args = opts and opts.is_for_args or false for _, field in ipairs(fields) do assert(type(field.name) == 'string', @@ -203,12 +228,22 @@ local function convert_record_fields(state, fields) name = field.name, kind = gql_type(state, field.type), } - object_args[field.name] = nullable(res[field.name].kind) + + + -- arrays (gql lists) and maps can't be arguments + -- so these kinds are to be skipped + + ---@todo consider case when gql_class is wrapper nonNull around List + --- or Map + if not (is_for_args and (res[field.name].kind == 'List' + or res[field.name].kind == 'Map')) then + object_args[field.name] = nullable(res[field.name].kind) + end end return res, object_args end ---- The function recursively converts passed avro-schema to a graphql type. +--- The function recursively converts passed avro-schema to a graphql type (kind) --- --- @tparam table state for read state.accessor and previously filled --- state.types (state.types are gql types) @@ -237,12 +272,14 @@ gql_type = function(state, avro_schema, collection, collection_name) ('collection and collection_name must be nils or ' .. 'non-nils simultaneously, got: %s and %s'):format(type(collection), type(collection_name))) + local accessor = state.accessor assert(accessor ~= nil, 'state.accessor must not be nil') assert(accessor.select ~= nil, 'state.accessor.select must not be nil') assert(accessor.list_args ~= nil, 'state.accessor.list_args must not be nil') + -- type of the top element in the avro schema local avro_t = avro_type(avro_schema) if avro_t == 'record' or avro_t == 'record*' then @@ -255,7 +292,7 @@ gql_type = function(state, avro_schema, collection, collection_name) local fields, _ = convert_record_fields(state, avro_schema.fields) - -- if collection param is passed + -- if collection param is passed then go over all connections for _, c in ipairs((collection or {}).connections or {}) do assert(type(c.type) == 'string', 'connection.type must be a string, got ' .. type(c.type)) @@ -269,6 +306,7 @@ gql_type = function(state, avro_schema, collection, collection_name) assert(type(c.parts) == 'table', 'connection.parts must be a string, got ' .. type(c.parts)) + -- gql type of connection field local destination_type = state.types[c.destination_collection] assert(destination_type ~= nil, @@ -287,6 +325,7 @@ gql_type = function(state, avro_schema, collection, collection_name) local c_list_args = state.list_arguments[c.destination_collection] + -- change fields that are represented by connections fields[c.name] = { name = c.name, kind = destination_type, @@ -294,13 +333,16 @@ gql_type = function(state, avro_schema, collection, collection_name) resolve = function(parent, args_instance, info) local destination_args_names = {} local destination_args_values = {} + for _, part in ipairs(c.parts) do + assert(type(part.source_field) == 'string', 'part.source_field must be a string, got ' .. type(part.destination_field)) assert(type(part.destination_field) == 'string', 'part.destination_field must be a string, got ' .. type(part.destination_field)) + destination_args_names[#destination_args_names + 1] = part.destination_field destination_args_values[#destination_args_values + 1] = @@ -344,6 +386,7 @@ gql_type = function(state, avro_schema, collection, collection_name) } end + -- create gql schema local res = types.object({ name = collection ~= nil and collection.name or avro_schema.name, description = 'generated from avro-schema for ' .. @@ -351,8 +394,30 @@ gql_type = function(state, avro_schema, collection, collection_name) fields = fields, }) return avro_t == 'enum' and types.nonNull(res) or res + elseif avro_t == 'enum' then error('enums not implemented yet') -- XXX + + elseif avro_t == 'array' or avro_t == 'array*' then + + assert(avro_schema.items ~= nil, + 'items field must not be nil in array avro schema') + assert(type(avro_schema.items) == 'string', + 'avro_schema.items must be a string, got ' .. type(avro_schema.item)) + + local gql_items_type = convert_scalar_type(avro_schema.items, + {is_items_type=true, raise=true}) + + local gql_array = types.list(gql_items_type) + + if avro_t == 'array*' then + return gql_array + end + + if avro_t == 'array' then + return types.nonNull(gql_array) + end + else local res = convert_scalar_type(avro_schema, {raise = false}) if res == nil then @@ -399,15 +464,19 @@ local function parse_cfg(cfg) schema.name)) -- recursively converts all avro types into gql types in the given schema + assert(schema.type == 'record', + 'top-level schema must have record avro type, not' .. schema.type) state.types[collection_name] = gql_type(state, schema, collection, collection_name) - -- prepare arguments + -- prepare arguments (their kinds) local _, object_args = convert_record_fields(state, - schema.fields) + schema.fields, {is_for_args=true}) local list_args = convert_record_fields_to_args( - state, accessor:list_args(collection_name)) + state, accessor:list_args(collection_name), {is_for_args=true}) local args = utils.merge_tables(object_args, list_args) + -- list and map (avro array and map) can't be arguments + state.object_arguments[collection_name] = object_args state.list_arguments[collection_name] = list_args state.all_arguments[collection_name] = args @@ -580,4 +649,4 @@ function tarantool_graphql.new(cfg) }) end -return tarantool_graphql +return tarantool_graphql \ No newline at end of file From 9a43096ee2e503aa10a8e66374084546fbd6d0b3 Mon Sep 17 00:00:00 2001 From: SudoBobo Date: Thu, 22 Feb 2018 14:45:40 +0300 Subject: [PATCH 04/26] simplify conditional statements --- graphql/tarantool_graphql.lua | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/graphql/tarantool_graphql.lua b/graphql/tarantool_graphql.lua index be1e19c..4fbb0fe 100644 --- a/graphql/tarantool_graphql.lua +++ b/graphql/tarantool_graphql.lua @@ -199,7 +199,7 @@ local function convert_record_fields_to_args(state, fields, opts) ---@todo consider case when gql_class is wrapper nonNull around List --- or Map - if not(is_for_args and (gql_class == 'List' or gql_class == 'Map')) then + if (not is_for_args) or (gql_class ~= 'List' and gql_class ~= 'Map') then args[field.name] = nullable(gql_class) end end @@ -235,8 +235,8 @@ local function convert_record_fields(state, fields, opts) ---@todo consider case when gql_class is wrapper nonNull around List --- or Map - if not (is_for_args and (res[field.name].kind == 'List' - or res[field.name].kind == 'Map')) then + if (not is_for_args) or (res[field.name].kind ~= 'List' + and res[field.name].kind ~= 'Map') then object_args[field.name] = nullable(res[field.name].kind) end end @@ -257,6 +257,9 @@ end --- automatically generate corresponding decucible fields. --- 2. The collection name will be used as the resulting graphql type name --- instead of the avro-schema name. +--- +--- Resulting object will look like this: +--- gql_type = function(state, avro_schema, collection, collection_name) assert(type(state) == 'table', 'state must be a table, got ' .. type(state)) @@ -450,6 +453,7 @@ local function parse_cfg(cfg) local fields = {} + -- create fields (from collections) for top-level gql type 'query) for collection_name, collection in pairs(state.collections) do collection.name = collection_name assert(collection.schema_name ~= nil, From 6bd1ca21f1aa3b56787166551e8b429de0dbdd2c Mon Sep 17 00:00:00 2001 From: SudoBobo Date: Thu, 22 Feb 2018 15:22:56 +0300 Subject: [PATCH 05/26] add mised simple test for array support --- test/local/simple_array.result | 7 +++ test/local/simple_array.test.lua | 95 ++++++++++++++++++++++++++++++++ 2 files changed, 102 insertions(+) create mode 100644 test/local/simple_array.result create mode 100644 test/local/simple_array.test.lua diff --git a/test/local/simple_array.result b/test/local/simple_array.result new file mode 100644 index 0000000..a3f6c68 --- /dev/null +++ b/test/local/simple_array.result @@ -0,0 +1,7 @@ +RESULT +--- +user_collection: +- user_id: def + favs: + - potato + - fruit \ No newline at end of file diff --git a/test/local/simple_array.test.lua b/test/local/simple_array.test.lua new file mode 100644 index 0000000..8a1ee82 --- /dev/null +++ b/test/local/simple_array.test.lua @@ -0,0 +1,95 @@ +#!/usr/bin/env tarantool + +local fio = require('fio') + +-- require in-repo version of graphql/ sources despite current working directory +package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") + :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. + package.path + +local json = require('json') +local yaml = require('yaml') +local graphql = require('graphql') +local utils = require('graphql.utils') + +local schemas = json.decode([[{ + "user": { + "name": "user", + "type": "record", + "fields": [ + { "name": "id", "type": "string" }, + { "name": "favs", "type": { "type": "array", "items": "string" } } + ] + } +}]]) + +local collections = json.decode([[{ + "user_collection": { + "schema_name": "user" +}]]) + +local function access_function(parent, collection_name, filter, args) + --[[ + print('DEBUG: collection_name: ' .. collection_name) + print('DEBUG: filter: ' .. json.encode(filter)) + print('DEBUG: args: ' .. json.encode(args)) + print('DEBUG: --------') + --]] + local obj + if collection_name == 'user_collection' then + obj = { + user_id = 'def', + favs = {'potato', 'fruit'} + } + else + error('NIY: ' .. collection_name) + end + if not utils.is_subtable(obj, filter) then + return {} + end + return { obj } +end + +local accessor = setmetatable({}, { + __index = { + select = function(self, parent, collection_name, connection_name, + filter, args) + return access_function(parent, collection_name, filter, args) + end, + list_args = function(self, connection_type) + if connection_type == '1:1' then + return {} + end + return { + { name = 'limit', type = 'int' }, + { name = 'offset', type = 'long' }, + -- {name = 'filter', type = ...}, + } + end, + } +}) + +local gql_wrapper = graphql.new({ +-- class_name:class mapping + schemas = schemas, +-- collection_{schema_name=..., connections=...} mapping + collections = collections, +-- :select() and :list_args() provider + accessor = accessor, +}) + +local query_1 = [[ + query obtainUserFavs($user_id: String) { + user_collection(user_id: $user_id, type: "type 1", size: 2) { + id + favs + } + } +]] + +utils.show_trace(function() + local variables_1 = { user_id = 'def' } + local gql_query_1 = gql_wrapper:compile(query_1) + local result = gql_query_1:execute(variables_1) + print(('RESULT\n%s'):format(yaml.encode(result))) +end) From 31897d613082ce0f0b4678c551c091843f27701b Mon Sep 17 00:00:00 2001 From: SudoBobo Date: Fri, 23 Feb 2018 20:05:22 +0300 Subject: [PATCH 06/26] add simple test for array avro type usage --- test/local/array_and_map.result | 18 ++++ test/local/array_and_map.test.lua | 131 ++++++++++++++++++++++++++++++ 2 files changed, 149 insertions(+) create mode 100644 test/local/array_and_map.result create mode 100755 test/local/array_and_map.test.lua diff --git a/test/local/array_and_map.result b/test/local/array_and_map.result new file mode 100644 index 0000000..f2614db --- /dev/null +++ b/test/local/array_and_map.result @@ -0,0 +1,18 @@ +RESULT +--- +user_collection: +- user_id: def + favorite_food: + - meat + - potato +... + +RESULT +--- +organization_collection: +- organization_id: def + organization_events: + holiday: nice holiday + new_year: hey hou +... + diff --git a/test/local/array_and_map.test.lua b/test/local/array_and_map.test.lua new file mode 100755 index 0000000..10b85e5 --- /dev/null +++ b/test/local/array_and_map.test.lua @@ -0,0 +1,131 @@ +#!/usr/bin/env tarantool + +local fio = require('fio') + +-- require in-repo version of graphql/ sources despite current working directory +package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") + :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. + package.path + +local json = require('json') +local yaml = require('yaml') +local graphql = require('graphql') +local utils = require('graphql.utils') + +local schemas = json.decode([[{ + "user": { + "name": "user", + "type": "record", + "fields": [ + { "name": "user_id", "type": "string" }, + { "name": "favorite_food", "type": {"type": "array", "items": "string"} } + ] + }, + "organization": { + "name": "organization", + "type": "record", + "fields": [ + { "name": "organization_id", "type": "string" }, + { "name": "organization_events", "type": {"type": "map", "values": "string"} } + ] + } +}]]) + +local collections = json.decode([[{ + "user_collection": { + "schema_name": "user" + }, + "organization_collection": { + "schema_name": "organization" + } +}]]) + +local function access_function(parent, collection_name, filter, args) + --[[ + print('DEBUG: collection_name: ' .. collection_name) + print('DEBUG: filter: ' .. json.encode(filter)) + print('DEBUG: args: ' .. json.encode(args)) + print('DEBUG: --------') + --]] + local obj + if collection_name == 'user_collection' then + obj = { + user_id = 'def', + favorite_food = { 'meat', 'potato' }, + } + elseif collection_name == 'organization_collection' then + obj = { + organization_id = 'def', + organization_events = { holiday = 'nice holiday', + new_year = 'hey hou' }, + } + else + error('NIY: ' .. collection_name) + end + if not utils.is_subtable(obj, filter) then + return {} + end + return { obj } +end + +local accessor = setmetatable({}, { + __index = { + select = function(self, parent, collection_name, connection_name, + filter, args) + return access_function(parent, collection_name, filter, args) + end, + list_args = function(self, connection_type) + if connection_type == '1:1' then + return {} + end + return { + { name = 'limit', type = 'int' }, + { name = 'offset', type = 'long' }, + -- {name = 'filter', type = ...}, + } + end, + } +}) + +local gql_wrapper = graphql.new({ +-- class_name:class mapping + schemas = schemas, +-- collection_{schema_name=..., connections=...} mapping + collections = collections, +-- :select() and :list_args() provider + accessor = accessor, +}) + +local query_with_list = [[ + query userFavs($user_id: String) { + user_collection(user_id: $user_id) { + user_id + favorite_food + } + } +]] + + +local query_with_map = [[ + query obtainOrganizationEvents($organization_id: String) { + organization_collection(organization_id: $organization_id) { + organization_id, + organization_events + } + } +]] + + +utils.show_trace(function() + local variables_2 = { user_id = 'def' } + local gql_query_2 = gql_wrapper:compile(query_with_list) + local result = gql_query_2:execute(variables_2) + print(('RESULT\n%s'):format(yaml.encode(result))) +end) + +utils.show_trace(function() + local variables_1 = {organization_id = 'def'} + local gql_query_1 = gql_wrapper:compile(query_with_map) + local result = gql_query_1:execute(variables_1) + print(('RESULT\n%s'):format(yaml.encode(result))) +end) From f0fb9f42a40dd00d187b9d6a6ea0e5e27d86c690 Mon Sep 17 00:00:00 2001 From: SudoBobo Date: Fri, 23 Feb 2018 20:07:37 +0300 Subject: [PATCH 07/26] move nullable function to utils as it is will be necessary for tests --- graphql/utils.lua | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/graphql/utils.lua b/graphql/utils.lua index 1a472fa..e2df5db 100644 --- a/graphql/utils.lua +++ b/graphql/utils.lua @@ -132,4 +132,16 @@ function utils.gen_booking_table(data) }) end +-- XXX: recursive skip several NonNull's? +function utils.nullable(gql_class) + assert(type(gql_class) == 'table', 'gql_class must be a table, got ' .. + type(gql_class)) + + if gql_class.__type ~= 'NonNull' then return gql_class end + + assert(gql_class.ofType ~= nil, 'gql_class.ofType must not be nil') + return gql_class.ofType +end + + return utils From ac9f0ed5a3dbfdf59c7a66b113a9c0350b341f5e Mon Sep 17 00:00:00 2001 From: SudoBobo Date: Fri, 23 Feb 2018 20:26:43 +0300 Subject: [PATCH 08/26] readability refactor; clean up comments; move nullable function to utils --- graphql/tarantool_graphql.lua | 56 +++++++++++++---------------------- 1 file changed, 20 insertions(+), 36 deletions(-) diff --git a/graphql/tarantool_graphql.lua b/graphql/tarantool_graphql.lua index be1e19c..64a3365 100644 --- a/graphql/tarantool_graphql.lua +++ b/graphql/tarantool_graphql.lua @@ -17,12 +17,13 @@ local execute = require('graphql.core.execute') local utils = require('graphql.utils') +--local nullable = utils.nullable + local tarantool_graphql = {} -- forward declarations local gql_type ---- Returns type of the top element in the avro schema local function avro_type(avro_schema) if type(avro_schema) == 'table' then if avro_schema.type == 'record' then @@ -58,16 +59,7 @@ local function avro_type(avro_schema) error('unrecognized avro-schema type: ' .. json.encode(avro_schema)) end --- XXX: recursive skip several NonNull's? -local function nullable(gql_class) - assert(type(gql_class) == 'table', 'gql_class must be a table, got ' .. - type(gql_class)) - - if gql_class.__type ~= 'NonNull' then return gql_class end - - assert(gql_class.ofType ~= nil, 'gql_class.ofType must not be nil') - return gql_class.ofType -end +local nullable = utils.nullable local types_long = types.scalar({ name = 'Long', @@ -123,7 +115,7 @@ end --- Non-recursive version of the @{gql_type} function that returns --- InputObject instead of Object. --- An error will be raised in case of fields that are not scalar types ---- as there are no sense in non scalar arguments +--- as there are no sense in non scalar arguments. local function gql_argument_type(state, avro_schema) assert(type(state) == 'table', 'state must be a table, got ' .. type(state)) @@ -163,8 +155,6 @@ local function gql_argument_type(state, avro_schema) })) return res - --- elseif avro_type(avro_schema) == 'array' then - else local res = convert_scalar_type(avro_schema, {raise = false}) if res == nil then @@ -175,17 +165,13 @@ local function gql_argument_type(state, avro_schema) end ---- Returns table of record's arguments ---- all arguments are nullable +--- Recursively convert each field of an avro-schema to a graphql type and +--- corresponding argument for an upper graphql type. --- --- @tparam table state --- @tparam table fields ---- @tparam table opts include is_for_args flag to specify ---- case when the function is used to collect arguments -local function convert_record_fields_to_args(state, fields, opts) +local function convert_record_fields_to_args(state, fields) local args = {} - local is_for_args = opts and opts.is_for_args or false - for _, field in ipairs(fields) do assert(type(field.name) == 'string', @@ -197,9 +183,8 @@ local function convert_record_fields_to_args(state, fields, opts) -- arrays (gql lists) and maps can't be arguments -- so these kinds are to be skipped - ---@todo consider case when gql_class is wrapper nonNull around List - --- or Map - if not(is_for_args and (gql_class == 'List' or gql_class == 'Map')) then + if (nullable(gql_class) ~= 'List' + and nullable(gql_class) ~= 'Map') then args[field.name] = nullable(gql_class) end end @@ -217,7 +202,8 @@ end local function convert_record_fields(state, fields, opts) local res = {} local object_args = {} - local is_for_args = opts and opts.is_for_args or false + local opts = opts or {} + local is_for_args = opts.is_for_args or false for _, field in ipairs(fields) do assert(type(field.name) == 'string', @@ -231,19 +217,15 @@ local function convert_record_fields(state, fields, opts) -- arrays (gql lists) and maps can't be arguments - -- so these kinds are to be skipped - - ---@todo consider case when gql_class is wrapper nonNull around List - --- or Map - if not (is_for_args and (res[field.name].kind == 'List' - or res[field.name].kind == 'Map')) then + if not is_for_args or (nullable(res[field.name].kind) ~= 'List' + and nullable(res[field.name].kind) ~= 'Map') then object_args[field.name] = nullable(res[field.name].kind) end end return res, object_args end ---- The function recursively converts passed avro-schema to a graphql type (kind) +--- The function recursively converts passed avro-schema to a graphql type (kind). --- --- @tparam table state for read state.accessor and previously filled --- state.types (state.types are gql types) @@ -518,7 +500,7 @@ local function parse_cfg(cfg) end --- The function checks if given query has an appropriate type 'query' ---- (mutations are not supported yet) +--- (mutations are not supported yet). local function assert_gql_query_ast(func_name, ast) assert(#ast.definitions == 1, func_name .. ': expected an one query') @@ -530,7 +512,8 @@ local function assert_gql_query_ast(func_name, ast) type(operation_name)) end ---- The function just makes some assertions and then call graphql-lua execute +--- The function just makes some reasonable assertions on input +--- and then call graphql-lua execute. local function gql_execute(qstate, variables) assert(qstate.state) local state = qstate.state @@ -549,7 +532,8 @@ local function gql_execute(qstate, variables) end --- The function parses a raw query string, validate the resulting query ---- and return it ready for execution +--- and make it ready for execution. +--- --- @tparam table state current state of graphql-lib, including --- schemas, collections and accessor --- @tparam string query raw query string @@ -649,4 +633,4 @@ function tarantool_graphql.new(cfg) }) end -return tarantool_graphql \ No newline at end of file +return tarantool_graphql From 8110208eeac03c6c2ca9932e1c0fb9276b974cce Mon Sep 17 00:00:00 2001 From: Ivan Koptelov Date: Fri, 23 Feb 2018 20:30:26 +0300 Subject: [PATCH 09/26] Update tarantool_graphql.lua --- graphql/tarantool_graphql.lua | 2 -- 1 file changed, 2 deletions(-) diff --git a/graphql/tarantool_graphql.lua b/graphql/tarantool_graphql.lua index 64a3365..83b0bcd 100644 --- a/graphql/tarantool_graphql.lua +++ b/graphql/tarantool_graphql.lua @@ -17,8 +17,6 @@ local execute = require('graphql.core.execute') local utils = require('graphql.utils') ---local nullable = utils.nullable - local tarantool_graphql = {} -- forward declarations From 043d6f9487bca644bb700f50096d033dd012ac9b Mon Sep 17 00:00:00 2001 From: SudoBobo Date: Sun, 25 Feb 2018 22:41:20 +0300 Subject: [PATCH 10/26] small style fixes --- graphql/tarantool_graphql.lua | 41 +++++++++++++---------------------- 1 file changed, 15 insertions(+), 26 deletions(-) diff --git a/graphql/tarantool_graphql.lua b/graphql/tarantool_graphql.lua index 83b0bcd..e2cbbdd 100644 --- a/graphql/tarantool_graphql.lua +++ b/graphql/tarantool_graphql.lua @@ -96,24 +96,20 @@ local function convert_scalar_type(avro_schema, opts) return types.string end - if opts.is_items_type then - error('avro array items must have know scalar type, not: ' .. - json.encode(avro_schema)) - end - if raise then error('unrecognized avro-schema scalar type: ' .. json.encode(avro_schema)) end - return nil end --- Non-recursive version of the @{gql_type} function that returns --- InputObject instead of Object. ---- An error will be raised in case of fields that are not scalar types ---- as there are no sense in non scalar arguments. +--- An error will be raised if avro_schema type is 'record' +--- and its' fields are not scalar type because currently +--- triple nesting level (record with record as a field - ok, +--- record with record wich has inside another level - not ok) local function gql_argument_type(state, avro_schema) assert(type(state) == 'table', 'state must be a table, got ' .. type(state)) @@ -131,7 +127,6 @@ local function gql_argument_type(state, avro_schema) local fields = {} for _, field in ipairs(avro_schema.fields) do - assert(type(field.name) == 'string', ('field.name must be a string, got %s (schema %s)') :format(type(field.name), json.encode(field))) @@ -163,15 +158,17 @@ local function gql_argument_type(state, avro_schema) end ---- Recursively convert each field of an avro-schema to a graphql type and ---- corresponding argument for an upper graphql type. +-- Convert list of fields in the avro-schema format to list of GraphQL types +-- with intention to use it as GraphQL arguments later. +-- It uses the @{gql_argument_type} function to convert each field, then skips +-- fields of array and map types and gives the resulting list of converted fields. --- ---- @tparam table state ---- @tparam table fields +--- @tparam table state for read state.accessor and previously filled +--- state.types +--- @tparam table fields fields part from an avro-schema local function convert_record_fields_to_args(state, fields) local args = {} for _, field in ipairs(fields) do - assert(type(field.name) == 'string', ('field.name must be a string, got %s (schema %s)') :format(type(field.name), json.encode(field))) @@ -179,10 +176,8 @@ local function convert_record_fields_to_args(state, fields) local gql_class = gql_argument_type(state, field.type) -- arrays (gql lists) and maps can't be arguments - -- so these kinds are to be skipped - - if (nullable(gql_class) ~= 'List' - and nullable(gql_class) ~= 'Map') then + -- so these graphql types are to be skipped + if nullable(gql_class) ~= 'List' and nullable(gql_class) ~= 'Map' then args[field.name] = nullable(gql_class) end end @@ -213,7 +208,6 @@ local function convert_record_fields(state, fields, opts) kind = gql_type(state, field.type), } - -- arrays (gql lists) and maps can't be arguments if not is_for_args or (nullable(res[field.name].kind) ~= 'List' and nullable(res[field.name].kind) ~= 'Map') then @@ -223,7 +217,7 @@ local function convert_record_fields(state, fields, opts) return res, object_args end ---- The function recursively converts passed avro-schema to a graphql type (kind). +--- The function converts passed avro-schema to a GraphQL type. --- --- @tparam table state for read state.accessor and previously filled --- state.types (state.types are gql types) @@ -379,15 +373,13 @@ gql_type = function(state, avro_schema, collection, collection_name) error('enums not implemented yet') -- XXX elseif avro_t == 'array' or avro_t == 'array*' then - assert(avro_schema.items ~= nil, 'items field must not be nil in array avro schema') assert(type(avro_schema.items) == 'string', 'avro_schema.items must be a string, got ' .. type(avro_schema.item)) local gql_items_type = convert_scalar_type(avro_schema.items, - {is_items_type=true, raise=true}) - + {raise=true}) local gql_array = types.list(gql_items_type) if avro_t == 'array*' then @@ -397,16 +389,13 @@ gql_type = function(state, avro_schema, collection, collection_name) if avro_t == 'array' then return types.nonNull(gql_array) end - else local res = convert_scalar_type(avro_schema, {raise = false}) if res == nil then error('unrecognized avro-schema type: ' .. json.encode(avro_schema)) end return res - end - end From 7714393a4d3aae1195797c553791a01967cfe6f4 Mon Sep 17 00:00:00 2001 From: SudoBobo Date: Mon, 26 Feb 2018 11:44:19 +0300 Subject: [PATCH 11/26] readability and comments fixes --- graphql/tarantool_graphql.lua | 66 ++++++++++++++++------------------- 1 file changed, 31 insertions(+), 35 deletions(-) diff --git a/graphql/tarantool_graphql.lua b/graphql/tarantool_graphql.lua index e2cbbdd..1874b2b 100644 --- a/graphql/tarantool_graphql.lua +++ b/graphql/tarantool_graphql.lua @@ -195,8 +195,6 @@ end local function convert_record_fields(state, fields, opts) local res = {} local object_args = {} - local opts = opts or {} - local is_for_args = opts.is_for_args or false for _, field in ipairs(fields) do assert(type(field.name) == 'string', @@ -209,8 +207,9 @@ local function convert_record_fields(state, fields, opts) } -- arrays (gql lists) and maps can't be arguments - if not is_for_args or (nullable(res[field.name].kind) ~= 'List' - and nullable(res[field.name].kind) ~= 'Map') then + local avro_t = avro_type(field.type) + if avro_t ~= 'array' and avro_t ~= 'array*' and avro_t ~= 'map' + and avro_t ~= 'map*' then object_args[field.name] = nullable(res[field.name].kind) end end @@ -231,6 +230,10 @@ end --- automatically generate corresponding decucible fields. --- 2. The collection name will be used as the resulting graphql type name --- instead of the avro-schema name. +--- +--- XXX As it is not clear now what to do with complex types inside arrays +---(just pass to results or allow to use filters), only scalar arrays +--- is allowed for now. gql_type = function(state, avro_schema, collection, collection_name) assert(type(state) == 'table', 'state must be a table, got ' .. type(state)) @@ -253,7 +256,7 @@ gql_type = function(state, avro_schema, collection, collection_name) assert(accessor.list_args ~= nil, 'state.accessor.list_args must not be nil') - -- type of the top element in the avro schema + -- type of the top element in the avro-schema local avro_t = avro_type(avro_schema) if avro_t == 'record' or avro_t == 'record*' then @@ -299,7 +302,6 @@ gql_type = function(state, avro_schema, collection, collection_name) local c_list_args = state.list_arguments[c.destination_collection] - -- change fields that are represented by connections fields[c.name] = { name = c.name, kind = destination_type, @@ -309,7 +311,6 @@ gql_type = function(state, avro_schema, collection, collection_name) local destination_args_values = {} for _, part in ipairs(c.parts) do - assert(type(part.source_field) == 'string', 'part.source_field must be a string, got ' .. type(part.destination_field)) @@ -322,6 +323,7 @@ gql_type = function(state, avro_schema, collection, collection_name) destination_args_values[#destination_args_values + 1] = parent[part.source_field] end + local from = { collection_name = collection_name, connection_name = c.name, @@ -360,7 +362,7 @@ gql_type = function(state, avro_schema, collection, collection_name) } end - -- create gql schema + -- create gql type local res = types.object({ name = collection ~= nil and collection.name or avro_schema.name, description = 'generated from avro-schema for ' .. @@ -368,27 +370,21 @@ gql_type = function(state, avro_schema, collection, collection_name) fields = fields, }) return avro_t == 'enum' and types.nonNull(res) or res - elseif avro_t == 'enum' then error('enums not implemented yet') -- XXX - elseif avro_t == 'array' or avro_t == 'array*' then assert(avro_schema.items ~= nil, 'items field must not be nil in array avro schema') assert(type(avro_schema.items) == 'string', - 'avro_schema.items must be a string, got ' .. type(avro_schema.item)) - - local gql_items_type = convert_scalar_type(avro_schema.items, - {raise=true}) - local gql_array = types.list(gql_items_type) + 'avro_schema.items must be a string, got ' + .. type(avro_schema.item)) - if avro_t == 'array*' then - return gql_array - end + local gql_items_type = convert_scalar_type(avro_schema.items) - if avro_t == 'array' then - return types.nonNull(gql_array) - end + assert(gql_items_type, "only scalars are supported as array items for now, + and " .. avro_type(avro_schema.items) .. " is not a scalar") + local gql_array = types.list(gql_items_type) + return avro_t == 'array' and types.nonNull(gql_array) or gql_array else local res = convert_scalar_type(avro_schema, {raise = false}) if res == nil then @@ -398,7 +394,6 @@ gql_type = function(state, avro_schema, collection, collection_name) end end - local function parse_cfg(cfg) local state = {} state.types = utils.gen_booking_table({}) @@ -434,18 +429,18 @@ local function parse_cfg(cfg) -- recursively converts all avro types into gql types in the given schema assert(schema.type == 'record', - 'top-level schema must have record avro type, not' .. schema.type) - state.types[collection_name] = gql_type(state, schema, collection, collection_name) + 'top-level schema must have record avro type, not' + .. schema.type) + state.types[collection_name] = gql_type(state, schema, collection, + collection_name) - -- prepare arguments (their kinds) + -- prepare arguments' types local _, object_args = convert_record_fields(state, - schema.fields, {is_for_args=true}) + schema.fields) local list_args = convert_record_fields_to_args( - state, accessor:list_args(collection_name), {is_for_args=true}) + state, accessor:list_args(collection_name)) local args = utils.merge_tables(object_args, list_args) - -- list and map (avro array and map) can't be arguments - state.object_arguments[collection_name] = object_args state.list_arguments[collection_name] = list_args state.all_arguments[collection_name] = args @@ -486,8 +481,9 @@ local function parse_cfg(cfg) return state end ---- The function checks if given query has an appropriate type 'query' ---- (mutations are not supported yet). +--- The function checks that one and only one GraphQL operation +--- (query/mutation/subscription) is defined in the AST and it's type +--- is 'query' as mutations and subscriptions are not supported yet local function assert_gql_query_ast(func_name, ast) assert(#ast.definitions == 1, func_name .. ': expected an one query') @@ -518,10 +514,10 @@ local function gql_execute(qstate, variables) operation_name) end ---- The function parses a raw query string, validate the resulting query ---- and make it ready for execution. ---- ---- @tparam table state current state of graphql-lib, including +--- The function parses a query string, validate the resulting query +--- against the GraphQL schema and provides an object with the function to +--- execute the query with specific variables values +--- @tparam table state current state of graphql, including --- schemas, collections and accessor --- @tparam string query raw query string local function gql_compile(state, query) From 470e8e101be844fdb99e435ce76d4e3c2cf4ab47 Mon Sep 17 00:00:00 2001 From: SudoBobo Date: Mon, 26 Feb 2018 11:44:57 +0300 Subject: [PATCH 12/26] remove excess test --- test/local/simple_array.result | 7 --- test/local/simple_array.test.lua | 95 -------------------------------- 2 files changed, 102 deletions(-) delete mode 100644 test/local/simple_array.result delete mode 100644 test/local/simple_array.test.lua diff --git a/test/local/simple_array.result b/test/local/simple_array.result deleted file mode 100644 index a3f6c68..0000000 --- a/test/local/simple_array.result +++ /dev/null @@ -1,7 +0,0 @@ -RESULT ---- -user_collection: -- user_id: def - favs: - - potato - - fruit \ No newline at end of file diff --git a/test/local/simple_array.test.lua b/test/local/simple_array.test.lua deleted file mode 100644 index 8a1ee82..0000000 --- a/test/local/simple_array.test.lua +++ /dev/null @@ -1,95 +0,0 @@ -#!/usr/bin/env tarantool - -local fio = require('fio') - --- require in-repo version of graphql/ sources despite current working directory -package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") - :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. - package.path - -local json = require('json') -local yaml = require('yaml') -local graphql = require('graphql') -local utils = require('graphql.utils') - -local schemas = json.decode([[{ - "user": { - "name": "user", - "type": "record", - "fields": [ - { "name": "id", "type": "string" }, - { "name": "favs", "type": { "type": "array", "items": "string" } } - ] - } -}]]) - -local collections = json.decode([[{ - "user_collection": { - "schema_name": "user" -}]]) - -local function access_function(parent, collection_name, filter, args) - --[[ - print('DEBUG: collection_name: ' .. collection_name) - print('DEBUG: filter: ' .. json.encode(filter)) - print('DEBUG: args: ' .. json.encode(args)) - print('DEBUG: --------') - --]] - local obj - if collection_name == 'user_collection' then - obj = { - user_id = 'def', - favs = {'potato', 'fruit'} - } - else - error('NIY: ' .. collection_name) - end - if not utils.is_subtable(obj, filter) then - return {} - end - return { obj } -end - -local accessor = setmetatable({}, { - __index = { - select = function(self, parent, collection_name, connection_name, - filter, args) - return access_function(parent, collection_name, filter, args) - end, - list_args = function(self, connection_type) - if connection_type == '1:1' then - return {} - end - return { - { name = 'limit', type = 'int' }, - { name = 'offset', type = 'long' }, - -- {name = 'filter', type = ...}, - } - end, - } -}) - -local gql_wrapper = graphql.new({ --- class_name:class mapping - schemas = schemas, --- collection_{schema_name=..., connections=...} mapping - collections = collections, --- :select() and :list_args() provider - accessor = accessor, -}) - -local query_1 = [[ - query obtainUserFavs($user_id: String) { - user_collection(user_id: $user_id, type: "type 1", size: 2) { - id - favs - } - } -]] - -utils.show_trace(function() - local variables_1 = { user_id = 'def' } - local gql_query_1 = gql_wrapper:compile(query_1) - local result = gql_query_1:execute(variables_1) - print(('RESULT\n%s'):format(yaml.encode(result))) -end) From 0160c690fb797a354533b8ba5774f591d01f7efb Mon Sep 17 00:00:00 2001 From: SudoBobo Date: Mon, 26 Feb 2018 11:46:30 +0300 Subject: [PATCH 13/26] remove redundant 'map' test cases --- test/local/array_and_map.result | 9 -------- test/local/array_and_map.test.lua | 38 ++----------------------------- 2 files changed, 2 insertions(+), 45 deletions(-) diff --git a/test/local/array_and_map.result b/test/local/array_and_map.result index f2614db..da7035d 100644 --- a/test/local/array_and_map.result +++ b/test/local/array_and_map.result @@ -7,12 +7,3 @@ user_collection: - potato ... -RESULT ---- -organization_collection: -- organization_id: def - organization_events: - holiday: nice holiday - new_year: hey hou -... - diff --git a/test/local/array_and_map.test.lua b/test/local/array_and_map.test.lua index 10b85e5..e2bb119 100755 --- a/test/local/array_and_map.test.lua +++ b/test/local/array_and_map.test.lua @@ -20,23 +20,12 @@ local schemas = json.decode([[{ { "name": "user_id", "type": "string" }, { "name": "favorite_food", "type": {"type": "array", "items": "string"} } ] - }, - "organization": { - "name": "organization", - "type": "record", - "fields": [ - { "name": "organization_id", "type": "string" }, - { "name": "organization_events", "type": {"type": "map", "values": "string"} } - ] - } -}]]) + } + }]]) local collections = json.decode([[{ "user_collection": { "schema_name": "user" - }, - "organization_collection": { - "schema_name": "organization" } }]]) @@ -53,12 +42,6 @@ local function access_function(parent, collection_name, filter, args) user_id = 'def', favorite_food = { 'meat', 'potato' }, } - elseif collection_name == 'organization_collection' then - obj = { - organization_id = 'def', - organization_events = { holiday = 'nice holiday', - new_year = 'hey hou' }, - } else error('NIY: ' .. collection_name) end @@ -105,17 +88,6 @@ local query_with_list = [[ } ]] - -local query_with_map = [[ - query obtainOrganizationEvents($organization_id: String) { - organization_collection(organization_id: $organization_id) { - organization_id, - organization_events - } - } -]] - - utils.show_trace(function() local variables_2 = { user_id = 'def' } local gql_query_2 = gql_wrapper:compile(query_with_list) @@ -123,9 +95,3 @@ utils.show_trace(function() print(('RESULT\n%s'):format(yaml.encode(result))) end) -utils.show_trace(function() - local variables_1 = {organization_id = 'def'} - local gql_query_1 = gql_wrapper:compile(query_with_map) - local result = gql_query_1:execute(variables_1) - print(('RESULT\n%s'):format(yaml.encode(result))) -end) From 19b8d8b3a4c5b861509e922f9ba9aca9f51b9823 Mon Sep 17 00:00:00 2001 From: SudoBobo Date: Mon, 26 Feb 2018 13:52:53 +0300 Subject: [PATCH 14/26] fix small error; now general convert_scalar_type accepts nil as opt --- graphql/tarantool_graphql.lua | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/graphql/tarantool_graphql.lua b/graphql/tarantool_graphql.lua index 1874b2b..a0d2485 100644 --- a/graphql/tarantool_graphql.lua +++ b/graphql/tarantool_graphql.lua @@ -78,7 +78,7 @@ local function convert_scalar_type(avro_schema, opts) assert(type(opts) == 'table', 'opts must be nil or table, got ' .. type(opts)) local raise = opts.raise or false - assert(type(opts.raise) == 'boolean', 'opts.raise must be boolean, got ' .. + assert(type(raise) == 'boolean', 'opts.raise must be boolean, got ' .. type(opts.raise)) local avro_t = avro_type(avro_schema) @@ -381,8 +381,8 @@ gql_type = function(state, avro_schema, collection, collection_name) local gql_items_type = convert_scalar_type(avro_schema.items) - assert(gql_items_type, "only scalars are supported as array items for now, - and " .. avro_type(avro_schema.items) .. " is not a scalar") + assert(gql_items_type, "only scalars are supported as array items for now " + .. avro_type(avro_schema.items) .. " is not a scalar") local gql_array = types.list(gql_items_type) return avro_t == 'array' and types.nonNull(gql_array) or gql_array else From 1377c751981e683c3ae5d38d9e42c9d1a7b5b5e5 Mon Sep 17 00:00:00 2001 From: SudoBobo Date: Mon, 26 Feb 2018 19:34:32 +0300 Subject: [PATCH 15/26] add test for avro-array usage (with space accessor); --- test/local/avro_array.result | 9 ++ test/local/avro_array.test.lua | 57 +++++++++ ...nd_map.result => simple_avro_array.result} | 0 ...ap.test.lua => simple_avro_array.test.lua} | 15 +-- test/testdata/array_and_map_testdata.lua | 113 ++++++++++++++++++ 5 files changed, 187 insertions(+), 7 deletions(-) create mode 100644 test/local/avro_array.result create mode 100755 test/local/avro_array.test.lua rename test/local/{array_and_map.result => simple_avro_array.result} (100%) rename test/local/{array_and_map.test.lua => simple_avro_array.test.lua} (84%) create mode 100755 test/testdata/array_and_map_testdata.lua diff --git a/test/local/avro_array.result b/test/local/avro_array.result new file mode 100644 index 0000000..6b6cdbb --- /dev/null +++ b/test/local/avro_array.result @@ -0,0 +1,9 @@ +RESULT +--- +user_collection: +- user_id: user_id_1 + favorite_food: + - meat + - potato +... + diff --git a/test/local/avro_array.test.lua b/test/local/avro_array.test.lua new file mode 100755 index 0000000..24f25ec --- /dev/null +++ b/test/local/avro_array.test.lua @@ -0,0 +1,57 @@ +#!/usr/bin/env tarantool + +box.cfg{background = false} +local fio = require('fio') + +-- require in-repo version of graphql/ sources despite current working directory +package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") + :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. +package.path + +local graphql = require('graphql') +local testdata = require('test.testdata.array_and_map_testdata') + +-- init box, upload test data and acquire metadata +-- ----------------------------------------------- + + +-- init box and data schema +testdata.init_spaces() + +-- upload test data +testdata.fill_test_data() + +-- acquire metadata +local metadata = testdata.get_test_metadata() +local schemas = metadata.schemas +local collections = metadata.collections +local service_fields = metadata.service_fields +local indexes = metadata.indexes + +-- build accessor and graphql schemas +-- ---------------------------------- + +local accessor = graphql.accessor_space.new({ + schemas = schemas, + collections = collections, + service_fields = service_fields, + indexes = indexes, +}) + +local gql_wrapper = graphql.new({ + schemas = schemas, + collections = collections, + accessor = accessor, +}) + +-- run queries +-- ----------- + +testdata.run_queries(gql_wrapper) + +-- clean up +-- -------- + +testdata.drop_spaces() + +os.exit() diff --git a/test/local/array_and_map.result b/test/local/simple_avro_array.result similarity index 100% rename from test/local/array_and_map.result rename to test/local/simple_avro_array.result diff --git a/test/local/array_and_map.test.lua b/test/local/simple_avro_array.test.lua similarity index 84% rename from test/local/array_and_map.test.lua rename to test/local/simple_avro_array.test.lua index e2bb119..257cc8c 100755 --- a/test/local/array_and_map.test.lua +++ b/test/local/simple_avro_array.test.lua @@ -2,11 +2,12 @@ local fio = require('fio') --- require in-repo version of graphql/ sources despite current working directory + --require in-repo version of graphql/ sources despite current working directory package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. package.path + local json = require('json') local yaml = require('yaml') local graphql = require('graphql') @@ -29,7 +30,7 @@ local collections = json.decode([[{ } }]]) -local function access_function(parent, collection_name, filter, args) +local function simple_access_function(parent, collection_name, filter, args) --[[ print('DEBUG: collection_name: ' .. collection_name) print('DEBUG: filter: ' .. json.encode(filter)) @@ -51,11 +52,11 @@ local function access_function(parent, collection_name, filter, args) return { obj } end -local accessor = setmetatable({}, { +local simple_accessor = setmetatable({}, { __index = { select = function(self, parent, collection_name, connection_name, filter, args) - return access_function(parent, collection_name, filter, args) + return simple_access_function(parent, collection_name, filter, args) end, list_args = function(self, connection_type) if connection_type == '1:1' then @@ -70,13 +71,13 @@ local accessor = setmetatable({}, { } }) -local gql_wrapper = graphql.new({ +local gql_wrapper_simple_accessor = graphql.new({ -- class_name:class mapping schemas = schemas, -- collection_{schema_name=..., connections=...} mapping collections = collections, -- :select() and :list_args() provider - accessor = accessor, + accessor = simple_accessor, }) local query_with_list = [[ @@ -90,7 +91,7 @@ local query_with_list = [[ utils.show_trace(function() local variables_2 = { user_id = 'def' } - local gql_query_2 = gql_wrapper:compile(query_with_list) + local gql_query_2 = gql_wrapper_simple_accessor:compile(query_with_list) local result = gql_query_2:execute(variables_2) print(('RESULT\n%s'):format(yaml.encode(result))) end) diff --git a/test/testdata/array_and_map_testdata.lua b/test/testdata/array_and_map_testdata.lua new file mode 100755 index 0000000..c264b9c --- /dev/null +++ b/test/testdata/array_and_map_testdata.lua @@ -0,0 +1,113 @@ +local json = require('json') +local yaml = require('yaml') +local utils = require('graphql.utils') + +local array_testdata = {} + +local function print_and_return(...) + print(...) + return table.concat({ ... }, ' ') .. '\n' +end + +function array_testdata.get_test_metadata() + + local schemas = json.decode([[{ + "user": { + "name": "user", + "type": "record", + "fields": [ + { "name": "user_id", "type": "string" }, + { "name": "favorite_food", "type": {"type": "array", "items": "string"} } + ] + } + }]]) + + local collections = json.decode([[{ + "user_collection": { + "schema_name": "user", + "connections": [] + } +}]]) + + local service_fields = { + user = { + { name = 'expires_on', type = 'long', default = 0 }, + }, + order = {}, + } + + local indexes = { + user_collection = { + user_id_index = { + service_fields = {}, + fields = { 'user_id' }, + index_type = 'tree', + unique = true, + primary = true, + }, + } + } + + return { + schemas = schemas, + collections = collections, + service_fields = service_fields, + indexes = indexes, + } +end + +function array_testdata.init_spaces() + -- user_collection fields + local U_USER_ID_FN = 2 + + box.once('test_space_init_spaces', function() + box.schema.create_space('user_collection') + box.space.user_collection:create_index('user_id_index', + { type = 'tree', unique = true, parts = { + U_USER_ID_FN, 'string' + } } + ) + end) +end + +function array_testdata.fill_test_data(shard) + local shard = shard or box.space + + shard.user_collection:replace( + { 1827767717, 'user_id_1', { 'meat', 'potato' } }) + shard.user_collection:replace( + { 1827767717, 'user_id_2', { 'fruit' } }) + --@todo add empty array +end + +function array_testdata.drop_spaces() + box.space._schema:delete('oncetest_space_init_spaces') + box.space.user_collection:drop() +end + +function array_testdata.run_queries(gql_wrapper) + + local results = '' + + local query_1 = [[ + query user_favorites($user_id: String) { + user_collection(user_id: $user_id) { + user_id + favorite_food + } + } + ]] + + --assert(false, 'err') + utils.show_trace(function() + local variables_1 = { user_id = 'user_id_1' } + local gql_query_1 = gql_wrapper:compile(query_1) + local result = gql_query_1:execute(variables_1) + results = results .. print_and_return( + ('RESULT\n%s'):format(yaml.encode(result))) + end) + + return results +end + +return array_testdata From 5af08d8fd49ef8e19bc8b95da540901feec11dea Mon Sep 17 00:00:00 2001 From: SudoBobo Date: Mon, 26 Feb 2018 19:52:41 +0300 Subject: [PATCH 16/26] move nullable back to tarantool_graphql from utils --- graphql/tarantool_graphql.lua | 11 ++++++++++- graphql/utils.lua | 12 ------------ 2 files changed, 10 insertions(+), 13 deletions(-) mode change 100644 => 100755 graphql/utils.lua diff --git a/graphql/tarantool_graphql.lua b/graphql/tarantool_graphql.lua index a0d2485..ee2a0dd 100644 --- a/graphql/tarantool_graphql.lua +++ b/graphql/tarantool_graphql.lua @@ -57,7 +57,16 @@ local function avro_type(avro_schema) error('unrecognized avro-schema type: ' .. json.encode(avro_schema)) end -local nullable = utils.nullable +-- XXX: recursive skip several NonNull's? +local function nullable(gql_class) + assert(type(gql_class) == 'table', 'gql_class must be a table, got ' .. + type(gql_class)) + + if gql_class.__type ~= 'NonNull' then return gql_class end + + assert(gql_class.ofType ~= nil, 'gql_class.ofType must not be nil') + return gql_class.ofType +end local types_long = types.scalar({ name = 'Long', diff --git a/graphql/utils.lua b/graphql/utils.lua old mode 100644 new mode 100755 index e2df5db..1a472fa --- a/graphql/utils.lua +++ b/graphql/utils.lua @@ -132,16 +132,4 @@ function utils.gen_booking_table(data) }) end --- XXX: recursive skip several NonNull's? -function utils.nullable(gql_class) - assert(type(gql_class) == 'table', 'gql_class must be a table, got ' .. - type(gql_class)) - - if gql_class.__type ~= 'NonNull' then return gql_class end - - assert(gql_class.ofType ~= nil, 'gql_class.ofType must not be nil') - return gql_class.ofType -end - - return utils From be8457c434444344c74e9bab5f358caf1dee4cf7 Mon Sep 17 00:00:00 2001 From: SudoBobo Date: Tue, 27 Feb 2018 11:29:46 +0300 Subject: [PATCH 17/26] add support of avro map and test for simple case with scalar map values and mock accessor --- graphql/tarantool_graphql.lua | 28 ++++++- test/local/simple_avro_map.result | 9 ++ test/local/simple_avro_map.test.lua | 102 +++++++++++++++++++++++ test/testdata/array_and_map_testdata.lua | 24 +++--- 4 files changed, 150 insertions(+), 13 deletions(-) create mode 100644 test/local/simple_avro_map.result create mode 100755 test/local/simple_avro_map.test.lua diff --git a/graphql/tarantool_graphql.lua b/graphql/tarantool_graphql.lua index ee2a0dd..1b90281 100644 --- a/graphql/tarantool_graphql.lua +++ b/graphql/tarantool_graphql.lua @@ -80,6 +80,20 @@ local types_long = types.scalar({ end }) +local types_map = types.scalar({ + name = 'Map', + description = 'Map is a dictionary with string keys and values of ' .. + 'arbitrary but same among all values type', + serialize = function(value) return value end, + parseValue = function(value)return value end, + -- node == ast + parseLiteral = function(node) + if node.kind == 'Map' then + return node.value + end + end +}) + -- XXX: boolean -- XXX: float local function convert_scalar_type(avro_schema, opts) @@ -394,6 +408,18 @@ gql_type = function(state, avro_schema, collection, collection_name) .. avro_type(avro_schema.items) .. " is not a scalar") local gql_array = types.list(gql_items_type) return avro_t == 'array' and types.nonNull(gql_array) or gql_array + elseif avro_t == 'map' or avro_t == 'map*' then + assert(avro_schema.values ~= nil, + 'values must not be nil in map avro schema') + assert(type(avro_schema.values) == 'table' + or type(avro_schema.values) == 'string', + ('avro_schema.values must be a table or a string,' .. + 'got %s (avro_schema %s)'):format(type(avro_schema.values), + json.encode(avro_schema))) + + convert_scalar_type(avro_schema.values, {raise = true}) + local gql_map = types_map + return avro_t == 'map' and types.nonNull(gql_map) or gql_map else local res = convert_scalar_type(avro_schema, {raise = false}) if res == nil then @@ -438,7 +464,7 @@ local function parse_cfg(cfg) -- recursively converts all avro types into gql types in the given schema assert(schema.type == 'record', - 'top-level schema must have record avro type, not' + 'top-level schema must have record avro type, not ' .. schema.type) state.types[collection_name] = gql_type(state, schema, collection, collection_name) diff --git a/test/local/simple_avro_map.result b/test/local/simple_avro_map.result new file mode 100644 index 0000000..a3982bd --- /dev/null +++ b/test/local/simple_avro_map.result @@ -0,0 +1,9 @@ +RESULT +--- +user_collection: +- user_id: def + favorite_holidays: + december: new year + march: vacation +... + diff --git a/test/local/simple_avro_map.test.lua b/test/local/simple_avro_map.test.lua new file mode 100755 index 0000000..029e6fa --- /dev/null +++ b/test/local/simple_avro_map.test.lua @@ -0,0 +1,102 @@ +#!/usr/bin/env tarantool + +local fio = require('fio') + +--require in-repo version of graphql/ sources despite current working directory +package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") + :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. + package.path + +local json = require('json') +local yaml = require('yaml') +local graphql = require('graphql') +local utils = require('graphql.utils') + +local schemas = json.decode([[{ + "user": { + "name": "user", + "type": "record", + "fields": [ + { "name": "user_id", "type": "string" }, + { "name": "favorite_holidays", "type": {"type": "map", "values": "string"} } + ] + } +}]]) + +local collections = json.decode([[{ + "user_collection": { + "schema_name": "user" + } +}]]) + +local function simple_access_function(parent, collection_name, filter, args) + --[[ + print('DEBUG: collection_name: ' .. collection_name) + print('DEBUG: filter: ' .. json.encode(filter)) + print('DEBUG: args: ' .. json.encode(args)) + print('DEBUG: --------') + --]] + local obj + if collection_name == 'user_collection' then + obj = { + user_id = 'def', + favorite_holidays = { december = 'new year', march = 'vacation' } + } + else + error('NIY: ' .. collection_name) + end + if not utils.is_subtable(obj, filter) then + return {} + end + return { obj } +end + +local simple_accessor = setmetatable({}, { + __index = { + select = function(self, parent, collection_name, connection_name, + filter, args) + return simple_access_function(parent, collection_name, filter, args) + end, + list_args = function(self, connection_type) + if connection_type == '1:1' then + return {} + end + return { + { name = 'limit', type = 'int' }, + { name = 'offset', type = 'long' }, + -- {name = 'filter', type = ...}, + } + end, + } +}) + +local gql_wrapper_simple_accessor +local query_with_map +utils.show_trace(function() + gql_wrapper_simple_accessor = graphql.new({ + -- class_name:class mapping + schemas = schemas, + -- collection_{schema_name=..., connections=...} mapping + collections = collections, + -- :select() and :list_args() provider + accessor = simple_accessor, + }) + + query_with_map = [[ + query userFavs($user_id: String) { + user_collection(user_id: $user_id) { + user_id + favorite_holidays + } + } + ]] + +end +) + +utils.show_trace(function() + local variables_2 = { user_id = 'def' } + local gql_query_2 = gql_wrapper_simple_accessor:compile(query_with_map) + local result = gql_query_2:execute(variables_2) + print(('RESULT\n%s'):format(yaml.encode(result))) +end) diff --git a/test/testdata/array_and_map_testdata.lua b/test/testdata/array_and_map_testdata.lua index c264b9c..eeccaf1 100755 --- a/test/testdata/array_and_map_testdata.lua +++ b/test/testdata/array_and_map_testdata.lua @@ -12,22 +12,22 @@ end function array_testdata.get_test_metadata() local schemas = json.decode([[{ - "user": { - "name": "user", - "type": "record", - "fields": [ - { "name": "user_id", "type": "string" }, - { "name": "favorite_food", "type": {"type": "array", "items": "string"} } - ] + "user": { + "name": "user", + "type": "record", + "fields": [ + { "name": "user_id", "type": "string" }, + { "name": "favorite_food", "type": {"type": "array", "items": "string"} } + ] } }]]) local collections = json.decode([[{ - "user_collection": { - "schema_name": "user", - "connections": [] - } -}]]) + "user_collection": { + "schema_name": "user", + "connections": [] + } + }]]) local service_fields = { user = { From fec3b31b087ae67b4fc0917cca4a3a6187e87327 Mon Sep 17 00:00:00 2001 From: SudoBobo Date: Tue, 27 Feb 2018 14:22:55 +0300 Subject: [PATCH 18/26] add test for map with space accessor --- ...ray.result => array_and_map_simple.result} | 1 + ...test.lua => array_and_map_simple.test.lua} | 40 +++++++++++-------- test/testdata/array_and_map_testdata.lua | 18 ++++----- 3 files changed, 34 insertions(+), 25 deletions(-) rename test/local/{avro_array.result => array_and_map_simple.result} (57%) rename test/local/{avro_array.test.lua => array_and_map_simple.test.lua} (62%) diff --git a/test/local/avro_array.result b/test/local/array_and_map_simple.result similarity index 57% rename from test/local/avro_array.result rename to test/local/array_and_map_simple.result index 6b6cdbb..8274dd3 100644 --- a/test/local/avro_array.result +++ b/test/local/array_and_map_simple.result @@ -2,6 +2,7 @@ RESULT --- user_collection: - user_id: user_id_1 + favorite_holidays: {'december': 'new year', 'march': 'vacation'} favorite_food: - meat - potato diff --git a/test/local/avro_array.test.lua b/test/local/array_and_map_simple.test.lua similarity index 62% rename from test/local/avro_array.test.lua rename to test/local/array_and_map_simple.test.lua index 24f25ec..aa16c47 100755 --- a/test/local/avro_array.test.lua +++ b/test/local/array_and_map_simple.test.lua @@ -1,12 +1,11 @@ #!/usr/bin/env tarantool -box.cfg{background = false} +box.cfg { background = false } local fio = require('fio') -- require in-repo version of graphql/ sources despite current working directory package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") - :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. -package.path + :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. package.path local graphql = require('graphql') local testdata = require('test.testdata.array_and_map_testdata') @@ -27,22 +26,31 @@ local schemas = metadata.schemas local collections = metadata.collections local service_fields = metadata.service_fields local indexes = metadata.indexes +local utils = require('graphql.utils') -- build accessor and graphql schemas -- ---------------------------------- - -local accessor = graphql.accessor_space.new({ - schemas = schemas, - collections = collections, - service_fields = service_fields, - indexes = indexes, -}) - -local gql_wrapper = graphql.new({ - schemas = schemas, - collections = collections, - accessor = accessor, -}) +local accessor +utils.show_trace( +function() + accessor = graphql.accessor_space.new({ + schemas = schemas, + collections = collections, + service_fields = service_fields, + indexes = indexes, + }) +end +) + +local gql_wrapper +utils.show_trace(function() + gql_wrapper = graphql.new({ + schemas = schemas, + collections = collections, + accessor = accessor, + }) +end +) -- run queries -- ----------- diff --git a/test/testdata/array_and_map_testdata.lua b/test/testdata/array_and_map_testdata.lua index eeccaf1..092193a 100755 --- a/test/testdata/array_and_map_testdata.lua +++ b/test/testdata/array_and_map_testdata.lua @@ -17,7 +17,8 @@ function array_testdata.get_test_metadata() "type": "record", "fields": [ { "name": "user_id", "type": "string" }, - { "name": "favorite_food", "type": {"type": "array", "items": "string"} } + { "name": "favorite_food", "type": {"type": "array", "items": "string"} }, + { "name": "favorite_holidays", "type": {"type": "map", "values": "string"} } ] } }]]) @@ -74,9 +75,8 @@ function array_testdata.fill_test_data(shard) local shard = shard or box.space shard.user_collection:replace( - { 1827767717, 'user_id_1', { 'meat', 'potato' } }) - shard.user_collection:replace( - { 1827767717, 'user_id_2', { 'fruit' } }) + { 1827767717, 'user_id_1', { 'meat', 'potato' }, + { december = 'new year', march = 'vacation'} }) --@todo add empty array end @@ -89,22 +89,22 @@ function array_testdata.run_queries(gql_wrapper) local results = '' - local query_1 = [[ - query user_favorites($user_id: String) { + local query_map = [[ + query user_holidays($user_id: String) { user_collection(user_id: $user_id) { user_id favorite_food + favorite_holidays } } ]] - --assert(false, 'err') utils.show_trace(function() local variables_1 = { user_id = 'user_id_1' } - local gql_query_1 = gql_wrapper:compile(query_1) + local gql_query_1 = gql_wrapper:compile(query_map) local result = gql_query_1:execute(variables_1) results = results .. print_and_return( - ('RESULT\n%s'):format(yaml.encode(result))) + ('RESULT\n%s'):format(yaml.encode(result))) end) return results From 0423ec45313a52fdadd5cb1c4e786666e560d6f6 Mon Sep 17 00:00:00 2001 From: SudoBobo Date: Wed, 28 Feb 2018 13:02:44 +0300 Subject: [PATCH 19/26] add gql support of avro map/array with records as items/values --- graphql/tarantool_graphql.lua | 14 +++---- test/local/array_and_map_simple.result | 10 ----- .../local/array_and_map_space_accessor.result | 14 +++++++ ... => array_and_map_space_accessor.test.lua} | 0 test/testdata/array_and_map_testdata.lua | 37 +++++++++++++++++-- 5 files changed, 53 insertions(+), 22 deletions(-) delete mode 100644 test/local/array_and_map_simple.result create mode 100644 test/local/array_and_map_space_accessor.result rename test/local/{array_and_map_simple.test.lua => array_and_map_space_accessor.test.lua} (100%) diff --git a/graphql/tarantool_graphql.lua b/graphql/tarantool_graphql.lua index 1b90281..3d422b0 100644 --- a/graphql/tarantool_graphql.lua +++ b/graphql/tarantool_graphql.lua @@ -398,14 +398,12 @@ gql_type = function(state, avro_schema, collection, collection_name) elseif avro_t == 'array' or avro_t == 'array*' then assert(avro_schema.items ~= nil, 'items field must not be nil in array avro schema') - assert(type(avro_schema.items) == 'string', - 'avro_schema.items must be a string, got ' - .. type(avro_schema.item)) + assert(type(avro_schema.items) == 'string' + or type(avro_schema.items) == 'table', + 'avro_schema.items must be a string or a table, got ' + .. type(avro_schema.items)) - local gql_items_type = convert_scalar_type(avro_schema.items) - - assert(gql_items_type, "only scalars are supported as array items for now " - .. avro_type(avro_schema.items) .. " is not a scalar") + local gql_items_type = gql_type(state, avro_schema.items) local gql_array = types.list(gql_items_type) return avro_t == 'array' and types.nonNull(gql_array) or gql_array elseif avro_t == 'map' or avro_t == 'map*' then @@ -417,7 +415,7 @@ gql_type = function(state, avro_schema, collection, collection_name) 'got %s (avro_schema %s)'):format(type(avro_schema.values), json.encode(avro_schema))) - convert_scalar_type(avro_schema.values, {raise = true}) + gql_type(state, avro_schema.values) local gql_map = types_map return avro_t == 'map' and types.nonNull(gql_map) or gql_map else diff --git a/test/local/array_and_map_simple.result b/test/local/array_and_map_simple.result deleted file mode 100644 index 8274dd3..0000000 --- a/test/local/array_and_map_simple.result +++ /dev/null @@ -1,10 +0,0 @@ -RESULT ---- -user_collection: -- user_id: user_id_1 - favorite_holidays: {'december': 'new year', 'march': 'vacation'} - favorite_food: - - meat - - potato -... - diff --git a/test/local/array_and_map_space_accessor.result b/test/local/array_and_map_space_accessor.result new file mode 100644 index 0000000..d293bc3 --- /dev/null +++ b/test/local/array_and_map_space_accessor.result @@ -0,0 +1,14 @@ +RESULT +--- +user_collection: +- favorite_holidays: {'december': 'new year', 'march': 'vacation'} + user_id: user_id_1 + user_balances: + - value: 33 + - value: 44 + favorite_food: + - meat + - potato + customer_balances: {'salary': {'value': 333}, 'deposit': {'value': 444}} +... + diff --git a/test/local/array_and_map_simple.test.lua b/test/local/array_and_map_space_accessor.test.lua similarity index 100% rename from test/local/array_and_map_simple.test.lua rename to test/local/array_and_map_space_accessor.test.lua diff --git a/test/testdata/array_and_map_testdata.lua b/test/testdata/array_and_map_testdata.lua index 092193a..26bc40b 100755 --- a/test/testdata/array_and_map_testdata.lua +++ b/test/testdata/array_and_map_testdata.lua @@ -18,7 +18,29 @@ function array_testdata.get_test_metadata() "fields": [ { "name": "user_id", "type": "string" }, { "name": "favorite_food", "type": {"type": "array", "items": "string"} }, - { "name": "favorite_holidays", "type": {"type": "map", "values": "string"} } + { "name": "favorite_holidays", "type": {"type": "map", "values": "string"} }, + { "name": "user_balances", "type": + {"type": "array", "items": + { "name": "balance", + "type": "record", + "fields": [{ + "name": "value", + "type": "int" + }] + } + } + }, + { "name": "customer_balances", "type": + {"type": "map", "values": + { "name": "another_balance", + "type": "record", + "fields": [{ + "name": "value", + "type": "int" + }] + } + } + } ] } }]]) @@ -76,7 +98,10 @@ function array_testdata.fill_test_data(shard) shard.user_collection:replace( { 1827767717, 'user_id_1', { 'meat', 'potato' }, - { december = 'new year', march = 'vacation'} }) + { december = 'new year', march = 'vacation' }, + { { 33 }, { 44 } }, + { salary = {333}, deposit = { 444}} + }) --@todo add empty array end @@ -89,19 +114,23 @@ function array_testdata.run_queries(gql_wrapper) local results = '' - local query_map = [[ + local query = [[ query user_holidays($user_id: String) { user_collection(user_id: $user_id) { user_id favorite_food favorite_holidays + user_balances { + value + } + customer_balances } } ]] utils.show_trace(function() local variables_1 = { user_id = 'user_id_1' } - local gql_query_1 = gql_wrapper:compile(query_map) + local gql_query_1 = gql_wrapper:compile(query) local result = gql_query_1:execute(variables_1) results = results .. print_and_return( ('RESULT\n%s'):format(yaml.encode(result))) From a936771930f7b6ba014af31468873800e88f290f Mon Sep 17 00:00:00 2001 From: SudoBobo Date: Wed, 28 Feb 2018 14:36:58 +0300 Subject: [PATCH 20/26] style/readability fixes after review --- graphql/core/execute.lua | 81 +++++++++++++++++++++++- graphql/tarantool_graphql.lua | 39 ++++++------ test/local/simple_avro_array.test.lua | 9 ++- test/testdata/array_and_map_testdata.lua | 8 +-- 4 files changed, 106 insertions(+), 31 deletions(-) diff --git a/graphql/core/execute.lua b/graphql/core/execute.lua index 181b0c9..e740100 100644 --- a/graphql/core/execute.lua +++ b/graphql/core/execute.lua @@ -79,6 +79,13 @@ local function mergeSelectionSets(fields) end local function defaultResolver(object, arguments, info) + --print('print from default resolver in execute 82 ') + --print(object) + --require('pl.pretty').dump(object) + --require('pl.pretty').dump(object) + --print('print from default resolver in 86') + --require('pl.pretty').dump(info.fieldASTs[1].name.value) + --require('pl.pretty').dump(object[info.fieldASTs[1].name.value]) return object[info.fieldASTs[1].name.value] end @@ -145,6 +152,25 @@ end local evaluateSelections +--- check if given object is flat and have only scalars +local function is_simple_object(object) + assert(type(object) == 'table', 'object type must be table') + + for _, v in pairs(object) do + if type(v) == 'table' or type(v) == 'function' then + return false + end + end + + for _, v in ipairs(object) do + if type(v) == 'table' or type(v) == 'function' then + return false + end + end + + return true +end + local function completeValue(fieldType, result, subSelections, context) local fieldTypeName = fieldType.__type @@ -163,6 +189,7 @@ local function completeValue(fieldType, result, subSelections, context) return nil end + if fieldTypeName == 'List' then local innerType = fieldType.ofType @@ -172,7 +199,21 @@ local function completeValue(fieldType, result, subSelections, context) local values = {} for i, value in ipairs(result) do + --print('print from List section in completeValue() 182 with i == ' .. i) + --print('INNER TYPE') + --require('pl.pretty').dump(innerType) + --print('VALUE') + --require('pl.pretty').dump(value) + -- + --print("is simple object?") + --print(is_simple_object(value)) + + --values[i] = is_simple_object(value) and value or + -- completeValue(innerType, value, subSelections, context) values[i] = completeValue(innerType, value, subSelections, context) + --print('result after completeValue()') + --require('pl.pretty').dump(value[i]) + end return next(values) and values or context.schema.__emptyList @@ -183,6 +224,9 @@ local function completeValue(fieldType, result, subSelections, context) end if fieldTypeName == 'Object' then + -- можно добавить условие, что если объект простой + -- и вложенностей нет + -- то вернуть его как есть и не делать evaluateSelections local fields = evaluateSelections(fieldType, result, subSelections, context) return next(fields) and fields or context.schema.__emptyObject elseif fieldTypeName == 'Interface' or fieldTypeName == 'Union' then @@ -194,10 +238,17 @@ local function completeValue(fieldType, result, subSelections, context) end local function getFieldEntry(objectType, object, fields, context) + + --print('print from 242 execute lua') + local firstField = fields[1] local fieldName = firstField.name.value local responseKey = getFieldResponseKey(firstField) local fieldType = introspection.fieldMap[fieldName] or objectType.fields[fieldName] + -- + --require('pl.pretty').dump(fieldName) + --require('pl.pretty').dump(fieldType) + if fieldType == nil then return nil @@ -227,25 +278,53 @@ local function getFieldEntry(objectType, object, fields, context) local resolvedObject = (fieldType.resolve or defaultResolver)(object, arguments, info) local subSelections = mergeSelectionSets(fields) + --print('print from 258 in execute') + --require('pl.pretty').dump(resolvedObject) + + + --print('resolvedObject') + --require('pl.pretty').dump(objectType) + + local res = completeValue(fieldType.kind, resolvedObject, subSelections, context) + --print('complete value result from 256 in execute.lua') + --require('pl.pretty').dump(res) - return completeValue(fieldType.kind, resolvedObject, subSelections, context) + return res end evaluateSelections = function(objectType, object, selections, context) local groupedFieldSet = collectFields(objectType, selections, {}, {}, context) + --print('print from 298, groupedFieldSet') + --require('pl.pretty').dump(groupedFieldSet) + --print('OBJECT') + --require('pl.pretty').dump(object) + --print('OBJECT TYPE') + --require('pl.pretty').dump(objectType) + --print('SELECTIONS') + --require('pl.pretty').dump(selections) + + + + return util.map(groupedFieldSet, function(fields) return getFieldEntry(objectType, object, fields, context) end) end return function(schema, tree, rootValue, variables, operationName) + + local context = buildContext(schema, tree, rootValue, variables, operationName) local rootType = schema[context.operation.operation] + + if not rootType then error('Unsupported operation "' .. context.operation.operation .. '"') end + + return evaluateSelections(rootType, rootValue, context.operation.selectionSet.selections, context) end diff --git a/graphql/tarantool_graphql.lua b/graphql/tarantool_graphql.lua index 3d422b0..6cb7469 100644 --- a/graphql/tarantool_graphql.lua +++ b/graphql/tarantool_graphql.lua @@ -102,7 +102,7 @@ local function convert_scalar_type(avro_schema, opts) type(opts)) local raise = opts.raise or false assert(type(raise) == 'boolean', 'opts.raise must be boolean, got ' .. - type(opts.raise)) + type(raise)) local avro_t = avro_type(avro_schema) if avro_t == 'int' then @@ -121,7 +121,7 @@ local function convert_scalar_type(avro_schema, opts) if raise then error('unrecognized avro-schema scalar type: ' .. - json.encode(avro_schema)) + json.encode(avro_schema)) end return nil @@ -132,7 +132,7 @@ end --- An error will be raised if avro_schema type is 'record' --- and its' fields are not scalar type because currently --- triple nesting level (record with record as a field - ok, ---- record with record wich has inside another level - not ok) +--- record with record which has inside another level - not ok). local function gql_argument_type(state, avro_schema) assert(type(state) == 'table', 'state must be a table, got ' .. type(state)) @@ -174,13 +174,13 @@ local function gql_argument_type(state, avro_schema) else local res = convert_scalar_type(avro_schema, {raise = false}) if res == nil then - error('unrecognized avro-schema type: ' .. json.encode(avro_schema)) + error('unrecognized avro-schema type: ' .. + json.encode(avro_schema)) end return res end end - -- Convert list of fields in the avro-schema format to list of GraphQL types -- with intention to use it as GraphQL arguments later. -- It uses the @{gql_argument_type} function to convert each field, then skips @@ -207,15 +207,13 @@ local function convert_record_fields_to_args(state, fields) return args end ---- Recursively convert each field of an avro-schema to a graphql type and +--- Convert each field of an avro-schema to a graphql type and --- corresponding argument for an upper graphql type. --- --- @tparam table state for read state.accessor and previously filled --- state.types --- @tparam table fields fields part from an avro-schema ---- @tparam table opts include is_for_args flag to specify ---- case when the function is used to collect arguments -local function convert_record_fields(state, fields, opts) +local function convert_record_fields(state, fields) local res = {} local object_args = {} @@ -255,7 +253,7 @@ end --- instead of the avro-schema name. --- --- XXX As it is not clear now what to do with complex types inside arrays ----(just pass to results or allow to use filters), only scalar arrays +--- (just pass to results or allow to use filters), only scalar arrays --- is allowed for now. gql_type = function(state, avro_schema, collection, collection_name) assert(type(state) == 'table', @@ -400,8 +398,8 @@ gql_type = function(state, avro_schema, collection, collection_name) 'items field must not be nil in array avro schema') assert(type(avro_schema.items) == 'string' or type(avro_schema.items) == 'table', - 'avro_schema.items must be a string or a table, got ' - .. type(avro_schema.items)) + 'avro_schema.items must be a string or a table, got ' .. + type(avro_schema.items)) local gql_items_type = gql_type(state, avro_schema.items) local gql_array = types.list(gql_items_type) @@ -421,7 +419,8 @@ gql_type = function(state, avro_schema, collection, collection_name) else local res = convert_scalar_type(avro_schema, {raise = false}) if res == nil then - error('unrecognized avro-schema type: ' .. json.encode(avro_schema)) + error('unrecognized avro-schema type: ' .. + json.encode(avro_schema)) end return res end @@ -460,10 +459,11 @@ local function parse_cfg(cfg) 'the schema itself: "%s" vs "%s"'):format(collection.schema_name, schema.name)) - -- recursively converts all avro types into gql types in the given schema + -- recursively converts all avro types into GraphQL types in the given + -- schema assert(schema.type == 'record', - 'top-level schema must have record avro type, not ' - .. schema.type) + 'top-level schema must have record avro type, got ' .. + tostring(schema.type)) state.types[collection_name] = gql_type(state, schema, collection, collection_name) @@ -516,7 +516,7 @@ end --- The function checks that one and only one GraphQL operation --- (query/mutation/subscription) is defined in the AST and it's type ---- is 'query' as mutations and subscriptions are not supported yet +--- is 'query' as mutations and subscriptions are not supported yet. local function assert_gql_query_ast(func_name, ast) assert(#ast.definitions == 1, func_name .. ': expected an one query') @@ -549,10 +549,11 @@ end --- The function parses a query string, validate the resulting query --- against the GraphQL schema and provides an object with the function to ---- execute the query with specific variables values +--- execute the query with specific variables values. +--- --- @tparam table state current state of graphql, including --- schemas, collections and accessor ---- @tparam string query raw query string +--- @tparam string query query string local function gql_compile(state, query) assert(type(state) == 'table' and type(query) == 'string', 'use :validate(...) instead of .validate(...)') diff --git a/test/local/simple_avro_array.test.lua b/test/local/simple_avro_array.test.lua index 257cc8c..5657b46 100755 --- a/test/local/simple_avro_array.test.lua +++ b/test/local/simple_avro_array.test.lua @@ -4,8 +4,7 @@ local fio = require('fio') --require in-repo version of graphql/ sources despite current working directory package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") - :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. - package.path + :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. package.path local json = require('json') @@ -72,11 +71,11 @@ local simple_accessor = setmetatable({}, { }) local gql_wrapper_simple_accessor = graphql.new({ --- class_name:class mapping + -- class_name:class mapping schemas = schemas, --- collection_{schema_name=..., connections=...} mapping + -- collection_{schema_name=..., connections=...} mapping collections = collections, --- :select() and :list_args() provider + -- :select() and :list_args() provider accessor = simple_accessor, }) diff --git a/test/testdata/array_and_map_testdata.lua b/test/testdata/array_and_map_testdata.lua index 26bc40b..ae280b1 100755 --- a/test/testdata/array_and_map_testdata.lua +++ b/test/testdata/array_and_map_testdata.lua @@ -10,7 +10,6 @@ local function print_and_return(...) end function array_testdata.get_test_metadata() - local schemas = json.decode([[{ "user": { "name": "user", @@ -86,9 +85,7 @@ function array_testdata.init_spaces() box.once('test_space_init_spaces', function() box.schema.create_space('user_collection') box.space.user_collection:create_index('user_id_index', - { type = 'tree', unique = true, parts = { - U_USER_ID_FN, 'string' - } } + { type = 'tree', unique = true, parts = { U_USER_ID_FN, 'string' }} ) end) end @@ -100,7 +97,7 @@ function array_testdata.fill_test_data(shard) { 1827767717, 'user_id_1', { 'meat', 'potato' }, { december = 'new year', march = 'vacation' }, { { 33 }, { 44 } }, - { salary = {333}, deposit = { 444}} + { salary = { 333 }, deposit = { 444 } } }) --@todo add empty array end @@ -111,7 +108,6 @@ function array_testdata.drop_spaces() end function array_testdata.run_queries(gql_wrapper) - local results = '' local query = [[ From c8abf95ab733349777043b5a12de7c551c4ff7c2 Mon Sep 17 00:00:00 2001 From: SudoBobo Date: Wed, 28 Feb 2018 14:39:16 +0300 Subject: [PATCH 21/26] remove executable bit --- graphql/utils.lua | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100755 => 100644 graphql/utils.lua diff --git a/graphql/utils.lua b/graphql/utils.lua old mode 100755 new mode 100644 From 951e7eab049cb90ea391b9edf056c17803969e90 Mon Sep 17 00:00:00 2001 From: SudoBobo Date: Wed, 28 Feb 2018 16:41:02 +0300 Subject: [PATCH 22/26] add info about Map --- README.md | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/README.md b/README.md index d273bc3..c001bbb 100644 --- a/README.md +++ b/README.md @@ -47,6 +47,42 @@ GraphQL queries on data from the local Tarantool's storage called spaces. It is planned to implement another data accessor that allows to fetch objects sharded using tarantool/shard module. +### Notes on types + +User should distinguish between Object and Map types. Both of them consists of +keys and values but there are some important differences. + +While Object is a GraphQL +built-in type, Map is a scalar-based type. In case of Object-based type +all key-value pairs are set during type definition and values may have different +arbitrary types. + +In contrast, all values of Map-based type must have the same +type and specific key-value pairs are not set during type definition. + +Map-based types should be queried as a scalar type, not as a object type +(because map's keys are not part of the schema) + + +This works +``` +{ + … + map_based_type + … +} +``` + +This doesn't work +``` +{ + … + map_based_type { + key_1 + } + … +} +``` ## Run tests ``` From 91bd8bf95d242ba07bd65c6dc06c77872736293c Mon Sep 17 00:00:00 2001 From: SudoBobo Date: Wed, 28 Feb 2018 16:42:10 +0300 Subject: [PATCH 23/26] typo fix --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index c001bbb..332f8a5 100644 --- a/README.md +++ b/README.md @@ -60,7 +60,7 @@ arbitrary types. In contrast, all values of Map-based type must have the same type and specific key-value pairs are not set during type definition. -Map-based types should be queried as a scalar type, not as a object type +Map-based types should be queried as a scalar type, not as an object type (because map's keys are not part of the schema) From d9a762383272da1f739811b7058b8f079dbc2206 Mon Sep 17 00:00:00 2001 From: SudoBobo Date: Wed, 28 Feb 2018 19:42:05 +0300 Subject: [PATCH 24/26] remove whitespaces at the end of lines --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 332f8a5..f124801 100644 --- a/README.md +++ b/README.md @@ -50,17 +50,17 @@ sharded using tarantool/shard module. ### Notes on types User should distinguish between Object and Map types. Both of them consists of -keys and values but there are some important differences. +keys and values but there are some important differences. While Object is a GraphQL -built-in type, Map is a scalar-based type. In case of Object-based type +built-in type, Map is a scalar-based type. In case of Object-based type all key-value pairs are set during type definition and values may have different arbitrary types. In contrast, all values of Map-based type must have the same type and specific key-value pairs are not set during type definition. -Map-based types should be queried as a scalar type, not as an object type +Map-based types should be queried as a scalar type, not as an object type (because map's keys are not part of the schema) From 1f2583a84779534ff09e16dd74a0d1af1984948a Mon Sep 17 00:00:00 2001 From: SudoBobo Date: Wed, 28 Feb 2018 19:53:19 +0300 Subject: [PATCH 25/26] remove all debug prints --- graphql/core/execute.lua | 81 +--------------------------------------- 1 file changed, 1 insertion(+), 80 deletions(-) diff --git a/graphql/core/execute.lua b/graphql/core/execute.lua index 5ed6714..92d5070 100644 --- a/graphql/core/execute.lua +++ b/graphql/core/execute.lua @@ -79,13 +79,6 @@ local function mergeSelectionSets(fields) end local function defaultResolver(object, arguments, info) - --print('print from default resolver in execute 82 ') - --print(object) - --require('pl.pretty').dump(object) - --require('pl.pretty').dump(object) - --print('print from default resolver in 86') - --require('pl.pretty').dump(info.fieldASTs[1].name.value) - --require('pl.pretty').dump(object[info.fieldASTs[1].name.value]) return object[info.fieldASTs[1].name.value] end @@ -155,25 +148,6 @@ end local evaluateSelections ---- check if given object is flat and have only scalars -local function is_simple_object(object) - assert(type(object) == 'table', 'object type must be table') - - for _, v in pairs(object) do - if type(v) == 'table' or type(v) == 'function' then - return false - end - end - - for _, v in ipairs(object) do - if type(v) == 'table' or type(v) == 'function' then - return false - end - end - - return true -end - local function completeValue(fieldType, result, subSelections, context) local fieldTypeName = fieldType.__type @@ -192,7 +166,6 @@ local function completeValue(fieldType, result, subSelections, context) return nil end - if fieldTypeName == 'List' then local innerType = fieldType.ofType @@ -202,21 +175,7 @@ local function completeValue(fieldType, result, subSelections, context) local values = {} for i, value in ipairs(result) do - --print('print from List section in completeValue() 182 with i == ' .. i) - --print('INNER TYPE') - --require('pl.pretty').dump(innerType) - --print('VALUE') - --require('pl.pretty').dump(value) - -- - --print("is simple object?") - --print(is_simple_object(value)) - - --values[i] = is_simple_object(value) and value or - -- completeValue(innerType, value, subSelections, context) values[i] = completeValue(innerType, value, subSelections, context) - --print('result after completeValue()') - --require('pl.pretty').dump(value[i]) - end return next(values) and values or context.schema.__emptyList @@ -227,9 +186,6 @@ local function completeValue(fieldType, result, subSelections, context) end if fieldTypeName == 'Object' then - -- можно добавить условие, что если объект простой - -- и вложенностей нет - -- то вернуть его как есть и не делать evaluateSelections local fields = evaluateSelections(fieldType, result, subSelections, context) return next(fields) and fields or context.schema.__emptyObject elseif fieldTypeName == 'Interface' or fieldTypeName == 'Union' then @@ -241,17 +197,10 @@ local function completeValue(fieldType, result, subSelections, context) end local function getFieldEntry(objectType, object, fields, context) - - --print('print from 242 execute lua') - local firstField = fields[1] local fieldName = firstField.name.value local responseKey = getFieldResponseKey(firstField) local fieldType = introspection.fieldMap[fieldName] or objectType.fields[fieldName] - -- - --require('pl.pretty').dump(fieldName) - --require('pl.pretty').dump(fieldType) - if fieldType == nil then return nil @@ -282,53 +231,25 @@ local function getFieldEntry(objectType, object, fields, context) local resolvedObject = (fieldType.resolve or defaultResolver)(object, arguments, info) local subSelections = mergeSelectionSets(fields) - --print('print from 258 in execute') - --require('pl.pretty').dump(resolvedObject) - - - --print('resolvedObject') - --require('pl.pretty').dump(objectType) - - local res = completeValue(fieldType.kind, resolvedObject, subSelections, context) - --print('complete value result from 256 in execute.lua') - --require('pl.pretty').dump(res) - return res + return completeValue(fieldType.kind, resolvedObject, subSelections, context) end evaluateSelections = function(objectType, object, selections, context) local groupedFieldSet = collectFields(objectType, selections, {}, {}, context) - --print('print from 298, groupedFieldSet') - --require('pl.pretty').dump(groupedFieldSet) - --print('OBJECT') - --require('pl.pretty').dump(object) - --print('OBJECT TYPE') - --require('pl.pretty').dump(objectType) - --print('SELECTIONS') - --require('pl.pretty').dump(selections) - - - - return util.map(groupedFieldSet, function(fields) return getFieldEntry(objectType, object, fields, context) end) end return function(schema, tree, rootValue, variables, operationName) - - local context = buildContext(schema, tree, rootValue, variables, operationName) local rootType = schema[context.operation.operation] - - if not rootType then error('Unsupported operation "' .. context.operation.operation .. '"') end - - return evaluateSelections(rootType, rootValue, context.operation.selectionSet.selections, context) end From e406a314d7d614d4c6d54986bedc7831ac89310f Mon Sep 17 00:00:00 2001 From: SudoBobo Date: Wed, 28 Feb 2018 20:08:56 +0300 Subject: [PATCH 26/26] make Map and Object explanation more specific --- README.md | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 270acbb..209047d 100644 --- a/README.md +++ b/README.md @@ -54,14 +54,15 @@ keys and values but there are some important differences. While Object is a GraphQL built-in type, Map is a scalar-based type. In case of Object-based type -all key-value pairs are set during type definition and values may have different -arbitrary types. +all key-value pairs are set during type definition and values may have +different types (as defined in the schema). -In contrast, all values of Map-based type must have the same -type and specific key-value pairs are not set during type definition. +In contrast, set of valid Map keys is not defined in the schema, any key-value +pair is valid despite name of the key while value has schema-determined type +(which is the same among all values in the map). Map-based types should be queried as a scalar type, not as an object type -(because map's keys are not part of the schema) +(because map's keys are not part of the schema). This works