diff --git a/README.md b/README.md index b9cdf60..209047d 100644 --- a/README.md +++ b/README.md @@ -47,6 +47,43 @@ GraphQL queries on data from the local Tarantool's storage called spaces. It is planned to implement another data accessor that allows to fetch objects sharded using tarantool/shard module. +### Notes on types + +User should distinguish between Object and Map types. Both of them consists of +keys and values but there are some important differences. + +While Object is a GraphQL +built-in type, Map is a scalar-based type. In case of Object-based type +all key-value pairs are set during type definition and values may have +different types (as defined in the schema). + +In contrast, set of valid Map keys is not defined in the schema, any key-value +pair is valid despite name of the key while value has schema-determined type +(which is the same among all values in the map). + +Map-based types should be queried as a scalar type, not as an object type +(because map's keys are not part of the schema). + + +This works +``` +{ + … + map_based_type + … +} +``` + +This doesn't work +``` +{ + … + map_based_type { + key_1 + } + … +} +``` ## Run tests ``` diff --git a/graphql/tarantool_graphql.lua b/graphql/tarantool_graphql.lua index d597525..6d8a7ba 100644 --- a/graphql/tarantool_graphql.lua +++ b/graphql/tarantool_graphql.lua @@ -30,6 +30,14 @@ local function avro_type(avro_schema) return 'record*' elseif utils.is_array(avro_schema) then return 'union' + elseif avro_schema.type == 'array' then + return 'array' + elseif avro_schema.type == 'array*' then + return 'array*' + elseif avro_schema.type == 'map' then + return 'map' + elseif avro_schema.type == 'map*' then + return 'map*' end elseif type(avro_schema) == 'string' then if avro_schema == 'int' then @@ -72,6 +80,20 @@ local types_long = types.scalar({ end }) +local types_map = types.scalar({ + name = 'Map', + description = 'Map is a dictionary with string keys and values of ' .. + 'arbitrary but same among all values type', + serialize = function(value) return value end, + parseValue = function(value)return value end, + -- node == ast + parseLiteral = function(node) + if node.kind == 'Map' then + return node.value + end + end +}) + -- XXX: boolean -- XXX: float local function convert_scalar_type(avro_schema, opts) @@ -79,8 +101,8 @@ local function convert_scalar_type(avro_schema, opts) assert(type(opts) == 'table', 'opts must be nil or table, got ' .. type(opts)) local raise = opts.raise or false - assert(type(opts.raise) == 'boolean', 'opts.raise must be boolean, got ' .. - type(opts.raise)) + assert(type(raise) == 'boolean', 'opts.raise must be boolean, got ' .. + type(raise)) local avro_t = avro_type(avro_schema) if avro_t == 'int' then @@ -96,15 +118,21 @@ local function convert_scalar_type(avro_schema, opts) elseif avro_t == 'string*' then return types.string end + if raise then error('unrecognized avro-schema scalar type: ' .. json.encode(avro_schema)) end + return nil end --- Non-recursive version of the @{gql_type} function that returns --- InputObject instead of Object. +--- An error will be raised if avro_schema type is 'record' +--- and its' fields are not scalar type because currently +--- triple nesting level (record with record as a field - ok, +--- record with record which has inside another level - not ok). local function gql_argument_type(state, avro_schema) assert(type(state) == 'table', 'state must be a table, got ' .. type(state)) @@ -115,17 +143,20 @@ local function gql_argument_type(state, avro_schema) assert(type(avro_schema.name) == 'string', ('avro_schema.name must be a string, got %s (avro_schema %s)') :format(type(avro_schema.name), json.encode(avro_schema))) + assert(type(avro_schema.fields) == 'table', ('avro_schema.fields must be a table, got %s (avro_schema %s)') :format(type(avro_schema.fields), json.encode(avro_schema))) local fields = {} - for _, field in ipairs(fields) do + for _, field in ipairs(avro_schema.fields) do assert(type(field.name) == 'string', ('field.name must be a string, got %s (schema %s)') :format(type(field.name), json.encode(field))) + local gql_field_type = convert_scalar_type( field.type, {raise = true}) + fields[field.name] = { name = field.name, kind = types.nonNull(gql_field_type), @@ -143,26 +174,41 @@ local function gql_argument_type(state, avro_schema) else local res = convert_scalar_type(avro_schema, {raise = false}) if res == nil then - error('unrecognized avro-schema type: ' .. json.encode(avro_schema)) + error('unrecognized avro-schema type: ' .. + json.encode(avro_schema)) end return res end end +-- Convert list of fields in the avro-schema format to list of GraphQL types +-- with intention to use it as GraphQL arguments later. +-- It uses the @{gql_argument_type} function to convert each field, then skips +-- fields of array and map types and gives the resulting list of converted fields. +--- +--- @tparam table state for read state.accessor and previously filled +--- state.types +--- @tparam table fields fields part from an avro-schema local function convert_record_fields_to_args(state, fields) local args = {} for _, field in ipairs(fields) do assert(type(field.name) == 'string', ('field.name must be a string, got %s (schema %s)') :format(type(field.name), json.encode(field))) + local gql_class = gql_argument_type(state, field.type) - args[field.name] = nullable(gql_class) + + -- arrays (gql lists) and maps can't be arguments + -- so these graphql types are to be skipped + if nullable(gql_class) ~= 'List' and nullable(gql_class) ~= 'Map' then + args[field.name] = nullable(gql_class) + end end return args end ---- Convert each field of an avro-schema to a graphql type and corresponding ---- argument for an upper graphql type. +--- Convert each field of an avro-schema to a graphql type and +--- corresponding argument for an upper graphql type. --- --- @tparam table state for read state.accessor and previously filled --- state.types @@ -170,23 +216,31 @@ end local function convert_record_fields(state, fields) local res = {} local object_args = {} + for _, field in ipairs(fields) do assert(type(field.name) == 'string', ('field.name must be a string, got %s (schema %s)') :format(type(field.name), json.encode(field))) + res[field.name] = { name = field.name, kind = gql_type(state, field.type), } - object_args[field.name] = nullable(res[field.name].kind) + + -- arrays (gql lists) and maps can't be arguments + local avro_t = avro_type(field.type) + if avro_t ~= 'array' and avro_t ~= 'array*' and avro_t ~= 'map' + and avro_t ~= 'map*' then + object_args[field.name] = nullable(res[field.name].kind) + end end return res, object_args end ---- The function recursively converts passed avro-schema to a graphql type. +--- The function converts passed avro-schema to a GraphQL type. --- --- @tparam table state for read state.accessor and previously filled ---- state.types +--- state.types (state.types are gql types) --- @tparam table avro_schema input avro-schema --- @tparam[opt] table collection table with schema_name, connections fields --- described a collection (e.g. tarantool's spaces) @@ -197,6 +251,10 @@ end --- automatically generate corresponding decucible fields. --- 2. The collection name will be used as the resulting graphql type name --- instead of the avro-schema name. +--- +--- XXX As it is not clear now what to do with complex types inside arrays +--- (just pass to results or allow to use filters), only scalar arrays +--- is allowed for now. gql_type = function(state, avro_schema, collection, collection_name) assert(type(state) == 'table', 'state must be a table, got ' .. type(state)) @@ -212,12 +270,14 @@ gql_type = function(state, avro_schema, collection, collection_name) ('collection and collection_name must be nils or ' .. 'non-nils simultaneously, got: %s and %s'):format(type(collection), type(collection_name))) + local accessor = state.accessor assert(accessor ~= nil, 'state.accessor must not be nil') assert(accessor.select ~= nil, 'state.accessor.select must not be nil') assert(accessor.list_args ~= nil, 'state.accessor.list_args must not be nil') + -- type of the top element in the avro-schema local avro_t = avro_type(avro_schema) if avro_t == 'record' or avro_t == 'record*' then @@ -230,6 +290,7 @@ gql_type = function(state, avro_schema, collection, collection_name) local fields, _ = convert_record_fields(state, avro_schema.fields) + -- if collection param is passed then go over all connections for _, c in ipairs((collection or {}).connections or {}) do assert(type(c.type) == 'string', 'connection.type must be a string, got ' .. type(c.type)) @@ -243,6 +304,7 @@ gql_type = function(state, avro_schema, collection, collection_name) assert(type(c.parts) == 'table', 'connection.parts must be a string, got ' .. type(c.parts)) + -- gql type of connection field local destination_type = state.types[c.destination_collection] assert(destination_type ~= nil, @@ -268,6 +330,7 @@ gql_type = function(state, avro_schema, collection, collection_name) resolve = function(parent, args_instance, info) local destination_args_names = {} local destination_args_values = {} + for _, part in ipairs(c.parts) do assert(type(part.source_field) == 'string', 'part.source_field must be a string, got ' .. @@ -275,11 +338,13 @@ gql_type = function(state, avro_schema, collection, collection_name) assert(type(part.destination_field) == 'string', 'part.destination_field must be a string, got ' .. type(part.destination_field)) + destination_args_names[#destination_args_names + 1] = part.destination_field destination_args_values[#destination_args_values + 1] = parent[part.source_field] end + local from = { collection_name = collection_name, connection_name = c.name, @@ -320,6 +385,7 @@ gql_type = function(state, avro_schema, collection, collection_name) } end + -- create gql type local res = types.object({ name = collection ~= nil and collection.name or avro_schema.name, description = 'generated from avro-schema for ' .. @@ -329,10 +395,34 @@ gql_type = function(state, avro_schema, collection, collection_name) return avro_t == 'enum' and types.nonNull(res) or res elseif avro_t == 'enum' then error('enums not implemented yet') -- XXX + elseif avro_t == 'array' or avro_t == 'array*' then + assert(avro_schema.items ~= nil, + 'items field must not be nil in array avro schema') + assert(type(avro_schema.items) == 'string' + or type(avro_schema.items) == 'table', + 'avro_schema.items must be a string or a table, got ' .. + type(avro_schema.items)) + + local gql_items_type = gql_type(state, avro_schema.items) + local gql_array = types.list(gql_items_type) + return avro_t == 'array' and types.nonNull(gql_array) or gql_array + elseif avro_t == 'map' or avro_t == 'map*' then + assert(avro_schema.values ~= nil, + 'values must not be nil in map avro schema') + assert(type(avro_schema.values) == 'table' + or type(avro_schema.values) == 'string', + ('avro_schema.values must be a table or a string,' .. + 'got %s (avro_schema %s)'):format(type(avro_schema.values), + json.encode(avro_schema))) + + gql_type(state, avro_schema.values) + local gql_map = types_map + return avro_t == 'map' and types.nonNull(gql_map) or gql_map else local res = convert_scalar_type(avro_schema, {raise = false}) if res == nil then - error('unrecognized avro-schema type: ' .. json.encode(avro_schema)) + error('unrecognized avro-schema type: ' .. + json.encode(avro_schema)) end return res end @@ -358,10 +448,11 @@ local function parse_cfg(cfg) local fields = {} - for name, collection in pairs(state.collections) do - collection.name = name + for collection_name, collection in pairs(state.collections) do + collection.name = collection_name assert(collection.schema_name ~= nil, 'collection.schema_name must not be nil') + local schema = cfg.schemas[collection.schema_name] assert(schema ~= nil, ('cfg.schemas[%s] must not be nil'):format( tostring(collection.schema_name))) @@ -369,21 +460,30 @@ local function parse_cfg(cfg) ('top-level schema name does not match the name in ' .. 'the schema itself: "%s" vs "%s"'):format(collection.schema_name, schema.name)) - state.types[name] = gql_type(state, schema, collection, name) + -- recursively converts all avro types into GraphQL types in the given + -- schema + assert(schema.type == 'record', + 'top-level schema must have record avro type, got ' .. + tostring(schema.type)) + state.types[collection_name] = gql_type(state, schema, collection, + collection_name) + + -- prepare arguments' types local _, object_args = convert_record_fields(state, schema.fields) local list_args = convert_record_fields_to_args( - state, accessor:list_args(name)) + state, accessor:list_args(collection_name)) local args = utils.merge_tables(object_args, list_args) - state.object_arguments[name] = object_args - state.list_arguments[name] = list_args - state.all_arguments[name] = args + + state.object_arguments[collection_name] = object_args + state.list_arguments[collection_name] = list_args + state.all_arguments[collection_name] = args -- create entry points from collection names - fields[name] = { - kind = types.nonNull(types.list(state.types[name])), - arguments = state.all_arguments[name], + fields[collection_name] = { + kind = types.nonNull(types.list(state.types[collection_name])), + arguments = state.all_arguments[collection_name], resolve = function(rootValue, args_instance, info) local object_args_instance = {} -- passed to 'filter' local list_args_instance = {} -- passed to 'args' @@ -399,6 +499,7 @@ local function parse_cfg(cfg) end end local from = nil + local extra = { qcontext = info.qcontext } @@ -419,6 +520,9 @@ local function parse_cfg(cfg) return state end +--- The function checks that one and only one GraphQL operation +--- (query/mutation/subscription) is defined in the AST and it's type +--- is 'query' as mutations and subscriptions are not supported yet. local function assert_gql_query_ast(func_name, ast) assert(#ast.definitions == 1, func_name .. ': expected an one query') @@ -430,6 +534,8 @@ local function assert_gql_query_ast(func_name, ast) type(operation_name)) end +--- The function just makes some reasonable assertions on input +--- and then call graphql-lua execute. local function gql_execute(qstate, variables) assert(qstate.state) local state = qstate.state @@ -447,6 +553,13 @@ local function gql_execute(qstate, variables) operation_name) end +--- The function parses a query string, validate the resulting query +--- against the GraphQL schema and provides an object with the function to +--- execute the query with specific variables values. +--- +--- @tparam table state current state of graphql, including +--- schemas, collections and accessor +--- @tparam string query query string local function gql_compile(state, query) assert(type(state) == 'table' and type(query) == 'string', 'use :validate(...) instead of .validate(...)') @@ -463,6 +576,7 @@ local function gql_compile(state, query) ast = ast, operation_name = operation_name, } + local gql_query = setmetatable(qstate, { __index = { execute = gql_execute, diff --git a/test/local/array_and_map_space_accessor.result b/test/local/array_and_map_space_accessor.result new file mode 100644 index 0000000..d293bc3 --- /dev/null +++ b/test/local/array_and_map_space_accessor.result @@ -0,0 +1,14 @@ +RESULT +--- +user_collection: +- favorite_holidays: {'december': 'new year', 'march': 'vacation'} + user_id: user_id_1 + user_balances: + - value: 33 + - value: 44 + favorite_food: + - meat + - potato + customer_balances: {'salary': {'value': 333}, 'deposit': {'value': 444}} +... + diff --git a/test/local/array_and_map_space_accessor.test.lua b/test/local/array_and_map_space_accessor.test.lua new file mode 100755 index 0000000..aa16c47 --- /dev/null +++ b/test/local/array_and_map_space_accessor.test.lua @@ -0,0 +1,65 @@ +#!/usr/bin/env tarantool + +box.cfg { background = false } +local fio = require('fio') + +-- require in-repo version of graphql/ sources despite current working directory +package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") + :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. package.path + +local graphql = require('graphql') +local testdata = require('test.testdata.array_and_map_testdata') + +-- init box, upload test data and acquire metadata +-- ----------------------------------------------- + + +-- init box and data schema +testdata.init_spaces() + +-- upload test data +testdata.fill_test_data() + +-- acquire metadata +local metadata = testdata.get_test_metadata() +local schemas = metadata.schemas +local collections = metadata.collections +local service_fields = metadata.service_fields +local indexes = metadata.indexes +local utils = require('graphql.utils') + +-- build accessor and graphql schemas +-- ---------------------------------- +local accessor +utils.show_trace( +function() + accessor = graphql.accessor_space.new({ + schemas = schemas, + collections = collections, + service_fields = service_fields, + indexes = indexes, + }) +end +) + +local gql_wrapper +utils.show_trace(function() + gql_wrapper = graphql.new({ + schemas = schemas, + collections = collections, + accessor = accessor, + }) +end +) + +-- run queries +-- ----------- + +testdata.run_queries(gql_wrapper) + +-- clean up +-- -------- + +testdata.drop_spaces() + +os.exit() diff --git a/test/local/simple_avro_array.result b/test/local/simple_avro_array.result new file mode 100644 index 0000000..da7035d --- /dev/null +++ b/test/local/simple_avro_array.result @@ -0,0 +1,9 @@ +RESULT +--- +user_collection: +- user_id: def + favorite_food: + - meat + - potato +... + diff --git a/test/local/simple_avro_array.test.lua b/test/local/simple_avro_array.test.lua new file mode 100755 index 0000000..5657b46 --- /dev/null +++ b/test/local/simple_avro_array.test.lua @@ -0,0 +1,97 @@ +#!/usr/bin/env tarantool + +local fio = require('fio') + + --require in-repo version of graphql/ sources despite current working directory +package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") + :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. package.path + + +local json = require('json') +local yaml = require('yaml') +local graphql = require('graphql') +local utils = require('graphql.utils') + +local schemas = json.decode([[{ + "user": { + "name": "user", + "type": "record", + "fields": [ + { "name": "user_id", "type": "string" }, + { "name": "favorite_food", "type": {"type": "array", "items": "string"} } + ] + } + }]]) + +local collections = json.decode([[{ + "user_collection": { + "schema_name": "user" + } +}]]) + +local function simple_access_function(parent, collection_name, filter, args) + --[[ + print('DEBUG: collection_name: ' .. collection_name) + print('DEBUG: filter: ' .. json.encode(filter)) + print('DEBUG: args: ' .. json.encode(args)) + print('DEBUG: --------') + --]] + local obj + if collection_name == 'user_collection' then + obj = { + user_id = 'def', + favorite_food = { 'meat', 'potato' }, + } + else + error('NIY: ' .. collection_name) + end + if not utils.is_subtable(obj, filter) then + return {} + end + return { obj } +end + +local simple_accessor = setmetatable({}, { + __index = { + select = function(self, parent, collection_name, connection_name, + filter, args) + return simple_access_function(parent, collection_name, filter, args) + end, + list_args = function(self, connection_type) + if connection_type == '1:1' then + return {} + end + return { + { name = 'limit', type = 'int' }, + { name = 'offset', type = 'long' }, + -- {name = 'filter', type = ...}, + } + end, + } +}) + +local gql_wrapper_simple_accessor = graphql.new({ + -- class_name:class mapping + schemas = schemas, + -- collection_{schema_name=..., connections=...} mapping + collections = collections, + -- :select() and :list_args() provider + accessor = simple_accessor, +}) + +local query_with_list = [[ + query userFavs($user_id: String) { + user_collection(user_id: $user_id) { + user_id + favorite_food + } + } +]] + +utils.show_trace(function() + local variables_2 = { user_id = 'def' } + local gql_query_2 = gql_wrapper_simple_accessor:compile(query_with_list) + local result = gql_query_2:execute(variables_2) + print(('RESULT\n%s'):format(yaml.encode(result))) +end) + diff --git a/test/local/simple_avro_map.result b/test/local/simple_avro_map.result new file mode 100644 index 0000000..a3982bd --- /dev/null +++ b/test/local/simple_avro_map.result @@ -0,0 +1,9 @@ +RESULT +--- +user_collection: +- user_id: def + favorite_holidays: + december: new year + march: vacation +... + diff --git a/test/local/simple_avro_map.test.lua b/test/local/simple_avro_map.test.lua new file mode 100755 index 0000000..029e6fa --- /dev/null +++ b/test/local/simple_avro_map.test.lua @@ -0,0 +1,102 @@ +#!/usr/bin/env tarantool + +local fio = require('fio') + +--require in-repo version of graphql/ sources despite current working directory +package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") + :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. + package.path + +local json = require('json') +local yaml = require('yaml') +local graphql = require('graphql') +local utils = require('graphql.utils') + +local schemas = json.decode([[{ + "user": { + "name": "user", + "type": "record", + "fields": [ + { "name": "user_id", "type": "string" }, + { "name": "favorite_holidays", "type": {"type": "map", "values": "string"} } + ] + } +}]]) + +local collections = json.decode([[{ + "user_collection": { + "schema_name": "user" + } +}]]) + +local function simple_access_function(parent, collection_name, filter, args) + --[[ + print('DEBUG: collection_name: ' .. collection_name) + print('DEBUG: filter: ' .. json.encode(filter)) + print('DEBUG: args: ' .. json.encode(args)) + print('DEBUG: --------') + --]] + local obj + if collection_name == 'user_collection' then + obj = { + user_id = 'def', + favorite_holidays = { december = 'new year', march = 'vacation' } + } + else + error('NIY: ' .. collection_name) + end + if not utils.is_subtable(obj, filter) then + return {} + end + return { obj } +end + +local simple_accessor = setmetatable({}, { + __index = { + select = function(self, parent, collection_name, connection_name, + filter, args) + return simple_access_function(parent, collection_name, filter, args) + end, + list_args = function(self, connection_type) + if connection_type == '1:1' then + return {} + end + return { + { name = 'limit', type = 'int' }, + { name = 'offset', type = 'long' }, + -- {name = 'filter', type = ...}, + } + end, + } +}) + +local gql_wrapper_simple_accessor +local query_with_map +utils.show_trace(function() + gql_wrapper_simple_accessor = graphql.new({ + -- class_name:class mapping + schemas = schemas, + -- collection_{schema_name=..., connections=...} mapping + collections = collections, + -- :select() and :list_args() provider + accessor = simple_accessor, + }) + + query_with_map = [[ + query userFavs($user_id: String) { + user_collection(user_id: $user_id) { + user_id + favorite_holidays + } + } + ]] + +end +) + +utils.show_trace(function() + local variables_2 = { user_id = 'def' } + local gql_query_2 = gql_wrapper_simple_accessor:compile(query_with_map) + local result = gql_query_2:execute(variables_2) + print(('RESULT\n%s'):format(yaml.encode(result))) +end) diff --git a/test/testdata/array_and_map_testdata.lua b/test/testdata/array_and_map_testdata.lua new file mode 100755 index 0000000..ae280b1 --- /dev/null +++ b/test/testdata/array_and_map_testdata.lua @@ -0,0 +1,138 @@ +local json = require('json') +local yaml = require('yaml') +local utils = require('graphql.utils') + +local array_testdata = {} + +local function print_and_return(...) + print(...) + return table.concat({ ... }, ' ') .. '\n' +end + +function array_testdata.get_test_metadata() + local schemas = json.decode([[{ + "user": { + "name": "user", + "type": "record", + "fields": [ + { "name": "user_id", "type": "string" }, + { "name": "favorite_food", "type": {"type": "array", "items": "string"} }, + { "name": "favorite_holidays", "type": {"type": "map", "values": "string"} }, + { "name": "user_balances", "type": + {"type": "array", "items": + { "name": "balance", + "type": "record", + "fields": [{ + "name": "value", + "type": "int" + }] + } + } + }, + { "name": "customer_balances", "type": + {"type": "map", "values": + { "name": "another_balance", + "type": "record", + "fields": [{ + "name": "value", + "type": "int" + }] + } + } + } + ] + } + }]]) + + local collections = json.decode([[{ + "user_collection": { + "schema_name": "user", + "connections": [] + } + }]]) + + local service_fields = { + user = { + { name = 'expires_on', type = 'long', default = 0 }, + }, + order = {}, + } + + local indexes = { + user_collection = { + user_id_index = { + service_fields = {}, + fields = { 'user_id' }, + index_type = 'tree', + unique = true, + primary = true, + }, + } + } + + return { + schemas = schemas, + collections = collections, + service_fields = service_fields, + indexes = indexes, + } +end + +function array_testdata.init_spaces() + -- user_collection fields + local U_USER_ID_FN = 2 + + box.once('test_space_init_spaces', function() + box.schema.create_space('user_collection') + box.space.user_collection:create_index('user_id_index', + { type = 'tree', unique = true, parts = { U_USER_ID_FN, 'string' }} + ) + end) +end + +function array_testdata.fill_test_data(shard) + local shard = shard or box.space + + shard.user_collection:replace( + { 1827767717, 'user_id_1', { 'meat', 'potato' }, + { december = 'new year', march = 'vacation' }, + { { 33 }, { 44 } }, + { salary = { 333 }, deposit = { 444 } } + }) + --@todo add empty array +end + +function array_testdata.drop_spaces() + box.space._schema:delete('oncetest_space_init_spaces') + box.space.user_collection:drop() +end + +function array_testdata.run_queries(gql_wrapper) + local results = '' + + local query = [[ + query user_holidays($user_id: String) { + user_collection(user_id: $user_id) { + user_id + favorite_food + favorite_holidays + user_balances { + value + } + customer_balances + } + } + ]] + + utils.show_trace(function() + local variables_1 = { user_id = 'user_id_1' } + local gql_query_1 = gql_wrapper:compile(query) + local result = gql_query_1:execute(variables_1) + results = results .. print_and_return( + ('RESULT\n%s'):format(yaml.encode(result))) + end) + + return results +end + +return array_testdata