diff --git a/.gitignore b/.gitignore index bd87e11..c063212 100644 --- a/.gitignore +++ b/.gitignore @@ -19,3 +19,6 @@ doc/apidoc # benchmarks results bench.*.result.txt + +# temporary directory for make apidoc-lint +doc/apidoc-lint-tmp diff --git a/.travis.yml b/.travis.yml index 74d8602..ffad6fd 100644 --- a/.travis.yml +++ b/.travis.yml @@ -58,6 +58,8 @@ install: - cd ../graphql - sudo luarocks install luacheck - sudo pip install virtualenv + - sudo luarocks install ldoc script: + - make apidoc-lint - make test diff --git a/Makefile b/Makefile index 97c7945..b3ab03b 100644 --- a/Makefile +++ b/Makefile @@ -9,6 +9,8 @@ default: .PHONY: lint lint: luacheck graphql/*.lua \ + graphql/convert_schema/*.lua \ + graphql/server/*.lua \ test/bench/*.lua \ test/space/*.lua \ test/testdata/*.lua \ @@ -45,6 +47,12 @@ pure-bench: clean: rm -rf test/var +.PHONY: apidoc-lint +apidoc-lint: + ! ldoc -d doc/apidoc-lint-tmp graphql --all -f markdown 2>&1 >/dev/null | \ + grep -v ': no module() call found; no initial doc comment$$\|: contains no items$$' + rm -rf doc/apidoc-lint-tmp + .PHONY: apidoc apidoc: ldoc -d doc/apidoc graphql --all -f markdown diff --git a/README.md b/README.md index 01eec4f..0b28f12 100644 --- a/README.md +++ b/README.md @@ -394,4 +394,8 @@ make test Consider LICENSE file for details. In brief: * graphql/core: MIT (c) 2015 Bjorn Swenson +* graphql/server/graphiql: Facebook dev tools & examples license (allows use, + copy and distribute) (c) 2015, Facebook, Inc (more: [1]) * all other content: BSD 2-clause (c) 2018 Tarantool AUTHORS + +[1]: https://github.com/graphql/graphiql/issues/10 diff --git a/demo/demo.lua b/demo/demo.lua index 4d435bf..6ea873a 100755 --- a/demo/demo.lua +++ b/demo/demo.lua @@ -11,7 +11,7 @@ package.path = package.path local log = require('log') -local test_utils = require('test.utils') +local test_utils = require('test.test_utils') -- e. g. nullable_1_1_conn local testdata_name = arg[1] or 'common' diff --git a/graphql/accessor_general.lua b/graphql/accessor_general.lua index 5a3029d..a80cfdb 100644 --- a/graphql/accessor_general.lua +++ b/graphql/accessor_general.lua @@ -10,12 +10,9 @@ local avro_schema = require('avro_schema') local utils = require('graphql.utils') local clock = require('clock') local bit = require('bit') -local rex, is_pcre2 = utils.optional_require('rex_pcre2'), true -if rex == nil then - -- fallback to libpcre - rex, is_pcre2 = utils.optional_require('rex_pcre'), false -end +local rex, is_pcre2 = utils.optional_require_rex() local avro_helpers = require('graphql.avro_helpers') +local db_schema_helpers = require('graphql.db_schema_helpers') local check = utils.check @@ -44,7 +41,7 @@ accessor_general.TIMEOUT_INFINITY = TIMEOUT_INFINITY --- Validate and compile set of avro schemas (with respect to service fields). --- --- @tparam table schemas map where keys are string names and values are ---- avro schemas; consider an example in @{tarantool_graphql.new} +--- avro schemas; consider an example in @{impl.new} --- function description. --- @tparam table service_fields map where keys are string names of avro --- schemas (from `schemas` argument) and values are service @@ -109,46 +106,6 @@ local function compile_schemas(schemas, service_fields) return models, service_fields_defaults end ---- Get user-provided meta-information about the primary index of given ---- collection. ---- ---- @tparam table self the data accessor ---- ---- @tparam string collection_name the name of collection to find the primary ---- index ---- ---- @treturn string `index_name` ---- @treturn table `index` (meta-information, not the index itself) -local function get_primary_index_meta(self, collection_name) - assert(type(self) == 'table', - 'self must be a table, got ' .. type(self)) - assert(type(collection_name) == 'string', - 'collection_name must be a string, got ' .. - type(collection_name)) - - local indexes = self.indexes[collection_name] - - local res_index_name - - for index_name, index in pairs(indexes) do - if res_index_name == nil and index.primary then - res_index_name = index_name - elseif res_index_name ~= nil and index.primary then - error(('several indexes were marked as primary in ' .. - 'the "%s" collection, at least "%s" and "%s"'):format( - collection_name, res_index_name, index_name)) - end - end - - if res_index_name == nil then - error(('cannot find primary index for collection "%s"'):format( - collection_name)) - end - - local res_index = indexes[res_index_name] - return res_index_name, res_index -end - --- Get a key to lookup index by `lookup_index_name` (part of `index_cache`). --- --- @tparam table filter filter for objects, its keys (names of fields) will @@ -312,7 +269,7 @@ end --- Validate `from` parameter of accessor_instance:select(). --- ---- @tparam table from see @{tarantool_graphql.new} +--- @tparam table from see @{impl.new} --- --- Raises an error when the validation fails. --- @@ -334,7 +291,7 @@ end --- (directly or indirectly using the `accessor_space.new` or the --- `accessor_shard.new` function); this function uses the --- `self.index_cache` prebuild table representing available indexes - +--- --- @tparam string collection_name name of a collection of whose indexes the --- function will search through --- @@ -353,7 +310,7 @@ end --- lookup needed index, values forms the `value_list` return value --- --- @tparam table args the `args` argument from the `self:select()` function, ---- it is the `list_args_instance` variable in terms of the `tarantool_graphql` +--- it is the `list_args_instance` variable in terms of the `convert_schema` --- module; here we using only `args.offset` value --- --- @treturn boolean `full_match` is whether passing `value_list` to the index @@ -375,7 +332,7 @@ end --- `nil`, or contains `value_list` field to pass to a GT (great-then) index, --- or contains `filter` field to use in `process_tuple` for find the pivot in --- a select result -local get_index_name = function(self, collection_name, from, filter, args) +local function get_index_name(self, collection_name, from, filter, args) assert(type(self) == 'table', 'self must be a table, got ' .. type(self)) assert(type(collection_name) == 'string', @@ -419,7 +376,8 @@ local get_index_name = function(self, collection_name, from, filter, args) local pivot if args.offset ~= nil then - local _, index_meta = get_primary_index_meta(self, collection_name) + local _, index_meta = db_schema_helpers.get_primary_index_meta(self, + collection_name) local pivot_filter if #index_meta.fields == 1 then -- we use simple type in case of scalar offset @@ -444,8 +402,8 @@ local get_index_name = function(self, collection_name, from, filter, args) -- corresponding offset in `pivot.value_list`, then the result will be -- postprocessed using `new_filter`, if necessary. if args.offset ~= nil then - local index_name, index_meta = get_primary_index_meta(self, - collection_name) + local index_name, index_meta = db_schema_helpers.get_primary_index_meta( + self, collection_name) local full_match local pivot_value_list local new_filter = filter @@ -741,8 +699,8 @@ end --- to validate --- --- @tparam table schemas map from schema names to schemas as defined in the ---- @{tarantool_graphql.new} function; this is for validate collection against ---- certain set of schemas (no 'dangling' schema names in collections) +--- @{impl.new} function; this is for validate collection against certain set +--- of schemas (no 'dangling' schema names in collections) --- --- @return nil local function validate_collections(collections, schemas) @@ -814,36 +772,42 @@ end local function match_using_re(obj, pcre) if pcre == nil then return true end + assert(rex ~= nil, 'we should not pass over :compile() ' .. + 'with a query contains PCRE matching when there are '.. + 'no lrexlib-pcre (rex_pcre) module present') + for field_name, re in pairs(pcre) do -- skip an object with null in a string* field if obj[field_name] == nil then return false end - assert(rex ~= nil, 'we should not pass over :compile() ' .. - 'with a query contains PCRE matching when there are '.. - 'no lrexlib-pcre (rex_pcre) module present') - local flags = rex.flags() - -- emulate behaviour of (?i) on libpcre (libpcre2 supports it) - local cfg = 0 - if not is_pcre2 then - local cnt - re, cnt = re:gsub('^%(%?i%)', '') - if cnt > 0 then - cfg = bit.bor(cfg, flags.CASELESS) - end - end - -- enable UTF-8 - if is_pcre2 then - cfg = bit.bor(cfg, flags.UTF) - cfg = bit.bor(cfg, flags.UCP) + if type(re) == 'table' then + local match = match_using_re(obj[field_name], re) + if not match then return false end else - cfg = bit.bor(cfg, flags.UTF8) - cfg = bit.bor(cfg, flags.UCP) - end - -- XXX: compile re once - local re = rex.new(re, cfg) - if not re:match(obj[field_name]) then - return false + local flags = rex.flags() + -- emulate behaviour of (?i) on libpcre (libpcre2 supports it) + local cfg = 0 + if not is_pcre2 then + local cnt + re, cnt = re:gsub('^%(%?i%)', '') + if cnt > 0 then + cfg = bit.bor(cfg, flags.CASELESS) + end + end + -- enable UTF-8 + if is_pcre2 then + cfg = bit.bor(cfg, flags.UTF) + cfg = bit.bor(cfg, flags.UCP) + else + cfg = bit.bor(cfg, flags.UTF8) + cfg = bit.bor(cfg, flags.UCP) + end + -- XXX: compile re once + local re = rex.new(re, cfg) + if not re:match(obj[field_name]) then + return false + end end end @@ -874,7 +838,7 @@ end --- `offset` arqument of the GraphQL query), --- * `resulting_object_cnt_max` (number), --- * `fetched_object_cnt_max` (number), ---- * `resolveField` (function) for subrequests, see @{tarantool_graphql.new}. +--- * `resolveField` (function) for subrequests, see @{impl.new}. --- --- @return nil --- @@ -950,23 +914,6 @@ local function process_tuple(state, tuple, opts) return true end ---- Get schema name by a collection name. ---- ---- @tparam table self data accessor instance ---- ---- @tparam string collection_name ---- ---- @treturn string `schema_name` -local function get_schema_name(self, collection_name) - local collection = self.collections[collection_name] - assert(collection ~= nil, - ('cannot find the collection "%s"'):format(collection_name)) - local schema_name = collection.schema_name - assert(type(schema_name) == 'string', - 'schema_name must be a string, got ' .. type(schema_name)) - return schema_name -end - --- Call one of accessor function: `update_tuple` or `delete_tuple` for each --- selected object. --- @@ -988,7 +935,8 @@ local function perform_primary_key_operation(self, collection_name, schema_name, selected, operation, ...) check(operation, 'operation', 'string') - local _, primary_index_meta = get_primary_index_meta(self, collection_name) + local _, primary_index_meta = db_schema_helpers.get_primary_index_meta( + self, collection_name) local new_objects = {} @@ -1019,7 +967,7 @@ end --- @tparam string collection_name name of collection to perform select --- --- @tparam table from collection and connection names we arrive from/by as ---- defined in the `tarantool_graphql.new` function description +--- defined in the `impl.new` function description --- --- @tparam table filter subset of object fields with values by which we want --- to find full object(s) @@ -1182,7 +1130,7 @@ local function insert_internal(self, collection_name, from, filter, args, extra) check(from.collection_name, 'from.collection_name', 'nil') -- convert object -> tuple (set default values from a schema) - local schema_name = get_schema_name(self, collection_name) + local schema_name = db_schema_helpers.get_schema_name(self, collection_name) local default_flatten_object = self.default_flatten_object[schema_name] assert(default_flatten_object ~= nil, ('cannot find default_flatten_object ' .. @@ -1224,7 +1172,7 @@ local function update_internal(self, collection_name, extra, selected) assert(next(extra.extra_args, next(extra.extra_args)) == nil, err_msg) -- convert xobject -> update statements - local schema_name = get_schema_name(self, collection_name) + local schema_name = db_schema_helpers.get_schema_name(self, collection_name) local default_xflatten = self.default_xflatten[schema_name] assert(default_xflatten ~= nil, ('cannot find default_xflatten ' .. @@ -1240,10 +1188,16 @@ end --- Delete an object. --- ---- Parameters are the same as for @{select_update}. +--- Corresponding parameters are the same as for @{select_internal}. +--- +--- @tparam table self +--- +--- @tparam string collection_name --- --- @tparam table extra `extra.extra_args.delete` is used --- +--- @tparam table selected objects to delete +--- --- @treturn table `new_objects` list of deleted objects (in the order of the --- `selected` parameter) local function delete_internal(self, collection_name, extra, selected) @@ -1253,7 +1207,7 @@ local function delete_internal(self, collection_name, extra, selected) 'arguments' assert(next(extra.extra_args, next(extra.extra_args)) == nil, err_msg) - local schema_name = get_schema_name(self, collection_name) + local schema_name = db_schema_helpers.get_schema_name(self, collection_name) return perform_primary_key_operation(self, collection_name, schema_name, selected, 'delete_tuple') @@ -1312,110 +1266,6 @@ local function init_qcontext(accessor, qcontext) settings.timeout_ms * 1000 * 1000 end ---- Get an avro-schema for a primary key by a collection name. ---- ---- @tparam table self accessor_general instance ---- ---- @tparam string collection_name name of a collection ---- ---- @treturn string `offset_type` is a just string in case of scalar primary ---- key (and, then, offset) type ---- ---- @treturn table `offset_type` is a record in case of compound (multi-part) ---- primary key -local function get_primary_key_type(self, collection_name) - -- get name of field of primary key - local _, index_meta = get_primary_index_meta( - self, collection_name) - - local collection = self.collections[collection_name] - local schema = self.schemas[collection.schema_name] - - local offset_fields = {} - - for _, field_name in ipairs(index_meta.fields) do - local field_type - for _, field in ipairs(schema.fields) do - if field.name == field_name then - field_type = field.type - end - end - assert(field_type ~= nil, - ('cannot find type for primary index field "%s" ' .. - 'for collection "%s"'):format(field_name, - collection_name)) - assert(type(field_type) == 'string', - 'field type must be a string, got ' .. - type(field_type)) - offset_fields[#offset_fields + 1] = { - name = field_name, - type = field_type, - } - end - - local offset_type - assert(#offset_fields > 0, - 'offset must contain at least one field') - if #offset_fields == 1 then - -- use a scalar type - offset_type = offset_fields[1].type - else - -- construct an input type - offset_type = { - name = collection_name .. '_offset', - type = 'record', - fields = offset_fields, - } - end - - return offset_type -end - --- XXX: add string fields of a nested record / 1:1 connection to --- get_pcre_argument_type - ---- Get an avro-schema for a pcre argument by a collection name. ---- ---- Note: it is called from `list_args`, so applicable only for lists: ---- top-level objects and 1:N connections. ---- ---- @tparam table self accessor_general instance ---- ---- @tparam string collection_name name of a collection ---- ---- @treturn table `pcre_type` is a record with fields per string/string* field ---- of an object of the collection -local function get_pcre_argument_type(self, collection_name) - local collection = self.collections[collection_name] - assert(collection ~= nil, 'cannot found collection ' .. - tostring(collection_name)) - local schema = self.schemas[collection.schema_name] - assert(schema ~= nil, 'cannot found schema ' .. - tostring(collection.schema_name)) - - assert(schema.type == 'record', - 'top-level object expected to be a record, got ' .. - tostring(schema.type)) - - local string_fields = {} - - for _, field in ipairs(schema.fields) do - if field.type == 'string' or field.type == 'string*' then - local field = table.copy(field) - field.type = avro_helpers.make_avro_type_nullable( - field.type, {raise_on_nullable = false}) - table.insert(string_fields, field) - end - end - - local pcre_type = { - name = collection_name .. '_pcre', - type = 'record', - fields = string_fields, - } - return pcre_type -end - --- Create default unflatten/flatten/xflatten functions, that can be called --- from funcs.unflatten_tuple/funcs.flatten_object/funcs.xflatten when an --- additional pre/postprocessing is not needed. @@ -1470,114 +1320,6 @@ local function gen_default_object_tuple_map_funcs(models) } end ---- List of avro-schema fields to use as arguments of a collection field and ---- 1:N connection field. ---- ---- @tparam table self the data accessor instance ---- ---- @tparam string collection_name name of collection to create the fields ---- ---- @treturn table list of avro-schema fields -local function list_args(self, collection_name) - local offset_type = get_primary_key_type(self, collection_name) - - -- add `pcre` argument only if lrexlib-pcre was found - local pcre_field - if rex ~= nil then - local pcre_type = get_pcre_argument_type(self, collection_name) - pcre_field = {name = 'pcre', type = pcre_type} - end - - return { - {name = 'limit', type = 'int'}, - {name = 'offset', type = offset_type}, - -- {name = 'filter', type = ...}, - pcre_field, - } -end - ---- List of avro-schema fields to use as extra arguments of a collection / ---- a connection field. ---- ---- Mutation arguments (insert, update, delete) are generated here. ---- ---- @tparam table self the data accessor instance ---- ---- @tparam string collection_name name of collection to create the fields ---- ---- @treturn table list of avro-schema fields ---- ---- @treturn table map with flags to describe where generated arguments should ---- be used; the format is the following: ---- ---- { ---- = { ---- add_to_mutations_only = , ---- add_to_top_fields_only = , ---- }, ---- ... ---- } -local function extra_args(self, collection_name) - if not self.settings.enable_mutations then - return {}, {} - end - - local collection = self.collections[collection_name] - local schema_name = collection.schema_name - - local schema_insert = table.copy(self.schemas[schema_name]) - schema_insert.name = collection_name .. '_insert' - - local _, primary_index_meta = get_primary_index_meta(self, - collection_name) - - local schema_update = { - name = collection_name .. '_update', - type = 'record', - fields = {}, - } - -- add all fields except ones whose are part of the primary key - for _, field in ipairs(self.schemas[schema_name].fields) do - assert(field.name ~= nil, 'field.name is nil') - local is_field_part_of_primary_key = false - for _, pk_field_name in ipairs(primary_index_meta.fields) do - if field.name == pk_field_name then - is_field_part_of_primary_key = true - break - end - end - - if not is_field_part_of_primary_key then - local field = table.copy(field) - field.type = avro_helpers.make_avro_type_nullable( - field.type) - table.insert(schema_update.fields, field) - end - end - - local schema_delete = 'boolean' - - return { - {name = 'insert', type = schema_insert}, - {name = 'update', type = schema_update}, - {name = 'delete', type = schema_delete}, - }, { - insert = { - add_to_mutations_only = true, - add_to_top_fields_only = true, - }, - update = { - add_to_mutations_only = true, - add_to_top_fields_only = false, - }, - delete = { - add_to_mutations_only = true, - add_to_top_fields_only = false, - }, - } -end - - --- Create a new data accessor. --- --- Provided `funcs` argument determines certain functions for retrieving @@ -1602,7 +1344,7 @@ end --- and `true` for avro-schema-3*)_. --- --- For examples of `opts.schemas` and `opts.collections` consider the ---- @{tarantool_graphql.new} function description. +--- @{impl.new} function description. --- --- Example of `opts.service_fields` item: --- @@ -1641,8 +1383,8 @@ end --- functions description. --- --- @treturn table data accessor instance, a table with the two methods ---- (`select` and `arguments`) as described in the @{tarantool_graphql.new} ---- function description. +--- (`select` and `arguments`) as described in the @{impl.new} function +--- description. function accessor_general.new(opts, funcs) assert(type(opts) == 'table', 'opts must be a table, got ' .. type(opts)) @@ -1751,8 +1493,6 @@ function accessor_general.new(opts, funcs) return selected end, - list_args = list_args, - extra_args = extra_args, } }) end diff --git a/graphql/avro_helpers.lua b/graphql/avro_helpers.lua index c481714..5dd9919 100644 --- a/graphql/avro_helpers.lua +++ b/graphql/avro_helpers.lua @@ -1,8 +1,11 @@ ---- The module us collection of helpers to simplify avro-schema related tasks. +--- The module is collection of helpers to simplify avro-schema related tasks. local json = require('json') local avro_schema = require('avro_schema') +local utils = require('graphql.utils') +local check = utils.check + local avro_helpers = {} --- The function converts avro type to the corresponding nullable type in @@ -25,7 +28,7 @@ local avro_helpers = {} --- --- * `raise_on_nullable` (boolean) raise an error on nullable type --- ---- @result `result` (string or table) nullable avro type +--- @return `result` (string or table) nullable avro type function avro_helpers.make_avro_type_nullable(avro, opts) assert(avro ~= nil, "avro must not be nil") local opts = opts or {} @@ -66,7 +69,8 @@ function avro_helpers.make_avro_type_nullable(avro, opts) end return avro elseif value_type == 'table' and #avro == 0 then - return avro_helpers.make_avro_type_nullable(avro.type, opts) + avro.type = avro_helpers.make_avro_type_nullable(avro.type, opts) + return avro end error("avro should be a string or a table, got " .. value_type) @@ -83,4 +87,156 @@ function avro_helpers.major_avro_schema_version() return model.get_types == nil and 2 or 3 end +function avro_helpers.is_scalar_type(avro_schema_type) + check(avro_schema_type, 'avro_schema_type', 'string') + + local scalar_types = { + ['int'] = true, + ['int*'] = true, + ['long'] = true, + ['long*'] = true, + ['float'] = true, + ['float*'] = true, + ['double'] = true, + ['double*'] = true, + ['boolean'] = true, + ['boolean*'] = true, + ['string'] = true, + ['string*'] = true, + ['null'] = true, + } + + return scalar_types[avro_schema_type] or false +end + +function avro_helpers.is_compound_type(avro_schema_type) + check(avro_schema_type, 'avro_schema_type', 'string') + + local compound_types = { + ['record'] = true, + ['record*'] = true, + ['array'] = true, + ['array*'] = true, + ['map'] = true, + ['map*'] = true, + ['union'] = true, + } + + return compound_types[avro_schema_type] or false +end + +--- Get type of an avro-schema. +--- +--- @param avro_schema (table or string) input avro-schema +--- +--- @tparam[opt] table opts the following options: +--- +--- * allow_references (boolean) +--- +--- @treturn string `avro_t` type of the avro-schema +--- +--- @treturn boolean `is_ref` whether the avro-schema is reference to another +--- avro-schema type +function avro_helpers.avro_type(avro_schema, opts) + local opts = opts or {} + local allow_references = opts.allow_references or false + + if type(avro_schema) == 'table' then + if utils.is_array(avro_schema) then + return 'union', false + elseif avro_helpers.is_compound_type(avro_schema.type) then + return avro_schema.type, false + elseif allow_references then + return avro_schema, true + end + elseif type(avro_schema) == 'string' then + if avro_helpers.is_scalar_type(avro_schema) then + return avro_schema, false + elseif allow_references then + return avro_schema, true + end + end + + error('unrecognized avro-schema type: ' .. json.encode(avro_schema)) +end + +--- Expand avro-schema references. +--- +--- @param avro_schema (table or string) input avro-schema +--- +--- @tparam[opt] table opts the following options: +--- +--- * definitions (table) processed avro-schemas to expand further references +--- +--- @return generated expanded avro-schema +function avro_helpers.expand_references(avro_schema, opts) + local opts = opts or {} + local definitions = opts.definitions or {} + + local avro_t, is_ref = avro_helpers.avro_type(avro_schema, + {allow_references = true}) + + if is_ref then + assert(definitions[avro_t] ~= nil, + ('undefined reference: %s'):format(avro_t)) + return definitions[avro_t] + elseif avro_t == 'union' then + local res = {} + for _, child in ipairs(avro_schema) do + table.insert(res, avro_helpers.expand_references(child, + {definitions = definitions})) + end + return res + elseif avro_t == 'record' or avro_t == 'record*' then + local res = table.copy(avro_schema) + res.fields = {} + + local res_nonnull + local res_nullable + if avro_t == 'record' then + res_nonnull = res + res_nullable = table.copy(res) + res_nullable.type = 'record*' + res_nullable.fields = res.fields + else + res_nonnull = table.copy(res) + res_nonnull.type = 'record' + res_nonnull.fields = res.fields + res_nullable = res + end + + -- Saving type before traverse deeper allows to use reference to it + -- inside (it is allowed by our avro-schema implementation for nullable + -- fields, union, array and map). + local name = avro_schema.name + assert(definitions[name] == nil and definitions[name .. '*'] == nil, + ('multiple definitions of %s'):format(name)) + definitions[name] = res_nonnull + definitions[name .. '*'] = res_nullable + + for _, field in ipairs(avro_schema.fields) do + local field = table.copy(field) + field.type = avro_helpers.expand_references(field.type, + {definitions = definitions}) + table.insert(res.fields, field) + end + + return res + elseif avro_t == 'array' or avro_t == 'array*' then + local res = table.copy(avro_schema) + res.items = avro_helpers.expand_references(avro_schema.items, + {definitons = definitions}) + return res + elseif avro_t == 'map' or avro_t == 'map*' then + local res = table.copy(avro_schema) + res.values = avro_helpers.expand_references(avro_schema.values, + {definitons = definitions}) + return res + elseif avro_helpers.is_scalar_type(avro_t) then + return avro_schema + end + + error('unrecognized avro-schema type: ' .. json.encode(avro_schema)) +end + return avro_helpers diff --git a/graphql/config_complement.lua b/graphql/config_complement.lua index 2c89e31..025decc 100644 --- a/graphql/config_complement.lua +++ b/graphql/config_complement.lua @@ -203,7 +203,7 @@ end --- @tparam table connections cfg.connections - user-defined collections --- @tparam table indexes cfg.indexes - {[collection_name] = collection_indexes, ...} --- @treturn table `collections` is complemented collections -local function complement_connections(collections, connections, indexes, schemas) +local function complement_connections(collections, connections, indexes) if connections == nil then return collections end diff --git a/graphql/convert_schema/arguments.lua b/graphql/convert_schema/arguments.lua new file mode 100644 index 0000000..5ac8da7 --- /dev/null +++ b/graphql/convert_schema/arguments.lua @@ -0,0 +1,162 @@ +--- Convert avro-schema fields to GraphQL arguments (scalars and InputObjects). + +local json = require('json') +local core_types = require('graphql.core.types') +local avro_helpers = require('graphql.avro_helpers') +local scalar_types = require('graphql.convert_schema.scalar_types') +local helpers = require('graphql.convert_schema.helpers') +local union = require('graphql.convert_schema.union') + +local utils = require('graphql.utils') +local check = utils.check + +local arguments = {} + +--- Convert avro-schema type to GraphQL scalar or InputObject. +--- +--- @param avro_schema (table or string) avro-schema with expanded references +--- +--- @tparam table opts the following options: +--- +--- * type_name (string; optional) name for GraphQL type instead of one from +--- avro-schema full name (considered for record / record*) +--- +--- * context (table) avro-schema processing context: +--- +--- - field_name (string; optional) name of currently parsed field +--- +--- - path (table) point where we are in avro-schema +--- +--- @treturn table GraphQL scalar or InputObject +local function convert(avro_schema, opts) + check(avro_schema, 'avro_schema', 'table', 'string') + check(opts, 'opts', 'table', 'nil') + + local opts = opts or {} + local type_name = opts.type_name + local context = opts.context + + check(type_name, 'type_name', 'string', 'nil') + check(context, 'context', 'table') + + local avro_t = avro_helpers.avro_type(avro_schema) + + if avro_t == 'record' or avro_t == 'record*' then + if type(avro_schema.name) ~= 'string' then -- avoid extra json.encode() + assert(type(avro_schema.name) == 'string', + ('avro_schema.name must be a string, got %s (avro_schema %s)') + :format(type(avro_schema.name), json.encode(avro_schema))) + end + if type(avro_schema.fields) ~= 'table' then -- avoid extra json.encode() + assert(type(avro_schema.fields) == 'table', + ('avro_schema.fields must be a table, got %s (avro_schema %s)') + :format(type(avro_schema.fields), json.encode(avro_schema))) + end + + table.insert(context.path, type_name or avro_schema.name) + local fields = {} + for _, field in ipairs(avro_schema.fields) do + if type(field.name) ~= 'string' then -- avoid extra json.encode() + assert(type(field.name) == 'string', + ('field.name must be a string, got %s (schema %s)') + :format(type(field.name), json.encode(field))) + end + + table.insert(context.path, field.name) + context.field_name = field.name + local gql_field_type = convert(field.type, {context = context}) + context.field_name = nil + table.remove(context.path, #context.path) + + fields[field.name] = { + name = field.name, + kind = gql_field_type, + } + end + table.remove(context.path, #context.path) + + local res = core_types.inputObject({ + name = type_name or helpers.full_name(avro_schema.name, context), + description = 'generated from avro-schema for ' .. + avro_schema.name, + fields = fields, + }) + + return avro_t == 'record' and core_types.nonNull(res) or res + elseif avro_t == 'enum' then + error('enums do not implemented yet') -- XXX + elseif avro_t == 'array' or avro_t == 'array*' then + local gql_items_type = convert(avro_schema.items, {context = context}) + local res = core_types.list(gql_items_type) + return avro_t == 'array' and core_types.nonNull(res) or res + elseif avro_t == 'map' or avro_t == 'map*' then + local gql_values_type = convert(avro_schema.values, {context = context}) + + local res = core_types.inputMap({ + name = helpers.full_name('InputMap', context), + values = gql_values_type, + }) + return avro_t == 'map' and core_types.nonNull(res) or res + elseif avro_t == 'union' then + return union.convert(avro_schema, { + convert = convert, + gen_argument = true, + context = context, + }) + else + local res = scalar_types.convert(avro_schema, {raise = false}) + if res == nil then + error('unrecognized avro-schema type: ' .. + json.encode(avro_schema)) + end + return res + end +end + +--- Convert each field of an avro-schema record to a scalar graphql type or an +--- input object. +--- +--- @tparam table fields list of fields of the avro-schema record fields format +--- +--- @tparam string root_name topmost part of namespace +--- +--- @treturn table `args` -- map with type names as keys and graphql types as +--- values +function arguments.convert_record_fields(fields, root_name) + check(fields, 'fields', 'table') + + local context = { + field_name = nil, + path = {'$arguments', root_name}, + } + + local args = {} + for _, field in ipairs(fields) do + if type(field.name) ~= 'string' then -- avoid extra json.encode() + assert(type(field.name) == 'string', + ('field.name must be a string, got %s (schema %s)') + :format(type(field.name), json.encode(field))) + end + + -- We preserve a type name of an uppermost InputObject that starts from + -- the collection name to allow use it for variables. + local avro_t = avro_helpers.avro_type(field.type) + local type_name + if (avro_t == 'record' or avro_t == 'record*') and + field.type.name:startswith(root_name) then + type_name = field.type.name + end + + table.insert(context.path, field.name) + context.field_name = field.name + args[field.name] = convert(field.type, { + context = context, + type_name = type_name, + }) + context.field_name = nil + table.remove(context.path, #context.path) + end + return args +end + +return arguments diff --git a/graphql/convert_schema/core_types_helpers.lua b/graphql/convert_schema/core_types_helpers.lua new file mode 100644 index 0000000..8f461a7 --- /dev/null +++ b/graphql/convert_schema/core_types_helpers.lua @@ -0,0 +1,26 @@ +--- Collection of helpers to work with GraphQL types. + +local utils = require('graphql.utils') +local check = utils.check + +local core_types_helpers = {} + +function core_types_helpers.nullable(gql_class) + check(gql_class, 'gql_class', 'table') + + if gql_class.__type ~= 'NonNull' then return gql_class end + + assert(gql_class.ofType ~= nil, 'gql_class.ofType must not be nil') + return core_types_helpers.nullable(gql_class.ofType) +end + +function core_types_helpers.raw_gql_type(gql_class) + check(gql_class, 'gql_class', 'table') + + if gql_class.ofType == nil then return gql_class end + + assert(gql_class.ofType ~= nil, 'gql_class.ofType must not be nil') + return core_types_helpers.raw_gql_type(gql_class.ofType) +end + +return core_types_helpers diff --git a/graphql/convert_schema/helpers.lua b/graphql/convert_schema/helpers.lua new file mode 100644 index 0000000..adba0e4 --- /dev/null +++ b/graphql/convert_schema/helpers.lua @@ -0,0 +1,36 @@ +local utils = require('graphql.utils') +local check = utils.check + +local helpers = {} + +--- Get dot-separated name prepended with namespace. +--- +--- @tparam string name base name +--- +--- @tparam table context avro-schema parsing context to get namespace from +--- +--- @treturn string full name +function helpers.full_name(name, context) + check(name, 'name', 'string') + check(context, 'context', 'table') + check(context.path, 'context.path', 'table') + + if next(context.path) == nil then + return name + end + + local namespace = table.concat(context.path, '.') + return namespace .. '.' .. name +end + +--- Get last part of dot-separated name. +--- +--- @tparam string name full name +--- +--- @treturn string base name +function helpers.base_name(name) + check(name, 'name', 'string') + return name:gsub('^.*%.', '') +end + +return helpers diff --git a/graphql/convert_schema/init.lua b/graphql/convert_schema/init.lua new file mode 100644 index 0000000..a416ada --- /dev/null +++ b/graphql/convert_schema/init.lua @@ -0,0 +1,7 @@ +local schema = require('graphql.convert_schema.schema') + +local convert_schema = {} + +convert_schema.convert = schema.convert + +return convert_schema diff --git a/graphql/convert_schema/resolve.lua b/graphql/convert_schema/resolve.lua new file mode 100644 index 0000000..d9c8be3 --- /dev/null +++ b/graphql/convert_schema/resolve.lua @@ -0,0 +1,223 @@ +--- Generate resolve functions to connect graphql-lua to accessor_general. + +local json = require('json') +local yaml = require('yaml') +local core_types_helpers = require('graphql.convert_schema.core_types_helpers') + +local utils = require('graphql.utils') +local check = utils.check + +local resolve = {} + +local function gen_from_parameter(collection_name, parent, connection) + local names = {} + local values = {} + + for _, part in ipairs(connection.parts) do + check(part.source_field, 'part.source_field', 'string') + check(part.destination_field, 'part.destination_field', 'string') + + names[#names + 1] = part.destination_field + values[#values + 1] = parent[part.source_field] + end + + return { + collection_name = collection_name, + connection_name = connection.name, + destination_args_names = names, + destination_args_values = values, + } +end + +-- Check FULL match constraint before request of +-- destination object(s). Note that connection key parts +-- can be prefix of index key parts. Zero parts count +-- considered as ok by this check. +local function are_all_parts_null(parent, connection_parts) + local are_all_parts_null = true + local are_all_parts_non_null = true + for _, part in ipairs(connection_parts) do + local value = parent[part.source_field] + + if value ~= nil then -- nil or box.NULL + are_all_parts_null = false + else + are_all_parts_non_null = false + end + end + + local ok = are_all_parts_null or are_all_parts_non_null + if not ok then -- avoid extra json.encode() + assert(ok, + 'FULL MATCH constraint was failed: connection ' .. + 'key parts must be all non-nulls or all nulls; ' .. + 'object: ' .. json.encode(parent)) + end + + return are_all_parts_null +end + +local function separate_args_instance(args_instance, arguments) + local object_args_instance = {} + local list_args_instance = {} + local extra_args_instance = {} + + for k, v in pairs(args_instance) do + if arguments.extra[k] ~= nil then + extra_args_instance[k] = v + elseif arguments.list[k] ~= nil then + list_args_instance[k] = v + elseif arguments.all[k] ~= nil then + object_args_instance[k] = v + else + error(('cannot found "%s" field ("%s" value) ' .. + 'within allowed fields'):format(tostring(k), + json.encode(v))) + end + end + + return { + object = object_args_instance, + list = list_args_instance, + extra = extra_args_instance, + } +end + +function resolve.gen_resolve_function(collection_name, connection, + destination_type, arguments, accessor) + local c = connection + local raw_destination_type = core_types_helpers.raw_gql_type( + destination_type) + + -- capture `raw_destination_type` + local function genResolveField(info) + return function(field_name, object, filter, opts) + assert(raw_destination_type.fields[field_name], + ('performing a subrequest by the non-existent ' .. + 'field "%s" of the collection "%s"'):format(field_name, + c.destination_collection)) + return raw_destination_type.fields[field_name].resolve( + object, filter, info, opts) + end + end + + -- captures c.{type, parts, name, destination_collection}, collection_name, + -- genResolveField, arguments, accessor + return function(parent, args_instance, info, opts) + local opts = opts or {} + assert(type(opts) == 'table', + 'opts must be nil or a table, got ' .. type(opts)) + local dont_force_nullability = + opts.dont_force_nullability or false + assert(type(dont_force_nullability) == 'boolean', + 'opts.dont_force_nullability ' .. + 'must be nil or a boolean, got ' .. + type(dont_force_nullability)) + + local from = gen_from_parameter(collection_name, parent, c) + + -- Avoid non-needed index lookup on a destination collection when + -- all connection parts are null: + -- * return null for 1:1* connection; + -- * return {} for 1:N connection (except the case when source + -- collection is the query or the mutation pseudo-collection). + if collection_name ~= nil and are_all_parts_null(parent, c.parts) then + if c.type ~= '1:1*' and c.type ~= '1:N' then + -- `if` is to avoid extra json.encode + assert(c.type == '1:1*' or c.type == '1:N', + ('only 1:1* or 1:N connections can have ' .. + 'all key parts null; parent is %s from ' .. + 'collection "%s"'):format(json.encode(parent), + tostring(collection_name))) + end + return c.type == '1:N' and {} or nil + end + + local resolveField = genResolveField(info) + local extra = { + qcontext = info.qcontext, + resolveField = resolveField, -- for subrequests + extra_args = {}, + } + + -- object_args_instance will be passed to 'filter' + -- list_args_instance will be passed to 'args' + -- extra_args_instance will be passed to 'extra.extra_args' + local arguments_instance = separate_args_instance(args_instance, + arguments) + extra.extra_args = arguments_instance.extra + + local objs = accessor:select(parent, + c.destination_collection, from, + arguments_instance.object, arguments_instance.list, extra) + assert(type(objs) == 'table', + 'objs list received from an accessor ' .. + 'must be a table, got ' .. type(objs)) + if c.type == '1:1' or c.type == '1:1*' then + -- we expect here exactly one object even for 1:1* + -- connections because we processed all-parts-are-null + -- situation above + assert(#objs == 1 or dont_force_nullability, + 'expect one matching object, got ' .. + tostring(#objs)) + return objs[1] + else -- c.type == '1:N' + return objs + end + end +end + +function resolve.gen_resolve_function_multihead(collection_name, connection, + union_types, var_num_to_box_field_name, accessor) + local c = connection + + local determinant_keys = utils.get_keys(c.variants[1].determinant) + + local function resolve_variant(parent) + assert(utils.do_have_keys(parent, determinant_keys), + ('Parent object of union object doesn\'t have determinant ' .. + 'fields which are necessary to determine which resolving ' .. + 'variant should be used. Union parent object:\n"%s"\n' .. + 'Determinant keys:\n"%s"'): + format(yaml.encode(parent), yaml.encode(determinant_keys))) + + local var_idx + local res_var + for i, var in ipairs(c.variants) do + local is_match = utils.is_subtable(parent, var.determinant) + if is_match then + res_var = var + var_idx = i + break + end + end + + local box_field_name = var_num_to_box_field_name[var_idx] + + assert(res_var, ('Variant resolving failed.'.. + 'Parent object: "%s"\n'):format(yaml.encode(parent))) + return res_var, var_idx, box_field_name + end + + return function(parent, _, info) + local v, variant_num, box_field_name = resolve_variant(parent) + local destination_type = union_types[variant_num] + + local quazi_connection = { + type = c.type, + parts = v.parts, + name = c.name, + destination_collection = v.destination_collection, + } + -- XXX: generate a function for each variant at schema generation time + local result = resolve.gen_resolve_function(collection_name, + quazi_connection, destination_type, {}, accessor)(parent, {}, info) + + -- This 'wrapping' is needed because we use 'select' on 'collection' + -- GraphQL type and the result of the resolve function must be in + -- {'collection_name': {result}} format to be avro-valid. + return {[box_field_name] = result}, destination_type + end +end + +return resolve diff --git a/graphql/convert_schema/scalar_types.lua b/graphql/convert_schema/scalar_types.lua new file mode 100644 index 0000000..a78a48a --- /dev/null +++ b/graphql/convert_schema/scalar_types.lua @@ -0,0 +1,47 @@ +--- Convert scalar avro-schema types to GraphQL types. + +local json = require('json') +local core_types = require('graphql.core.types') +local avro_helpers = require('graphql.avro_helpers') + +local utils = require('graphql.utils') +local check = utils.check + +local scalar_types = {} + +function scalar_types.convert(avro_schema, opts) + local opts = opts or {} + check(opts, 'opts', 'table') + local raise = opts.raise or false + check(raise, 'raise', 'boolean') + + local scalar_types = { + ['int'] = core_types.int.nonNull, + ['int*'] = core_types.int, + ['long'] = core_types.long.nonNull, + ['long*'] = core_types.long, + ['float'] = core_types.float.nonNull, + ['float*'] = core_types.float, + ['double'] = core_types.double.nonNull, + ['double*'] = core_types.double, + ['boolean'] = core_types.boolean.nonNull, + ['boolean*'] = core_types.boolean, + ['string'] = core_types.string.nonNull, + ['string*'] = core_types.string, + } + + local avro_t = avro_helpers.avro_type(avro_schema) + local graphql_type = scalar_types[avro_t] + if graphql_type ~= nil then + return graphql_type + end + + if raise then + error('unrecognized avro-schema scalar type: ' .. + json.encode(avro_schema)) + end + + return nil +end + +return scalar_types diff --git a/graphql/convert_schema/schema.lua b/graphql/convert_schema/schema.lua new file mode 100644 index 0000000..3896d5a --- /dev/null +++ b/graphql/convert_schema/schema.lua @@ -0,0 +1,334 @@ +--- Convert extended avro-schema (collections) to GraphQL schema. + +local core_types = require('graphql.core.types') +local core_schema = require('graphql.core.schema') +local core_types_helpers = require('graphql.convert_schema.core_types_helpers') +local gen_arguments = require('graphql.gen_arguments') +local arguments = require('graphql.convert_schema.arguments') +local types = require('graphql.convert_schema.types') +local avro_helpers = require('graphql.avro_helpers') + +local utils = require('graphql.utils') +local check = utils.check + +local schema = {} + +--- Add extra arguments for collection / connection fields. +--- +--- XXX: This function is written in the hacky way. The function should gone +--- when we'll rewrite argument / InputObject generation in the right way. The +--- plan is the following: +--- +--- * DONE: Move object_args to accessor_general (or move all *_args function +--- into a separate module); skipping float / double / ... arguments should +--- be done here. +--- * TBD: generate per-connection arguments in avro-schema in some way? +--- * DONE: Move avro-schema -> GraphQL arguments translating into its own +--- module. +--- * DONE: Support a sub-record arguments and others (union, array, ...). +--- * TBD: Generate arguments for cartesian product of {1:1, 1:1*, 1:N, all} x +--- {query, mutation, all} x {top-level, nested, all} x {collections}. +--- * TBD: Use generated arguments in GraphQL types (schema) generation. +--- +--- @tparam table state tarantool_graphql instance +--- +--- @tparam table root_types generated by @{create_root_collection} +--- +--- @return nothing +local function add_extra_arguments(state, root_types) + for _, what in ipairs({'Query', 'Mutation'}) do + -- add extra arguments to top-level fields (collections) + for collection_name, field in pairs(root_types[what].fields) do + -- Prevent exposing an argument inserted, say, into the mutation schema + -- subtree to the query subtree (it is needed because we use a booking + -- table for arguments). + field.arguments = table.copy(field.arguments) + + local extra_args = state.extra_arguments[collection_name] + local extra_args_meta = state.extra_arguments_meta[collection_name] + + for arg_name, arg in pairs(extra_args) do + local meta = extra_args_meta[arg_name] + check(meta, 'meta', 'table') + local add_arg = what == 'Mutation' or + not meta.add_to_mutations_only + if add_arg then + field.arguments[arg_name] = arg + end + end + + local parent_field = field + + local collection = state.collections[collection_name] + for _, c in ipairs(collection.connections or {}) do + -- XXX: support multihead connections + if c.destination_collection then + local collection_name = c.destination_collection + local field = core_types_helpers.raw_gql_type( + parent_field.kind).fields[c.name] + local extra_args = state.extra_arguments[collection_name] + local extra_args_meta = + state.extra_arguments_meta[collection_name] + + for arg_name, arg in pairs(extra_args) do + local meta = extra_args_meta[arg_name] + check(meta, 'meta', 'table') + local add_arg = not meta.add_to_top_fields_only and + (what == 'Mutation' or + not meta.add_to_mutations_only) + if add_arg then + field.arguments[arg_name] = arg + end + end + end + end + end + end +end + +--- Create virtual root collections `query` and `mutation`, which has +--- connections to any collection. +--- +--- Actually, each GQL query starts its execution from the `query` or +--- `mutation` collection. That is why it shoult contain connections to any +--- collection. +--- +--- @tparam table state dictionary which contains all information about the +--- schema, arguments, types... +local function create_root_collection(state) + local root_connections = {} + -- The fake connections have 1:N mechanics. + -- Create one connection for each collection. + for collection_name, collection in pairs(state.collections) do + table.insert(root_connections, { + parts = {}, + name = collection_name, + destination_collection = collection_name, + type = "1:N" + }) + end + + local root_types = {} + + for _, what in ipairs({'Query', 'Mutation'}) do + local root_schema = { + type = "record", + name = what, + -- The fake root has no fields. + fields = {} + } + local root_collection = { + name = nil, -- skip are_all_parts_null check, see resolve.lua + connections = root_connections + } + + -- `convert` is designed to create GQL type corresponding to a real + -- schema and connections. However it also works with the fake schema. + -- Query/Mutation type must be the Object, so it cannot be nonNull. + root_types[what] = core_types_helpers.nullable( + types.convert(state, root_schema, { + collection = root_collection, + })) + end + + add_extra_arguments(state, root_types) + + state.schema = core_schema.create({ + query = root_types['Query'], + mutation = root_types['Mutation'], + }) +end + +--- Execute a function for each 1:1 or 1:1* connection of each collection. +--- +--- @tparam table state tarantool_graphql instance +--- +--- @tparam table[opt] connection_types list of connection types to call `func` +--- on it; nil/box.NULL means all connections w/o filtering +--- +--- @tparam function func a function with the following parameters: +--- +--- * source collection name (string); +--- * connection (table). +local function for_each_connection(state, connection_types, func) + for collection_name, collection in pairs(state.collections) do + for _, c in ipairs(collection.connections or {}) do + if connection_types == nil or utils.value_in(c.type, + connection_types) then + func(collection_name, c) + end + end + end +end + +--- Add arguments corresponding to 1:1 and 1:1* connections (nested filters). +--- +--- @tparam table state graphql_tarantool instance +local function add_connection_arguments(state) + -- map destination collection to list of input objects + local input_objects = {} + -- map source collection and connection name to an input object + local lookup_input_objects = {} + + -- create InputObjects for each 1:1 or 1:1* connection of each collection + for_each_connection(state, {'1:1', '1:1*'}, function(collection_name, c) + -- XXX: support multihead connections + if c.variants ~= nil then return end + + local object = core_types.inputObject({ + name = c.name, + description = ('generated from the connection "%s" ' .. + 'of collection "%s" using collection "%s"'):format( + c.name, collection_name, c.destination_collection), + fields = state.object_arguments[c.destination_collection], + }) + + if input_objects[c.destination_collection] == nil then + input_objects[c.destination_collection] = {} + end + table.insert(input_objects[c.destination_collection], object) + + if lookup_input_objects[collection_name] == nil then + lookup_input_objects[collection_name] = {} + end + lookup_input_objects[collection_name][c.name] = object + end) + + -- update fields of collection arguments and input objects with other input + -- objects + for_each_connection(state, {'1:1', '1:1*'}, function(collection_name, c) + -- XXX: support multihead connections + if c.variants ~= nil then return end + + local new_object = lookup_input_objects[collection_name][c.name] + -- collection arguments + local fields = state.object_arguments[collection_name] + assert(fields[c.name] == nil, + 'we must not add an input object twice to the same collection ' .. + 'arguments list') + fields[c.name] = new_object + -- input objects + for _, input_object in ipairs(input_objects[collection_name] or {}) do + local fields = input_object.fields + assert(fields[c.name] == nil, + 'we must not add an input object twice to the same input ' .. + 'object') + fields[c.name] = { + name = c.name, + kind = new_object, + } + end + end) +end + +function schema.convert(state, cfg) + -- collection type is always record, so always non-null; we can lazily + -- evaluate non-null type from nullable type, but not vice versa, so we + -- collect nullable types here and evaluate non-null ones where needed + state.nullable_collection_types = utils.gen_booking_table({}) + + state.object_arguments = utils.gen_booking_table({}) + state.list_arguments = utils.gen_booking_table({}) + state.all_arguments = utils.gen_booking_table({}) + + -- Booking table used here because of the one reason: inside a resolve + -- function we need to determine that a user-provided argument is an extra + -- argument. We capture extra_arguments[collection_name] into the resolve + -- function and sure it exists and will not be changed. + state.extra_arguments = utils.gen_booking_table({}) + state.extra_arguments_meta = {} + + local accessor = cfg.accessor + assert(accessor ~= nil, 'cfg.accessor must not be nil') + assert(accessor.select ~= nil, 'cfg.accessor.select must not be nil') + state.accessor = accessor + + assert(cfg.collections ~= nil, 'cfg.collections must not be nil') + local collections = table.copy(cfg.collections) + state.collections = collections + + -- add schemas with expanded references + cfg.e_schemas = {} + + -- Prepare types which represents: + -- - Avro schemas (collections) + -- - scalar field arguments (used to filter objects by value stored in it's + -- field) + -- - list arguments (offset, limit...) + for collection_name, collection in pairs(state.collections) do + -- add name field into each collection + collection.name = collection_name + check(collection.name, 'collection.name', 'string') + + assert(collection.schema_name ~= nil, + 'collection.schema_name must not be nil') + + local schema = cfg.schemas[collection.schema_name] + assert(schema ~= nil, ('cfg.schemas[%s] must not be nil'):format( + tostring(collection.schema_name))) + assert(schema.name == nil or schema.name == collection.schema_name, + ('top-level schema name does not match the name in ' .. + 'the schema itself: "%s" vs "%s"'):format(collection.schema_name, + schema.name)) + + assert(schema.type == 'record', + 'top-level schema must have record avro type, got ' .. + tostring(schema.type)) + + -- fill schema with expanded references + local e_schema = avro_helpers.expand_references(schema) + cfg.e_schemas[collection.schema_name] = e_schema + + -- recursively converts all avro types into GraphQL types in the given + -- schema + local collection_type = types.convert(state, e_schema, { + collection = collection, + type_name = collection_name, + }) + -- we utilize the fact that collection type is always non-null and + -- don't store this information; see comment above for + -- `nullable_collection_types` variable definition + assert(collection_type.__type == 'NonNull', + 'collection must always has non-null type') + state.nullable_collection_types[collection_name] = + core_types_helpers.nullable(collection_type) + + -- prepare arguments' types + local object_args_avro = gen_arguments.object_args(cfg, collection_name) + local list_args_avro = gen_arguments.list_args(cfg, collection_name) + local extra_args_opts = { + enable_mutations = accessor.settings.enable_mutations, + } + local extra_args_avro, extra_args_meta = gen_arguments.extra_args(cfg, + collection_name, extra_args_opts) + check(extra_args_meta, 'extra_args_meta', 'table') + + local object_args = arguments.convert_record_fields(object_args_avro, + collection_name) + local list_args = arguments.convert_record_fields(list_args_avro, + collection_name) + local extra_args = arguments.convert_record_fields(extra_args_avro, + collection_name) + + state.object_arguments[collection_name] = object_args + state.list_arguments[collection_name] = list_args + state.extra_arguments[collection_name] = extra_args + state.extra_arguments_meta[collection_name] = extra_args_meta + end + + add_connection_arguments(state) + + -- fill all_arguments with object_arguments + list_arguments + for collection_name, collection in pairs(state.collections) do + local object_args = state.object_arguments[collection_name] + local list_args = state.list_arguments[collection_name] + + local args = utils.merge_tables(object_args, list_args) + state.all_arguments[collection_name] = args + end + + -- create fake root for the `query` and the `mutation` collection + create_root_collection(state) +end + +return schema diff --git a/graphql/convert_schema/types.lua b/graphql/convert_schema/types.lua new file mode 100644 index 0000000..dd4484f --- /dev/null +++ b/graphql/convert_schema/types.lua @@ -0,0 +1,477 @@ +--- Convert avro-schema types to GraphQL types and set resolve functions. + +local json = require('json') +local core_types = require('graphql.core.types') +local avro_helpers = require('graphql.avro_helpers') +local scalar_types = require('graphql.convert_schema.scalar_types') +local resolve = require('graphql.convert_schema.resolve') +local helpers = require('graphql.convert_schema.helpers') +local union = require('graphql.convert_schema.union') + +local utils = require('graphql.utils') +local check = utils.check + +local types = {} + +--- Convert each field of an avro-schema to a graphql type. +--- +--- @tparam table state for read state.accessor and previously filled +--- state.nullable_collection_types +--- +--- @tparam table fields fields part from an avro-schema +--- +--- @tparam table context as described in @{types.convert}; not used here +--- explicitly, but `path` and `field_name` are *updated* and the `context` is +--- passed deeper within the @{types.convert} call +--- +--- @treturn table `res` -- map with type names as keys and graphql types as +--- values +local function convert_record_fields(state, fields, context) + local res = {} + for _, field in ipairs(fields) do + assert(type(field.name) == 'string', + ('field.name must be a string, got %s (schema %s)') + :format(type(field.name), json.encode(field))) + + table.insert(context.path, field.name) + context.field_name = field.name + res[field.name] = { + name = field.name, + kind = types.convert(state, field.type, {context = context}), + } + table.remove(context.path, #context.path) + context.field_name = nil + end + return res +end + +local function args_from_destination_collection(state, collection, + connection_type) + if connection_type == '1:1' then + return state.object_arguments[collection] + elseif connection_type == '1:1*' then + return state.object_arguments[collection] + elseif connection_type == '1:N' then + return state.all_arguments[collection] + else + error('unknown connection type: ' .. tostring(connection_type)) + end +end + +local function specify_destination_type(destination_type, connection_type) + if connection_type == '1:1' then + return core_types.nonNull(destination_type) + elseif connection_type == '1:1*' then + return destination_type + elseif connection_type == '1:N' then + return core_types.nonNull(core_types.list(core_types.nonNull( + destination_type))) + else + error('unknown connection type: ' .. tostring(connection_type)) + end +end + +--- The function 'boxes' given collection type. +--- +--- Why the 'boxing' of collection types is needed and how it is done is +--- described in comments to @{convert_multihead_connection}. +--- +--- @tparam table type_to_box GraphQL Object type (which represents a collection) +--- @tparam string connection_type of given collection (1:1, 1:1* or 1:N) +--- @tparam string type_to_box_name name of given 'type_to_box' (It can not +--- be taken from 'type_to_box' because at the time of function execution +--- 'type_to_box' refers to an empty table, which later will be filled with +--- actual type table) +--- @treturn table GraphQL Object type representing 'boxed' collection +--- @treturn string name of the single field in the box GraphQL Object +local function box_collection_type(type_to_box, connection_type, + type_to_box_name) + check(type_to_box, 'type_to_box', 'table') + check(connection_type, 'connection_type', 'string') + check(type_to_box_name, 'type_to_box_name', 'string') + + local box_type_name + local box_type_description + + if connection_type == '1:1' then + box_type_name = 'box_' .. type_to_box_name + box_type_description = 'Box around 1:1 multi-head variant' + elseif connection_type == '1:1*' then + box_type_name = 'box_' .. type_to_box_name + box_type_description = 'Box around 1:1* multi-head variant' + elseif connection_type == '1:N' then + box_type_name = 'box_array_' .. type_to_box_name + box_type_description = 'Box around 1:N multi-head variant' + else + error('unknown connection type: ' .. tostring(connection_type)) + end + + -- box_type_name seen in 'on' clause, so we cannot use full name here. + -- XXX: Ideally we should deduplicate it using defined names set (graphql + -- schema / db_schema local) and autorenaming with ..._2, ..._3. + local field_name = type_to_box_name + local box_field = { + [field_name] = { + name = field_name, + kind = type_to_box, + } + } + local box_type = core_types.object({ + name = box_type_name, + description = box_type_description, + fields = box_field + }) + + return box_type, field_name +end + +--- The function converts passed simple connection to a field of GraphQL type. +--- +--- @tparam table state for read state.accessor and previously filled +--- state.nullable_collection_types (those are gql types) +--- +--- @tparam table connection simple connection to create field +--- +--- @tparam table collection_name name of the collection which has given +--- connection +--- +--- @treturn table generated field +local function convert_simple_connection(state, connection, collection_name) + local c = connection + + check(c.destination_collection, 'connection.destination_collection', 'string') + check(c.parts, 'connection.parts', 'table') + + -- gql type of connection field + local destination_type = + state.nullable_collection_types[c.destination_collection] + assert(destination_type ~= nil, + ('destination_type (named %s) must not be nil'):format( + c.destination_collection)) + + destination_type = specify_destination_type(destination_type, c.type) + + local c_args = args_from_destination_collection(state, + c.destination_collection, c.type) + local c_list_args = state.list_arguments[c.destination_collection] + local e_args = state.extra_arguments[c.destination_collection] + + local arguments = { + all = c_args, + list = c_list_args, + extra = e_args, + } + + local resolve_function = resolve.gen_resolve_function(collection_name, c, + destination_type, arguments, state.accessor) + + local field = { + name = c.name, + kind = destination_type, + arguments = c_args, + resolve = resolve_function, + } + + return field +end + +--- The function converts passed multi-head connection to GraphQL Union type. +--- +--- Destination collections of passed multi-head connection are turned into +--- variants of resulting GraphQL Union type. Note that GraphQL types which +--- represent destination collections are wrapped with 'box' types. Here is 'how' +--- and 'why' it is done. +--- +--- How: +--- Let's consider multi-head connection with two destination collections: +--- "human": { +--- "name": "human", +--- "type": "record", +--- "fields": [ +--- { "name": "hero_id", "type": "string" }, +--- { "name": "name", "type": "string" } +--- ] +--- } +--- +--- "starship": { +--- "name": "starship", +--- "type": "record", +--- "fields": [ +--- { "name": "hero_id", "type": "string" }, +--- { "name": "model", "type": "string" } +--- ] +--- } +--- +--- In case of 1:1 multi-head connection the resulting field can be accessed as +--- follows: +--- hero_connection { +--- ... on box_human_collection { +--- human_collection { +--- name +--- } +--- } +--- ... on box_starship_collection { +--- starship_collection { +--- model +--- } +--- } +--- } +--- +--- In case of 1:N multi-head connection: +--- hero_connection { +--- ... on box_array_human_collection { +--- human_collection { +--- name +--- } +--- } +--- ... on box_array_starship_collection { +--- starship_collection { +--- model +--- } +--- } +--- } +--- +--- Why: +--- There are two reasons for 'boxing'. +--- 1) In case of 1:N connections, destination collections are represented by +--- GraphQL Lists (of Objects). But according to the GraphQL specification only +--- Objects can be variants of Union. So we need to 'box' Lists (into Objects +--- with single field) to use them as Union variants. +--- 2) GraphQL responses, received from tarantool graphql, must be avro-valid. +--- On every incoming GraphQL query a corresponding avro-schema can be generated. +--- Response to this query is 'avro-valid' if it can be successfully validated with +--- this generated (from incoming query) avro-schema. In case of multi-head +--- connections it means that value of multi-head connection field must have +--- the following format: SomeDestinationCollectionType: {...} where {...} +--- indicates the YAML encoding of a SomeDestinationCollectionType instance. +--- In case of 1:N {...} indicates a list of instances. Using of 'boxing' +--- provides the needed format. +--- +--- @tparam table state for collection types +--- +--- @tparam table connection multi-head connection to create field +--- +--- @tparam table collection_name name of the collection which has given +--- connection +--- +--- @tparam table context avro-schema parsing context as described in +--- @{types.convert} +--- +--- @treturn table generated field +local function convert_multihead_connection(state, connection, collection_name, + context) + local c = connection + local union_types = {} + local var_num_to_box_field_name = {} + + for _, v in ipairs(c.variants) do + assert(v.determinant, 'each variant should have a determinant') + check(v.determinant, 'variant\'s determinant', 'table') + check(v.destination_collection, 'variant.destination_collection', 'string') + check(v.parts, 'variant.parts', 'table') + + local destination_type = + state.nullable_collection_types[v.destination_collection] + assert(destination_type ~= nil, + ('destination_type (named %s) must not be nil'):format( + v.destination_collection)) + destination_type = specify_destination_type(destination_type, c.type) + + local variant_type, box_field_name = box_collection_type(destination_type, + c.type, v.destination_collection) + var_num_to_box_field_name[#union_types + 1] = box_field_name + union_types[#union_types + 1] = variant_type + end + + local resolve_function = resolve.gen_resolve_function_multihead( + collection_name, c, union_types, var_num_to_box_field_name, + state.accessor) + + local field = { + name = c.name, + kind = core_types.union({ + name = helpers.full_name(c.name, context), + types = union_types, + }), + arguments = nil, -- see Border cases/Unions at the top of + -- tarantool_graphql module description + resolve = resolve_function, + } + return field +end + +--- The function converts passed connection to a field of GraphQL type. +--- +--- @tparam table state for read state.accessor and previously filled +--- state.types (state.types are gql types) +--- +--- @tparam table connection connection to create field +--- +--- @tparam table collection_name name of the collection which have given +--- connection +--- +--- @tparam table context avro-schema parsing context as described in +--- @{types.convert} +--- +--- @treturn table generated field +local convert_connection_to_field = function(state, connection, collection_name, + context) + check(connection.type, 'connection.type', 'string') + assert(connection.type == '1:1' or connection.type == '1:1*' or + connection.type == '1:N', 'connection.type must be 1:1, 1:1* or 1:N, '.. + 'got ' .. connection.type) + check(connection.name, 'connection.name', 'string') + assert(connection.destination_collection or connection.variants, + 'connection must either destination_collection or variants field') + check(context, 'context', 'table') + + if connection.destination_collection then + return convert_simple_connection(state, connection, collection_name) + end + + if connection.variants then + return convert_multihead_connection(state, connection, collection_name, + context) + end +end + +--- The function converts passed avro-schema to a GraphQL type. +--- +--- @tparam table state for read state.accessor and previously filled +--- state.nullable_collection_types (those are gql types) +--- +--- @tparam table avro_schema input avro-schema +--- +--- @tparam[opt] table opts the following options: +--- +--- * `collection` (table; optional) when passed it will be used to generate +--- fields for connections +--- +--- * `type_name` (string; optional) when passed it will be used to generate +--- name of the GraphQL type instead of one from avro_schema (considered +--- only for record / record*) +--- +--- * context (table; optional) current context of parsing the avro_schema, +--- consists the following fields: +--- +--- - `field_name` (string; optional) it is only for an union generation, +--- because avro-schema union has no name in it and specific name is +--- necessary for GraphQL union +--- +--- - `path` (table) path to our position in avro-schema tree; used in +--- GraphQL types names generation +--- +--- Note: map is considered scalar. This means that particular fields cannot be +--- requested using GraphQL, only the entire map or nothing. +function types.convert(state, avro_schema, opts) + check(state, 'state', 'table') + check(avro_schema, 'avro_schema', 'table', 'string') + check(opts, 'opts', 'table', 'nil') + + local opts = opts or {} + local collection = opts.collection + local type_name = opts.type_name + local context = opts.context + if context == nil then + context = { + field_name = nil, + path = {}, + } + end + + check(collection, 'collection', 'table', 'nil') + check(type_name, 'type_name', 'string', 'nil') + check(context, 'context', 'table') + + local field_name = context.field_name + local path = context.path + + check(field_name, 'field_name', 'string', 'nil') + check(path, 'path', 'table') + + local accessor = state.accessor + check(accessor, 'accessor', 'table') + check(accessor.select, 'accessor.select', 'function') + + local avro_t = avro_helpers.avro_type(avro_schema) + + if avro_t == 'record' or avro_t == 'record*' then + if type(avro_schema.name) ~= 'string' then -- avoid extra json.encode() + assert(type(avro_schema.name) == 'string', + ('avro_schema.name must be a string, got %s (avro_schema %s)') + :format(type(avro_schema.name), json.encode(avro_schema))) + end + if type(avro_schema.fields) ~= 'table' then -- avoid extra json.encode() + assert(type(avro_schema.fields) == 'table', + ('avro_schema.fields must be a table, got %s (avro_schema %s)') + :format(type(avro_schema.fields), json.encode(avro_schema))) + end + + local type_name = type_name or avro_schema.name + + table.insert(context.path, type_name) + local fields = convert_record_fields(state, avro_schema.fields, context) + table.remove(context.path, #context.path) + + -- if collection param is passed then go over all connections + for _, c in ipairs((collection or {}).connections or {}) do + fields[c.name] = convert_connection_to_field(state, c, + collection.name, context) + end + + -- create GraphQL type + local res = core_types.object({ + name = helpers.full_name(type_name, context), + description = 'generated from avro-schema for ' .. + avro_schema.name, + fields = fields, + }) + return avro_t == 'record' and core_types.nonNull(res) or res + elseif avro_t == 'enum' then + error('enums do not implemented yet') -- XXX + elseif avro_t == 'array' or avro_t == 'array*' then + assert(avro_schema.items ~= nil, + 'items field must not be nil in array avro schema') + assert(type(avro_schema.items) == 'string' + or type(avro_schema.items) == 'table', + 'avro_schema.items must be a string or a table, got ' .. + type(avro_schema.items)) + + local gql_items_type = types.convert(state, avro_schema.items, + {context = context}) + local res = core_types.list(gql_items_type) + return avro_t == 'array' and core_types.nonNull(res) or res + elseif avro_t == 'map' or avro_t == 'map*' then + assert(avro_schema.values ~= nil, + 'values must not be nil in map avro schema') + assert(type(avro_schema.values) == 'table' + or type(avro_schema.values) == 'string', + ('avro_schema.values must be a table or a string, ' .. + 'got %s (avro_schema %s)'):format(type(avro_schema.values), + json.encode(avro_schema))) + + -- validate avro schema format inside 'values' + types.convert(state, avro_schema.values, {context = context}) + + local res = core_types.map + return avro_t == 'map' and core_types.nonNull(res) or res + elseif avro_t == 'union' then + return union.convert(avro_schema, { + -- captures state variable + convert = function(avro_schema, opts) + return types.convert(state, avro_schema, opts) + end, + gen_argument = false, + context = context, + }) + else + local res = scalar_types.convert(avro_schema, {raise = false}) + if res == nil then + error('unrecognized avro-schema type: ' .. + json.encode(avro_schema)) + end + return res + end +end + +return types diff --git a/graphql/convert_schema/union.lua b/graphql/convert_schema/union.lua new file mode 100644 index 0000000..505380d --- /dev/null +++ b/graphql/convert_schema/union.lua @@ -0,0 +1,335 @@ +local yaml = require('yaml') +local core_types = require('graphql.core.types') +local core_types_helpers = require('graphql.convert_schema.core_types_helpers') +local avro_helpers = require('graphql.avro_helpers') +local helpers = require('graphql.convert_schema.helpers') + +local utils = require('graphql.utils') +local check = utils.check + +local union = {} + +--- The function 'boxes' given GraphQL type into GraphQL Object 'box' type. +--- +--- @tparam table type_to_box GraphQL type to be boxed +--- +--- @tparam string box_field_name name of the single box field +--- +--- @tparam table opts the following options: +--- +--- * gen_argument (boolean) whether resulting type will be used in result +--- types (false) or argument types (true) +--- +--- * context (table) avro-schema parsing context as described in +--- @{types.convert} and @{arguments.convert} +--- +--- @treturn table GraphQL Object +local function box_type(type_to_box, box_field_name, opts) + check(type_to_box, 'type_to_box', 'table') + check(box_field_name, 'box_field_name', 'string') + + local gen_argument = opts.gen_argument + local context = opts.context + + local gql_true_type = core_types_helpers.nullable(type_to_box) + + -- Use bare name for the result type (to use in 'on' clause) and full name + -- for the argument type to avoid 'Encountered multiple types' error. See + -- also the comment in @{types.box_collection_type}. + local box_name = (gql_true_type.name or gql_true_type.__type) .. '_box' + if gen_argument then + box_name = helpers.full_name(box_name, context) + else + box_name = helpers.base_name(box_name) + end + + local box_fields = { + [box_field_name] = { + name = box_field_name, + kind = type_to_box, + } + } + + local box_object_type = gen_argument and core_types.inputObject or + core_types.object + + return box_object_type({ + name = box_name, + description = 'Box (wrapper) around union variant', + fields = box_fields, + }) +end + +--- The functions creates table of GraphQL types from avro-schema union type. +--- +--- @tparam table avro_schema +--- +--- @tparam table opts the following options: +--- +--- * convert (function) @{types.convert} or @{arguments.convert} +--- +--- * gen_argument (boolean) whether resulting type will be used in result +--- types (false) or argument types (true) +--- +--- * context (table) as described in @{types.convert} and +--- @{arguments.convert}; not used here explicitly, but passed deeper within +--- the @{types.convert} or @{arguments.convert} call +--- +--- @treturn table union_types +--- +--- @treturn table determinant_to_type +--- +--- @treturn boolean is_nullable +local function create_union_types(avro_schema, opts) + check(avro_schema, 'avro_schema', 'table') + assert(utils.is_array(avro_schema), 'union avro-schema must be an array ' .. + ', got\n' .. yaml.encode(avro_schema)) + + local convert = opts.convert + local gen_argument = opts.gen_argument + local context = opts.context + + local union_types = {} + local determinant_to_type = {} + local is_nullable = false + + for _, type in ipairs(avro_schema) do + -- If there is a 'null' type among 'union' types (in avro-schema union) + -- then resulting GraphQL Union type will be nullable + if type == 'null' then + is_nullable = true + else + local variant_type = convert(type, {context = context}) + local box_field_name = type.name or avro_helpers.avro_type(type) + union_types[#union_types + 1] = box_type(variant_type, + box_field_name, { + gen_argument = gen_argument, + context = context, + }) + local determinant = type.name or type.type or type + determinant_to_type[determinant] = union_types[#union_types] + end + end + + return union_types, determinant_to_type, is_nullable +end + +--- The function creates GraphQL Union type from given avro-schema union type. +--- There are two problems with GraphQL Union types, which we solve with specific +--- format of generated Unions. These problems are: +--- +--- 1) GraphQL Unions represent an object that could be one of a list of +--- GraphQL Object types. So Scalars and Lists can not be one of Union +--- types. +--- +--- 2) GraphQL responses, received from tarantool graphql, must be avro-valid. +--- On every incoming GraphQL query a corresponding avro-schema can be +--- generated. Response to this query is 'avro-valid' if it can be +--- successfully validated with this generated (from incoming query) +--- avro-schema. +--- +--- Specific format of generated Unions include the following: +--- +--- Avro scalar types (e.g. int, string) are converted into GraphQL Object or +--- InputObject types. Avro scalar converted to GraphQL Scalar (string -> +--- String) and then name of GraphQL type is concatenated with '_box' +--- ('String_box'). Resulting name is a name of created GraphQL Object / +--- InputObject. This object has only one field with GraphQL type +--- corresponding to avro scalar type (String type in our example). Avro type's +--- name is taken as a name for this single field. +--- +--- [..., "string", ...] +--- +--- turned into +--- MyUnion { +--- ... +--- ... on String_box { +--- string +--- ... +--- } +--- +--- Avro arrays and maps are converted into GraphQL Object or InputObject +--- types. The name of the resulting GraphQL Object is 'List_box' or 'Map_box' +--- respectively. This object has only one field with GraphQL type +--- corresponding to 'items' / 'values' avro type. 'array' or 'map' +--- (respectively) is taken as a name of this single field. +--- +--- [..., {"type": "array", "items": "int"}, ...] +--- +--- turned into +--- MyUnion { +--- ... +--- ... on List_box { +--- array +--- ... +--- } +--- +--- Avro records are converted into GraphQL Object or InputObject types. The +--- name of the resulting GraphQL Object is concatenation of record's name and +--- '_box'. This Object has only one field. The name of this field is record's +--- name. The type of this field is GraphQL Object / InputObject generated from +--- avro record schema in a usual way (see @{types.convert} and +--- @{arguments.convert}) +--- +--- { "type": "record", "name": "Foo", "fields":[ +--- { "name": "foo1", "type": "string" }, +--- { "name": "foo2", "type": "string" } +--- ]} +--- +--- turned into +--- MyUnion { +--- ... +--- ... on Foo_box { +--- Foo { +--- foo1 +--- foo2 +--- } +--- ... +--- } +--- +--- Please consider full example below. +--- +--- @tparam table avro_schema avro-schema union type +--- +--- @tparam table opts the following options: +--- +--- * convert (function) @{types.convert} or @{arguments.convert} +--- +--- * gen_argument (boolean) whether resulting type will be used in result +--- types (false) or argument types (true) +--- +--- * context (table) as described in @{types.convert} and +--- @{arguments.convert}; only `context.field_name` is used here (as the name +--- of the generated GraphQL union); `path` is *updated* (with the field +--- name) and the `context` is passed deeper within the @{create_union_types} +--- call (which calls @{types.convert} or @{arguments.convert} inside) +--- +--- @treturn table GraphQL Union type. Consider the following example: +--- +--- Avro-schema (inside a record): +--- +--- ... +--- "name": "MyUnion", "type": [ +--- "null", +--- "string", +--- { "type": "array", "items": "int" }, +--- { "type": "record", "name": "Foo", "fields":[ +--- { "name": "foo1", "type": "string" }, +--- { "name": "foo2", "type": "string" } +--- ]} +--- ] +--- ... +--- +--- GraphQL Union type (It will be nullable as avro-schema has 'null' variant): +--- +--- MyUnion { +--- ... on String_box { +--- string +--- } +--- +--- ... on List_box { +--- array +--- } +--- +--- ... on Foo_box { +--- Foo { +--- foo1 +--- foo2 +--- } +--- } +function union.convert(avro_schema, opts) + check(avro_schema, 'avro_schema', 'table') + assert(utils.is_array(avro_schema), 'union avro-schema must be an ' .. + 'array, got:\n' .. yaml.encode(avro_schema)) + + local opts = opts or {} + check(opts, 'opts', 'table') + + local convert = opts.convert + local gen_argument = opts.gen_argument or false + local context = opts.context + + check(convert, 'convert', 'function') + check(gen_argument, 'gen_argument', 'boolean') + check(context, 'context', 'table') + + local union_name = context.field_name + check(union_name, 'field_name', 'string') + + -- check avro-schema constraints + for i, type in ipairs(avro_schema) do + assert(avro_helpers.avro_type(type) ~= 'union', + 'unions must not immediately contain other unions') + + if type.name ~= nil then + for j, another_type in ipairs(avro_schema) do + if i ~= j then + if another_type.name ~= nil then + assert(type.name:gsub('%*$', '') ~= + another_type.name:gsub('%*$', ''), + 'Unions may not contain more than one schema ' .. + 'with the same name') + end + end + end + else + for j, another_type in ipairs(avro_schema) do + if i ~= j then + assert(avro_helpers.avro_type(type) ~= + avro_helpers.avro_type(another_type), + 'Unions may not contain more than one schema with ' .. + 'the same type except for the named types: ' .. + 'record, fixed and enum') + end + end + end + end + + -- create GraphQL union + table.insert(context.path, union_name) + local union_types, determinant_to_type, is_nullable = + create_union_types(avro_schema, { + convert = convert, + gen_argument = gen_argument, + context = context, + }) + table.remove(context.path, #context.path) + + local union_constructor = gen_argument and core_types.inputUnion or + core_types.union + + local union_type = union_constructor({ + types = union_types, + name = helpers.full_name(union_name, context), + resolveType = function(result) + for determinant, type in pairs(determinant_to_type) do + if result[determinant] ~= nil then + return type + end + end + error(('result object has no determinant field matching ' .. + 'determinants for this union\nresult object:\n%s' .. + 'determinants:\n%s'):format(yaml.encode(result), + yaml.encode(determinant_to_type))) + end, + resolveNodeType = function(node) + assert(#node.values == 1, + ('box object with more then one field: %d'):format( + #node.values)) + local determinant = node.values[1].name + check(determinant, 'determinant', 'string') + local res = determinant_to_type[determinant] + assert(determinant ~= nil, + ('the union has no "%s" field'):format(determinant)) + return res + end, + }) + + if not is_nullable then + union_type = core_types.nonNull(union_type) + end + + return union_type +end + +return union diff --git a/graphql/core/types.lua b/graphql/core/types.lua index bca7787..68ba406 100644 --- a/graphql/core/types.lua +++ b/graphql/core/types.lua @@ -171,6 +171,18 @@ function types.union(config) return instance end +types.map = types.scalar({ + name = 'Map', + description = 'Map is a dictionary with string keys and values of ' .. + 'arbitrary but same among all values type', + serialize = function(value) return value end, + parseValue = function(value) return value end, + parseLiteral = function(node) + error('Literal parsing is implemented in util.coerceValue; ' .. + 'we should not go here') + end, +}) + function types.inputObject(config) assert(type(config.name) == 'string', 'type name must be provided as a string') @@ -193,6 +205,42 @@ function types.inputObject(config) return instance end +function types.inputMap(config) + local instance = { + __type = 'InputMap', + name = config.name, + serialize = function(value) return value end, + parseValue = function(value) return value end, + parseLiteral = function(node) + error('Literal parsing is implemented in util.coerceValue; ' .. + 'we should not go here') + end, + values = config.values, + } + + instance.nonNull = types.nonNull(instance) + + return instance +end + +function types.inputUnion(config) + local instance = { + __type = 'InputUnion', + name = config.name, + serialize = function(value) return value end, + parseValue = function(value) return value end, + parseLiteral = function(node) + error('Literal parsing is implemented in util.coerceValue; ' .. + 'we should not go here') + end, + resolveNodeType = config.resolveNodeType, + } + + instance.nonNull = types.nonNull(instance) + + return instance +end + local coerceInt = function(value) value = tonumber(value) @@ -215,6 +263,19 @@ types.int = types.scalar({ end }) +types.long = types.scalar({ + name = 'Long', + description = 'Long is non-bounded integral type', + serialize = function(value) return tonumber(value) end, + parseValue = function(value) return tonumber(value) end, + parseLiteral = function(node) + -- 'int' is name of the immediate value type + if node.kind == 'int' then + return tonumber(node.value) + end + end +}) + types.float = types.scalar({ name = 'Float', serialize = tonumber, @@ -226,6 +287,18 @@ types.float = types.scalar({ end }) +types.double = types.scalar({ + name = 'Double', + serialize = tonumber, + parseValue = tonumber, + parseLiteral = function(node) + -- 'float' and 'int' are names of immediate value types + if node.kind == 'float' or node.kind == 'int' then + return tonumber(node.value) + end + end +}) + types.string = types.scalar({ name = 'String', description = "The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.", diff --git a/graphql/core/util.lua b/graphql/core/util.lua index 5a9438d..a1435aa 100644 --- a/graphql/core/util.lua +++ b/graphql/core/util.lua @@ -70,19 +70,22 @@ function util.coerceValue(node, schemaType, variables) end) end - if schemaType.__type == 'InputObject' then + local isInputObject = schemaType.__type == 'InputObject' + if isInputObject or schemaType.__type == 'InputMap' then if node.kind ~= 'inputObject' then error('Expected an input object') end local inputObjectValue = {} for _, field in pairs(node.values) do - if not schemaType.fields[field.name] then + if isInputObject and not schemaType.fields[field.name] then error('Unknown input object field "' .. field.name .. '"') end - inputObjectValue[field.name] = util.coerceValue( - field.value, schemaType.fields[field.name].kind, variables) + local child_type = isInputObject and schemaType.fields[field.name].kind or + schemaType.values + inputObjectValue[field.name] = util.coerceValue(field.value, child_type, + variables) end return inputObjectValue end @@ -106,6 +109,11 @@ function util.coerceValue(node, schemaType, variables) return schemaType.parseLiteral(node) end + + if schemaType.__type == 'InputUnion' then + local child_type = schemaType.resolveNodeType(node) + return util.coerceValue(node, child_type, variables) + end end return util diff --git a/graphql/db_schema_helpers.lua b/graphql/db_schema_helpers.lua new file mode 100644 index 0000000..b2553a7 --- /dev/null +++ b/graphql/db_schema_helpers.lua @@ -0,0 +1,62 @@ +local utils = require('graphql.utils') +local check = utils.check + +local db_schema_helpers = {} + +--- Get user-provided meta-information about the primary index of given +--- collection. +--- +--- @tparam table db_schema `schemas`, `collections`, `service_fields`, +--- `indexes` +--- +--- @tparam string collection_name the name of collection to find the primary +--- index +--- +--- @treturn string `index_name` +--- +--- @treturn table `index` (meta-information, not the index itself) +function db_schema_helpers.get_primary_index_meta(db_schema, collection_name) + check(db_schema, 'db_schema', 'table') + check(collection_name, 'collection_name', 'string') + + local indexes = db_schema.indexes[collection_name] + + local res_index_name + + for index_name, index in pairs(indexes) do + if res_index_name == nil and index.primary then + res_index_name = index_name + elseif res_index_name ~= nil and index.primary then + error(('several indexes were marked as primary in ' .. + 'the "%s" collection, at least "%s" and "%s"'):format( + collection_name, res_index_name, index_name)) + end + end + + if res_index_name == nil then + error(('cannot find primary index for collection "%s"'):format( + collection_name)) + end + + local res_index = indexes[res_index_name] + return res_index_name, res_index +end + +--- Get schema name by a collection name. +--- +--- @tparam table db_schema `schemas`, `collections`, `service_fields`, +--- `indexes` +--- +--- @tparam string collection_name +--- +--- @treturn string `schema_name` +function db_schema_helpers.get_schema_name(db_schema, collection_name) + local collection = db_schema.collections[collection_name] + assert(collection ~= nil, + ('cannot find the collection "%s"'):format(collection_name)) + local schema_name = collection.schema_name + check(schema_name, 'schema_name', 'string') + return schema_name +end + +return db_schema_helpers diff --git a/graphql/gen_arguments.lua b/graphql/gen_arguments.lua new file mode 100644 index 0000000..e6f1f5d --- /dev/null +++ b/graphql/gen_arguments.lua @@ -0,0 +1,350 @@ +--- Generate avro-schema for arguments by given database schema. + +local json = require('json') +local utils = require('graphql.utils') +local rex, _ = utils.optional_require_rex() +local avro_helpers = require('graphql.avro_helpers') +local db_schema_helpers = require('graphql.db_schema_helpers') + +local check = utils.check + +local gen_arguments = {} + +--- Get an avro-schema for a primary key by a collection name. +--- +--- @tparam table db_schema `e_schemas`, `schemas`, `collections`, +--- `service_fields`, `indexes` +--- +--- @tparam string collection_name name of a collection +--- +--- @treturn string `offset_type` is a just string in case of scalar primary +--- key (and, then, offset) type +--- +--- @treturn table `offset_type` is a record in case of compound (multi-part) +--- primary key +local function get_primary_key_type(db_schema, collection_name) + -- get name of field of primary key + local _, index_meta = db_schema_helpers.get_primary_index_meta( + db_schema, collection_name) + + local schema_name = db_schema_helpers.get_schema_name(db_schema, + collection_name) + local e_schema = db_schema.e_schemas[schema_name] + + local offset_fields = {} + + for _, field_name in ipairs(index_meta.fields) do + local field_type + for _, field in ipairs(e_schema.fields) do + if field.name == field_name then + field_type = field.type + end + end + assert(field_type ~= nil, + ('cannot find type for primary index field "%s" ' .. + 'for collection "%s"'):format(field_name, + collection_name)) + assert(type(field_type) == 'string', + 'field type must be a string, got ' .. + type(field_type)) + offset_fields[#offset_fields + 1] = { + name = field_name, + type = field_type, + } + end + + local offset_type + assert(#offset_fields > 0, + 'offset must contain at least one field') + if #offset_fields == 1 then + -- use a scalar type + offset_type = offset_fields[1].type + else + -- construct an input type + offset_type = { + name = collection_name .. '_offset', + type = 'record', + fields = offset_fields, + } + end + + return offset_type +end + +--- Make schema types deep nullable down to scalar, union, array or map +--- (matches xflatten input syntax). +--- +--- @param e_schema (table or string) avro-schema with expanded references +--- +--- @tparam[opt] function skip_cond +--- +--- @return transformed avro-schema or nil (when mathed by skip_cond) +local function recursive_nullable(e_schema, skip_cond) + local avro_t = avro_helpers.avro_type(e_schema) + + if skip_cond ~= nil and skip_cond(avro_t) then return nil end + + if avro_helpers.is_scalar_type(avro_t) then + return avro_helpers.make_avro_type_nullable(e_schema, + {raise_on_nullable = false}) + elseif avro_t == 'record' or avro_t == 'record*' then + local res = table.copy(e_schema) + res.type = 'record*' -- make the record nullable + res.fields = {} + + for _, field in ipairs(e_schema.fields) do + local new_type = recursive_nullable(field.type, skip_cond) + if new_type ~= nil then + local field = table.copy(field) + field.type = new_type + table.insert(res.fields, field) + end + end + + return res + elseif avro_t == 'union' or + avro_t == 'array' or avro_t == 'array*' or + avro_t == 'map' or avro_t == 'map*' then + e_schema = table.copy(e_schema) + return avro_helpers.make_avro_type_nullable(e_schema, + {raise_on_nullable = false}) + end + + error('unrecognized avro-schema type: ' .. json.encode(e_schema)) +end + +--- Whether we can compare the type for equallity. +--- +--- @tparam string avro_schema_type +--- +--- @treturn boolean +local function is_comparable_scalar_type(avro_schema_type) + check(avro_schema_type, 'avro_schema_type', 'string') + + local scalar_types = { + ['int'] = true, + ['int*'] = true, + ['long'] = true, + ['long*'] = true, + ['boolean'] = true, + ['boolean*'] = true, + ['string'] = true, + ['string*'] = true, + ['null'] = true, + } + + return scalar_types[avro_schema_type] or false +end + +-- XXX: add string fields of 1:1 connection to get_pcre_argument_type + +--- Get an avro-schema for a pcre argument by a collection name. +--- +--- Note: it is called from `list_args`, so applicable only for lists: +--- top-level objects and 1:N connections. +--- +--- @tparam table db_schema `e_schemas`, `schemas`, `collections`, +--- `service_fields`, `indexes` +--- +--- @tparam string collection_name name of a collection +--- +--- @treturn table record with fields per string/string* field of an object +--- of the collection +local function get_pcre_argument_type(db_schema, collection_name) + local schema_name = db_schema_helpers.get_schema_name(db_schema, + collection_name) + local e_schema = db_schema.e_schemas[schema_name] + assert(e_schema ~= nil, 'cannot find expanded schema ' .. + tostring(schema_name)) + + assert(e_schema.type == 'record', + 'top-level object expected to be a record, got ' .. + tostring(e_schema.type)) + + local res = recursive_nullable(e_schema, function(avro_t) + -- skip non-comparable scalars (float, double), union, array, map + local is_non_string_scalar = avro_helpers.is_scalar_type(avro_t) and + (avro_t ~= 'string' and avro_t ~= 'string*') + local is_non_record_compound = avro_helpers.is_compound_type(avro_t) + and (avro_t ~= 'record' and avro_t ~= 'record*') + return is_non_string_scalar or is_non_record_compound + + end) + res.name = collection_name .. '_pcre' + return res +end + +--- Get avro-schema for update argument. +--- +--- @tparam table db_schema `e_schemas`, `schemas`, `collections`, +--- `service_fields`, `indexes` +--- +--- @tparam string collection_name name of a collection +--- +--- @treturn table generated avro-schema +local function get_update_argument_type(db_schema, collection_name) + local schema_name = db_schema_helpers.get_schema_name(db_schema, + collection_name) + local e_schema = db_schema.e_schemas[schema_name] + assert(e_schema ~= nil, 'cannot find expanded schema ' .. + tostring(schema_name)) + + assert(e_schema.type == 'record', + 'top-level object expected to be a record, got ' .. + tostring(e_schema.type)) + + local _, primary_index_meta = db_schema_helpers.get_primary_index_meta( + db_schema, collection_name) + + local schema_update = { + name = collection_name .. '_update', + type = 'record*', + fields = {}, + } + -- add all fields except ones whose are part of the primary key + for _, field in ipairs(e_schema.fields) do + assert(field.name ~= nil, 'field.name is nil') + local is_field_part_of_primary_key = false + for _, pk_field_name in ipairs(primary_index_meta.fields) do + if field.name == pk_field_name then + is_field_part_of_primary_key = true + break + end + end + + if not is_field_part_of_primary_key then + local field = table.copy(field) + field.type = recursive_nullable(field.type) + table.insert(schema_update.fields, field) + end + end + + return schema_update +end + +--- List of avro-schema fields to use as arguments of a collection field and a +--- connection field (with any connection type). +--- +--- @tparam table db_schema `e_schemas`, `schemas`, `collections`, +--- `service_fields`, `indexes` +--- +--- @tparam string collection_name name of collection to create the fields +--- +--- @treturn table list of avro-schema fields +function gen_arguments.object_args(db_schema, collection_name) + local schema_name = db_schema_helpers.get_schema_name(db_schema, + collection_name) + local e_schema = db_schema.e_schemas[schema_name] + assert(e_schema ~= nil, 'cannot find expanded schema ' .. + tostring(schema_name)) + + assert(e_schema.type == 'record', + 'top-level object expected to be a record, got ' .. + tostring(e_schema.type)) + + local res = recursive_nullable(e_schema, function(avro_t) + -- skip non-comparable scalars (float, double), union, array, map + local is_non_comparable_scalar = avro_helpers.is_scalar_type(avro_t) and + not is_comparable_scalar_type(avro_t) + local is_non_record_compound = avro_helpers.is_compound_type(avro_t) + and (avro_t ~= 'record' and avro_t ~= 'record*') + return is_non_comparable_scalar or is_non_record_compound + end) + return res.fields +end + +--- List of avro-schema fields to use as arguments of a collection field and +--- 1:N connection field. +--- +--- @tparam table db_schema `e_schemas`, `schemas`, `collections`, +--- `service_fields`, `indexes` +--- +--- @tparam string collection_name name of collection to create the fields +--- +--- @treturn table list of avro-schema fields +function gen_arguments.list_args(db_schema, collection_name) + local offset_type = get_primary_key_type(db_schema, collection_name) + offset_type = avro_helpers.make_avro_type_nullable(offset_type, + {raise_on_nullable = false}) + + -- add `pcre` argument only if lrexlib-pcre was found + local pcre_field + if rex ~= nil then + local pcre_type = get_pcre_argument_type(db_schema, collection_name) + pcre_field = {name = 'pcre', type = pcre_type} + end + + return { + {name = 'limit', type = 'int*'}, + {name = 'offset', type = offset_type}, + -- {name = 'filter', type = ...}, + pcre_field, + } +end + +--- List of avro-schema fields to use as extra arguments of a collection / +--- a connection field. +--- +--- Mutation arguments (insert, update, delete) are generated here. +--- +--- @tparam table db_schema `e_schemas`, `schemas`, `collections`, +--- `service_fields`, `indexes` +--- +--- @tparam string collection_name name of collection to create the fields +--- +--- @tparam table opts +--- +--- * enable_mutations (boolean) +--- +--- @treturn table list of avro-schema fields +--- +--- @treturn table map with flags to describe where generated arguments should +--- be used; the format is the following: +--- +--- { +--- = { +--- add_to_mutations_only = , +--- add_to_top_fields_only = , +--- }, +--- ... +--- } +function gen_arguments.extra_args(db_schema, collection_name, opts) + local opts = opts or {} + local enable_mutations = opts.enable_mutations or false + + if not enable_mutations then + return {}, {} + end + + local schema_name = db_schema_helpers.get_schema_name(db_schema, + collection_name) + local e_schema = db_schema.e_schemas[schema_name] + + local schema_insert = table.copy(e_schema) + schema_insert.name = collection_name .. '_insert' + schema_insert.type = 'record*' -- make the record nullable + + local schema_update = get_update_argument_type(db_schema, collection_name) + local schema_delete = 'boolean*' + + return { + {name = 'insert', type = schema_insert}, + {name = 'update', type = schema_update}, + {name = 'delete', type = schema_delete}, + }, { + insert = { + add_to_mutations_only = true, + add_to_top_fields_only = true, + }, + update = { + add_to_mutations_only = true, + add_to_top_fields_only = false, + }, + delete = { + add_to_mutations_only = true, + add_to_top_fields_only = false, + }, + } +end + +return gen_arguments diff --git a/graphql/impl.lua b/graphql/impl.lua new file mode 100644 index 0000000..f97c110 --- /dev/null +++ b/graphql/impl.lua @@ -0,0 +1,297 @@ +--- Implementation of module-level functions and functions of instances of the +--- graphql library and a compiled query. + +local accessor_space = require('graphql.accessor_space') +local accessor_shard = require('graphql.accessor_shard') +local parse = require('graphql.core.parse') +local validate = require('graphql.core.validate') +local execute = require('graphql.core.execute') +local query_to_avro = require('graphql.query_to_avro') +local simple_config = require('graphql.simple_config') +local config_complement = require('graphql.config_complement') +local server = require('graphql.server.server') +local convert_schema = require('graphql.convert_schema') + +local utils = require('graphql.utils') +local check = utils.check + +local impl = {} + +-- Instance of the library to provide graphql:compile() and graphql:execute() +-- method (with creating zero configuration graphql instance under hood when +-- calling compile() for the first time). +local default_instance + +--- Execute an operation from compiled query. +--- +--- @tparam table qstate compiled query +--- +--- @tparam table variables variables to pass to the query +--- +--- @tparam[opt] string operation_name optional operation name +--- +--- @treturn table result of the operation +local function gql_execute(qstate, variables, operation_name) + assert(qstate.state) + local state = qstate.state + assert(state.schema) + + check(variables, 'variables', 'table') + check(operation_name, 'operation_name', 'string', 'nil') + + local root_value = {} + + return execute(state.schema, qstate.ast, root_value, variables, + operation_name) +end + +--- Compile a query and execute an operation. +--- +--- See @{gql_compile} and @{gql_execute} for parameters description. +--- +--- @treturn table result of the operation +local function compile_and_execute(state, query, variables, operation_name) + assert(type(state) == 'table', 'use :gql_execute(...) instead of ' .. + '.execute(...)') + assert(state.schema ~= nil, 'have not compiled schema') + check(query, 'query', 'string') + check(variables, 'variables', 'table', 'nil') + check(operation_name, 'operation_name', 'string', 'nil') + + local compiled_query = state:compile(query) + return compiled_query:execute(variables, operation_name) +end + +--- Parse GraphQL query string, validate against the GraphQL schema and +--- provide an object with the function to execute an operation from the +--- request with specific variables values. +--- +--- @tparam table state the library instance +--- +--- @tparam string query text of a GraphQL query +--- +--- @treturn table compiled query with `execute` and `avro_schema` functions +local function gql_compile(state, query) + assert(type(state) == 'table' and type(query) == 'string', + 'use :validate(...) instead of .validate(...)') + assert(state.schema ~= nil, 'have not compiled schema') + check(query, 'query', 'string') + + local ast = parse(query) + validate(state.schema, ast) + + local qstate = { + state = state, + ast = ast, + } + + local gql_query = setmetatable(qstate, { + __index = { + execute = gql_execute, + avro_schema = query_to_avro.convert + } + }) + return gql_query +end + +local function start_server(gql, host, port) + assert(type(gql) == 'table', + 'use :start_server(...) instead of .start_server(...)') + + check(host, 'host', 'nil', 'string') + check(port, 'port', 'nil', 'number') + + gql.server = server.init(gql, host, port) + gql.server:start() + + return ('The GraphQL server started at http://%s:%s'):format( + gql.server.host, gql.server.port + ) +end + +local function stop_server(gql) + assert(type(gql) == 'table', + 'use :stop_server(...) instead of .stop_server(...)') + assert(gql.server, 'no running server to stop') + + gql.server:stop() + + return ('The GraphQL server stopped at http://%s:%s'):format( + gql.server.host, gql.server.port) +end + +--- The function creates an accessor of desired type with default configuration. +--- +--- @tparam table cfg schemas, collections, service_fields, indexes and so on +--- +--- @treturn table `accessor` created accessor instance +local function create_default_accessor(cfg) + check(cfg.accessor, 'cfg.accessor', 'string') + assert(cfg.accessor == 'space' or cfg.accessor == 'shard', + 'accessor_type must be shard or space, got ' .. cfg.accessor) + check(cfg.service_fields, 'cfg.service_fields', 'table') + check(cfg.indexes, 'cfg.indexes', 'table') + check(cfg.collection_use_tomap, 'cfg.collection_use_tomap', 'table', 'nil') + check(cfg.accessor_funcs, 'cfg.accessor_funcs', 'table', 'nil') + + local accessor_cfg = { + schemas = cfg.schemas, + collections = cfg.collections, + service_fields = cfg.service_fields, + indexes = cfg.indexes, + collection_use_tomap = cfg.collection_use_tomap, + resulting_object_cnt_max = cfg.resulting_object_cnt_max, + fetched_object_cnt_max = cfg.fetched_object_cnt_max, + timeout_ms = cfg.timeout_ms, + enable_mutations = cfg.enable_mutations, + } + + if cfg.accessor == 'space' then + return accessor_space.new(accessor_cfg, cfg.accessor_funcs) + end + + if cfg.accessor == 'shard' then + return accessor_shard.new(accessor_cfg, cfg.accessor_funcs) + end +end + +function impl.compile(query) + if default_instance == nil then + default_instance = impl.new() + end + return default_instance:compile(query) +end + +function impl.execute(query, variables, operation_name) + if default_instance == nil then + default_instance = impl.new() + end + return default_instance:execute(query, variables, operation_name) +end + +function impl.start_server() + if default_instance == nil then + default_instance = impl.new() + end + + return default_instance:start_server() +end + +function impl.stop_server() + if default_instance ~= nil and default_instance.server ~= nil then + return default_instance:stop_server() + end + return 'there is no active server in default Tarantool graphql instance' +end + +--- Create the library instance. +--- +--- Usage: +--- +--- ... = graphql.new({ +--- schemas = { +--- schema_name_foo = { // the value is avro-schema (esp., a record) +--- name = 'schema_name_foo, +--- type = 'record', +--- fields = { +--- ... +--- } +--- }, +--- ... +--- }, +--- collections = { +--- collections_name_foo = { +--- schema_name = 'schema_name_foo', +--- connections = { // the optional field +--- { +--- name = 'connection_name_bar', +--- destination_collection = 'collection_baz', +--- parts = { +--- { +--- source_field = 'field_name_source_1', +--- destination_field = 'field_name_destination_1' +--- }, +--- ... +--- }, +--- index_name = 'index_name' -- is is for an accessor, +--- -- ignored in the graphql +--- -- part +--- }, +--- ... +--- }, +--- }, +--- ... +--- }, +--- accessor = setmetatable({}, { +--- __index = { +--- select = function(self, parent, collection_name, from, +--- object_args_instance, list_args_instance, extra) +--- -- * from has the following structure: +--- -- +--- -- { +--- -- collection_name = <...>, +--- -- connection_name = <...>, +--- -- destination_args_names = <...>, +--- -- destination_args_values = <...>, +--- -- } +--- -- +--- -- from.collection_name is nil for a top-level collection. +--- -- +--- -- `extra` is a table which contains additional data for +--- -- the query: +--- -- +--- -- * `qcontext` (table) can be used by an accessor to store +--- -- any query-related data; +--- -- * `resolveField(field_name, object, filter, opts)` +--- -- (function) for performing a subrequest on a fields +--- -- connected using a 1:1 or 1:1* connection. +--- -- +--- return ... +--- end, +--- } +--- }), +--- }) +function impl.new(cfg) + local cfg = cfg or {} + cfg = table.deepcopy(cfg) -- prevent change of user's data + + -- auto config case + if not next(cfg) or utils.has_only(cfg, 'connections') then + local generated_cfg = simple_config.graphql_cfg_from_tarantool() + generated_cfg.accessor = 'space' + generated_cfg.connections = cfg.connections or {} + cfg = generated_cfg + cfg = config_complement.complement_cfg(cfg) + end + + check(cfg.accessor, 'cfg.accessor', 'string', 'table') + if type(cfg.accessor) == 'string' then + cfg.accessor = create_default_accessor(cfg) + end + + -- to use `cfg` as db_schema + if cfg.service_fields == nil then + cfg.service_fields = cfg.accessor.service_fields + end + + -- to use `cfg` as db_schema + if cfg.indexes == nil then + cfg.indexes = cfg.accessor.indexes + end + + local state = {} + convert_schema.convert(state, cfg) + return setmetatable(state, { + __index = { + compile = gql_compile, + execute = compile_and_execute, + start_server = start_server, + stop_server = stop_server, + internal = { -- for unit testing + cfg = cfg, + } + } + }) +end + +return impl diff --git a/graphql/init.lua b/graphql/init.lua index ccf5a5c..9042ee3 100644 --- a/graphql/init.lua +++ b/graphql/init.lua @@ -1,19 +1,55 @@ +--- Abstraction layer between a data collections (e.g. tarantool's spaces) and +--- the GraphQL query language. +--- +--- Random notes: +--- +--- * GraphQL top level statement must be a collection name. Arguments for this +--- statement match non-deducible field names of corresponding object and +--- passed to an accessor function in the filter argument. +--- +--- Border cases: +--- +--- * Unions: as GraphQL specification says "...no fields may be queried on +--- Union type without the use of typed fragments." Tarantool_graphql +--- behaves this way. So 'common fields' are not supported. This does NOT +--- work: +--- +--- hero { +--- hero_id -- common field; does NOT work +--- ... on human { +--- name +--- } +--- ... on droid { +--- model +--- } +--- } +--- +--- +--- +--- (GraphQL spec: http://facebook.github.io/graphql/October2016/#sec-Unions) +--- Also, no arguments are currently allowed for fragments. +--- See issue about this (https://github.com/facebook/graphql/issues/204) + local accessor_general = require('graphql.accessor_general') local accessor_space = require('graphql.accessor_space') local accessor_shard = require('graphql.accessor_shard') -local tarantool_graphql = require('graphql.tarantool_graphql') +local impl = require('graphql.impl') local graphql = {} +-- constants +graphql.TIMEOUT_INFINITY = accessor_general.TIMEOUT_INFINITY + +-- for backward compatibility graphql.accessor_general = accessor_general graphql.accessor_space = accessor_space graphql.accessor_shard = accessor_shard -graphql.new = tarantool_graphql.new -graphql.compile = tarantool_graphql.compile -graphql.execute = tarantool_graphql.execute -graphql.start_server = tarantool_graphql.start_server -graphql.stop_server = tarantool_graphql.stop_server -graphql.TIMEOUT_INFINITY = accessor_general.TIMEOUT_INFINITY +-- functions +graphql.new = impl.new +graphql.compile = impl.compile +graphql.execute = impl.execute +graphql.start_server = impl.start_server +graphql.stop_server = impl.stop_server return graphql diff --git a/graphql/query_to_avro.lua b/graphql/query_to_avro.lua index ff68c30..a219bbd 100644 --- a/graphql/query_to_avro.lua +++ b/graphql/query_to_avro.lua @@ -9,6 +9,7 @@ local path = "graphql.core" local introspection = require(path .. '.introspection') local query_util = require(path .. '.query_util') local avro_helpers = require('graphql.avro_helpers') +local convert_schema_helpers = require('graphql.convert_schema.helpers') -- module functions local query_to_avro = {} @@ -44,7 +45,7 @@ end --- @tparam table context current traversal context, here it just falls to the --- called functions (internal graphql-lua format) --- ---- @tresult table `result` is the resulting avro-schema +--- @treturn table `result` is the resulting avro-schema local function gql_type_to_avro(fieldType, subSelections, context) local fieldTypeName = fieldType.__type local isNonNull = false @@ -97,7 +98,7 @@ local function field_to_avro(object_type, fields, context) local fieldTypeAvro = gql_type_to_avro(fieldType.kind, subSelections, context) return { - name = fieldName, + name = convert_schema_helpers.base_name(fieldName), type = fieldTypeAvro, } end @@ -119,7 +120,7 @@ object_to_avro = function(object_type, selections, context) {}, {}, context) local result = { type = 'record', - name = object_type.name, + name = convert_schema_helpers.base_name(object_type.name), fields = {} } if #context.namespace_parts ~= 0 then diff --git a/graphql/server/graphiql/LICENSE b/graphql/server/graphiql/LICENSE new file mode 100644 index 0000000..ad274db --- /dev/null +++ b/graphql/server/graphiql/LICENSE @@ -0,0 +1,26 @@ +LICENSE AGREEMENT For GraphiQL software + +Facebook, Inc. (“Facebook”) owns all right, title and interest, including all +intellectual property and other proprietary rights, in and to the GraphiQL +software. Subject to your compliance with these terms, you are hereby granted a +non-exclusive, worldwide, royalty-free copyright license to (1) use and copy the +GraphiQL software; and (2) reproduce and distribute the GraphiQL software as +part of your own software (“Your Software”). Facebook reserves all rights not +expressly granted to you in this license agreement. + +THE SOFTWARE AND DOCUMENTATION, IF ANY, ARE PROVIDED "AS IS" AND ANY EXPRESS OR +IMPLIED WARRANTIES (INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE) ARE DISCLAIMED. IN NO +EVENT SHALL FACEBOOK OR ITS AFFILIATES, OFFICES, DIRECTORS OR EMPLOYEES BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE +GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF +THE USE OF THE SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +You will include in Your Software (e.g., in the file(s), documentation or other +materials accompanying your software): (1) the disclaimer set forth above; (2) +this sentence; and (3) the following copyright notice: + +Copyright (c) 2015, Facebook, Inc. All rights reserved. diff --git a/graphql/server/static/css/graphiql.css b/graphql/server/graphiql/static/css/graphiql.css similarity index 100% rename from graphql/server/static/css/graphiql.css rename to graphql/server/graphiql/static/css/graphiql.css diff --git a/graphql/server/static/js/graphiql.js b/graphql/server/graphiql/static/js/graphiql.js similarity index 100% rename from graphql/server/static/js/graphiql.js rename to graphql/server/graphiql/static/js/graphiql.js diff --git a/graphql/server/server.lua b/graphql/server/server.lua index ae58b25..0cf42b3 100644 --- a/graphql/server/server.lua +++ b/graphql/server/server.lua @@ -1,7 +1,6 @@ local fio = require('fio') local utils = require('graphql.server.utils') local json = require('json') -local check = require('graphql.utils').check local server = {} @@ -26,6 +25,8 @@ local function static_handler(req) if path == '/' then path = fio.pathjoin('graphiql', 'index.html') + else + path = fio.pathjoin('graphiql', path) end local lib_dir = utils.script_path() diff --git a/graphql/simple_config.lua b/graphql/simple_config.lua index 32baa5d..6199f00 100644 --- a/graphql/simple_config.lua +++ b/graphql/simple_config.lua @@ -148,7 +148,7 @@ end --- The function returns formats of all fully defined spaces. --- Spaces are taken from the tarantool instance in which --- tarantool graphql is launched. For definition of fully ---- defined spaces see @{is_fully_defined}. +--- defined spaces see `is_fully_defined`. --- --- @treturn table spaces_formats {[space_name] = {space_format}, ...} --- where space_format is {{first_field_format}, {second_field_format}, ...} diff --git a/graphql/tarantool_graphql.lua b/graphql/tarantool_graphql.lua deleted file mode 100644 index 2465d77..0000000 --- a/graphql/tarantool_graphql.lua +++ /dev/null @@ -1,1911 +0,0 @@ ---- Abstraction layer between a data collections (e.g. tarantool's spaces) and ---- the GraphQL query language. ---- ---- Random notes: ---- ---- * GraphQL top level statement must be a collection name. Arguments for this ---- statement match non-deducible field names of corresponding object and ---- passed to an accessor function in the filter argument. ---- ---- Border cases: ---- ---- * Unions: as GraphQL specification says "...no fields may be queried on ---- Union type without the use of typed fragments." Tarantool_graphql ---- behaves this way. So 'common fields' are not supported. This does NOT ---- work: ---- ---- hero { ---- hero_id -- common field; does NOT work ---- ... on human { ---- name ---- } ---- ... on droid { ---- model ---- } ---- } ---- ---- ---- ---- (GraphQL spec: http://facebook.github.io/graphql/October2016/#sec-Unions) ---- Also, no arguments are currently allowed for fragments. ---- See issue about this (https://github.com/facebook/graphql/issues/204) - -local json = require('json') -local yaml = require('yaml') -local log = require('log') - -local accessor_space = require('graphql.accessor_space') -local accessor_shard = require('graphql.accessor_shard') -local parse = require('graphql.core.parse') -local schema = require('graphql.core.schema') -local types = require('graphql.core.types') -local validate = require('graphql.core.validate') -local execute = require('graphql.core.execute') -local query_to_avro = require('graphql.query_to_avro') -local simple_config = require('graphql.simple_config') -local config_complement = require('graphql.config_complement') -local server = require('graphql.server.server') - -local utils = require('graphql.utils') -local check = utils.check - -local tarantool_graphql = {} --- instance of tarantool graphql to provide graphql:compile() and --- graphql:execute() method (with creating zero configuration graphql instance --- under hood when calling compile() for the first time) -local default_instance - --- forward declarations -local gql_type - -local function is_scalar_type(avro_schema_type) - check(avro_schema_type, 'avro_schema_type', 'string') - - local scalar_types = { - ['int'] = true, - ['int*'] = true, - ['long'] = true, - ['long*'] = true, - ['float'] = true, - ['float*'] = true, - ['double'] = true, - ['double*'] = true, - ['boolean'] = true, - ['boolean*'] = true, - ['string'] = true, - ['string*'] = true, - ['null'] = true, - } - - return scalar_types[avro_schema_type] or false -end - -local function is_comparable_scalar_type(avro_schema_type) - check(avro_schema_type, 'avro_schema_type', 'string') - - local scalar_types = { - ['int'] = true, - ['int*'] = true, - ['long'] = true, - ['long*'] = true, - ['boolean'] = true, - ['boolean*'] = true, - ['string'] = true, - ['string*'] = true, - ['null'] = true, - } - - return scalar_types[avro_schema_type] or false -end - -local function is_compound_type(avro_schema_type) - check(avro_schema_type, 'avro_schema_type', 'string') - - local compound_types = { - ['record'] = true, - ['record*'] = true, - ['array'] = true, - ['array*'] = true, - ['map'] = true, - ['map*'] = true, - } - - return compound_types[avro_schema_type] or false -end - -local function avro_type(avro_schema, opts) - local opts = opts or {} - local allow_references = opts.allow_references or false - - if type(avro_schema) == 'table' then - if utils.is_array(avro_schema) then - return 'union' - elseif is_compound_type(avro_schema.type) then - return avro_schema.type - elseif allow_references then - return avro_schema - end - elseif type(avro_schema) == 'string' then - if is_scalar_type(avro_schema) then - return avro_schema - elseif allow_references then - return avro_schema - end - end - error('unrecognized avro-schema type: ' .. json.encode(avro_schema)) -end - -local function nullable(gql_class) - assert(type(gql_class) == 'table', 'gql_class must be a table, got ' .. - type(gql_class)) - - if gql_class.__type ~= 'NonNull' then return gql_class end - - assert(gql_class.ofType ~= nil, 'gql_class.ofType must not be nil') - return nullable(gql_class.ofType) -end - -local function raw_gql_type(gql_class) - assert(type(gql_class) == 'table', 'gql_class must be a table, got ' .. - type(gql_class)) - - while gql_class.ofType ~= nil do - gql_class = gql_class.ofType - end - - return gql_class -end - -local types_long = types.scalar({ - name = 'Long', - description = 'Long is non-bounded integral type', - serialize = function(value) return tonumber(value) end, - parseValue = function(value) return tonumber(value) end, - parseLiteral = function(node) - -- 'int' is name of the immediate value type - if node.kind == 'int' then - return tonumber(node.value) - end - end -}) - -local types_double = types.scalar({ - name = 'Double', - serialize = tonumber, - parseValue = tonumber, - parseLiteral = function(node) - -- 'float' and 'int' are names of immediate value types - if node.kind == 'float' or node.kind == 'int' then - return tonumber(node.value) - end - end -}) - -local types_map = types.scalar({ - name = 'Map', - description = 'Map is a dictionary with string keys and values of ' .. - 'arbitrary but same among all values type', - serialize = function(value) return value end, - parseValue = function(value) return value end, - parseLiteral = function(node) - if node.kind == 'Map' then - return node.value - end - end -}) - -local function convert_scalar_type(avro_schema, opts) - local opts = opts or {} - assert(type(opts) == 'table', 'opts must be nil or table, got ' .. - type(opts)) - local raise = opts.raise or false - assert(type(raise) == 'boolean', 'opts.raise must be boolean, got ' .. - type(raise)) - - local scalar_types = { - ['int'] = types.int.nonNull, - ['int*'] = types.int, - ['long'] = types_long.nonNull, - ['long*'] = types_long, - ['float'] = types.float.nonNull, - ['float*'] = types.float, - ['double'] = types_double.nonNull, - ['double*'] = types_double, - ['boolean'] = types.boolean.nonNull, - ['boolean*'] = types.boolean, - ['string'] = types.string.nonNull, - ['string*'] = types.string, - } - - local avro_t = avro_type(avro_schema) - local graphql_type = scalar_types[avro_t] - if graphql_type ~= nil then - return graphql_type - end - - if raise then - error('unrecognized avro-schema scalar type: ' .. - json.encode(avro_schema)) - end - - return nil -end - ---- Non-recursive version of the @{gql_type} function that returns ---- InputObject instead of Object. ---- ---- An error will be raised if avro_schema type is 'record' ---- and its' fields have non-scalar types. So triple nesting level is not ---- supported (record with record as a field - ok, record with record which ---- has inside an another level - not ok). -local function gql_argument_type(avro_schema) - assert(avro_schema ~= nil, - 'avro_schema must not be nil') - - if avro_type(avro_schema) == 'record' then - assert(type(avro_schema.name) == 'string', - ('avro_schema.name must be a string, got %s (avro_schema %s)') - :format(type(avro_schema.name), json.encode(avro_schema))) - - assert(type(avro_schema.fields) == 'table', - ('avro_schema.fields must be a table, got %s (avro_schema %s)') - :format(type(avro_schema.fields), json.encode(avro_schema))) - - local fields = {} - for _, field in ipairs(avro_schema.fields) do - assert(type(field.name) == 'string', - ('field.name must be a string, got %s (schema %s)') - :format(type(field.name), json.encode(field))) - - local gql_field_type = convert_scalar_type( - field.type, {raise = true}) - - fields[field.name] = { - name = field.name, - kind = gql_field_type, - } - end - - local res = types.nonNull(types.inputObject({ - name = avro_schema.name, - description = 'generated from avro-schema for ' .. - avro_schema.name, - fields = fields, - })) - - return res - else - local res = convert_scalar_type(avro_schema, {raise = false}) - if res == nil then - error('unrecognized avro-schema type: ' .. - json.encode(avro_schema)) - end - return res - end -end - ---- Convert each field of an avro-schema to a scalar graphql type or an input ---- object. ---- ---- It uses the @{gql_argument_type} function to convert each field, then skips ---- fields of record, array and map types and gives the resulting list of ---- converted fields. ---- ---- @tparam table fields list of fields of the avro-schema record fields format ---- ---- @tparam[opt] table opts optional options: ---- ---- * `skip_compound` -- do not add fields of record type to the arguments; ---- default: false. ---- ---- * `dont_skip` -- do not skip any fields; default: false. ---- ---- @treturn table `args` -- map with type names as keys and graphql types as ---- values -local function convert_record_fields_to_args(fields, opts) - assert(type(fields) == 'table', - 'fields must be a table, got ' .. type(fields)) - - local opts = opts or {} - assert(type(opts) == 'table', - 'opts must be a table, got ' .. type(opts)) - - local skip_compound = opts.skip_compound or false - assert(type(skip_compound) == 'boolean', - 'skip_compound must be a boolean, got ' .. type(skip_compound)) - - local dont_skip = opts.dont_skip or false - check(dont_skip, 'dont_skip', 'boolean') - - local args = {} - for _, field in ipairs(fields) do - assert(type(field.name) == 'string', - ('field.name must be a string, got %s (schema %s)') - :format(type(field.name), json.encode(field))) - -- records, arrays (gql lists), maps and unions can't be arguments, so - -- these graphql types are to be skipped; - -- skip_compound == false is the trick for accessor_general-provided - -- record; we don't expect map, array or union here as well as we don't - -- expect avro-schema reference. - local avro_t = avro_type(field.type, {allow_references = true}) - local add_field = dont_skip or is_comparable_scalar_type(avro_t) or - (not skip_compound and not is_scalar_type(avro_t)) - if add_field then - local ok, gql_class = pcall(gql_argument_type, field.type) - -- XXX: we need better avro-schema -> graphql types converter to - -- handle the following cases: - -- * scalar arguments that can be checked for equality (object - -- args): skip any other - -- * pcre / limit / offset (nothing special here I guess) - -- * auxiliary schemas for insert / update: don't skip anything - if ok then - args[field.name] = nullable(gql_class) - else - log.warn(('Cannot add argument "%s": %s'):format( - field.name, tostring(gql_class))) - end - end - end - return args -end - ---- Convert each field of an avro-schema to a graphql type. ---- ---- @tparam table state for read state.accessor and previously filled ---- state.nullable_collection_types ---- ---- @tparam table fields fields part from an avro-schema ---- ---- @tparam table context as described in @{gql_type}; not used here ---- explicitly, but `path` and `field_name` are *updated* and the `context` is ---- passed deeper within the @{gql_type} call ---- ---- @treturn table `res` -- map with type names as keys and graphql types as ---- values -local function convert_record_fields(state, fields, context) - local res = {} - for _, field in ipairs(fields) do - assert(type(field.name) == 'string', - ('field.name must be a string, got %s (schema %s)') - :format(type(field.name), json.encode(field))) - - table.insert(context.path, field.name) - context.field_name = field.name - res[field.name] = { - name = field.name, - kind = gql_type(state, field.type, context), - } - table.remove(context.path, #context.path) - context.field_name = nil - end - return res -end - -local function args_from_destination_collection(state, collection, - connection_type) - if connection_type == '1:1' then - return state.object_arguments[collection] - elseif connection_type == '1:1*' then - return state.object_arguments[collection] - elseif connection_type == '1:N' then - return state.all_arguments[collection] - else - error('unknown connection type: ' .. tostring(connection_type)) - end -end - -local function specify_destination_type(destination_type, connection_type) - if connection_type == '1:1' then - return types.nonNull(destination_type) - elseif connection_type == '1:1*' then - return destination_type - elseif connection_type == '1:N' then - return types.nonNull(types.list(types.nonNull(destination_type))) - else - error('unknown connection type: ' .. tostring(connection_type)) - end -end - ---- The function 'boxes' given collection type. ---- ---- Why the 'boxing' of collection types is needed and how it is done is ---- described in comments to @{convert_multihead_connection}. ---- ---- @tparam table type_to_box GraphQL Object type (which represents a collection) ---- @tparam string connection_type of given collection (1:1, 1:1* or 1:N) ---- @tparam string type_to_box_name name of given 'type_to_box' (It can not ---- be taken from 'type_to_box' because at the time of function execution ---- 'type_to_box' refers to an empty table, which later will be filled with ---- actual type table) ---- @treturn table GraphQL Object type representing 'boxed' collection ---- @treturn string name of the single field in the box GraphQL Object -local function box_collection_type(type_to_box, connection_type, type_to_box_name) - check(type_to_box, 'type_to_box', 'table') - check(connection_type, 'connection_type', 'string') - check(type_to_box_name, 'type_to_box_name', 'string') - - local box_type_name - local box_type_description - - if connection_type == '1:1' then - box_type_name = 'box_' .. type_to_box_name - box_type_description = 'Box around 1:1 multi-head variant' - elseif connection_type == '1:1*' then - box_type_name = 'box_' .. type_to_box_name - box_type_description = 'Box around 1:1* multi-head variant' - elseif connection_type == '1:N' then - box_type_name = 'box_array_' .. type_to_box_name - box_type_description = 'Box around 1:N multi-head variant' - else - error('unknown connection type: ' .. tostring(connection_type)) - end - - local field_name = type_to_box_name - local box_field = {[field_name] = {name = field_name, kind = type_to_box}} - local box_type = types.object({ - name = box_type_name, - description = box_type_description, - fields = box_field - }) - - return box_type, field_name -end - -local function parent_args_values(parent, connection_parts) - local destination_args_names = {} - local destination_args_values = {} - for _, part in ipairs(connection_parts) do - assert(type(part.source_field) == 'string', - 'part.source_field must be a string, got ' .. - type(part.destination_field)) - assert(type(part.destination_field) == 'string', - 'part.destination_field must be a string, got ' .. - type(part.destination_field)) - - destination_args_names[#destination_args_names + 1] = - part.destination_field - local value = parent[part.source_field] - destination_args_values[#destination_args_values + 1] = value - end - - return destination_args_names, destination_args_values -end - --- Check FULL match constraint before request of --- destination object(s). Note that connection key parts --- can be prefix of index key parts. Zero parts count --- considered as ok by this check. -local function are_all_parts_null(parent, connection_parts) - local are_all_parts_null = true - local are_all_parts_non_null = true - for _, part in ipairs(connection_parts) do - local value = parent[part.source_field] - - if value ~= nil then -- nil or box.NULL - are_all_parts_null = false - else - are_all_parts_non_null = false - end - end - - local ok = are_all_parts_null or are_all_parts_non_null - if not ok then -- avoid extra json.encode() - assert(ok, - 'FULL MATCH constraint was failed: connection ' .. - 'key parts must be all non-nulls or all nulls; ' .. - 'object: ' .. json.encode(parent)) - end - - return are_all_parts_null -end - -local function separate_args_instance(args_instance, connection_args, - connection_list_args, extra_args) - local object_args_instance = {} - local list_args_instance = {} - local extra_args_instance = {} - - for k, v in pairs(args_instance) do - if extra_args[k] ~= nil then - extra_args_instance[k] = v - elseif connection_list_args[k] ~= nil then - list_args_instance[k] = v - elseif connection_args[k] ~= nil then - object_args_instance[k] = v - else - error(('cannot found "%s" field ("%s" value) ' .. - 'within allowed fields'):format(tostring(k), - json.encode(v))) - end - end - return object_args_instance, list_args_instance, extra_args_instance -end - ---- The function converts passed simple connection to a field of GraphQL type. ---- ---- @tparam table state for read state.accessor and previously filled ---- state.nullable_collection_types (those are gql types) ---- @tparam table avro_schema input avro-schema ---- @tparam[opt] table collection table with schema_name, connections fields ---- described a collection (e.g. tarantool's spaces) ---- ---- @tparam table state for for collection types ---- @tparam table connection simple connection to create field on ---- @tparam table collection_name name of the collection which has given ---- connection -local function convert_simple_connection(state, connection, collection_name) - local c = connection - - check(c.destination_collection, 'connection.destination_collection', 'string') - check(c.parts, 'connection.parts', 'table') - - -- gql type of connection field - local destination_type = - state.nullable_collection_types[c.destination_collection] - assert(destination_type ~= nil, - ('destination_type (named %s) must not be nil'):format( - c.destination_collection)) - - local raw_destination_type = destination_type - destination_type = specify_destination_type(destination_type, c.type) - - -- capture `raw_destination_type` - local function genResolveField(info) - return function(field_name, object, filter, opts) - assert(raw_destination_type.fields[field_name], - ('performing a subrequest by the non-existent ' .. - 'field "%s" of the collection "%s"'):format(field_name, - c.destination_collection)) - return raw_destination_type.fields[field_name].resolve( - object, filter, info, opts) - end - end - - local c_args = args_from_destination_collection(state, - c.destination_collection, c.type) - local c_list_args = state.list_arguments[c.destination_collection] - local e_args = state.extra_arguments[c.destination_collection] - - local field = { - name = c.name, - kind = destination_type, - arguments = c_args, - -- captures c.{parts, name, destination_collection}, collection_name, - -- genResolveField, c_args, c_list_args. - resolve = function(parent, args_instance, info, opts) - local opts = opts or {} - assert(type(opts) == 'table', - 'opts must be nil or a table, got ' .. type(opts)) - local dont_force_nullability = - opts.dont_force_nullability or false - assert(type(dont_force_nullability) == 'boolean', - 'opts.dont_force_nullability ' .. - 'must be nil or a boolean, got ' .. - type(dont_force_nullability)) - - local destination_args_names, destination_args_values = - parent_args_values(parent, c.parts) - - -- Avoid non-needed index lookup on a destination collection when - -- all connection parts are null: - -- * return null for 1:1* connection; - -- * return {} for 1:N connection (except the case when source - -- collection is the query or the mutation pseudo-collection). - if collection_name ~= nil and are_all_parts_null(parent, c.parts) - then - if c.type ~= '1:1*' and c.type ~= '1:N' then - -- `if` is to avoid extra json.encode - assert(c.type == '1:1*' or c.type == '1:N', - ('only 1:1* or 1:N connections can have ' .. - 'all key parts null; parent is %s from ' .. - 'collection "%s"'):format(json.encode(parent), - tostring(collection_name))) - end - return c.type == '1:N' and {} or nil - end - - local from = { - collection_name = collection_name, - connection_name = c.name, - destination_args_names = destination_args_names, - destination_args_values = destination_args_values, - } - local resolveField = genResolveField(info) - local extra = { - qcontext = info.qcontext, - resolveField = resolveField, -- for subrequests - extra_args = {}, - } - - -- object_args_instance will be passed to 'filter' - -- list_args_instance will be passed to 'args' - -- extra_args_instance will be passed to 'extra.extra_args' - local object_args_instance, list_args_instance, - extra_args_instance = separate_args_instance(args_instance, - c_args, c_list_args, e_args) - extra.extra_args = extra_args_instance - - local objs = state.accessor:select(parent, - c.destination_collection, from, - object_args_instance, list_args_instance, extra) - assert(type(objs) == 'table', - 'objs list received from an accessor ' .. - 'must be a table, got ' .. type(objs)) - if c.type == '1:1' or c.type == '1:1*' then - -- we expect here exactly one object even for 1:1* - -- connections because we processed all-parts-are-null - -- situation above - assert(#objs == 1 or dont_force_nullability, - 'expect one matching object, got ' .. - tostring(#objs)) - return objs[1] - else -- c.type == '1:N' - return objs - end - end, - } - - return field -end - ---- The function converts passed multi-head connection to GraphQL Union type. ---- ---- Destination collections of passed multi-head connection are turned into ---- variants of resulting GraphQL Union type. Note that GraphQL types which ---- represent destination collections are wrapped with 'box' types. Here is 'how' ---- and 'why' it is done. ---- ---- How: ---- Let's consider multi-head connection with two destination collections: ---- "human": { ---- "name": "human", ---- "type": "record", ---- "fields": [ ---- { "name": "hero_id", "type": "string" }, ---- { "name": "name", "type": "string" } ---- ] ---- } ---- ---- "starship": { ---- "name": "starship", ---- "type": "record", ---- "fields": [ ---- { "name": "hero_id", "type": "string" }, ---- { "name": "model", "type": "string" } ---- ] ---- } ---- ---- In case of 1:1 multi-head connection the resulting field can be accessed as ---- follows: ---- hero_connection { ---- ... on box_human_collection { ---- human_collection { ---- name ---- } ---- } ---- ... on box_starship_collection { ---- starship_collection { ---- model ---- } ---- } ---- } ---- ---- In case of 1:N multi-head connection: ---- hero_connection { ---- ... on box_array_human_collection { ---- human_collection { ---- name ---- } ---- } ---- ... on box_array_starship_collection { ---- starship_collection { ---- model ---- } ---- } ---- } ---- ---- Why: ---- There are two reasons for 'boxing'. ---- 1) In case of 1:N connections, destination collections are represented by ---- GraphQL Lists (of Objects). But according to the GraphQL specification only ---- Objects can be variants of Union. So we need to 'box' Lists (into Objects ---- with single field) to use them as Union variants. ---- 2) GraphQL responses, received from tarantool graphql, must be avro-valid. ---- On every incoming GraphQL query a corresponding avro-schema can be generated. ---- Response to this query is 'avro-valid' if it can be successfully validated with ---- this generated (from incoming query) avro-schema. In case of multi-head ---- connections it means that value of multi-head connection field must have ---- the following format: SomeDestinationCollectionType: {...} where {...} ---- indicates the YAML encoding of a SomeDestinationCollectionType instance. ---- In case of 1:N {...} indicates a list of instances. Using of 'boxing' ---- provides the needed format. ---- ---- @tparam table state for collection types ---- @tparam table connection multi-head connection to create GraphQL Union on ---- @tparam table collection_name name of the collection which has given ---- connection ---- @treturn table GraphQL Union type -local function convert_multihead_connection(state, connection, collection_name) - local c = connection - local union_types = {} - local collection_to_arguments = {} - local collection_to_list_arguments = {} - local collection_to_extra_arguments = {} - local var_num_to_box_field_name = {} - - for _, v in ipairs(c.variants) do - assert(v.determinant, 'each variant should have a determinant') - check(v.determinant, 'variant\'s determinant', 'table') - check(v.destination_collection, 'variant.destination_collection', 'string') - check(v.parts, 'variant.parts', 'table') - - local destination_type = - state.nullable_collection_types[v.destination_collection] - assert(destination_type ~= nil, - ('destination_type (named %s) must not be nil'):format( - v.destination_collection)) - destination_type = specify_destination_type(destination_type, c.type) - - local variant_type, box_field_name = box_collection_type(destination_type, - c.type, v.destination_collection) - var_num_to_box_field_name[#union_types + 1] = box_field_name - union_types[#union_types + 1] = variant_type - - local v_args = args_from_destination_collection(state, - v.destination_collection, c.type) - - local v_list_args = state.list_arguments[v.destination_collection] - local v_extra_args = state.extra_arguments[v.destination_collection] - - collection_to_arguments[v.destination_collection] = v_args - collection_to_list_arguments[v.destination_collection] = v_list_args - collection_to_extra_arguments[v.destination_collection] = v_extra_args - end - - local determinant_keys = utils.get_keys(c.variants[1].determinant) - - local resolve_variant = function (parent) - assert(utils.do_have_keys(parent, determinant_keys), - ('Parent object of union object doesn\'t have determinant ' .. - 'fields which are necessary to determine which resolving ' .. - 'variant should be used. Union parent object:\n"%s"\n' .. - 'Determinant keys:\n"%s"'): - format(yaml.encode(parent), yaml.encode(determinant_keys))) - - local var_idx - local res_var - for i, var in ipairs(c.variants) do - local is_match = utils.is_subtable(parent, var.determinant) - if is_match then - res_var = var - var_idx = i - break - end - end - - local box_field_name = var_num_to_box_field_name[var_idx] - - assert(res_var, ('Variant resolving failed.'.. - 'Parent object: "%s"\n'):format(yaml.encode(parent))) - return res_var, var_idx, box_field_name - end - - local field = { - name = c.name, - kind = types.union({ - name = c.name, - types = union_types, - }), - arguments = nil, -- see Border cases/Unions at the top of the file - resolve = function(parent, args_instance, info) - local v, variant_num, box_field_name = resolve_variant(parent) - local destination_type = union_types[variant_num] - - local destination_collection = - state.nullable_collection_types[v.destination_collection] - local destination_args_names, destination_args_values = - parent_args_values(parent, v.parts) - - -- Avoid non-needed index lookup on a destination collection when - -- all connection parts are null: - -- * return null for 1:1* connection; - -- * return {} for 1:N connection (except the case when source - -- collection is the query or the mutation pseudo-collection). - if collection_name ~= nil and are_all_parts_null(parent, v.parts) - then - if c.type ~= '1:1*' and c.type ~= '1:N' then - -- `if` is to avoid extra json.encode - assert(c.type == '1:1*' or c.type == '1:N', - ('only 1:1* or 1:N connections can have ' .. - 'all key parts null; parent is %s from ' .. - 'collection "%s"'):format(json.encode(parent), - tostring(collection_name))) - end - return c.type == '1:N' and {} or nil, destination_type - end - - local from = { - collection_name = collection_name, - connection_name = c.name, - destination_args_names = destination_args_names, - destination_args_values = destination_args_values, - } - local extra = { - qcontext = info.qcontext, - extra_args = {}, - } - - local c_args = collection_to_arguments[destination_collection] - local c_list_args = - collection_to_list_arguments[destination_collection] - local e_args = collection_to_extra_arguments[destination_collection] - - -- object_args_instance will be passed to 'filter' - -- list_args_instance will be passed to 'args' - -- extra_args_instance will be passed to 'extra.extra_args' - local object_args_instance, list_args_instance, - extra_args_instance = separate_args_instance(args_instance, - c_args, c_list_args, e_args) - extra.extra_args = extra_args_instance - - local objs = state.accessor:select(parent, - v.destination_collection, from, - object_args_instance, list_args_instance, extra) - assert(type(objs) == 'table', - 'objs list received from an accessor ' .. - 'must be a table, got ' .. type(objs)) - if c.type == '1:1' or c.type == '1:1*' then - -- we expect here exactly one object even for 1:1* - -- connections because we processed all-parts-are-null - -- situation above - assert(#objs == 1, 'expect one matching object, got ' .. - tostring(#objs)) - - -- this 'wrapping' is needed because we use 'select' on - -- 'collection' GraphQL type and the result of the resolve function - -- must be in {'collection_name': {result}} format to - -- be avro-valid - local formatted_obj = {[box_field_name] = objs[1]} - return formatted_obj, destination_type - else -- c.type == '1:N' - local formatted_objs = {[box_field_name] = objs} - return formatted_objs, destination_type - end - end - } - return field -end - ---- The function converts passed connection to a field of GraphQL type. ---- ---- @tparam table state for read state.accessor and previously filled ---- state.types (state.types are gql types) ---- @tparam table connection connection to create field on ---- @tparam table collection_name name of the collection which have given ---- connection ---- @treturn table simple and union connection depending on the type of ---- input connection -local convert_connection_to_field = function(state, connection, collection_name) - assert(type(connection.type) == 'string', - 'connection.type must be a string, got ' .. type(connection.type)) - assert(connection.type == '1:1' or connection.type == '1:1*' or - connection.type == '1:N', 'connection.type must be 1:1, 1:1* or 1:N, '.. - 'got ' .. connection.type) - assert(type(connection.name) == 'string', - 'connection.name must be a string, got ' .. type(connection.name)) - assert(connection.destination_collection or connection.variants, - 'connection must either destination_collection or variants field') - - if connection.destination_collection then - return convert_simple_connection(state, connection, collection_name) - end - - if connection.variants then - return convert_multihead_connection(state, connection, collection_name) - end -end - ---- The function 'boxes' given GraphQL type into GraphQL Object 'box' type. ---- ---- @tparam table type_to_box GraphQL type to be boxed ---- @tparam string box_field_name name of the single box field ---- @treturn table GraphQL Object -local function box_type(type_to_box, box_field_name) - check(type_to_box, 'type_to_box', 'table') - check(box_field_name, 'box_field_name', 'string') - - local gql_true_type = nullable(type_to_box) - - local box_name = gql_true_type.name or gql_true_type.__type - box_name = box_name .. '_box' - - local box_fields = {[box_field_name] = {name = box_field_name, - kind = type_to_box }} - - return types.object({ - name = box_name, - description = 'Box (wrapper) around union variant', - fields = box_fields, - }) -end - ---- The functions creates table of GraphQL types from avro-schema union type. ---- ---- @tparam table avro-schema ---- ---- @tparam table state tarantool_graphql instance ---- ---- @tparam table context as described in @{gql_type}; not used here ---- explicitly, but passed deeper within the @{gql_type} call ---- ---- @treturn table union_types ---- ---- @treturn table determinant_to_type ---- ---- @treturn boolean is_nullable -local function create_union_types(avro_schema, state, context) - check(avro_schema, 'avro_schema', 'table') - assert(utils.is_array(avro_schema), 'union avro-schema must be an array ' .. - ', got\n' .. yaml.encode(avro_schema)) - - local union_types = {} - local determinant_to_type = {} - local is_nullable = false - - for _, type in ipairs(avro_schema) do - -- If there is a 'null' type among 'union' types (in avro-schema union) - -- then resulting GraphQL Union type will be nullable - if type == 'null' then - is_nullable = true - else - local variant_type = gql_type(state, type, context) - local box_field_name = type.name or avro_type(type) - union_types[#union_types + 1] = box_type(variant_type, box_field_name) - local determinant = type.name or type.type or type - determinant_to_type[determinant] = union_types[#union_types] - end - end - - return union_types, determinant_to_type, is_nullable -end - ---- The function creates GraphQL Union type from given avro-schema union type. ---- There are two problems with GraphQL Union types, which we solve with specific ---- format of generated Unions. These problems are: ---- ---- 1) GraphQL Unions represent an object that could be one of a list of ---- GraphQL Object types. So Scalars and Lists can not be one of Union ---- types. ---- ---- 2) GraphQL responses, received from tarantool graphql, must be avro-valid. ---- On every incoming GraphQL query a corresponding avro-schema can be ---- generated. Response to this query is 'avro-valid' if it can be ---- successfully validated with this generated (from incoming query) ---- avro-schema. ---- ---- Specific format of generated Unions include the following: ---- ---- Avro scalar types (e.g. int, string) are converted into GraphQL Object types. ---- Avro scalar converted to GraphQL Scalar (string -> String) and then name of ---- GraphQL type is concatenated with '_box' ('String_box'). Resulting name is a name ---- of created GraphQL Object. This object has only one field with GraphQL type ---- corresponding to avro scalar type (String type in our example). Avro type's ---- name is taken as a name for this single field. ---- ---- [..., "string", ...] ---- ---- turned into ---- MyUnion { ---- ... ---- ... on String_box { ---- string ---- ... ---- } ---- ---- Avro arrays and maps are converted into GraphQL Object types. The name of ---- the resulting GraphQL Object is 'List_box' or 'Map_box' respectively. This ---- object has only one field with GraphQL type corresponding to 'items'/'values' ---- avro type. 'array' or 'map' (respectively) is taken as a name of this ---- single field. ---- ---- [..., {"type": "array", "items": "int"}, ...] ---- ---- turned into ---- MyUnion { ---- ... ---- ... on List_box { ---- array ---- ... ---- } ---- ---- Avro records are converted into GraphQL Object types. The name of the resulting ---- GraphQL Object is concatenation of record's name and '_box'. This Object ---- has only one field. The name of this field is record's name. The type of this ---- field is GraphQL Object generated from avro record schema in a usual way ---- (see @{gql_type}) ---- ---- { "type": "record", "name": "Foo", "fields":[ ---- { "name": "foo1", "type": "string" }, ---- { "name": "foo2", "type": "string" } ---- ]} ---- ---- turned into ---- MyUnion { ---- ... ---- ... on Foo_box { ---- Foo { ---- foo1 ---- foo2 ---- } ---- ... ---- } ---- ---- Please consider full example below. ---- ---- @tparam table state tarantool_graphql instance ---- ---- @tparam table avro_schema avro-schema union type ---- ---- @tparam table context as described in @{gql_type}; only ---- `context.field_name` is used here (as the name of the generated GraphQL ---- union); `path` is *updated* (with the field name) and the `context` is ---- passed deeper within the @{create_union_types} call (which calls ---- @{gql_type} inside) ---- ---- @treturn table GraphQL Union type. Consider the following example: ---- ---- Avro-schema (inside a record): ---- ---- ... ---- "name": "MyUnion", "type": [ ---- "null", ---- "string", ---- { "type": "array", "items": "int" }, ---- { "type": "record", "name": "Foo", "fields":[ ---- { "name": "foo1", "type": "string" }, ---- { "name": "foo2", "type": "string" } ---- ]} ---- ] ---- ... ---- ---- GraphQL Union type (It will be nullable as avro-schema has 'null' variant): ---- ---- MyUnion { ---- ... on String_box { ---- string ---- } ---- ---- ... on List_box { ---- array ---- } ---- ---- ... on Foo_box { ---- Foo { ---- foo1 ---- foo2 ---- } ---- } -local function create_gql_union(state, avro_schema, context) - check(avro_schema, 'avro_schema', 'table') - assert(utils.is_array(avro_schema), 'union avro-schema must be an ' .. - 'array, got:\n' .. yaml.encode(avro_schema)) - - local union_name = context.field_name - check(union_name, 'field_name', 'string') - - -- check avro-schema constraints - for i, type in ipairs(avro_schema) do - assert(avro_type(type) ~= 'union', 'unions must not immediately ' .. - 'contain other unions') - - if type.name ~= nil then - for j, another_type in ipairs(avro_schema) do - if i ~= j then - if another_type.name ~= nil then - assert(type.name:gsub('%*$', '') ~= - another_type.name:gsub('%*$', ''), - 'Unions may not contain more than one schema ' .. - 'with the same name') - end - end - end - else - for j, another_type in ipairs(avro_schema) do - if i ~= j then - assert(avro_type(type) ~= avro_type(another_type), - 'Unions may not contain more than one schema with ' .. - 'the same type except for the named types: ' .. - 'record, fixed and enum') - end - end - end - end - - -- create GraphQL union - table.insert(context.path, union_name) - local union_types, determinant_to_type, is_nullable = - create_union_types(avro_schema, state, context) - table.remove(context.path, #context.path) - - local union_type = types.union({ - types = union_types, - name = union_name, - resolveType = function(result) - for determinant, type in pairs(determinant_to_type) do - if result[determinant] ~= nil then - return type - end - end - error(('result object has no determinant field matching ' .. - 'determinants for this union\nresult object:\n%s' .. - 'determinants:\n%s'):format(yaml.encode(result), - yaml.encode(determinant_to_type))) - end - }) - - if not is_nullable then - union_type = types.nonNull(union_type) - end - - return union_type -end - ---- The function converts passed avro-schema to a GraphQL type. ---- ---- @tparam table state for read state.accessor and previously filled ---- state.nullable_collection_types (those are gql types) ---- ---- @tparam table avro_schema input avro-schema ---- ---- @tparam table context current context of parsing the avro_schema, consists ---- the following fields: ---- ---- * `collection` (table; optional) is a table with `schema_name` and ---- `connections` fields describes a collection (e.g. local tarantool spaces ---- or sharded spaces) ---- ---- * `collection_name` (string; optional) name of the collection ---- ---- * `definitions` (table) map from currently parsed avro-schema names to ---- generated GraphQL types; it allows reusing the same types w/o creation a ---- new same-named type, that considered as an error by graphql-lua when ---- creating type map for introspection ---- ---- * `field_name` (string; optional) it is only for an union generation, ---- because avro-schema union has no name in it and specific name is ---- necessary for GraphQL union ---- ---- * `path` (table) path to our position in avro-schema tree; it is used now ---- only to determine whether we are on the upmost level or on a nested one ---- ---- If collection is passed connections from the collection will be taken into ---- account to automatically generate corresponding decucible fields. ---- ---- If collection_name is passed it will be used as the resulting graphql type ---- name instead of the avro-schema name. ---- ---- XXX As it is not clear now what to do with complex types inside arrays ---- (just pass to results or allow to use filters), only scalar arrays ---- is allowed for now. Note: map is considered scalar. -gql_type = function(state, avro_schema, context) - check(state, 'state', 'table') - assert(avro_schema ~= nil, 'avro_schema must not be nil') - check(context, 'context', 'table') - - local collection = context.collection - local collection_name = context.collection_name - local definitions = context.definitions - local field_name = context.field_name - local path = context.path - - check(collection, 'collection', 'table', 'nil') - check(collection_name, 'collection_name', 'string', 'nil') - check(field_name, 'field_name', 'string', 'nil') - check(definitions, 'definitions', 'table') - check(path, 'path', 'table') - - local accessor = state.accessor - assert(accessor ~= nil, 'state.accessor must not be nil') - assert(accessor.select ~= nil, 'state.accessor.select must not be nil') - assert(accessor.list_args ~= nil, - 'state.accessor.list_args must not be nil') - - -- type of the top element in the avro-schema - local avro_t = avro_type(avro_schema, {allow_references = true}) - - if avro_t == 'record' or avro_t == 'record*' then - assert(type(avro_schema.name) == 'string', - ('avro_schema.name must be a string, got %s (avro_schema %s)') - :format(type(avro_schema.name), json.encode(avro_schema))) - assert(type(avro_schema.fields) == 'table', - ('avro_schema.fields must be a table, got %s (avro_schema %s)') - :format(type(avro_schema.fields), json.encode(avro_schema))) - - local graphql_type_name = next(path) == nil and collection_name or - avro_schema.name - local def = definitions[graphql_type_name .. (avro_t:endswith('*') - and '*' or '')] - if def ~= nil then - return def - end - - local fields = convert_record_fields(state, avro_schema.fields, context) - - -- if collection param is passed then go over all connections - for _, c in ipairs((collection or {}).connections or {}) do - fields[c.name] = convert_connection_to_field(state, c, collection_name) - end - - -- create gql type - local res = types.object({ - name = graphql_type_name, - description = 'generated from avro-schema for ' .. - avro_schema.name, - fields = fields, - }) - assert(definitions[graphql_type_name] == nil and - definitions[graphql_type_name .. '*'] == nil, - 'multiple definitions of ' .. graphql_type_name) - definitions[graphql_type_name] = types.nonNull(res) - definitions[graphql_type_name .. '*'] = res - return avro_t == 'record' and types.nonNull(res) or res - elseif avro_t == 'enum' then - error('enums not implemented yet') -- XXX - elseif avro_t == 'array' or avro_t == 'array*' then - assert(avro_schema.items ~= nil, - 'items field must not be nil in array avro schema') - assert(type(avro_schema.items) == 'string' - or type(avro_schema.items) == 'table', - 'avro_schema.items must be a string or a table, got ' .. - type(avro_schema.items)) - - local gql_items_type = gql_type(state, avro_schema.items, context) - local gql_array = types.list(gql_items_type) - return avro_t == 'array' and types.nonNull(gql_array) or gql_array - elseif avro_t == 'map' or avro_t == 'map*' then - assert(avro_schema.values ~= nil, - 'values must not be nil in map avro schema') - assert(type(avro_schema.values) == 'table' - or type(avro_schema.values) == 'string', - ('avro_schema.values must be a table or a string, ' .. - 'got %s (avro_schema %s)'):format(type(avro_schema.values), - json.encode(avro_schema))) - - -- validate avro schema format inside 'values' - gql_type(state, avro_schema.values, context) - - local gql_map = types_map - return avro_t == 'map' and types.nonNull(gql_map) or gql_map - elseif avro_t == 'union' then - return create_gql_union(state, avro_schema, context) - else - if type(avro_schema) == 'string' then - if definitions[avro_schema] ~= nil then - return definitions[avro_schema] - end - end - - local res = convert_scalar_type(avro_schema, {raise = false}) - if res == nil then - error('unrecognized avro-schema type: ' .. - json.encode(avro_schema)) - end - return res - end -end - ---- Add extra arguments for collection / connection fields. ---- ---- XXX: This function is written in the hacky way. The function should gone ---- when we'll rewrite argument / InputObject generation in the right way. The ---- plan is the following: ---- ---- * Move object_args to accessor_general (or move all *_args function into a ---- separate module); skipping float / double / ... arguments should be done ---- here. ---- * TBD: generate per-connection arguments in avro-schema in some way? ---- * Move avro-schema -> GraphQL arguments translating into its own module. ---- * Support a sub-record arguments and others (union, array, ...). ---- * Generate arguments for cartesian product of {1:1, 1:1*, 1:N, all} x ---- {query, mutation, all} x {top-level, nested, all} x {collections}. ---- * Use generated arguments in GraphQL types (schema) generation. ---- ---- @tparam table state tarantool_graphql instance ---- ---- @tparam table root_types generated by @{create_root_collection} ---- ---- @return nothing -local function add_extra_arguments(state, root_types) - for _, what in ipairs({'Query', 'Mutation'}) do - -- add extra arguments to top-level fields (collections) - for collection_name, field in pairs(root_types[what].fields) do - -- Prevent exposing an argument inserted, say, into the mutation schema - -- subtree to the query subtree (it is needed because we use a booking - -- table for arguments). - field.arguments = table.copy(field.arguments) - - local extra_args = state.extra_arguments[collection_name] - local extra_args_meta = state.extra_arguments_meta[collection_name] - - for arg_name, arg in pairs(extra_args) do - local meta = extra_args_meta[arg_name] - check(meta, 'meta', 'table') - local add_arg = what == 'Mutation' or - not meta.add_to_mutations_only - if add_arg then - field.arguments[arg_name] = arg - end - end - - local parent_field = field - - local collection = state.collections[collection_name] - for _, c in ipairs(collection.connections or {}) do - -- XXX: support multihead connections - if c.destination_collection then - local collection_name = c.destination_collection - local field = raw_gql_type(parent_field.kind).fields[c.name] - local extra_args = state.extra_arguments[collection_name] - local extra_args_meta = - state.extra_arguments_meta[collection_name] - - for arg_name, arg in pairs(extra_args) do - local meta = extra_args_meta[arg_name] - check(meta, 'meta', 'table') - local add_arg = not meta.add_to_top_fields_only and - (what == 'Mutation' or - not meta.add_to_mutations_only) - if add_arg then - field.arguments[arg_name] = arg - end - end - end - end - end - end -end - ---- Create virtual root collections `query` and `mutation`, which has ---- connections to any collection. ---- ---- Actually, each GQL query starts its execution from the `query` or ---- `mutation` collection. That is why it shoult contain connections to any ---- collection. ---- ---- @tparam table state dictionary which contains all information about the ---- schema, arguments, types... -local function create_root_collection(state) - local root_connections = {} - -- The fake connections have 1:N mechanics. - -- Create one connection for each collection. - for collection_name, collection in pairs(state.collections) do - table.insert(root_connections, { - parts = {}, - name = collection_name, - destination_collection = collection_name, - type = "1:N" - }) - end - - local root_types = {} - - for _, what in ipairs({'Query', 'Mutation'}) do - local root_schema = { - type = "record", - name = what, - -- The fake root has no fields. - fields = {} - } - local root_collection = { - name = what, - connections = root_connections - } - local context = { - collection = root_collection, - collection_name = nil, - definitions = {}, - field_name = nil, - path = {}, - } - - -- `gql_type` is designed to create GQL type corresponding to a real - -- schema and connections. However it also works with the fake schema. - -- Query/Mutation type must be the Object, so it cannot be nonNull. - root_types[what] = nullable(gql_type(state, root_schema, context)) - end - - add_extra_arguments(state, root_types) - - state.schema = schema.create({ - query = root_types['Query'], - mutation = root_types['Mutation'], - }) -end - ---- Execute a function for each 1:1 or 1:1* connection of each collection. ---- ---- @tparam table state tarantool_graphql instance ---- ---- @tparam table[opt] connection_types list of connection types to call `func` ---- on it; nil/box.NULL means all connections w/o filtering ---- ---- @tparam function func a function with the following parameters: ---- ---- * source collection name (string); ---- * connection (table). -local function for_each_connection(state, connection_types, func) - for collection_name, collection in pairs(state.collections) do - for _, c in ipairs(collection.connections or {}) do - if connection_types == nil or utils.value_in(c.type, - connection_types) then - func(collection_name, c) - end - end - end -end - ---- Add arguments corresponding to 1:1 and 1:1* connections (nested filters). ---- ---- @tparam table state graphql_tarantool instance -local function add_connection_arguments(state) - -- map destination collection to list of input objects - local input_objects = {} - -- map source collection and connection name to an input object - local lookup_input_objects = {} - - -- create InputObjects for each 1:1 or 1:1* connection of each collection - for_each_connection(state, {'1:1', '1:1*'}, function(collection_name, c) - -- XXX: support multihead connections - if c.variants ~= nil then return end - - local object = types.inputObject({ - name = c.name, - description = ('generated from the connection "%s" ' .. - 'of collection "%s" using collection "%s"'):format( - c.name, collection_name, c.destination_collection), - fields = state.object_arguments[c.destination_collection], - }) - - if input_objects[c.destination_collection] == nil then - input_objects[c.destination_collection] = {} - end - table.insert(input_objects[c.destination_collection], object) - - if lookup_input_objects[collection_name] == nil then - lookup_input_objects[collection_name] = {} - end - lookup_input_objects[collection_name][c.name] = object - end) - - -- update fields of collection arguments and input objects with other input - -- objects - for_each_connection(state, {'1:1', '1:1*'}, function(collection_name, c) - -- XXX: support multihead connections - if c.variants ~= nil then return end - - local new_object = lookup_input_objects[collection_name][c.name] - -- collection arguments - local fields = state.object_arguments[collection_name] - assert(fields[c.name] == nil, - 'we must not add an input object twice to the same collection ' .. - 'arguments list') - fields[c.name] = new_object - -- input objects - for _, input_object in ipairs(input_objects[collection_name] or {}) do - local fields = input_object.fields - assert(fields[c.name] == nil, - 'we must not add an input object twice to the same input ' .. - 'object') - fields[c.name] = { - name = c.name, - kind = new_object, - } - end - end) -end - -local function parse_cfg(cfg) - local state = {} - - -- collection type is always record, so always non-null; we can lazily - -- evaluate non-null type from nullable type, but not vice versa, so we - -- collect nullable types here and evaluate non-null ones where needed - state.nullable_collection_types = utils.gen_booking_table({}) - - state.object_arguments = utils.gen_booking_table({}) - state.list_arguments = utils.gen_booking_table({}) - state.all_arguments = utils.gen_booking_table({}) - - -- Booking table used here because of the one reason: inside a resolve - -- function we need to determine that a user-provided argument is an extra - -- argument. We capture extra_arguments[collection_name] into the resolve - -- function and sure it exists and will not be changed. - state.extra_arguments = utils.gen_booking_table({}) - state.extra_arguments_meta = {} - - local context = {} - - local accessor = cfg.accessor - assert(accessor ~= nil, 'cfg.accessor must not be nil') - assert(accessor.select ~= nil, 'cfg.accessor.select must not be nil') - assert(accessor.list_args ~= nil, - 'state.accessor.list_args must not be nil') - state.accessor = accessor - - assert(cfg.collections ~= nil, 'cfg.collections must not be nil') - local collections = table.copy(cfg.collections) - state.collections = collections - - -- Prepare types which represents: - -- - Avro schemas (collections) - -- - scalar field arguments (used to filter objects by value stored in it's - -- field) - -- - list arguments (offset, limit...) - for collection_name, collection in pairs(state.collections) do - collection.name = collection_name - assert(collection.schema_name ~= nil, - 'collection.schema_name must not be nil') - - local schema = cfg.schemas[collection.schema_name] - assert(schema ~= nil, ('cfg.schemas[%s] must not be nil'):format( - tostring(collection.schema_name))) - assert(schema.name == nil or schema.name == collection.schema_name, - ('top-level schema name does not match the name in ' .. - 'the schema itself: "%s" vs "%s"'):format(collection.schema_name, - schema.name)) - - assert(schema.type == 'record', - 'top-level schema must have record avro type, got ' .. - tostring(schema.type)) - - -- collection, collection_name are local for collection, definitions - -- are local for top-level avro-schema - if context[schema.name] == nil then - context[schema.name] = { - -- map from avro-schema names to graphql types - definitions = {}, - } - end - context[schema.name].collection = collection - context[schema.name].collection_name = collection_name - context[schema.name].field_name = nil - context[schema.name].path = {} - - -- recursively converts all avro types into GraphQL types in the given - -- schema - local collection_type = - gql_type(state, schema, context[schema.name]) - -- we utilize the fact that collection type is always non-null and - -- don't store this information; see comment above for - -- `nullable_collection_types` variable definition - assert(collection_type.__type == 'NonNull', - 'collection must always has non-null type') - state.nullable_collection_types[collection_name] = - nullable(collection_type) - - -- prepare arguments' types - local object_args = convert_record_fields_to_args(schema.fields, - {skip_compound = true}) - local list_args = convert_record_fields_to_args( - accessor:list_args(collection_name)) - local extra_args_avro, extra_args_meta = accessor:extra_args( - collection_name) - check(extra_args_meta, 'extra_args_meta', 'table') - local extra_args = convert_record_fields_to_args(extra_args_avro, - {dont_skip = true}) - - state.object_arguments[collection_name] = object_args - state.list_arguments[collection_name] = list_args - state.extra_arguments[collection_name] = extra_args - state.extra_arguments_meta[collection_name] = extra_args_meta - end - - add_connection_arguments(state) - - -- fill all_arguments with object_arguments + list_arguments - for collection_name, collection in pairs(state.collections) do - local object_args = state.object_arguments[collection_name] - local list_args = state.list_arguments[collection_name] - - local args = utils.merge_tables(object_args, list_args) - state.all_arguments[collection_name] = args - end - - -- create fake root for the `query` and the `mutation` collection - create_root_collection(state) - - return state -end - ---- Execute an operation from compiled query. ---- ---- @tparam qstate compiled query ---- ---- @tparam variables variables to pass to the query ---- ---- @tparam[opt] string operation_name optional operation name ---- ---- @treturn table result of the operation -local function gql_execute(qstate, variables, operation_name) - assert(qstate.state) - local state = qstate.state - assert(state.schema) - - check(variables, 'variables', 'table') - check(operation_name, 'operation_name', 'string', 'nil') - - local root_value = {} - - return execute(state.schema, qstate.ast, root_value, variables, - operation_name) -end - ---- Compile a query and execute an operation. ---- ---- See @{gql_compile} and @{gql_execute} for parameters description. ---- ---- @treturn table result of the operation -local function compile_and_execute(state, query, variables, operation_name) - assert(type(state) == 'table', 'use :gql_execute(...) instead of ' .. - '.execute(...)') - assert(state.schema ~= nil, 'have not compiled schema') - check(query, 'query', 'string') - check(variables, 'variables', 'table', 'nil') - check(operation_name, 'operation_name', 'string', 'nil') - - local compiled_query = state:compile(query) - return compiled_query:execute(variables, operation_name) -end - ---- Parse GraphQL query string, validate against the GraphQL schema and ---- provide an object with the function to execute an operation from the ---- request with specific variables values. ---- ---- @tparam table state a tarantool_graphql instance ---- ---- @tparam string query text of a GraphQL query ---- ---- @treturn table compiled query with `execute` and `avro_schema` functions -local function gql_compile(state, query) - assert(type(state) == 'table' and type(query) == 'string', - 'use :validate(...) instead of .validate(...)') - assert(state.schema ~= nil, 'have not compiled schema') - check(query, 'query', 'string') - - local ast = parse(query) - validate(state.schema, ast) - - local qstate = { - state = state, - ast = ast, - } - - local gql_query = setmetatable(qstate, { - __index = { - execute = gql_execute, - avro_schema = query_to_avro.convert - } - }) - return gql_query -end - -local function start_server(gql, host, port) - assert(type(gql) == 'table', - 'use :start_server(...) instead of .start_server(...)') - - check(host, 'host', 'nil', 'string') - check(port, 'port', 'nil', 'number') - - gql.server = server.init(gql, host, port) - gql.server:start() - - return ('The GraphQL server started at http://%s:%s'):format( - gql.server.host, gql.server.port - ) -end - -local function stop_server(gql) - assert(type(gql) == 'table', - 'use :stop_server(...) instead of .stop_server(...)') - assert(gql.server, 'no running server to stop') - - gql.server:stop() - - return ('The GraphQL server stopped at http://%s:%s'):format( - gql.server.host, gql.server.port) -end - -function tarantool_graphql.compile(query) - if default_instance == nil then - default_instance = tarantool_graphql.new() - end - return default_instance:compile(query) -end - -function tarantool_graphql.execute(query, variables, operation_name) - if default_instance == nil then - default_instance = tarantool_graphql.new() - end - return default_instance:execute(query, variables, operation_name) -end - -function tarantool_graphql.start_server() - if default_instance == nil then - default_instance = tarantool_graphql.new() - end - - return default_instance:start_server() -end - -function tarantool_graphql.stop_server() - if default_instance ~= nil and default_instance.server ~= nil then - return default_instance:stop_server() - end - return 'there is no active server in default Tarantool graphql instance' -end - ---- The function creates an accessor of desired type with default configuration. ---- ---- @tparam table cfg general tarantool_graphql config (contains schemas, ---- collections, service_fields and indexes) ---- @tparam string accessor type of desired accessor (space or shard) ---- @tparam table accessor_funcs set of functions to overwrite accessor ---- inner functions (`is_collection_exists`, `get_index`, `get_primary_index`, ---- `unflatten_tuple`, For more detailed description see @{accessor_general.new}) ---- These function allow this abstract data accessor behaves in the certain way. ---- Note that accessor_space and accessor_shard have their own set of these functions ---- and accessorFuncs argument (if passed) will be used to overwrite them -local function create_default_accessor(cfg) - check(cfg.accessor, 'cfg.accessor', 'string') - assert(cfg.accessor == 'space' or cfg.accessor == 'shard', - 'accessor_type must be shard or space, got ' .. cfg.accessor) - check(cfg.service_fields, 'cfg.service_fields', 'table') - check(cfg.indexes, 'cfg.indexes', 'table') - check(cfg.collection_use_tomap, 'cfg.collection_use_tomap', 'table', 'nil') - check(cfg.accessor_funcs, 'cfg.accessor_funcs', 'table', 'nil') - - if cfg.accessor == 'space' then - return accessor_space.new({ - schemas = cfg.schemas, - collections = cfg.collections, - service_fields = cfg.service_fields, - indexes = cfg.indexes, - collection_use_tomap = cfg.collection_use_tomap, - resulting_object_cnt_max = cfg.resulting_object_cnt_max, - fetched_object_cnt_max = cfg.fetched_object_cnt_max, - timeout_ms = cfg.timeout_ms, - enable_mutations = cfg.enable_mutations, - }, cfg.accessor_funcs) - end - - if cfg.accessor == 'shard' then - return accessor_shard.new({ - schemas = cfg.schemas, - collections = cfg.collections, - service_fields = cfg.service_fields, - indexes = cfg.indexes, - collection_use_tomap = cfg.collection_use_tomap, - resulting_object_cnt_max = cfg.resulting_object_cnt_max, - fetched_object_cnt_max = cfg.fetched_object_cnt_max, - timeout_ms = cfg.timeout_ms, - enable_mutations = cfg.enable_mutations, - }, cfg.accessor_funcs) - end -end - ---- Create a tarantool_graphql library instance. ---- ---- Usage: ---- ---- ... = tarantool_graphql.new({ ---- schemas = { ---- schema_name_foo = { // the value is avro-schema (esp., a record) ---- name = 'schema_name_foo, ---- type = 'record', ---- fields = { ---- ... ---- } ---- }, ---- ... ---- }, ---- collections = { ---- collections_name_foo = { ---- schema_name = 'schema_name_foo', ---- connections = { // the optional field ---- { ---- name = 'connection_name_bar', ---- destination_collection = 'collection_baz', ---- parts = { ---- { ---- source_field = 'field_name_source_1', ---- destination_field = 'field_name_destination_1' ---- }, ---- ... ---- }, ---- index_name = 'index_name' -- is is for an accessor, ---- -- ignored in the graphql ---- -- part ---- }, ---- ... ---- }, ---- }, ---- ... ---- }, ---- accessor = setmetatable({}, { ---- __index = { ---- select = function(self, parent, collection_name, from, ---- object_args_instance, list_args_instance, extra) ---- -- * from has the following structure: ---- -- ---- -- { ---- -- collection_name = <...>, ---- -- connection_name = <...>, ---- -- destination_args_names = <...>, ---- -- destination_args_values = <...>, ---- -- } ---- -- ---- -- from.collection_name is nil for a top-level collection. ---- -- ---- -- `extra` is a table which contains additional data for ---- -- the query: ---- -- ---- -- * `qcontext` (table) can be used by an accessor to store ---- -- any query-related data; ---- -- * `resolveField(field_name, object, filter, opts)` ---- -- (function) for performing a subrequest on a fields ---- -- connected using a 1:1 or 1:1* connection. ---- -- ---- return ... ---- end, ---- list_args = function(self, collection_name) ---- return { ---- {name = 'limit', type = 'int'}, ---- {name = 'offset', type = <...>}, -- type of a primary key ---- {name = 'pcre', type = <...>}, ---- } ---- end, ---- extra_args = function(self, collection_name) ---- ... ---- local args_meta = { ---- arg_name = { ---- add_to_mutations_only = true / false, ---- add_to_top_fields_only = true / false, ---- } ---- } ---- return schemas_list, args_meta ---- end ---- } ---- }), ---- }) -function tarantool_graphql.new(cfg) - local cfg = cfg or {} - - -- auto config case - if not next(cfg) or utils.has_only(cfg, 'connections') then - local generated_cfg = simple_config.graphql_cfg_from_tarantool() - generated_cfg.accessor = 'space' - generated_cfg.connections = cfg.connections or {} - cfg = generated_cfg - cfg = config_complement.complement_cfg(cfg) - end - - check(cfg.accessor, 'cfg.accessor', 'string', 'table') - if type(cfg.accessor) == 'string' then - cfg.accessor = create_default_accessor(cfg) - end - - local state = parse_cfg(cfg) - return setmetatable(state, { - __index = { - compile = gql_compile, - execute = compile_and_execute, - start_server = start_server, - stop_server = stop_server, - internal = { -- for unit testing - cfg = cfg, - } - } - }) -end - -return tarantool_graphql diff --git a/graphql/utils.lua b/graphql/utils.lua index 2dee7da..e07fd0b 100644 --- a/graphql/utils.lua +++ b/graphql/utils.lua @@ -4,19 +4,6 @@ local log = require('log') local utils = {} ---- Log an error and the corresponding backtrace in case of the `func` function ---- call raises the error. -function utils.show_trace(func, ...) - local args = {...} - return select(2, xpcall( - function() return func(unpack(args)) end, - function(err) - log.info('ERROR: ' .. tostring(err)) - log.info(debug.traceback()) - end - )) -end - --- Recursively checks whether `sub` fields values are match `t` ones. function utils.is_subtable(t, sub) for k, v in pairs(sub) do @@ -170,9 +157,17 @@ function utils.do_have_keys(table, keys) return true end ---- Check if passed obj has one of passed types. ---- @tparam table obj to check ---- @tparam {type_1, type_2} ... possible types +--- Check whether passed value has one of listed types. +--- +--- @param obj value to check +--- +--- @tparam string obj_name name of the value to form an error +--- +--- @tparam string type_1 +--- @tparam[opt] string type_2 +--- @tparam[opt] string type_3 +--- +--- @return nothing function utils.check(obj, obj_name, type_1, type_2, type_3) if type(obj) == type_1 or type(obj) == type_2 or type(obj) == type_3 then return @@ -213,4 +208,13 @@ function utils.value_in(value, array) return false end +function utils.optional_require_rex() + local rex, is_pcre2 = utils.optional_require('rex_pcre2'), true + if rex == nil then + -- fallback to libpcre + rex, is_pcre2 = utils.optional_require('rex_pcre'), false + end + return rex, is_pcre2 +end + return utils diff --git a/test/bench/bench.lua b/test/bench/bench.lua index f633818..8f0a246 100644 --- a/test/bench/bench.lua +++ b/test/bench/bench.lua @@ -15,7 +15,7 @@ local digest = require('digest') local multirunner = require('test.common.multirunner') local graphql = require('graphql') local utils = require('graphql.utils') -local test_utils = require('test.utils') +local test_utils = require('test.test_utils') local test_run = utils.optional_require('test_run') test_run = test_run and test_run.new() diff --git a/test/common/array_and_map.test.lua b/test/common/array_and_map.test.lua index 6ae23e7..b26409a 100755 --- a/test/common/array_and_map.test.lua +++ b/test/common/array_and_map.test.lua @@ -6,7 +6,7 @@ local fio = require('fio') package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. package.path -local test_utils = require('test.utils') +local test_utils = require('test.test_utils') local testdata = require('test.testdata.array_and_map_testdata') box.cfg({}) diff --git a/test/common/avro_refs.test.lua b/test/common/avro_refs.test.lua index 7bdbdb4..e6b2622 100755 --- a/test/common/avro_refs.test.lua +++ b/test/common/avro_refs.test.lua @@ -6,7 +6,7 @@ local fio = require('fio') package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. package.path -local test_utils = require('test.utils') +local test_utils = require('test.test_utils') local testdata = require('test.testdata.avro_refs_testdata') box.cfg({}) diff --git a/test/common/common.test.lua b/test/common/common.test.lua index 507fbda..c0f76b1 100755 --- a/test/common/common.test.lua +++ b/test/common/common.test.lua @@ -6,7 +6,7 @@ local fio = require('fio') package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. package.path -local test_utils = require('test.utils') +local test_utils = require('test.test_utils') local testdata = require('test.testdata.common_testdata') box.cfg({}) diff --git a/test/common/compound_index.test.lua b/test/common/compound_index.test.lua index 46e1f2e..dcb40e1 100755 --- a/test/common/compound_index.test.lua +++ b/test/common/compound_index.test.lua @@ -6,7 +6,7 @@ local fio = require('fio') package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. package.path -local test_utils = require('test.utils') +local test_utils = require('test.test_utils') local testdata = require('test.testdata.compound_index_testdata') box.cfg({}) diff --git a/test/common/directives.test.lua b/test/common/directives.test.lua index 120df41..35a4d87 100755 --- a/test/common/directives.test.lua +++ b/test/common/directives.test.lua @@ -8,8 +8,7 @@ package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") local tap = require('tap') local yaml = require('yaml') -local utils = require('graphql.utils') -local test_utils = require('test.utils') +local test_utils = require('test.test_utils') local testdata = require('test.testdata.common_testdata') local function run_queries(gql_wrapper) @@ -32,7 +31,7 @@ local function run_queries(gql_wrapper) } ]] - local gql_query_1 = utils.show_trace(function() + local gql_query_1 = test_utils.show_trace(function() return gql_wrapper:compile(query_1) end) @@ -45,7 +44,7 @@ local function run_queries(gql_wrapper) } -- should match 1 user - local result_1_1 = utils.show_trace(function() + local result_1_1 = test_utils.show_trace(function() return gql_query_1:execute(variables_1_1) end) @@ -71,7 +70,7 @@ local function run_queries(gql_wrapper) include = false } - local result_1_2 = utils.show_trace(function() + local result_1_2 = test_utils.show_trace(function() return gql_query_1:execute(variables_1_2) end) @@ -100,7 +99,7 @@ local function run_queries(gql_wrapper) } ]] - local gql_query_2 = utils.show_trace(function() + local gql_query_2 = test_utils.show_trace(function() return gql_wrapper:compile(query_2) end) @@ -112,7 +111,7 @@ local function run_queries(gql_wrapper) skip = true } - local result_2_1 = utils.show_trace(function() + local result_2_1 = test_utils.show_trace(function() return gql_query_2:execute(variables_2_1) end) @@ -134,7 +133,7 @@ local function run_queries(gql_wrapper) skip = false } - local result_2_2 = utils.show_trace(function() + local result_2_2 = test_utils.show_trace(function() return gql_query_2:execute(variables_2_2) end) diff --git a/test/common/limit_result.test.lua b/test/common/limit_result.test.lua index 3eebba3..8b00feb 100755 --- a/test/common/limit_result.test.lua +++ b/test/common/limit_result.test.lua @@ -7,7 +7,7 @@ package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. package.path local tap = require('tap') -local test_utils = require('test.utils') +local test_utils = require('test.test_utils') local testdata = require('test.testdata.user_order_item_testdata') local function run_queries(gql_wrapper) diff --git a/test/common/multihead_conn.test.lua b/test/common/multihead_conn.test.lua index 2875ef5..afb9839 100755 --- a/test/common/multihead_conn.test.lua +++ b/test/common/multihead_conn.test.lua @@ -6,7 +6,7 @@ local fio = require('fio') package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. package.path -local test_utils = require('test.utils') +local test_utils = require('test.test_utils') local testdata = require('test.testdata.multihead_conn_testdata') box.cfg({}) diff --git a/test/common/mutation.test.lua b/test/common/mutation.test.lua index fe63fd8..77d2e91 100755 --- a/test/common/mutation.test.lua +++ b/test/common/mutation.test.lua @@ -8,8 +8,7 @@ package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") local tap = require('tap') local yaml = require('yaml') -local utils = require('graphql.utils') -local test_utils = require('test.utils') +local test_utils = require('test.test_utils') local testdata = require('test.testdata.common_testdata') box.cfg({}) @@ -45,15 +44,8 @@ local function replace_tuple(virtbox, collection_name, key, tuple) if get_tuple(virtbox, collection_name, key) == nil then virtbox[collection_name]:insert(tuple) else - for _, zone in ipairs(virtbox.shards) do - for _, node in ipairs(zone) do - virtbox:space_call(collection_name, node, function(space_obj) - if space_obj:get(key) ~= nil then - space_obj:replace(tuple) - end - end) - end - end + delete_tuple(virtbox, collection_name, key) + virtbox[collection_name]:insert(tuple) end end @@ -76,7 +68,7 @@ local function check_insert(test, gql_wrapper, virtbox, mutation_insert, local dont_pass_variables = opts.dont_pass_variables or false local meta = opts.meta - utils.show_trace(function() + test_utils.show_trace(function() test:plan(7) local user_id = 'user_id_new_1' local order_id = 'order_id_new_1' @@ -124,6 +116,61 @@ local function check_insert(test, gql_wrapper, virtbox, mutation_insert, end) end +local function check_insert_order_metainfo(test, gql_wrapper, virtbox, + mutation_insert, exp_result_insert, opts) + local opts = opts or {} + local variables = opts.variables or {} + + test_utils.show_trace(function() + test:plan(5) + + local order_metainfo_id = 'order_metainfo_id_4000' + + -- check the tuple was not inserted before + local tuple = get_tuple(virtbox, 'order_metainfo_collection', + {order_metainfo_id}) + test:ok(tuple == nil, 'tuple was not inserted before') + + -- check mutation result + local gql_mutation_insert = gql_wrapper:compile(mutation_insert) + local result = gql_mutation_insert:execute(variables) + test:is_deeply(result, exp_result_insert, 'insert result') + + -- check inserted tuple + local EXTERNAL_ID_STRING = 1 -- 0 is for int + local tuple = get_tuple(virtbox, 'order_metainfo_collection', + {order_metainfo_id}) + test:ok(tuple ~= nil, 'inserted tuple exists') + local exp_tuple = { + 'order metainfo 4000', + 'order_metainfo_id_4000', + 'order_id_4000', + 'store 4000', + 'street 4000', + 'city 4000', + 'state 4000', + 'zip 4000', + 'second street 4000', + 'second city 4000', + 'second state 4000', + 'second zip 4000', + EXTERNAL_ID_STRING, + 'eid_4000', + {'slow'}, + {size = 'small'}, + } + test:is_deeply(tuple:totable(), exp_tuple, 'inserted tuple is correct') + + -- delete inserted tuple & check + delete_tuple(virtbox, 'order_metainfo_collection', + {order_metainfo_id}) + local tuple = get_tuple(virtbox, 'order_metainfo_collection', + {order_metainfo_id}) + test:ok(tuple == nil, 'inserted tuple was deleted') + assert(test:check(), 'check plan') + end) +end + local function check_update(test, gql_wrapper, virtbox, mutation_update, exp_result_update, opts) local opts = opts or {} @@ -132,7 +179,7 @@ local function check_update(test, gql_wrapper, virtbox, mutation_update, local extra_xorder = opts.extra_xorder or {} local extra_xuser = opts.extra_xuser or {} - utils.show_trace(function() + test_utils.show_trace(function() test:plan(7) local user_id = 'user_id_1' local order_id = 'order_id_1' @@ -206,12 +253,69 @@ local function check_update(test, gql_wrapper, virtbox, mutation_update, end) end +local function check_update_order_metainfo(test, gql_wrapper, virtbox, + mutation_update, exp_result_update, opts) + local opts = opts or {} + local variables = opts.variables or {} + + test_utils.show_trace(function() + test:plan(5) + + -- check the original tuple + local EXTERNAL_ID_INT = 0 + local EXTERNAL_ID_STRING = 1 + local order_metainfo_id = 'order_metainfo_id_1' + local orig_tuple = get_tuple(virtbox, 'order_metainfo_collection', + {order_metainfo_id}) + local exp_orig_tuple = { + 'order metainfo 1', order_metainfo_id, 'order_id_1', 'store 1', + 'street 1', 'city 1', 'state 1', 'zip 1', 'second street 1', + 'second city 1', 'second state 1', 'second zip 1', + EXTERNAL_ID_INT, 1, {'fast', 'new'}, { + size = 'medium', + since = '2018-01-01', + }, + } + test:is_deeply(orig_tuple:totable(), exp_orig_tuple, + 'original tuple is the one that expected') + + -- check mutation result + local gql_mutation_update = gql_wrapper:compile(mutation_update) + local result = gql_mutation_update:execute(variables) + test:is_deeply(result, exp_result_update, 'update result') + + -- check updated tuple + local tuple = get_tuple(virtbox, 'order_metainfo_collection', + {order_metainfo_id}) + test:ok(tuple ~= nil, 'updated tuple exists') + local exp_tuple = table.copy(exp_orig_tuple) + exp_tuple[1] = 'changed' + exp_tuple[6] = 'changed city' + exp_tuple[10] = 'second changed city' + exp_tuple[13] = EXTERNAL_ID_STRING + exp_tuple[14] = 'eid changed' + exp_tuple[15] = {'slow'} + exp_tuple[16] = {size = 'small'} + test:is_deeply(tuple:totable(), exp_tuple, 'updated tuple is correct') + + -- replace back updated tuples & check + replace_tuple(virtbox, 'order_metainfo_collection', {order_metainfo_id}, + orig_tuple) + local tuple = get_tuple(virtbox, 'order_metainfo_collection', + {order_metainfo_id}) + test:is_deeply(tuple:totable(), orig_tuple:totable(), + 'updated tuple was replaced back') + + assert(test:check(), 'check plan') + end) +end + local function check_delete(test, gql_wrapper, virtbox, mutation_delete, exp_result_delete, opts) local opts = opts or {} local dont_pass_variables = opts.dont_pass_variables or false - utils.show_trace(function() + test_utils.show_trace(function() test:plan(5) local user_id = 'user_id_1' local order_id = 'order_id_1' @@ -250,7 +354,7 @@ end local function run_queries(gql_wrapper, virtbox, meta) local test = tap.test('mutation') - test:plan(19) + test:plan(22) -- {{{ insert @@ -412,7 +516,149 @@ local function run_queries(gql_wrapper, virtbox, meta) test:is_deeply({ok, test_utils.strip_error(err)}, {false, err_exp}, '"insert" argument is forbidden with other filters (extra arguments)') - -- XXX: test inserting an object into a collection with subrecords + -- test inserting an object into a collection with subrecord, union, array + -- and map + local mutation_insert_6 = [[ + mutation insert_order_metainfo { + order_metainfo_collection(insert: { + metainfo: "order metainfo 4000" + order_metainfo_id: "order_metainfo_id_4000" + order_id: "order_id_4000" + store: { + name: "store 4000" + address: { + street: "street 4000" + city: "city 4000" + state: "state 4000" + zip: "zip 4000" + } + second_address: { + street: "second street 4000" + city: "second city 4000" + state: "second state 4000" + zip: "second zip 4000" + } + external_id: {string: "eid_4000"} + tags: ["slow"] + parametrized_tags: { + size: "small" + } + } + }) { + metainfo + order_metainfo_id + order_id + store { + name + address { + city + } + second_address { + city + } + external_id { + ... on String_box { + string + } + ... on Int_box { + int + } + } + tags + parametrized_tags + } + } + } + ]] + + local exp_result_insert_6 = yaml.decode(([[ + --- + order_metainfo_collection: + - metainfo: order metainfo 4000 + order_metainfo_id: order_metainfo_id_4000 + order_id: order_id_4000 + store: + name: store 4000 + address: + city: city 4000 + second_address: + city: second city 4000 + external_id: + string: eid_4000 + tags: + - slow + parametrized_tags: + size: small + ]]):strip()) + + check_insert_order_metainfo(test:test( + 'insert an object with subrecords (immediate argument)'), + gql_wrapper, virtbox, mutation_insert_6, exp_result_insert_6) + + -- the same with a variable instead of immediate argument + local mutation_insert_6v = [[ + mutation insert_order_metainfo( + $order_metainfo: order_metainfo_collection_insert + ) { + order_metainfo_collection(insert: $order_metainfo) { + metainfo + order_metainfo_id + order_id + store { + name + address { + city + } + second_address { + city + } + external_id { + ... on String_box { + string + } + ... on Int_box { + int + } + } + tags + parametrized_tags + } + } + } + ]] + + check_insert_order_metainfo(test:test( + 'insert an object with subrecords (variable argument)'), + gql_wrapper, virtbox, mutation_insert_6v, exp_result_insert_6, { + variables = { + order_metainfo = { + metainfo = 'order metainfo 4000', + order_metainfo_id = 'order_metainfo_id_4000', + order_id = 'order_id_4000', + store = { + name = 'store 4000', + address = { + street = 'street 4000', + city = 'city 4000', + state = 'state 4000', + zip = 'zip 4000', + }, + second_address = { + street = 'second street 4000', + city = 'second city 4000', + state = 'second state 4000', + zip = 'second zip 4000', + }, + external_id = {string = 'eid_4000'}, + tags = {'slow'}, + parametrized_tags = { + size = 'small', + } + } + } + } + } + ) -- }}} @@ -735,61 +981,135 @@ local function run_queries(gql_wrapper, virtbox, meta) '"update" argument is forbidden in a query') -- test updating of a field by which a shard key is calculated (it is the - -- first field: tuple[1]) - test:test('update 1st tuple field', function(test) - test:plan(5) - local mutation_update = [[ - mutation update_order_metainfo { - order_metainfo_collection( - order_metainfo_id: "order_metainfo_id_1" - update: { - metainfo: "changed" + -- first field: tuple[1]); + -- here we also check updating inside subrecords; + -- and also check updating of array, map and union + local mutation_update_subrecord = [[ + mutation update_order_metainfo { + order_metainfo_collection( + order_metainfo_id: "order_metainfo_id_1" + update: { + metainfo: "changed" + store: { + address: { + city: "changed city" + } + second_address: { + city: "second changed city" + } + external_id: {string: "eid changed"} + tags: ["slow"] + parametrized_tags: { + size: "small" + } } - ) { - metainfo - order_metainfo_id - order_id + } + ) { + metainfo + order_metainfo_id + order_id + store { + address { + city + } + second_address { + city + } + external_id { + ... on String_box { + string + } + ... on Int_box { + int + } + } + tags + parametrized_tags } } - ]] - local exp_result_update = yaml.decode(([[ - --- - order_metainfo_collection: - - metainfo: changed - order_metainfo_id: order_metainfo_id_1 - order_id: order_id_1 - ]]):strip()) + } + ]] - -- check the original tuple - local order_metainfo_id = 'order_metainfo_id_1' - local orig_tuple = get_tuple(virtbox, 'order_metainfo_collection', - {order_metainfo_id}) - local exp_orig_tuple = - {'order metainfo 1', order_metainfo_id, 'order_id_1'} - test:is_deeply(orig_tuple:totable(), exp_orig_tuple, - 'original tuple is the one that expected') + local exp_result_update_subrecord = yaml.decode(([[ + --- + order_metainfo_collection: + - metainfo: changed + order_metainfo_id: order_metainfo_id_1 + order_id: order_id_1 + store: + address: + city: changed city + second_address: + city: second changed city + external_id: + string: eid changed + tags: + - slow + parametrized_tags: + size: small + ]]):strip()) - -- check mutation result - local gql_mutation_update = gql_wrapper:compile(mutation_update) - local result = gql_mutation_update:execute({}) - test:is_deeply(result, exp_result_update, 'update result') + check_update_order_metainfo( + test:test('update 1st tuple field (immediate argument)'), gql_wrapper, + virtbox, mutation_update_subrecord, exp_result_update_subrecord) - -- check updated tuple - local tuple = get_tuple(virtbox, 'order_metainfo_collection', - {order_metainfo_id}) - test:ok(tuple ~= nil, 'updated tuple exists') - local exp_tuple = - {'changed', order_metainfo_id, 'order_id_1'} - test:is_deeply(tuple:totable(), exp_tuple, 'updated tuple is correct') + -- the same with a variable argument + local mutation_update_subrecord_v = [[ + mutation update_order_metainfo( + $xorder_metainfo: order_metainfo_collection_update + ) { + order_metainfo_collection( + order_metainfo_id: "order_metainfo_id_1" + update: $xorder_metainfo + ) { + metainfo + order_metainfo_id + order_id + store { + address { + city + } + second_address { + city + } + external_id { + ... on String_box { + string + } + ... on Int_box { + int + } + } + tags + parametrized_tags + } + } + } + ]] - -- replace back updated tuples & check - replace_tuple(virtbox, 'order_metainfo_collection', {order_metainfo_id}, - orig_tuple) - local tuple = get_tuple(virtbox, 'order_metainfo_collection', - {order_metainfo_id}) - test:is_deeply(tuple:totable(), orig_tuple:totable(), - 'updated tuple was replaced back') - end) + check_update_order_metainfo( + test:test('update 1st tuple field (variable argument)'), gql_wrapper, + virtbox, mutation_update_subrecord_v, exp_result_update_subrecord, { + variables = { + xorder_metainfo = { + metainfo = 'changed', + store = { + address = { + city = 'changed city', + }, + second_address = { + city = 'second changed city', + }, + external_id = {string = 'eid changed'}, + tags = {'slow'}, + parametrized_tags = { + size = 'small', + } + } + } + } + } + ) -- Test updating of a field of a primary key when: -- 1. it is NOT shard key field (tuple[1]); @@ -846,8 +1166,6 @@ local function run_queries(gql_wrapper, virtbox, meta) test:is_deeply({ok, test_utils.strip_error(err)}, {false, err_exp}, 'updating of a field of a primary key when it is shard key field') - -- XXX: test updating an object in a collection with subrecords - -- }}} -- {{{ delete diff --git a/test/common/nested_record.test.lua b/test/common/nested_record.test.lua index 9b5e337..194ce1b 100755 --- a/test/common/nested_record.test.lua +++ b/test/common/nested_record.test.lua @@ -6,7 +6,7 @@ local fio = require('fio') package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. package.path -local test_utils = require('test.utils') +local test_utils = require('test.test_utils') local testdata = require('test.testdata.nested_record_testdata') box.cfg({}) diff --git a/test/common/nullable_1_1_conn.test.lua b/test/common/nullable_1_1_conn.test.lua index d337acb..cfbe5c4 100755 --- a/test/common/nullable_1_1_conn.test.lua +++ b/test/common/nullable_1_1_conn.test.lua @@ -6,7 +6,7 @@ local fio = require('fio') package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. package.path -local test_utils = require('test.utils') +local test_utils = require('test.test_utils') local testdata = require('test.testdata.nullable_1_1_conn_testdata') box.cfg({}) diff --git a/test/common/nullable_index.test.lua b/test/common/nullable_index.test.lua index d062d64..cfe3827 100755 --- a/test/common/nullable_index.test.lua +++ b/test/common/nullable_index.test.lua @@ -6,7 +6,7 @@ local fio = require('fio') package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. package.path -local test_utils = require('test.utils') +local test_utils = require('test.test_utils') local testdata = require('test.testdata.nullable_index_testdata') box.cfg({}) diff --git a/test/common/pcre.test.lua b/test/common/pcre.test.lua index dfc50fd..04ca6b8 100755 --- a/test/common/pcre.test.lua +++ b/test/common/pcre.test.lua @@ -8,13 +8,12 @@ package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") local tap = require('tap') local yaml = require('yaml') -local utils = require('graphql.utils') -local test_utils = require('test.utils') +local test_utils = require('test.test_utils') local testdata = require('test.testdata.common_testdata') local function run_queries(gql_wrapper) local test = tap.test('pcre') - test:plan(4) + test:plan(5) local query_1 = [[ query users($offset: String, $first_name_re: String, @@ -28,7 +27,7 @@ local function run_queries(gql_wrapper) } ]] - local gql_query_1 = utils.show_trace(function() + local gql_query_1 = test_utils.show_trace(function() return gql_wrapper:compile(query_1) end) @@ -39,7 +38,7 @@ local function run_queries(gql_wrapper) middle_name_re = 'ich$', } - local result_1_1 = utils.show_trace(function() + local result_1_1 = test_utils.show_trace(function() return gql_query_1:execute(variables_1_1) end) @@ -61,7 +60,7 @@ local function run_queries(gql_wrapper) first_name_re = '^V', } - local result_1_2 = utils.show_trace(function() + local result_1_2 = test_utils.show_trace(function() return gql_query_1:execute(variables_1_2) end) @@ -82,7 +81,7 @@ local function run_queries(gql_wrapper) middle_name_re = 'ич$', } - local result_1_3 = utils.show_trace(function() + local result_1_3 = test_utils.show_trace(function() return gql_query_1:execute(variables_1_3) end) @@ -113,11 +112,11 @@ local function run_queries(gql_wrapper) } ]] - local gql_query_1i = utils.show_trace(function() + local gql_query_1i = test_utils.show_trace(function() return gql_wrapper:compile(query_1i) end) - local result_1i_1 = utils.show_trace(function() + local result_1i_1 = test_utils.show_trace(function() return gql_query_1i:execute({}) end) @@ -125,6 +124,40 @@ local function run_queries(gql_wrapper) -- }}} + -- {{{ regexp match by a subrecord field + + local query_2 = [[ + { + order_metainfo_collection(pcre: { + store: { + address: { + city: "3000$" + } + } + }) { + order_metainfo_id + } + } + ]] + + local exp_result_2 = yaml.decode(([[ + --- + order_metainfo_collection: + - order_metainfo_id: order_metainfo_id_3000 + ]]):strip()) + + local gql_query_2 = test_utils.show_trace(function() + return gql_wrapper:compile(query_2) + end) + + local result_2 = test_utils.show_trace(function() + return gql_query_2:execute({}) + end) + + test:is_deeply(result_2, exp_result_2, 'regexp match by a subrecord field') + + -- }}} + assert(test:check(), 'check plan') end diff --git a/test/common/query_timeout.test.lua b/test/common/query_timeout.test.lua index f8315bf..e41b20b 100755 --- a/test/common/query_timeout.test.lua +++ b/test/common/query_timeout.test.lua @@ -7,7 +7,7 @@ package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. package.path local tap = require('tap') -local utils = require('test.utils') +local utils = require('test.test_utils') local testdata = require('test.testdata.user_order_item_testdata') local function run_queries(gql_wrapper) diff --git a/test/common/union.test.lua b/test/common/union.test.lua index 4ab11f1..db5e97b 100755 --- a/test/common/union.test.lua +++ b/test/common/union.test.lua @@ -6,7 +6,7 @@ local fio = require('fio') package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. package.path -local utils = require('test.utils') +local utils = require('test.test_utils') local testdata = require('test.testdata.union_testdata') box.cfg({}) diff --git a/test/space/complemented_config.test.lua b/test/space/complemented_config.test.lua index ad11ac6..59f6ad2 100755 --- a/test/space/complemented_config.test.lua +++ b/test/space/complemented_config.test.lua @@ -2,8 +2,8 @@ local tap = require('tap') local yaml = require('yaml') -local utils = require('graphql.utils') local graphql = require('graphql') +local test_utils = require('test.test_utils') local connections = { { @@ -72,12 +72,12 @@ local function run_queries(gql_wrapper) } ]] - local gql_query_1 = utils.show_trace(function() + local gql_query_1 = test_utils.show_trace(function() return gql_wrapper:compile(query_1) end) local variables_1_1 = {user_id = 'user_id_1'} - local result_1_1 = utils.show_trace(function() + local result_1_1 = test_utils.show_trace(function() return gql_query_1:execute(variables_1_1) end) local exp_result_1_1 = yaml.decode(([[ @@ -94,6 +94,7 @@ local function run_queries(gql_wrapper) local cfg = gql_wrapper.internal.cfg cfg.accessor = nil + cfg.e_schemas = nil local result_1_2 = cfg local exp_result_1_2 = yaml.decode(([[ --- @@ -176,7 +177,7 @@ local function run_queries(gql_wrapper) assert(test:check(), 'check plan') end -utils.show_trace(function() +test_utils.show_trace(function() box.cfg { background = false } init_spaces() fill_test_data() diff --git a/test/space/default_instance.test.lua b/test/space/default_instance.test.lua index f1cd808..843e6a5 100755 --- a/test/space/default_instance.test.lua +++ b/test/space/default_instance.test.lua @@ -1,11 +1,11 @@ #!/usr/bin/env tarantool local tap = require('tap') -local utils = require('graphql.utils') local yaml = require('yaml') local json = require('json') local fio = require('fio') local http = require('http.client').new() +local test_utils = require('test.test_utils') package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. package.path @@ -42,7 +42,7 @@ local test = tap.test('default_instance') test:plan(5) -- test require('graphql').compile(query) -utils.show_trace(function() +test_utils.show_trace(function() local variables_1 = {user_id = 'user_id_1'} local compiled_query = gql_lib.compile(query) local result = compiled_query:execute(variables_1) @@ -56,7 +56,7 @@ utils.show_trace(function() end) -- test require('graphql').execute(query) -utils.show_trace(function() +test_utils.show_trace(function() local variables_2 = {user_id = 'user_id_2'} local result = gql_lib.execute(query, variables_2) local exp_result = yaml.decode(([[ @@ -69,7 +69,7 @@ utils.show_trace(function() end) -- test server -utils.show_trace(function() +test_utils.show_trace(function() local res = gql_lib.start_server() local exp_res_start = 'The GraphQL server started at http://127.0.0.1:8080' test:is(res, exp_res_start, 'start_server') diff --git a/test/space/init_fail.test.lua b/test/space/init_fail.test.lua index 173e1c3..8b9d616 100755 --- a/test/space/init_fail.test.lua +++ b/test/space/init_fail.test.lua @@ -10,7 +10,7 @@ package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") local tap = require('tap') local graphql = require('graphql') local testdata = require('test.testdata.compound_index_testdata') -local test_utils = require('test.utils') +local test_utils = require('test.test_utils') -- init box, upload test data and acquire metadata -- ----------------------------------------------- @@ -81,8 +81,8 @@ metadata.indexes.user_collection.user_str_index = { local ok, err = pcall(create_gql_wrapper, metadata) local err_exp = 'several indexes were marked as primary in the ' .. - '"user_collection" collection, at least "user_str_index" and ' .. - '"user_str_num_index"' + '"user_collection" collection, at least "user_str_num_index" and ' .. + '"user_str_index"' test:is_deeply({ok, test_utils.strip_error(err)}, {false, err_exp}, 'multiple primary indexes') diff --git a/test/space/nested_args.test.lua b/test/space/nested_args.test.lua index 883bd63..3f1329a 100755 --- a/test/space/nested_args.test.lua +++ b/test/space/nested_args.test.lua @@ -10,8 +10,7 @@ package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") local tap = require('tap') local yaml = require('yaml') local graphql = require('graphql') -local utils = require('graphql.utils') -local test_utils = require('test.utils') +local test_utils = require('test.test_utils') local common_testdata = require('test.testdata.common_testdata') local emails_testdata = require('test.testdata.nullable_1_1_conn_testdata') @@ -97,12 +96,12 @@ local function run_common_queries(gql_wrapper) } ]] - local gql_query_1 = utils.show_trace(function() + local gql_query_1 = test_utils.show_trace(function() return gql_wrapper:compile(query_1) end) local variables_1 = {user_id = 'user_id_1'} - local result_1 = utils.show_trace(function() + local result_1 = test_utils.show_trace(function() return gql_query_1:execute(variables_1) end) local exp_result_1 = yaml.decode(([[ @@ -142,12 +141,12 @@ local function run_emails_queries(gql_wrapper) } ]] - local gql_query_upside = utils.show_trace(function() + local gql_query_upside = test_utils.show_trace(function() return gql_wrapper:compile(query_upside) end) local variables_upside = {upside_body = 'a'} - local result_upside = utils.show_trace(function() + local result_upside = test_utils.show_trace(function() return gql_query_upside:execute(variables_upside) end) local exp_result_upside = yaml.decode(([[ diff --git a/test/space/server.test.lua b/test/space/server.test.lua index 22a7f6f..9f0ba67 100755 --- a/test/space/server.test.lua +++ b/test/space/server.test.lua @@ -6,11 +6,11 @@ package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") :gsub('/./', '/'):gsub('/+$', '')) .. '/../../?.lua' .. ';' .. package.path local tap = require('tap') -local utils = require('graphql.utils') local yaml = require('yaml') local json = require('json') local http = require('http.client').new() local graphql = require('graphql') +local test_utils = require('test.test_utils') local testdata = require('test.testdata.common_testdata') box.cfg{background = false} @@ -48,7 +48,7 @@ local test = tap.test('server') test:plan(6) -- test server -utils.show_trace(function() +test_utils.show_trace(function() local res = gql_wrapper:start_server() local exp_res_start = 'The GraphQL server started at http://127.0.0.1:8080' test:is(res, exp_res_start, 'start_server') diff --git a/test/space/unflatten_tuple.test.lua b/test/space/unflatten_tuple.test.lua index d62cc0e..29a1bd8 100755 --- a/test/space/unflatten_tuple.test.lua +++ b/test/space/unflatten_tuple.test.lua @@ -11,7 +11,7 @@ local tap = require('tap') local yaml = require('yaml') local avro = require('avro_schema') local graphql = require('graphql') -local utils = require('graphql.utils') +local test_utils = require('test.test_utils') local testdata = require('test.testdata.common_testdata') -- init box, upload test data and acquire metadata @@ -109,7 +109,7 @@ local function run_queries(gql_wrapper) first_name: Ivan$ ]]):strip()) - local result = utils.show_trace(function() + local result = test_utils.show_trace(function() local variables_1 = {order_id = 'order_id_1'} local gql_query_1 = gql_wrapper:compile(query_1) return gql_query_1:execute(variables_1) diff --git a/test/space/zero_config.test.lua b/test/space/zero_config.test.lua index cbafc09..a2f67b4 100755 --- a/test/space/zero_config.test.lua +++ b/test/space/zero_config.test.lua @@ -2,8 +2,8 @@ local tap = require('tap') local yaml = require('yaml') -local utils = require('graphql.utils') local graphql = require('graphql') +local test_utils = require('test.test_utils') local function init_spaces() local U_USER_ID_FN = 1 @@ -51,11 +51,11 @@ local function run_queries(gql_wrapper) } ]] - local gql_query_1 = utils.show_trace(function() + local gql_query_1 = test_utils.show_trace(function() return gql_wrapper:compile(query_1) end) - local result_1_1 = utils.show_trace(function() + local result_1_1 = test_utils.show_trace(function() local variables_1_1 = {user_id = 'user_id_1'} return gql_query_1:execute(variables_1_1) end) @@ -69,9 +69,10 @@ local function run_queries(gql_wrapper) ]]):strip()) test:is_deeply(result_1_1, exp_result_1_1, '1_1') - local result_1_2 = utils.show_trace(function() + local result_1_2 = test_utils.show_trace(function() local cfg = gql_wrapper.internal.cfg cfg.accessor = nil + cfg.e_schemas = nil return cfg end) @@ -113,7 +114,7 @@ local function run_queries(gql_wrapper) assert(test:check(), 'check plan') end -utils.show_trace(function() +test_utils.show_trace(function() box.cfg { background = false } init_spaces() fill_test_data() diff --git a/test/utils.lua b/test/test_utils.lua similarity index 72% rename from test/utils.lua rename to test/test_utils.lua index 8a0672f..a72bf99 100644 --- a/test/utils.lua +++ b/test/test_utils.lua @@ -6,20 +6,21 @@ local fio = require('fio') package.path = fio.abspath(debug.getinfo(1).source:match("@?(.*/)") :gsub('/./', '/'):gsub('/+$', '')) .. '/../?.lua' .. ';' .. package.path +local log = require('log') local avro_schema = require('avro_schema') local graphql = require('graphql') local multirunner = require('test.common.multirunner') -local graphql_utils = require('graphql.utils') -local test_run = graphql_utils.optional_require('test_run') +local utils = require('graphql.utils') +local test_run = utils.optional_require('test_run') test_run = test_run and test_run.new() -local utils = {} +local test_utils = {} -- module-local variables local models_cache -- simplified version of the same named function from accessor_general.lua -function utils.compile_schemas(schemas, service_fields) +function test_utils.compile_schemas(schemas, service_fields) local service_fields_types = {} for name, service_fields_list in pairs(service_fields) do local sf_types = {} @@ -48,7 +49,7 @@ local function get_model(meta, collection_name) local schema_name = meta.collections[collection_name].schema_name assert(schema_name ~= nil) if models_cache == nil then - models_cache = utils.compile_schemas(meta.schemas, + models_cache = test_utils.compile_schemas(meta.schemas, meta.service_fields) end local model = models_cache[schema_name] @@ -56,7 +57,7 @@ local function get_model(meta, collection_name) -- same-named schemas within the one test; the result will be incorrect in -- the case. if model == nil then - models_cache = utils.compile_schemas(meta.schemas, + models_cache = test_utils.compile_schemas(meta.schemas, meta.service_fields) model = models_cache[schema_name] end @@ -64,11 +65,11 @@ local function get_model(meta, collection_name) return model end -function utils.clear_models_cache() +function test_utils.clear_models_cache() models_cache = nil end -function utils.flatten_object(meta, collection_name, object, +function test_utils.flatten_object(meta, collection_name, object, service_field_values) local model = get_model(meta, collection_name) local ok, tuple = model.flatten(object, unpack(service_field_values or {})) @@ -76,21 +77,21 @@ function utils.flatten_object(meta, collection_name, object, return tuple end -function utils.unflatten_tuple(meta, collection_name, tuple) +function test_utils.unflatten_tuple(meta, collection_name, tuple) local model = get_model(meta, collection_name) local ok, object = model.unflatten(tuple) assert(ok, tostring(object)) return object end -function utils.replace_object(virtbox, meta, collection_name, object, +function test_utils.replace_object(virtbox, meta, collection_name, object, service_field_values) - local tuple = utils.flatten_object(meta, collection_name, object, + local tuple = test_utils.flatten_object(meta, collection_name, object, service_field_values) virtbox[collection_name]:replace(tuple) end -function utils.major_avro_schema_version() +function test_utils.major_avro_schema_version() local ok, handle = avro_schema.create('boolean') assert(ok) local ok, model = avro_schema.compile(handle) @@ -99,12 +100,12 @@ function utils.major_avro_schema_version() end -- return an error w/o file name and line number -function utils.strip_error(err) +function test_utils.strip_error(err) local res = tostring(err):gsub('^.-:.-: (.*)$', '%1') return res end -function utils.graphql_from_testdata(testdata, shard, graphql_opts) +function test_utils.graphql_from_testdata(testdata, shard, graphql_opts) local graphql_opts = graphql_opts or {} local meta = testdata.meta or testdata.get_test_metadata() @@ -116,13 +117,13 @@ function utils.graphql_from_testdata(testdata, shard, graphql_opts) accessor = shard and 'shard' or 'space', } - local gql_wrapper = graphql.new(graphql_utils.merge_tables( + local gql_wrapper = graphql.new(utils.merge_tables( default_graphql_opts, graphql_opts)) return gql_wrapper end -function utils.run_testdata(testdata, opts) +function test_utils.run_testdata(testdata, opts) local opts = opts or {} local run_queries = opts.run_queries or testdata.run_queries -- custom workload for, say, test different options on several graphql @@ -135,7 +136,7 @@ function utils.run_testdata(testdata, opts) multirunner.run_conf(conf_name, { test_run = test_run, init_function = testdata.init_spaces, - init_function_params = {utils.major_avro_schema_version()}, + init_function_params = {test_utils.major_avro_schema_version()}, cleanup_function = testdata.drop_spaces, workload = function(conf_name, shard) if workload then @@ -144,15 +145,28 @@ function utils.run_testdata(testdata, opts) local virtbox = shard or box.space local meta = testdata.meta or testdata.get_test_metadata() testdata.fill_test_data(virtbox, meta) - local gql_wrapper = utils.graphql_from_testdata(testdata, shard, - opts.graphql_opts) + local gql_wrapper = test_utils.graphql_from_testdata(testdata, + shard, opts.graphql_opts) run_queries(gql_wrapper, virtbox, meta) end - utils.clear_models_cache() + test_utils.clear_models_cache() end, servers = {'shard1', 'shard2', 'shard3', 'shard4'}, use_tcp = false, }) end -return utils +--- Log an error and the corresponding backtrace in case of the `func` function +--- call raises the error. +function test_utils.show_trace(func, ...) + local args = {...} + return select(2, xpcall( + function() return func(unpack(args)) end, + function(err) + log.info('ERROR: ' .. tostring(err)) + log.info(debug.traceback()) + end + )) +end + +return test_utils diff --git a/test/testdata/array_and_map_testdata.lua b/test/testdata/array_and_map_testdata.lua index 9ce116b..bcc2ef2 100644 --- a/test/testdata/array_and_map_testdata.lua +++ b/test/testdata/array_and_map_testdata.lua @@ -1,7 +1,7 @@ local tap = require('tap') local json = require('json') local yaml = require('yaml') -local utils = require('graphql.utils') +local test_utils = require('test.test_utils') local array_testdata = {} @@ -121,11 +121,11 @@ function array_testdata.run_queries(gql_wrapper) } ]] - local gql_query_1 = utils.show_trace(function() + local gql_query_1 = test_utils.show_trace(function() return gql_wrapper:compile(query_1) end) local variables_1 = { user_id = 'user_id_1' } - local result_1 = utils.show_trace(function() + local result_1 = test_utils.show_trace(function() return gql_query_1:execute(variables_1) end) diff --git a/test/testdata/avro_refs_testdata.lua b/test/testdata/avro_refs_testdata.lua index 712cf11..ff3b0af 100644 --- a/test/testdata/avro_refs_testdata.lua +++ b/test/testdata/avro_refs_testdata.lua @@ -7,8 +7,7 @@ local tap = require('tap') local json = require('json') local yaml = require('yaml') -local utils = require('graphql.utils') -local test_utils = require('test.utils') +local test_utils = require('test.test_utils') local testdata = {} @@ -209,18 +208,18 @@ function testdata.run_queries(gql_wrapper) ]] local query_1_p = query_1:gsub('foo', 'foo_2') - local gql_query_1 = utils.show_trace(function() + local gql_query_1 = test_utils.show_trace(function() return gql_wrapper:compile(query_1) end) - local gql_query_1_p = utils.show_trace(function() + local gql_query_1_p = test_utils.show_trace(function() return gql_wrapper:compile(query_1_p) end) local variables_1_1 = {id = 1} - local result_1_1 = utils.show_trace(function() + local result_1_1 = test_utils.show_trace(function() return gql_query_1:execute(variables_1_1) end) - local result_1_1_p = utils.show_trace(function() + local result_1_1_p = test_utils.show_trace(function() return gql_query_1_p:execute(variables_1_1) end) @@ -253,10 +252,10 @@ function testdata.run_queries(gql_wrapper) test:is_deeply(result_1_1_p, exp_result_1_1_p, '1_1_p') local variables_1_2 = {id = 2} - local result_1_2 = utils.show_trace(function() + local result_1_2 = test_utils.show_trace(function() return gql_query_1:execute(variables_1_2) end) - local result_1_2_p = utils.show_trace(function() + local result_1_2_p = test_utils.show_trace(function() return gql_query_1_p:execute(variables_1_2) end) diff --git a/test/testdata/common_testdata.lua b/test/testdata/common_testdata.lua index 88a952c..bb1a878 100644 --- a/test/testdata/common_testdata.lua +++ b/test/testdata/common_testdata.lua @@ -1,8 +1,7 @@ local tap = require('tap') local json = require('json') local yaml = require('yaml') -local utils = require('graphql.utils') -local test_utils = require('test.utils') +local test_utils = require('test.test_utils') local common_testdata = {} @@ -52,7 +51,34 @@ function common_testdata.get_test_metadata() "fields": [ { "name": "metainfo", "type": "string" }, { "name": "order_metainfo_id", "type": "string" }, - { "name": "order_id", "type": "string" } + { "name": "order_id", "type": "string" }, + { "name": "store", "type": { + "type": "record", + "name": "store", + "fields": [ + { "name": "name", "type": "string" }, + { "name": "address", "type": { + "type": "record", + "name": "address", + "fields": [ + { "name": "street", "type": "string" }, + { "name": "city", "type": "string" }, + { "name": "state", "type": "string" }, + { "name": "zip", "type": "string" } + ] + }}, + { "name": "second_address", "type": "address" }, + { "name": "external_id", "type": ["int", "string"]}, + { "name": "tags", "type": { + "type": "array", + "items": "string" + }}, + { "name": "parametrized_tags", "type": { + "type": "map", + "values": "string" + }} + ] + }} ] } }]]) @@ -298,6 +324,28 @@ function common_testdata.fill_test_data(virtbox, meta) metainfo = 'order metainfo ' .. s, order_metainfo_id = 'order_metainfo_id_' .. s, order_id = 'order_id_' .. s, + store = { + name = 'store ' .. s, + address = { + street = 'street ' .. s, + city = 'city ' .. s, + state = 'state ' .. s, + zip = 'zip ' .. s, + }, + second_address = { + street = 'second street ' .. s, + city = 'second city ' .. s, + state = 'second state ' .. s, + zip = 'second zip ' .. s, + }, + external_id = i % 2 == 1 and {int = i} or + {string = 'eid_' .. s}, + tags = {'fast', 'new'}, + parametrized_tags = { + size = 'medium', + since = '2018-01-01' + }, + } }) end end @@ -340,7 +388,7 @@ function common_testdata.run_queries(gql_wrapper) local variables_1 = {order_id = 'order_id_1'} - utils.show_trace(function() + test_utils.show_trace(function() local gql_query_1 = gql_wrapper:compile(query_1) local result = gql_query_1:execute(variables_1) test:is_deeply(result, exp_result_1, '1') @@ -360,7 +408,7 @@ function common_testdata.run_queries(gql_wrapper) } ]] - utils.show_trace(function() + test_utils.show_trace(function() local gql_query_1n = gql_wrapper:compile(query_1n) local result = gql_query_1n:execute(variables_1) test:is_deeply(result, exp_result_1, '1n') @@ -380,7 +428,7 @@ function common_testdata.run_queries(gql_wrapper) } ]] - utils.show_trace(function() + test_utils.show_trace(function() local gql_query_1inn = gql_wrapper:compile(query_1inn) local result = gql_query_1inn:execute({}) test:is_deeply(result, exp_result_1, '1inn') @@ -427,7 +475,7 @@ function common_testdata.run_queries(gql_wrapper) } ]] - local gql_query_1t = utils.show_trace(function() + local gql_query_1t = test_utils.show_trace(function() return gql_wrapper:compile(query_1t) end) @@ -443,7 +491,7 @@ function common_testdata.run_queries(gql_wrapper) test:is_deeply({ok, test_utils.strip_error(err)}, {false, err_exp}, 'wrong operation name should give an error') - utils.show_trace(function() + test_utils.show_trace(function() local result = gql_query_1t:execute({}, 'user_by_order') test:is_deeply(result, exp_result_1, 'execute an operation by name') end) @@ -463,7 +511,7 @@ function common_testdata.run_queries(gql_wrapper) } ]] - local gql_query_2 = utils.show_trace(function() + local gql_query_2 = test_utils.show_trace(function() return gql_wrapper:compile(query_2) end) @@ -480,7 +528,7 @@ function common_testdata.run_queries(gql_wrapper) description: second order of Ivan ]]):strip()) - utils.show_trace(function() + test_utils.show_trace(function() local variables_2_1 = {user_id = 'user_id_1'} local result = gql_query_2:execute(variables_2_1) test:is_deeply(result, exp_result_2_1, '2_1') @@ -515,7 +563,7 @@ function common_testdata.run_queries(gql_wrapper) description: order of user 42 ]]):strip()) - utils.show_trace(function() + test_utils.show_trace(function() local variables_2_2 = { user_id = 'user_id_42', limit = 10, @@ -538,7 +586,7 @@ function common_testdata.run_queries(gql_wrapper) description: order of user 42 ]]):strip()) - utils.show_trace(function() + test_utils.show_trace(function() local variables_2_3 = { user_id = 'user_id_42', limit = 10, @@ -559,7 +607,7 @@ function common_testdata.run_queries(gql_wrapper) description: order of user 42 ]]):strip()) - utils.show_trace(function() + test_utils.show_trace(function() local variables_2_4 = { first_name = 'first name 42', limit = 3, @@ -659,7 +707,7 @@ function common_testdata.run_queries(gql_wrapper) ]]):strip()) -- no limit, no offset - utils.show_trace(function() + test_utils.show_trace(function() local variables_2_5 = {user_id = 'user_id_42'} local result = gql_query_2:execute(variables_2_5) test:is_deeply(result, exp_result_2_5, '2_5') @@ -710,7 +758,7 @@ function common_testdata.run_queries(gql_wrapper) first_name: first name 62 ]]):strip()) - utils.show_trace(function() + test_utils.show_trace(function() local variables_3 = { limit = 10, offset = 'user_id_53', -- 50th (alphabetical sort) @@ -737,7 +785,7 @@ function common_testdata.run_queries(gql_wrapper) } ]] - local gql_query_4 = utils.show_trace(function() + local gql_query_4 = test_utils.show_trace(function() return gql_wrapper:compile(query_4) end) @@ -753,7 +801,7 @@ function common_testdata.run_queries(gql_wrapper) ]]):strip()) -- should match 1 order - utils.show_trace(function() + test_utils.show_trace(function() local variables_4_1 = { first_name = 'Ivan', description = 'first order of Ivan', @@ -772,7 +820,7 @@ function common_testdata.run_queries(gql_wrapper) ]]):strip()) -- should match no orders - utils.show_trace(function() + test_utils.show_trace(function() local variables_4_2 = { first_name = 'Ivan', description = 'non-existent order', @@ -798,7 +846,7 @@ function common_testdata.run_queries(gql_wrapper) } ]] - local gql_query_5 = utils.show_trace(function() + local gql_query_5 = test_utils.show_trace(function() return gql_wrapper:compile(query_5) end) @@ -814,7 +862,7 @@ function common_testdata.run_queries(gql_wrapper) ]]):strip()) -- should match 1 user - utils.show_trace(function() + test_utils.show_trace(function() local variables_5_1 = { first_name = 'Ivan', description = 'first order of Ivan', @@ -829,7 +877,7 @@ function common_testdata.run_queries(gql_wrapper) ]]):strip()) -- should match no users (or give an error?) - utils.show_trace(function() + test_utils.show_trace(function() local variables_5_2 = { first_name = 'non-existent user', description = 'first order of Ivan', @@ -880,13 +928,13 @@ function common_testdata.run_queries(gql_wrapper) } ]] - local gql_query_6 = utils.show_trace(function() + local gql_query_6 = test_utils.show_trace(function() return gql_wrapper:compile(query_6) end) - local gql_query_6_i_true = utils.show_trace(function() + local gql_query_6_i_true = test_utils.show_trace(function() return gql_wrapper:compile(query_6_i_true) end) - local gql_query_6_i_false = utils.show_trace(function() + local gql_query_6_i_false = test_utils.show_trace(function() return gql_wrapper:compile(query_6_i_false) end) @@ -945,7 +993,7 @@ function common_testdata.run_queries(gql_wrapper) discount: 363.33334350586 ]]):strip()) - utils.show_trace(function() + test_utils.show_trace(function() local variables_6_1 = {limit = 10} local result = gql_query_6:execute(variables_6_1) local exp_result_6_1 = deeply_number_tostring(exp_result_6_1) @@ -1008,7 +1056,7 @@ function common_testdata.run_queries(gql_wrapper) discount: 366.33334350586 ]]):strip()) - utils.show_trace(function() + test_utils.show_trace(function() local exp_result_6_2 = deeply_number_tostring(exp_result_6_2) local variables_6_2 = {limit = 10, in_stock = true} @@ -1077,7 +1125,7 @@ function common_testdata.run_queries(gql_wrapper) discount: 366 ]]):strip()) - utils.show_trace(function() + test_utils.show_trace(function() local exp_result_6_3 = deeply_number_tostring(exp_result_6_3) local variables_6_3 = {limit = 10, in_stock = false} diff --git a/test/testdata/compound_index_testdata.lua b/test/testdata/compound_index_testdata.lua index 79bd753..6919d67 100644 --- a/test/testdata/compound_index_testdata.lua +++ b/test/testdata/compound_index_testdata.lua @@ -1,8 +1,7 @@ local tap = require('tap') local json = require('json') local yaml = require('yaml') -local utils = require('graphql.utils') -local test_utils = require('test.utils') +local test_utils = require('test.test_utils') local compound_index_testdata = {} @@ -205,11 +204,11 @@ function compound_index_testdata.run_queries(gql_wrapper) } ]] - local gql_query_1 = utils.show_trace(function() + local gql_query_1 = test_utils.show_trace(function() return gql_wrapper:compile(query_1) end) - local result_1_1 = utils.show_trace(function() + local result_1_1 = test_utils.show_trace(function() local variables_1_1 = {user_str = 'user_str_b', user_num = 12} return gql_query_1:execute(variables_1_1) end) @@ -228,7 +227,7 @@ function compound_index_testdata.run_queries(gql_wrapper) -- }}} -- {{{ get a top-level object by a full compound primary key plus filter - local result_1_2 = utils.show_trace(function() + local result_1_2 = test_utils.show_trace(function() local variables_1_2 = { user_str = 'user_str_b', user_num = 12, @@ -248,7 +247,7 @@ function compound_index_testdata.run_queries(gql_wrapper) -- {{{ select top-level objects by a partial compound primary key (or maybe -- use fullscan) - local result_1_3 = utils.show_trace(function() + local result_1_3 = test_utils.show_trace(function() local variables_1_3 = {user_num = 12} return gql_query_1:execute(variables_1_3) end) @@ -280,7 +279,7 @@ function compound_index_testdata.run_queries(gql_wrapper) test:is_deeply(result_1_3, exp_result_1_3, '1_3') - local result_1_4 = utils.show_trace(function() + local result_1_4 = test_utils.show_trace(function() local variables_1_4 = {user_str = 'user_str_b'} return gql_query_1:execute(variables_1_4) end) @@ -376,7 +375,7 @@ function compound_index_testdata.run_queries(gql_wrapper) -- {{{ select top-level objects by a partial compound primary key plus -- filter (or maybe use fullscan) - local result_1_5 = utils.show_trace(function() + local result_1_5 = test_utils.show_trace(function() local variables_1_5 = { user_num = 12, first_name = 'non-existent' @@ -391,7 +390,7 @@ function compound_index_testdata.run_queries(gql_wrapper) test:is_deeply(result_1_5, exp_result_1_5, '1_5') - local result_1_6 = utils.show_trace(function() + local result_1_6 = test_utils.show_trace(function() local variables_1_6 = { user_str = 'user_str_b', first_name = 'non-existent' @@ -425,11 +424,11 @@ function compound_index_testdata.run_queries(gql_wrapper) } ]] - local gql_query_2 = utils.show_trace(function() + local gql_query_2 = test_utils.show_trace(function() return gql_wrapper:compile(query_2) end) - local result_2_1 = utils.show_trace(function() + local result_2_1 = test_utils.show_trace(function() local variables_2_1 = {user_str = 'user_str_b', user_num = 12} return gql_query_2:execute(variables_2_1) end) @@ -479,7 +478,7 @@ function compound_index_testdata.run_queries(gql_wrapper) -- }}} -- {{{ select objects by a connection by a full compound index plus filter - local result_2_2 = utils.show_trace(function() + local result_2_2 = test_utils.show_trace(function() local variables_2_2 = { user_str = 'user_str_b', user_num = 12, @@ -519,7 +518,7 @@ function compound_index_testdata.run_queries(gql_wrapper) } ]] - local result_3 = utils.show_trace(function() + local result_3 = test_utils.show_trace(function() local gql_query_3 = gql_wrapper:compile(query_3) local variables_3 = {user_str = 'user_str_b', user_num = 12} return gql_query_3:execute(variables_3) @@ -1158,11 +1157,11 @@ function compound_index_testdata.run_queries(gql_wrapper) } ]] - local gql_query_4 = utils.show_trace(function() + local gql_query_4 = test_utils.show_trace(function() return gql_wrapper:compile(query_4) end) - local result_4_1 = utils.show_trace(function() + local result_4_1 = test_utils.show_trace(function() local variables_4_1 = { limit = 10, offset = { @@ -1264,11 +1263,11 @@ function compound_index_testdata.run_queries(gql_wrapper) } ]] - local gql_query_5 = utils.show_trace(function() + local gql_query_5 = test_utils.show_trace(function() return gql_wrapper:compile(query_5) end) - local result_5_1 = utils.show_trace(function() + local result_5_1 = test_utils.show_trace(function() local variables_5_1 = { user_str = 'user_str_b', user_num = 12, @@ -1347,7 +1346,7 @@ function compound_index_testdata.run_queries(gql_wrapper) } ]] - local result_6 = utils.show_trace(function() + local result_6 = test_utils.show_trace(function() local gql_query_6 = gql_wrapper:compile(query_6) local variables_6 = { limit = 10, diff --git a/test/testdata/multihead_conn_testdata.lua b/test/testdata/multihead_conn_testdata.lua index eba75f6..6680b95 100644 --- a/test/testdata/multihead_conn_testdata.lua +++ b/test/testdata/multihead_conn_testdata.lua @@ -1,7 +1,7 @@ local tap = require('tap') local json = require('json') local yaml = require('yaml') -local utils = require('graphql.utils') +local test_utils = require('test.test_utils') local multihead_conn_testdata = {} @@ -359,12 +359,12 @@ function multihead_conn_testdata.run_queries(gql_wrapper) } ]] - local gql_query_1 = utils.show_trace(function() + local gql_query_1 = test_utils.show_trace(function() return gql_wrapper:compile(query) end) local variables_1_1 = {hero_id = 'hero_id_1'} - local result_1_1 = utils.show_trace(function() + local result_1_1 = test_utils.show_trace(function() return gql_query_1:execute(variables_1_1) end) local exp_result_1_1 = yaml.decode(([[ @@ -387,7 +387,7 @@ function multihead_conn_testdata.run_queries(gql_wrapper) test:is_deeply(result_1_1, exp_result_1_1, '1_1') local variables_1_2 = {hero_id = 'hero_id_2'} - local result_1_2 = utils.show_trace(function() + local result_1_2 = test_utils.show_trace(function() return gql_query_1:execute(variables_1_2) end) local exp_result_1_2 = yaml.decode(([[ diff --git a/test/testdata/nested_record_testdata.lua b/test/testdata/nested_record_testdata.lua index c13dae6..5ad89ff 100644 --- a/test/testdata/nested_record_testdata.lua +++ b/test/testdata/nested_record_testdata.lua @@ -5,8 +5,7 @@ local tap = require('tap') local json = require('json') local yaml = require('yaml') -local utils = require('graphql.utils') -local test_utils = require('test.utils') +local test_utils = require('test.test_utils') local testdata = {} @@ -90,7 +89,7 @@ end function testdata.run_queries(gql_wrapper) local test = tap.test('nested_record') - test:plan(1) + test:plan(2) local query_1 = [[ query getUserByUid($uid: Long) { @@ -107,7 +106,7 @@ function testdata.run_queries(gql_wrapper) ]] local variables_1 = {uid = 5} - local result_1 = utils.show_trace(function() + local result_1 = test_utils.show_trace(function() local gql_query_1 = gql_wrapper:compile(query_1) return gql_query_1:execute(variables_1) end) @@ -125,8 +124,6 @@ function testdata.run_queries(gql_wrapper) test:is_deeply(result_1, exp_result_1, '1') - -- XXX: uncomment when arguments for nested records will be supported - --[=[ local query_2 = [[ query getUserByX($x: Long) { user(nested: {x: $x}) { @@ -142,7 +139,7 @@ function testdata.run_queries(gql_wrapper) ]] local variables_2 = {x = 1005} - local result_2 = utils.show_trace(function() + local result_2 = test_utils.show_trace(function() local gql_query_2 = gql_wrapper:compile(query_2) return gql_query_2:execute(variables_2) end) @@ -159,7 +156,6 @@ function testdata.run_queries(gql_wrapper) ]]):strip()) test:is_deeply(result_2, exp_result_2, '2') - ]=]-- assert(test:check(), 'check plan') end diff --git a/test/testdata/nullable_1_1_conn_testdata.lua b/test/testdata/nullable_1_1_conn_testdata.lua index c8d9849..b3e4f02 100644 --- a/test/testdata/nullable_1_1_conn_testdata.lua +++ b/test/testdata/nullable_1_1_conn_testdata.lua @@ -8,8 +8,7 @@ local tap = require('tap') local json = require('json') local yaml = require('yaml') -local utils = require('graphql.utils') -local test_utils = require('test.utils') +local test_utils = require('test.test_utils') local nullable_1_1_conn_testdata = {} @@ -286,11 +285,11 @@ function nullable_1_1_conn_testdata.run_queries(gql_wrapper) } ]] - local gql_query_downside = utils.show_trace(function() + local gql_query_downside = test_utils.show_trace(function() return gql_wrapper:compile(query_downside) end) - local result = utils.show_trace(function() + local result = test_utils.show_trace(function() local variables_downside_a = {body = 'a'} return gql_query_downside:execute(variables_downside_a) end) @@ -316,7 +315,7 @@ function nullable_1_1_conn_testdata.run_queries(gql_wrapper) test:is_deeply(result, exp_result, 'downside_a') - local result = utils.show_trace(function() + local result = test_utils.show_trace(function() local variables_downside_h = {body = 'h'} return gql_query_downside:execute(variables_downside_h) end) @@ -358,7 +357,7 @@ function nullable_1_1_conn_testdata.run_queries(gql_wrapper) local gql_query_upside = gql_wrapper:compile(query_upside) - local result = utils.show_trace(function() + local result = test_utils.show_trace(function() local variables_upside = {body = 'f'} return gql_query_upside:execute(variables_upside) end) diff --git a/test/testdata/nullable_index_testdata.lua b/test/testdata/nullable_index_testdata.lua index e6910d0..9bd2275 100644 --- a/test/testdata/nullable_index_testdata.lua +++ b/test/testdata/nullable_index_testdata.lua @@ -6,8 +6,7 @@ local tap = require('tap') local json = require('json') local yaml = require('yaml') -local utils = require('graphql.utils') -local test_utils = require('test.utils') +local test_utils = require('test.test_utils') local nullable_index_testdata = {} @@ -346,12 +345,12 @@ function nullable_index_testdata.run_queries(gql_wrapper) } ]] - local gql_query_2 = utils.show_trace(function() + local gql_query_2 = test_utils.show_trace(function() return gql_wrapper:compile(query_2) end) -- fullscan; expected to see objects with ID > 100 - local result = utils.show_trace(function() + local result = test_utils.show_trace(function() local variables_2_1 = {} return gql_query_2:execute(variables_2_1) end) @@ -418,7 +417,7 @@ function nullable_index_testdata.run_queries(gql_wrapper) test:is_deeply(result, exp_result, '2_1') -- lookup by the unique index; expected to see only the object with ID 42 - local result = utils.show_trace(function() + local result = test_utils.show_trace(function() local variables_2_2 = { id_or_null_1 = '42', id_or_null_2 = '42', @@ -437,7 +436,7 @@ function nullable_index_testdata.run_queries(gql_wrapper) -- lookup by the non-unique index; expected to see only the object with ID -- 42 - local result = utils.show_trace(function() + local result = test_utils.show_trace(function() local variables_2_3 = { id_or_null_2 = '42', id_or_null_3 = '42', @@ -472,12 +471,12 @@ function nullable_index_testdata.run_queries(gql_wrapper) } ]] - local gql_query_3 = utils.show_trace(function() + local gql_query_3 = test_utils.show_trace(function() return gql_wrapper:compile(query_3) end) local variables_3_1 = {id = '42'} - local result_3_1 = utils.show_trace(function() + local result_3_1 = test_utils.show_trace(function() return gql_query_3:execute(variables_3_1) end) local exp_result_3_1 = yaml.decode(([[ @@ -492,7 +491,7 @@ function nullable_index_testdata.run_queries(gql_wrapper) test:is_deeply(result_3_1, exp_result_3_1, '3_1') local variables_3_2 = {id = '103'} - local result_3_2 = utils.show_trace(function() + local result_3_2 = test_utils.show_trace(function() return gql_query_3:execute(variables_3_2) end) local exp_result_3_2 = yaml.decode(([[ diff --git a/test/testdata/union_testdata.lua b/test/testdata/union_testdata.lua index 702c22b..9637318 100644 --- a/test/testdata/union_testdata.lua +++ b/test/testdata/union_testdata.lua @@ -2,7 +2,7 @@ local tap = require('tap') local json = require('json') local yaml = require('yaml') local avro = require('avro_schema') -local utils = require('graphql.utils') +local test_utils = require('test.test_utils') local union_testdata = {} @@ -149,13 +149,13 @@ function union_testdata.run_queries(gql_wrapper) } ]] - local gql_query_1 = utils.show_trace(function() + local gql_query_1 = test_utils.show_trace(function() return gql_wrapper:compile(query_1) end) local variables_1 = {} - local result_1 = utils.show_trace(function() + local result_1 = test_utils.show_trace(function() return gql_query_1:execute(variables_1) end)