230
230
--- Convert each field of an avro-schema to a graphql type.
231
231
---
232
232
--- @tparam table state for read state.accessor and previously filled
233
- --- state.types
233
+ --- state.nullable_collection_types
234
234
--- @tparam table fields fields part from an avro-schema
235
235
---
236
236
--- @treturn table `res` -- map with type names as keys and graphql types as
253
253
--- The function converts passed avro-schema to a GraphQL type.
254
254
---
255
255
--- @tparam table state for read state.accessor and previously filled
256
- --- state.types (state.types are gql types)
256
+ --- state.nullable_collection_types (those are gql types)
257
257
--- @tparam table avro_schema input avro-schema
258
258
--- @tparam [opt] table collection table with schema_name, connections fields
259
259
--- described a collection (e.g. tarantool's spaces)
@@ -307,8 +307,8 @@ gql_type = function(state, avro_schema, collection, collection_name)
307
307
for _ , c in ipairs ((collection or {}).connections or {}) do
308
308
assert (type (c .type ) == ' string' ,
309
309
' connection.type must be a string, got ' .. type (c .type ))
310
- assert (c .type == ' 1:1' or c .type == ' 1:N' ,
311
- ' connection.type must be 1:1 or 1:N, got ' .. c .type )
310
+ assert (c .type == ' 1:1' or c .type == ' 1:1* ' or c . type == ' 1: N' ,
311
+ ' connection.type must be 1:1, 1:1* or 1:N, got ' .. c .type )
312
312
assert (type (c .name ) == ' string' ,
313
313
' connection.name must be a string, got ' .. type (c .name ))
314
314
assert (type (c .destination_collection ) == ' string' ,
@@ -319,16 +319,20 @@ gql_type = function(state, avro_schema, collection, collection_name)
319
319
320
320
-- gql type of connection field
321
321
local destination_type =
322
- state .types [c .destination_collection ]
322
+ state .nullable_collection_types [c .destination_collection ]
323
323
assert (destination_type ~= nil ,
324
324
(' destination_type (named %s) must not be nil' ):format (
325
325
c .destination_collection ))
326
326
327
327
local c_args
328
328
if c .type == ' 1:1' then
329
+ destination_type = types .nonNull (destination_type )
330
+ c_args = state .object_arguments [c .destination_collection ]
331
+ elseif c .type == ' 1:1*' then
329
332
c_args = state .object_arguments [c .destination_collection ]
330
333
elseif c .type == ' 1:N' then
331
- destination_type = types .nonNull (types .list (destination_type ))
334
+ destination_type = types .nonNull (types .list (types .nonNull (
335
+ destination_type )))
332
336
c_args = state .all_arguments [c .destination_collection ]
333
337
else
334
338
error (' unknown connection type: ' .. tostring (c .type ))
@@ -343,6 +347,8 @@ gql_type = function(state, avro_schema, collection, collection_name)
343
347
resolve = function (parent , args_instance , info )
344
348
local destination_args_names = {}
345
349
local destination_args_values = {}
350
+ local are_all_parts_non_null = true
351
+ local are_all_parts_null = true
346
352
347
353
for _ , part in ipairs (c .parts ) do
348
354
assert (type (part .source_field ) == ' string' ,
@@ -354,8 +360,45 @@ gql_type = function(state, avro_schema, collection, collection_name)
354
360
355
361
destination_args_names [# destination_args_names + 1 ] =
356
362
part .destination_field
363
+
364
+ local value = parent [part .source_field ]
357
365
destination_args_values [# destination_args_values + 1 ] =
358
- parent [part .source_field ]
366
+ value
367
+
368
+ if value ~= nil then -- nil or box.NULL
369
+ are_all_parts_null = false
370
+ else
371
+ are_all_parts_non_null = false
372
+ end
373
+ end
374
+
375
+ -- Check FULL match constraint before request of
376
+ -- destination object(s). Note that connection key parts
377
+ -- can be prefix of index key parts. Zero parts count
378
+ -- considered as ok by this check.
379
+ local ok = are_all_parts_null or are_all_parts_non_null
380
+ if not ok then -- avoid extra json.encode()
381
+ assert (ok ,
382
+ ' FULL MATCH constraint was failed: connection ' ..
383
+ ' key parts must be all non-nulls or all nulls; ' ..
384
+ ' object: ' .. json .encode (parent ))
385
+ end
386
+
387
+ -- Avoid non-needed index lookup on a destination
388
+ -- collection when all connection parts are null:
389
+ -- * return null for 1:1* connection;
390
+ -- * return {} for 1:N connection (except the case when
391
+ -- source collection is the Query pseudo-collection).
392
+ if collection_name ~= ' Query' and are_all_parts_null then
393
+ if c .type ~= ' 1:1*' and c .type ~= ' 1:N' then
394
+ -- `if` is to avoid extra json.encode
395
+ assert (c .type == ' 1:1*' or c .type == ' 1:N' ,
396
+ (' only 1:1* or 1:N connections can have ' ..
397
+ ' all key parts null; parent is %s from ' ..
398
+ ' collection "%s"' ):format (json .encode (parent ),
399
+ tostring (collection_name )))
400
+ end
401
+ return c .type == ' 1:N' and {} or nil
359
402
end
360
403
361
404
local from = {
@@ -386,7 +429,10 @@ gql_type = function(state, avro_schema, collection, collection_name)
386
429
assert (type (objs ) == ' table' ,
387
430
' objs list received from an accessor ' ..
388
431
' must be a table, got ' .. type (objs ))
389
- if c .type == ' 1:1' then
432
+ if c .type == ' 1:1' or c .type == ' 1:1*' then
433
+ -- we expect here exactly one object even for 1:1*
434
+ -- connections because we processed all-parts-are-null
435
+ -- situation above
390
436
assert (# objs == 1 ,
391
437
' expect one matching object, got ' ..
392
438
tostring (# objs ))
@@ -405,7 +451,7 @@ gql_type = function(state, avro_schema, collection, collection_name)
405
451
avro_schema .name ,
406
452
fields = fields ,
407
453
})
408
- return avro_t == ' enum ' and types .nonNull (res ) or res
454
+ return avro_t == ' record ' and types .nonNull (res ) or res
409
455
elseif avro_t == ' enum' then
410
456
error (' enums not implemented yet' ) -- XXX
411
457
elseif avro_t == ' array' or avro_t == ' array*' then
@@ -476,15 +522,21 @@ local function create_root_collection(state)
476
522
477
523
-- `gql_type` is designed to create GQL type corresponding to a real schema
478
524
-- and connections. However it also works with the fake schema.
525
+ -- Query type must be the Object, so it cannot be nonNull.
479
526
local root_type = gql_type (state , root_schema , root_collection , " Query" )
480
527
state .schema = schema .create ({
481
- query = root_type
528
+ query = nullable ( root_type ),
482
529
})
483
530
end
484
531
485
532
local function parse_cfg (cfg )
486
533
local state = {}
487
- state .types = utils .gen_booking_table ({})
534
+
535
+ -- collection type is always record, so always non-null; we can lazily
536
+ -- evaluate non-null type from nullable type, but not vice versa, so we
537
+ -- collect nullable types here and evaluate non-null ones where needed
538
+ state .nullable_collection_types = utils .gen_booking_table ({})
539
+
488
540
state .object_arguments = utils .gen_booking_table ({})
489
541
state .list_arguments = utils .gen_booking_table ({})
490
542
state .all_arguments = utils .gen_booking_table ({})
@@ -523,8 +575,15 @@ local function parse_cfg(cfg)
523
575
assert (schema .type == ' record' ,
524
576
' top-level schema must have record avro type, got ' ..
525
577
tostring (schema .type ))
526
- state .types [collection_name ] = gql_type (state , schema , collection ,
527
- collection_name )
578
+ local collection_type =
579
+ gql_type (state , schema , collection , collection_name )
580
+ -- we utilize the fact that collection type is always non-null and
581
+ -- don't store this information; see comment above for
582
+ -- `nullable_collection_types` variable definition
583
+ assert (collection_type .__type == ' NonNull' ,
584
+ ' collection must always has non-null type' )
585
+ state .nullable_collection_types [collection_name ] =
586
+ nullable (collection_type )
528
587
529
588
-- prepare arguments' types
530
589
local object_args = convert_record_fields_to_args (schema .fields ,
@@ -536,7 +595,6 @@ local function parse_cfg(cfg)
536
595
state .object_arguments [collection_name ] = object_args
537
596
state .list_arguments [collection_name ] = list_args
538
597
state .all_arguments [collection_name ] = args
539
-
540
598
end
541
599
-- create fake root `Query` collection
542
600
create_root_collection (state )
0 commit comments