|
18 | 18 | UNSIGNED_INT64_LENGTH = 8
|
19 | 19 |
|
20 | 20 |
|
| 21 | +JSONB_TYPE_SMALL_OBJECT = 0x0 |
| 22 | +JSONB_TYPE_LARGE_OBJECT = 0x1 |
| 23 | +JSONB_TYPE_SMALL_ARRAY = 0x2 |
| 24 | +JSONB_TYPE_LARGE_ARRAY = 0x3 |
| 25 | +JSONB_TYPE_LITERAL = 0x4 |
| 26 | +JSONB_TYPE_INT16 = 0x5 |
| 27 | +JSONB_TYPE_UINT16 = 0x6 |
| 28 | +JSONB_TYPE_INT32 = 0x7 |
| 29 | +JSONB_TYPE_UINT32 = 0x8 |
| 30 | +JSONB_TYPE_INT64 = 0x9 |
| 31 | +JSONB_TYPE_UINT64 = 0xA |
| 32 | +JSONB_TYPE_DOUBLE = 0xB |
| 33 | +JSONB_TYPE_STRING = 0xC |
| 34 | +JSONB_TYPE_OPAQUE = 0xF |
| 35 | + |
| 36 | + |
| 37 | + |
21 | 38 | class BinLogPacketWrapper(object):
|
22 | 39 | """
|
23 | 40 | Bin Log Packet Wrapper. It uses an existing packet object, and wraps
|
@@ -281,3 +298,63 @@ def unpack_int32(self, n):
|
281 | 298 | + (struct.unpack('B', n[3])[0] << 24)
|
282 | 299 | except TypeError:
|
283 | 300 | return n[0] + (n[1] << 8) + (n[2] << 16) + (n[3] << 24)
|
| 301 | + |
| 302 | + def read_binary_json(self, size): |
| 303 | + length = self.read_uint_by_size(size) |
| 304 | + t = self.read_uint8() |
| 305 | + |
| 306 | + return self.read_binary_json_type(t, length) |
| 307 | + |
| 308 | + def read_binary_json_type(self, t, length): |
| 309 | + if t in (JSONB_TYPE_SMALL_OBJECT, JSONB_TYPE_LARGE_OBJECT): |
| 310 | + return self.read_binary_json_object(length - 1, |
| 311 | + large=(t == JSONB_TYPE_LARGE_OBJECT)) |
| 312 | + elif t in (JSONB_TYPE_SMALL_ARRAY, JSONB_TYPE_LARGE_ARRAY): |
| 313 | + return self.read_binary_json_array(length - 1, |
| 314 | + large=(t == JSONB_TYPE_LARGE_ARRAY)) |
| 315 | + elif t in (JSONB_TYPE_STRING,): |
| 316 | + return self.read_length_coded_pascal_string(1) |
| 317 | + |
| 318 | + def read_binary_json_object(self, length, large): |
| 319 | + if large: |
| 320 | + elements = self.read_uint32() |
| 321 | + size = self.read_uint32() |
| 322 | + else: |
| 323 | + elements = self.read_uint16() |
| 324 | + size = self.read_uint16() |
| 325 | + |
| 326 | + if size > length: |
| 327 | + raise ValueError('Json length is larger than packet length') |
| 328 | + |
| 329 | + if large: |
| 330 | + key_offset_lengths = [( |
| 331 | + self.read_uint32(), # offset (we don't actually need that) |
| 332 | + self.read_uint32() # size of the key |
| 333 | + ) for _ in range(elements)] |
| 334 | + |
| 335 | + value_type_lengths = [( |
| 336 | + self.read_uint8(), # type |
| 337 | + self.read_uint32() # offset |
| 338 | + ) for _ in range(elements)] |
| 339 | + else: |
| 340 | + key_offset_lengths = [( |
| 341 | + self.read_uint16(), # offset (we don't actually need that) |
| 342 | + self.read_uint16() # size of key |
| 343 | + ) for _ in range(elements)] |
| 344 | + |
| 345 | + value_type_lengths = [( |
| 346 | + self.read_uint8(), # type |
| 347 | + self.read_uint16() # offset |
| 348 | + ) for _ in range(elements)] |
| 349 | + |
| 350 | + keys = [self.read(x[1]) for x in key_offset_lengths] |
| 351 | + |
| 352 | + out = {} |
| 353 | + for i in range(elements): |
| 354 | + t = value_type_lengths[i][0] |
| 355 | + data = self.read_binary_json_type(t, length) |
| 356 | + out[keys[i]] = data |
| 357 | + |
| 358 | + return out |
| 359 | + |
| 360 | + |
0 commit comments