|
11 | 11 | from ._version_store_utils import checksum, pickle_compat_load, version_base_or_id
|
12 | 12 | from .._compression import decompress, compress_array
|
13 | 13 | from ..exceptions import UnsupportedPickleStoreVersion
|
| 14 | +from .._config import SKIP_BSON_ENCODE_PICKLE_STORE |
| 15 | + |
14 | 16 |
|
15 | 17 | # new versions of chunked pickled objects MUST begin with __chunked__
|
16 | 18 | _MAGIC_CHUNKED = '__chunked__'
|
@@ -75,15 +77,19 @@ def read(self, mongoose_lib, version, symbol, **kwargs):
|
75 | 77 | def read_options():
|
76 | 78 | return []
|
77 | 79 |
|
78 |
| - def write(self, arctic_lib, version, symbol, item, previous_version): |
79 |
| - try: |
80 |
| - # If it's encodeable, then ship it |
81 |
| - b = bson.BSON.encode({'data': item}) |
82 |
| - if len(b) < _MAX_BSON_ENCODE: |
83 |
| - version['data'] = item |
84 |
| - return |
85 |
| - except InvalidDocument: |
86 |
| - pass |
| 80 | + def write(self, arctic_lib, version, symbol, item, _previous_version): |
| 81 | + # Currently we try to bson encode if the data is less than a given size and store it in |
| 82 | + # the version collection, but pickling might be preferable if we have characters that don't |
| 83 | + # play well with the bson encoder or if you always want your data in the data collection. |
| 84 | + if not SKIP_BSON_ENCODE_PICKLE_STORE: |
| 85 | + try: |
| 86 | + # If it's encodeable, then ship it |
| 87 | + b = bson.BSON.encode({'data': item}) |
| 88 | + if len(b) < _MAX_BSON_ENCODE: |
| 89 | + version['data'] = item |
| 90 | + return |
| 91 | + except InvalidDocument: |
| 92 | + pass |
87 | 93 |
|
88 | 94 | # Pickle, chunk and store the data
|
89 | 95 | collection = arctic_lib.get_top_level_collection()
|
|
0 commit comments