File tree 2 files changed +25
-3
lines changed
2 files changed +25
-3
lines changed Original file line number Diff line number Diff line change 1
- # https://packaging.python.org/guides/packaging-namespace-packages/#pkgutil-style-namespace-packages
2
- # This file should only contain the following line. Otherwise other sub-packages databricks.* namespace
3
- # may not be importable.
1
+ # See: https://packaging.python.org/guides/packaging-namespace-packages/#pkgutil-style-namespace-packages
2
+ #
3
+ # This file must only contain the following line, or other packages in the databricks.* namespace
4
+ # may not be importable. The contents of this file must be byte-for-byte equivalent across all packages.
5
+ # If they are not, parallel package installation may lead to clobbered and invalid files.
6
+ # Also see https://github.com/databricks/databricks-sdk-py/issues/343.
4
7
__path__ = __import__ ("pkgutil" ).extend_path (__path__ , __name__ )
Original file line number Diff line number Diff line change
1
+ import hashlib
2
+
3
+
4
+ class TestInitFile :
5
+ """
6
+ Micro test to confirm the contents of `databricks/__init__.py` does not change.
7
+
8
+ Also see https://github.com/databricks/databricks-sdk-py/issues/343#issuecomment-1866029118.
9
+ """
10
+
11
+ def test_init_file_contents (self ):
12
+ with open ("src/databricks/__init__.py" ) as f :
13
+ init_file_contents = f .read ()
14
+
15
+ # This hash is the expected hash of the contents of `src/databricks/__init__.py`.
16
+ # It must not change, or else parallel package installation may lead to clobbered and invalid files.
17
+ expected_sha1 = "2772edbf52e517542acf8c039479c4b57b6ca2cd"
18
+ actual_sha1 = hashlib .sha1 (init_file_contents .encode ("utf-8" )).hexdigest ()
19
+ assert expected_sha1 == actual_sha1
You can’t perform that action at this time.
0 commit comments