Skip to content

Commit 1c721e0

Browse files
jprakash-dbvarun-edachali-dbx
authored andcommitted
Forward porting all changes into 4.x.x. uptil v3.7.3 (#529)
* Base changes * Black formatter * Cache version fix * Added the changed test_retry.py file * retry_test_mixins changes Signed-off-by: varun-edachali-dbx <[email protected]>
1 parent e1d7f71 commit 1c721e0

File tree

6 files changed

+32
-7
lines changed

6 files changed

+32
-7
lines changed

.github/workflows/code-quality-checks.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@ jobs:
9393
#----------------------------------------------
9494
- name: Load cached venv
9595
id: cached-poetry-dependencies
96-
uses: actions/cache@v2
96+
uses: actions/cache@v4
9797
with:
9898
path: .venv-pyarrow
9999
key: venv-pyarrow-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ github.event.repository.name }}-${{ hashFiles('**/poetry.lock') }}

CHANGELOG.md

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,16 @@
55
- Split the connector into two separate packages: `databricks-sql-connector` and `databricks-sqlalchemy`. The `databricks-sql-connector` package contains the core functionality of the connector, while the `databricks-sqlalchemy` package contains the SQLAlchemy dialect for the connector.
66
- Pyarrow dependency is now optional in `databricks-sql-connector`. Users needing arrow are supposed to explicitly install pyarrow
77

8+
# 3.7.3 (2025-03-28)
9+
10+
- Fix: Unable to poll small results in execute_async function (databricks/databricks-sql-python#515 by @jprakash-db)
11+
- Updated log messages to show the status code and error messages of requests (databricks/databricks-sql-python#511 by @jprakash-db)
12+
- Fix: Incorrect metadata was fetched in case of queries with the same alias (databricks/databricks-sql-python#505 by @jprakash-db)
13+
14+
# 3.7.2 (2025-01-31)
15+
16+
- Updated the retry_dela_max and retry_timeout (databricks/databricks-sql-python#497 by @jprakash-db)
17+
818
# 3.7.1 (2025-01-07)
919

1020
- Relaxed the number of Http retry attempts (databricks/databricks-sql-python#486 by @jprakash-db)

src/databricks/sql/auth/retry.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -408,8 +408,9 @@ def should_retry(self, method: str, status_code: int) -> Tuple[bool, str]:
408408
and status_code not in self.status_forcelist
409409
and status_code not in self.force_dangerous_codes
410410
):
411-
raise UnsafeToRetryError(
412-
"ExecuteStatement command can only be retried for codes 429 and 503. Received code: {status_code}"
411+
return (
412+
False,
413+
"ExecuteStatement command can only be retried for codes 429 and 503",
413414
)
414415

415416
# Request failed with a dangerous code, was an ExecuteStatement, but user forced retries for this

src/databricks/sql/backend/thrift_backend.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@
6868
# - 900s attempts-duration lines up w ODBC/JDBC drivers (for cluster startup > 10 mins)
6969
_retry_policy = { # (type, default, min, max)
7070
"_retry_delay_min": (float, 1, 0.1, 60),
71-
"_retry_delay_max": (float, 30, 5, 3600),
71+
"_retry_delay_max": (float, 60, 5, 3600),
7272
"_retry_stop_after_attempts_count": (int, 30, 1, 60),
7373
"_retry_stop_after_attempts_duration": (float, 900, 1, 86400),
7474
"_retry_delay_default": (float, 5, 1, 60),

src/databricks/sql/client.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -789,6 +789,14 @@ def execute(
789789
async_op=False,
790790
enforce_embedded_schema_correctness=enforce_embedded_schema_correctness,
791791
)
792+
self.active_result_set = ResultSet(
793+
self.connection,
794+
execute_response,
795+
self.thrift_backend,
796+
self.buffer_size_bytes,
797+
self.arraysize,
798+
self.connection.use_cloud_fetch,
799+
)
792800

793801
if self.active_result_set.is_staging_operation:
794802
self._handle_staging_operation(

tests/e2e/test_driver.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -179,14 +179,20 @@ def test_cloud_fetch(self):
179179

180180

181181
class TestPySQLAsyncQueriesSuite(PySQLPytestTestCase):
182+
def isExecuting(self, operation_state):
183+
return not operation_state or operation_state in [
184+
ttypes.TOperationState.RUNNING_STATE,
185+
ttypes.TOperationState.PENDING_STATE,
186+
]
187+
182188
def test_execute_async__long_running(self):
183189

184190
long_running_query = "SELECT COUNT(*) FROM RANGE(10000 * 16) x JOIN RANGE(10000) y ON FROM_UNIXTIME(x.id * y.id, 'yyyy-MM-dd') LIKE '%not%a%date%'"
185191
with self.cursor() as cursor:
186192
cursor.execute_async(long_running_query)
187193

188194
## Polling after every POLLING_INTERVAL seconds
189-
while cursor.is_query_pending():
195+
while self.isExecuting(cursor.get_query_state()):
190196
time.sleep(self.POLLING_INTERVAL)
191197
log.info("Polling the status in test_execute_async")
192198

@@ -205,7 +211,7 @@ def test_execute_async__small_result(self):
205211
time.sleep(5)
206212

207213
## Polling after every POLLING_INTERVAL seconds
208-
while cursor.is_query_pending():
214+
while self.isExecuting(cursor.get_query_state()):
209215
time.sleep(self.POLLING_INTERVAL)
210216
log.info("Polling the status in test_execute_async")
211217

@@ -235,7 +241,7 @@ def test_execute_async__large_result(self):
235241
time.sleep(5)
236242

237243
## Polling after every POLLING_INTERVAL seconds
238-
while cursor.is_query_pending():
244+
while self.isExecuting(cursor.get_query_state()):
239245
time.sleep(self.POLLING_INTERVAL)
240246
log.info("Polling the status in test_execute_async")
241247

0 commit comments

Comments
 (0)