@@ -25,7 +25,7 @@ def __init__(self,
25
25
server_hostname : str ,
26
26
http_path : str ,
27
27
access_token : str ,
28
- metadata : Optional [List [Tuple [str , str ]]] = None ,
28
+ http_headers : Optional [List [Tuple [str , str ]]] = None ,
29
29
session_configuration : Dict [str , Any ] = None ,
30
30
catalog : Optional [str ] = None ,
31
31
schema : Optional [str ] = None ,
@@ -37,7 +37,7 @@ def __init__(self,
37
37
:param http_path: Http path either to a DBSQL endpoint (e.g. /sql/1.0/endpoints/1234567890abcdef)
38
38
or to a DBR interactive cluster (e.g. /sql/protocolv1/o/1234567890123456/1234-123456-slid123)
39
39
:param access_token: Http Bearer access token, e.g. Databricks Personal Access Token.
40
- :param metadata : An optional list of (k, v) pairs that will be set as Http headers on every request
40
+ :param http_headers : An optional list of (k, v) pairs that will be set as Http headers on every request
41
41
:param session_configuration: An optional dictionary of Spark session parameters. Defaults to None.
42
42
Execute the SQL command `SET -v` to get a full list of available commands.
43
43
:param catalog: An optional initial catalog to use. Requires DBR version 9.0+
@@ -105,7 +105,7 @@ def __init__(self,
105
105
106
106
base_headers = [("User-Agent" , useragent_header )] + authorization_header
107
107
self .thrift_backend = ThriftBackend (self .host , self .port , http_path ,
108
- (metadata or []) + base_headers , ** kwargs )
108
+ (http_headers or []) + base_headers , ** kwargs )
109
109
110
110
self ._session_handle = self .thrift_backend .open_session (session_configuration , catalog ,
111
111
schema )
0 commit comments