Skip to content

Commit f751c01

Browse files
authored
deprecate: boto3 resources (#2097)
* Deprecate boto3 resources * Types * Un-deprecate create_athena_bucket * Revert DynamoDB changes
1 parent fb1d228 commit f751c01

File tree

3 files changed

+7
-11
lines changed

3 files changed

+7
-11
lines changed

awswrangler/athena/_utils.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -354,17 +354,16 @@ def create_athena_bucket(boto3_session: Optional[boto3.Session] = None) -> str:
354354
region_name: str = _utils.get_region_from_session(boto3_session=boto3_session).lower()
355355
bucket_name = f"aws-athena-query-results-{account_id}-{region_name}"
356356
path = f"s3://{bucket_name}/"
357-
resource = _utils.resource(service_name="s3", session=boto3_session)
358-
bucket = resource.Bucket(bucket_name)
357+
client_s3 = _utils.client(service_name="s3", session=boto3_session)
359358
args = {} if region_name == "us-east-1" else {"CreateBucketConfiguration": {"LocationConstraint": region_name}}
360359
try:
361-
bucket.create(**args)
362-
except resource.meta.client.exceptions.BucketAlreadyOwnedByYou as err:
360+
client_s3.create_bucket(Bucket=bucket_name, **args)
361+
except (client_s3.exceptions.BucketAlreadyExists, client_s3.exceptions.BucketAlreadyOwnedByYou) as err:
363362
_logger.debug("Bucket %s already exists.", err.response["Error"]["BucketName"])
364363
except botocore.exceptions.ClientError as err:
365364
if err.response["Error"]["Code"] == "OperationAborted":
366365
_logger.debug("A conflicting conditional operation is currently in progress against this resource.")
367-
bucket.wait_until_exists()
366+
client_s3.get_waiter("bucket_exists").wait(Bucket=bucket_name)
368367
return path
369368

370369

awswrangler/redshift.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -80,10 +80,9 @@ def _does_table_exist(cursor: redshift_connector.Cursor, schema: Optional[str],
8080

8181

8282
def _get_paths_from_manifest(path: str, boto3_session: Optional[boto3.Session] = None) -> List[str]:
83-
resource_s3: boto3.resource = _utils.resource(service_name="s3", session=boto3_session)
83+
client_s3 = _utils.client(service_name="s3", session=boto3_session)
8484
bucket, key = _utils.parse_path(path)
85-
content_object = resource_s3.Object(bucket, key)
86-
manifest_content = json.loads(content_object.get()["Body"].read().decode("utf-8"))
85+
manifest_content = json.loads(client_s3.get_object(Bucket=bucket, Key=key)["Body"].read().decode("utf-8"))
8786
return [path["url"] for path in manifest_content["entries"]]
8887

8988

awswrangler/s3/_copy.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@ def _copy_objects(
2222
) -> None:
2323
_logger.debug("len(batch): %s", len(batch))
2424
client_s3: boto3.client = _utils.client(service_name="s3", session=boto3_session)
25-
resource_s3: boto3.resource = _utils.resource(service_name="s3", session=boto3_session)
2625
if s3_additional_kwargs is None:
2726
boto3_kwargs: Optional[Dict[str, Any]] = None
2827
else:
@@ -31,11 +30,10 @@ def _copy_objects(
3130
source_bucket, source_key = _utils.parse_path(path=source)
3231
copy_source: Dict[str, str] = {"Bucket": source_bucket, "Key": source_key}
3332
target_bucket, target_key = _utils.parse_path(path=target)
34-
resource_s3.meta.client.copy(
33+
client_s3.copy(
3534
CopySource=copy_source,
3635
Bucket=target_bucket,
3736
Key=target_key,
38-
SourceClient=client_s3,
3937
ExtraArgs=boto3_kwargs,
4038
Config=TransferConfig(num_download_attempts=10, use_threads=use_threads),
4139
)

0 commit comments

Comments
 (0)