Skip to content

Commit 29b94d1

Browse files
author
Brannon Imamura
authored
Update redshift.py (#1319)
1 parent 7a3ae7b commit 29b94d1

File tree

1 file changed

+6
-0
lines changed

1 file changed

+6
-0
lines changed

awswrangler/redshift.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1458,6 +1458,7 @@ def copy( # pylint: disable=too-many-arguments
14581458
boto3_session: Optional[boto3.Session] = None,
14591459
s3_additional_kwargs: Optional[Dict[str, str]] = None,
14601460
max_rows_by_file: Optional[int] = 10_000_000,
1461+
precombine_key: Optional[str] = None,
14611462
) -> None:
14621463
"""Load Pandas DataFrame as a Table on Amazon Redshift using parquet files on S3 as stage.
14631464
@@ -1556,6 +1557,10 @@ def copy( # pylint: disable=too-many-arguments
15561557
Max number of rows in each file.
15571558
Default is None i.e. dont split the files.
15581559
(e.g. 33554432, 268435456)
1560+
precombine_key : str, optional
1561+
When there is a primary_key match during upsert, this column will change the upsert method,
1562+
comparing the values of the specified column from source and target, and keeping the
1563+
larger of the two. Will only work when mode = upsert.
15591564
15601565
Returns
15611566
-------
@@ -1623,6 +1628,7 @@ def copy( # pylint: disable=too-many-arguments
16231628
boto3_session=session,
16241629
s3_additional_kwargs=s3_additional_kwargs,
16251630
sql_copy_extra_params=sql_copy_extra_params,
1631+
precombine_key=precombine_key,
16261632
)
16271633
finally:
16281634
if keep_files is False:

0 commit comments

Comments
 (0)