Skip to content

Commit 9d5bfd4

Browse files
Yannik SachererYannik Sacherer
Yannik Sacherer
authored and
Yannik Sacherer
committed
added s3 region for copy into job related to issue dlt-hub#2349
1 parent 3190722 commit 9d5bfd4

File tree

1 file changed

+6
-7
lines changed

1 file changed

+6
-7
lines changed

dlt/destinations/impl/redshift/redshift.py

+6-7
Original file line numberDiff line numberDiff line change
@@ -65,28 +65,30 @@ def __init__(
6565
file_path: str,
6666
staging_credentials: Optional[CredentialsConfiguration] = None,
6767
staging_iam_role: str = None,
68+
s3_region: str = "us-east-1", # Add region as a parameter
6869
) -> None:
6970
super().__init__(file_path, staging_credentials)
7071
self._staging_iam_role = staging_iam_role
72+
self._s3_region = s3_region # Store region
7173
self._job_client: "RedshiftClient" = None
7274

7375
def run(self) -> None:
7476
self._sql_client = self._job_client.sql_client
75-
# we assume s3 credentials where provided for the staging
77+
# Assume S3 credentials were provided for the staging
7678
credentials = ""
7779
if self._staging_iam_role:
78-
credentials = f"IAM_ROLE '{self._staging_iam_role}'"
80+
credentials = f"IAM_ROLE '{self._staging_iam_role}' REGION '{self._s3_region}'"
7981
elif self._staging_credentials and isinstance(
8082
self._staging_credentials, AwsCredentialsWithoutDefaults
8183
):
8284
aws_access_key = self._staging_credentials.aws_access_key_id
8385
aws_secret_key = self._staging_credentials.aws_secret_access_key
8486
credentials = (
8587
"CREDENTIALS"
86-
f" 'aws_access_key_id={aws_access_key};aws_secret_access_key={aws_secret_key}'"
88+
f" 'aws_access_key_id={aws_access_key};aws_secret_access_key={aws_secret_key};region={self._s3_region}'"
8789
)
8890

89-
# get format
91+
# Get format
9092
ext = os.path.splitext(self._bucket_path)[1][1:]
9193
file_type = ""
9294
dateformat = ""
@@ -97,15 +99,12 @@ def run(self) -> None:
9799
compression = "" if is_compression_disabled() else "GZIP"
98100
elif ext == "parquet":
99101
file_type = "PARQUET"
100-
# if table contains json types then SUPER field will be used.
101-
# https://docs.aws.amazon.com/redshift/latest/dg/ingest-super.html
102102
if table_schema_has_type(self._load_table, "json"):
103103
file_type += " SERIALIZETOJSON"
104104
else:
105105
raise ValueError(f"Unsupported file type {ext} for Redshift.")
106106

107107
with self._sql_client.begin_transaction():
108-
# TODO: if we ever support csv here remember to add column names to COPY
109108
self._sql_client.execute_sql(f"""
110109
COPY {self._sql_client.make_qualified_table_name(self.load_table_name)}
111110
FROM '{self._bucket_path}'

0 commit comments

Comments
 (0)