Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

2469 add columns comments to iceberg #2479

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 12 additions & 1 deletion awswrangler/athena/_write_iceberg.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,18 @@ def _create_iceberg_table(
kms_key: Optional[str] = None,
boto3_session: Optional[boto3.Session] = None,
dtype: Optional[Dict[str, str]] = None,
columns_comments: Optional[Dict[str, str]] = None,
) -> None:
if not path:
raise exceptions.InvalidArgumentValue("Must specify table location to create the table.")

columns_types, _ = catalog.extract_athena_types(df=df, index=index, dtype=dtype)
cols_str: str = ", ".join([f"{k} {v}" for k, v in columns_types.items()])
cols_str: str = ", ".join(
[
f"{k} {v}" if columns_comments.get(k) is None else f"{k} {v} COMMENT \'{columns_comments.get(k)}\'"
for k, v in columns_types.items()
]
)
partition_cols_str: str = f"PARTITIONED BY ({', '.join([col for col in partition_cols])})" if partition_cols else ""
table_properties_str: str = (
", " + ", ".join([f"'{key}'='{value}'" for key, value in additional_table_properties.items()])
Expand Down Expand Up @@ -196,6 +202,7 @@ def to_iceberg(
dtype: Optional[Dict[str, str]] = None,
catalog_id: Optional[str] = None,
schema_evolution: bool = False,
columns_comments: Optional[Dict[str, str]] = None,
) -> None:
"""
Insert into Athena Iceberg table using INSERT INTO ... SELECT. Will create Iceberg table if it does not exist.
Expand Down Expand Up @@ -252,6 +259,8 @@ def to_iceberg(
If none is provided, the AWS account ID is used by default
schema_evolution: bool
If True allows schema evolution for new columns or changes in column types.
columns_comments: Optional[Dict[str, str]]
Glue/Athena catalog: Columns names and the related comments (e.g. {'col0': 'Column 0.', 'col1': 'Column 1.', 'col2': 'Partition.'})

Returns
-------
Expand Down Expand Up @@ -314,6 +323,7 @@ def to_iceberg(
kms_key=kms_key,
boto3_session=boto3_session,
dtype=dtype,
columns_comments=columns_comments,
)
else:
schema_differences = _determine_differences(
Expand Down Expand Up @@ -352,6 +362,7 @@ def to_iceberg(
s3_additional_kwargs=s3_additional_kwargs,
dtype=dtype,
catalog_id=catalog_id,
glue_table_settings={"columns_comments": columns_comments}
)

# Insert into iceberg table
Expand Down
25 changes: 14 additions & 11 deletions awswrangler/data_api/redshift.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,8 +93,12 @@ def _validate_redshift_target(self) -> None:
raise ValueError("Either `cluster_id` or `workgroup_name`(Redshift Serverless) must be set for connection")

def _validate_auth_method(self) -> None:
if not self.workgroup_name and not self.secret_arn and not self.db_user:
raise ValueError("Either `secret_arn` or `db_user` must be set for authentication")
if not self.workgroup_name and not self.secret_arn and not self.db_user and not self.cluster_id:
raise exceptions.InvalidArgumentCombination(
"Either `secret_arn`, `workgroup_name`, `db_user`, or `cluster_id` must be set for authentication."
)
if self.db_user and self.secret_arn:
raise exceptions.InvalidArgumentCombination("Only one of `secret_arn` or `db_user` is allowed.")

def _execute_statement(
self,
Expand All @@ -110,26 +114,25 @@ def _execute_statement(

self._validate_redshift_target()
self._validate_auth_method()
credentials = {}
args = {}
if self.secret_arn:
credentials = {"SecretArn": self.secret_arn}
elif self.db_user:
credentials = {"DbUser": self.db_user}
args["SecretArn"] = self.secret_arn
if self.db_user:
args["DbUser"] = self.db_user

if database is None:
database = self.database

if self.cluster_id:
redshift_target = {"ClusterIdentifier": self.cluster_id}
elif self.workgroup_name:
redshift_target = {"WorkgroupName": self.workgroup_name}
args["ClusterIdentifier"] = self.cluster_id
if self.workgroup_name:
args["WorkgroupName"] = self.workgroup_name

_logger.debug("Executing %s", sql)
response = self.client.execute_statement(
**redshift_target, # type: ignore[arg-type]
Database=database,
Sql=sql,
**credentials, # type: ignore[arg-type]
**args, # type: ignore[arg-type]
)
return response["Id"]

Expand Down
12 changes: 12 additions & 0 deletions docs/source/layers.rst
Original file line number Diff line number Diff line change
Expand Up @@ -408,3 +408,15 @@ Version 3.4.0
+----------------+--------+-------+-----------------------------------------------------------------------------------+
| me-south-1 | 3.9 | arm64 | arn:aws:lambda:me-south-1:938046470361:layer:AWSSDKPandas-Python39-Arm64:1 |
+----------------+--------+-------+-----------------------------------------------------------------------------------+
| cn-north-1 | 3.10 | x86_64| arn:aws-cn:lambda:cn-north-1:406640652441:layer:AWSSDKPandas-Python310:1 |
+----------------+--------+-------+-----------------------------------------------------------------------------------+
| cn-north-1 | 3.8 | x86_64| arn:aws-cn:lambda:cn-north-1:406640652441:layer:AWSSDKPandas-Python38:1 |
+----------------+--------+-------+-----------------------------------------------------------------------------------+
| cn-north-1 | 3.9 | x86_64| arn:aws-cn:lambda:cn-north-1:406640652441:layer:AWSSDKPandas-Python39:1 |
+----------------+--------+-------+-----------------------------------------------------------------------------------+
| cn-northwest-1 | 3.10 | x86_64| arn:aws-cn:lambda:cn-northwest-1:406640652441:layer:AWSSDKPandas-Python310:1 |
+----------------+--------+-------+-----------------------------------------------------------------------------------+
| cn-northwest-1 | 3.8 | x86_64| arn:aws-cn:lambda:cn-northwest-1:406640652441:layer:AWSSDKPandas-Python38:1 |
+----------------+--------+-------+-----------------------------------------------------------------------------------+
| cn-northwest-1 | 3.9 | x86_64| arn:aws-cn:lambda:cn-northwest-1:406640652441:layer:AWSSDKPandas-Python39:1 |
+----------------+--------+-------+-----------------------------------------------------------------------------------+
17 changes: 17 additions & 0 deletions tests/unit/test_data_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,23 @@ def test_connect_redshift_serverless_iam_role(databases_parameters: Dict[str, An
assert df.shape == (1, 1)


def test_connect_redshift_cluster_iam_role(databases_parameters: Dict[str, Any]) -> None:
cluster_id = databases_parameters["redshift"]["identifier"]
database = databases_parameters["redshift"]["database"]
con = wr.data_api.redshift.connect(cluster_id=cluster_id, database=database, boto3_session=None)
df = wr.data_api.redshift.read_sql_query("SELECT 1", con=con)
assert df.shape == (1, 1)


def test_connect_redshift_cluster_db_user(databases_parameters: Dict[str, Any]) -> None:
cluster_id = databases_parameters["redshift"]["identifier"]
database = databases_parameters["redshift"]["database"]
db_user = databases_parameters["user"]
con = wr.data_api.redshift.connect(cluster_id=cluster_id, database=database, db_user=db_user, boto3_session=None)
df = wr.data_api.redshift.read_sql_query("SELECT 1", con=con)
assert df.shape == (1, 1)


def test_connect_redshift_serverless_secrets_manager(databases_parameters: Dict[str, Any]) -> None:
workgroup_name = databases_parameters["redshift_serverless"]["workgroup"]
database = databases_parameters["redshift_serverless"]["database"]
Expand Down
2 changes: 1 addition & 1 deletion tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ passenv =
AWS_SECRET_ACCESS_KEY
AWS_SESSION_TOKEN
setenv =
COV_FAIL_UNDER = 82.00
COV_FAIL_UNDER = 87.00
allowlist_externals = poetry
commands_pre =
poetry install --no-root --sync --extras "deltalake gremlin mysql opencypher opensearch oracle postgres redshift sparql sqlserver geopandas"
Expand Down