From c034b3184a05d1dd9bd4d8d6f2b2e9336ce94941 Mon Sep 17 00:00:00 2001 From: Dominik Hoffmann Date: Sat, 9 Nov 2024 16:01:49 +0100 Subject: [PATCH] #22: Fixed uncaught exception Signed-off-by: Dominik Hoffmann --- .../data_wranglers/spark/data_quality/interval_filtering.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/sdk/python/rtdip_sdk/pipelines/data_wranglers/spark/data_quality/interval_filtering.py b/src/sdk/python/rtdip_sdk/pipelines/data_wranglers/spark/data_quality/interval_filtering.py index a480a4753..18caf3eb1 100644 --- a/src/sdk/python/rtdip_sdk/pipelines/data_wranglers/spark/data_quality/interval_filtering.py +++ b/src/sdk/python/rtdip_sdk/pipelines/data_wranglers/spark/data_quality/interval_filtering.py @@ -15,6 +15,7 @@ import pandas as pd from databricks.sqlalchemy.test_local.conftest import schema +from exceptiongroup import catch from pyspark.sql import functions as F from pyspark.sql import SparkSession from pyspark.sql import DataFrame @@ -87,7 +88,10 @@ def get_time_delta(self) -> timedelta: raise ValueError("interval_unit must be either 'seconds' or 'milliseconds'") def format_date_time_to_string(self, time_stamp: pd.Timestamp) -> str: - return time_stamp.strftime('%Y-%m-%d %H:%M:%S.%f')[:-3] + try: + return time_stamp.strftime('%Y-%m-%d %H:%M:%S.%f')[:-3] + except Exception as e: + raise ValueError(f"Error converting timestamp to string: {e}") def filter(self) -> DataFrame: """