-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat: Added testcommon.etl function to write 'when' files to storage (#…
…84) Co-authored-by: Jakob Stricker Nielsen <[email protected]> Co-authored-by: Henrik Tornbjerg Carøe <[email protected]>
- Loading branch information
1 parent
cf9e538
commit 1d72b1e
Showing
5 changed files
with
67 additions
and
11 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,39 @@ | ||
import os | ||
|
||
from pyspark.sql import types as T | ||
from pyspark.sql import SparkSession | ||
|
||
from testcommon.dataframes import read_csv | ||
|
||
|
||
def write_when_files_to_delta( | ||
spark: SparkSession, | ||
scenario_path: str, | ||
files: list[tuple[str, T.StructType]] | ||
) -> None: | ||
""" | ||
Writes a list of files to a delta table, using the filenames (without the file extension) as table names. | ||
If the Delta table does not exist, the function will create it. Otherwise, if a table already exists, its content | ||
will be overwritten | ||
Args: | ||
spark (SparkSession): The Spark session. | ||
scenario_path (str): The path to the scenario CSV file. | ||
files (list[tuple[str, T.StructType]]): A list of tuples containing filenames and their corresponding schemas. | ||
""" | ||
|
||
for file_name, schema in files: | ||
file_path = f"{scenario_path}/when/{file_name}" | ||
if not os.path.exists(file_path): | ||
continue | ||
df = read_csv( | ||
spark, | ||
file_path, | ||
schema, | ||
) | ||
|
||
# Overwrite destination table with DataFrame | ||
try: | ||
df.write.mode("overwrite").saveAsTable(file_name.removesuffix(".csv")) | ||
except Exception as e: | ||
print(f"Error executing overwrite on table {file_name}: {str(e)}") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters