Skip to content

Commit

Permalink
add module for parking_cycle_hangar_allocation_wait (#1511)
Browse files Browse the repository at this point in the history
  • Loading branch information
barnesm707 authored Nov 22, 2023
1 parent b5b65e6 commit 6b98556
Showing 1 changed file with 23 additions and 0 deletions.
23 changes: 23 additions & 0 deletions terraform/etl/38-aws-glue-job-parking.tf
Original file line number Diff line number Diff line change
Expand Up @@ -1349,3 +1349,26 @@ module "parking_correspondence_performance_records_with_pcn_downtime_gds" {
"--conf" = "spark.sql.legacy.timeParserPolicy=LEGACY --conf spark.sql.legacy.parquet.int96RebaseModeInRead=LEGACY --conf spark.sql.legacy.parquet.int96RebaseModeInWrite=LEGACY --conf spark.sql.legacy.parquet.datetimeRebaseModeInRead=LEGACY --conf spark.sql.legacy.parquet.datetimeRebaseModeInWrite=LEGACY"
}
}
# MRB 22-11-2023 Job created
module "parking_cycle_hangar_allocation_wait" {
source = "../modules/aws-glue-job"
is_live_environment = local.is_live_environment
is_production_environment = local.is_production_environment
department = module.department_parking_data_source
job_name = "${local.short_identifier_prefix}parking_cycle_hangar_allocation_wait"
helper_module_key = data.aws_s3_object.helpers.key
pydeequ_zip_key = data.aws_s3_object.pydeequ.key
spark_ui_output_storage_id = module.spark_ui_output_storage_data_source.bucket_id
script_name = "parking_cycle_hangar_allocation_wait"
triggered_by_job = "${local.short_identifier_prefix}Copy parking Liberator landing zone to raw"
job_description = "Identify how many spaces that have been used/free in Cycle Hangars and indicate numbers on the waiting list, etc."
workflow_name = "${local.short_identifier_prefix}parking-liberator-data-workflow"
trigger_enabled = local.is_production_environment
number_of_workers_for_glue_job = 10
glue_job_worker_type = "G.1X"
glue_version = "4.0"
job_parameters = {
"--job-bookmark-option" = "job-bookmark-disable"
"--environment" = var.environment
}
}

0 comments on commit 6b98556

Please sign in to comment.