From 9b3de30a9c86931c3ce341e06880cd927fa78e5b Mon Sep 17 00:00:00 2001 From: SJiB Date: Mon, 4 Nov 2024 17:36:19 +0100 Subject: [PATCH 001/127] correct check_fk_in_subsetid sia405 --- qgepqwat2ili/qgepsia405/export.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index 251e3128..72f66a2d 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -147,11 +147,14 @@ def check_fk_in_subsetid(subset, relation): else: if filtered: logger.warning( - f"check_fk_in_subsetid - '{fremdschluesselstr}' is not in subset - replaced with None instead!" + f"check_fk_in_subsetid - '{fremdschluesselstr}' is not in filtered subset - replaced with None instead!" ) return None else: - return tid_maker.tid_for_row(relation) + logger.warning( + f"check_fk_in_subsetid - '{fremdschluesselstr}' is not in datamodel - replaced with None instead!" + ) + return None def create_metaattributes(row): metaattribute = ABWASSER.metaattribute( From 99970bd758a399146416fb3b722a41194c2adb60 Mon Sep 17 00:00:00 2001 From: SJiB Date: Mon, 4 Nov 2024 17:37:54 +0100 Subject: [PATCH 002/127] same for dss export --- qgepqwat2ili/qgepdss/export.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index c91eb843..20b72188 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -154,11 +154,14 @@ def check_fk_in_subsetid(subset, relation): else: if filtered: logger.warning( - f"check_fk_in_subsetid - '{fremdschluesselstr}' is not in subset - replaced with None instead!" + f"check_fk_in_subsetid - '{fremdschluesselstr}' is not in filtered subset - replaced with None instead!" ) return None else: - return tid_maker.tid_for_row(relation) + logger.warning( + f"check_fk_in_subsetid - '{fremdschluesselstr}' is not in datamodel - replaced with None instead!" + ) + return None def create_metaattributes(row): metaattribute = ABWASSER.metaattribute( From 4da9af5a7401e3e1591c2a4d2be8ee0a7b3ab1af Mon Sep 17 00:00:00 2001 From: SJiB Date: Mon, 4 Nov 2024 17:38:51 +0100 Subject: [PATCH 003/127] same in kek export --- qgepqwat2ili/qgep/export.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/qgepqwat2ili/qgep/export.py b/qgepqwat2ili/qgep/export.py index c7909e7b..fe98d2ff 100644 --- a/qgepqwat2ili/qgep/export.py +++ b/qgepqwat2ili/qgep/export.py @@ -146,11 +146,14 @@ def check_fk_in_subsetid(subset, relation): else: if filtered: logger.warning( - f"check_fk_in_subsetid - '{fremdschluesselstr}' is not in subset - replaced with None instead!" + f"check_fk_in_subsetid - '{fremdschluesselstr}' is not in filtered subset - replaced with None instead!" ) return None else: - return tid_maker.tid_for_row(relation) + logger.warning( + f"check_fk_in_subsetid - '{fremdschluesselstr}' is not in datamodel - replaced with None instead!" + ) + return None def create_metaattributes(row): metaattribute = ABWASSER.metaattribute( From 14a441c4325cc6de2c494efc82417a03613a39fc Mon Sep 17 00:00:00 2001 From: SJiB Date: Wed, 6 Nov 2024 17:37:30 +0100 Subject: [PATCH 004/127] new function get_cl_re_ids, get_ws_wn_ids, remove_from_selection and add_to_selection --- qgepqwat2ili/qgepsia405/export.py | 40 ++++++++++-- qgepqwat2ili/utils/ili2db.py | 102 +++++++++++++++++++++++++++++- 2 files changed, 134 insertions(+), 8 deletions(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index 72f66a2d..c6b31572 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -8,7 +8,12 @@ from .. import utils # 4.10.2024 -from ..utils.ili2db import skip_wwtp_structure_ids +#from ..utils.ili2db import skip_wwtp_structure_ids +# 6.11.2024 replaced with +from ..utils.ili2db import get_cl_re_ids +from ..utils.ili2db import get_ws_wn_ids +from ..utils.ili2db import add_to_selection +from ..utils.ili2db import remove_from_selection from ..utils.various import logger from .model_abwasser import get_abwasser_model from .model_qgep import get_qgep_model @@ -35,19 +40,40 @@ def qgep_export(selection=None, labels_file=None, orientation=None): # backport from tww https://github.com/teksi/wastewater/blob/3acfba249866d299f8a22e249d9f1e475fe7b88d/plugin/teksi_wastewater/interlis/interlis_model_mapping/interlis_exporter_to_intermediate_schema.py#L83 abwasser_session.execute(text("SET CONSTRAINTS ALL DEFERRED;")) - # Filtering + + # 1. Filtering - check if selection filtered = selection is not None subset_ids = selection if selection is not None else [] - # get list of id's of class wwtp_structure (ARABauwerk) to be able to check if fk_wastewater_structure references to wwtp_structure + # 2. check if wwtp_structures exist - wastewater_structure_id_sia405abwasser_list = None - wastewater_structure_id_sia405abwasser_list = skip_wwtp_structure_ids() + wwt_structures_id_sia405abwasser_list = None + wwt_structures_id_sia405abwasser_list = get_ws_wn_ids('wwtp_structures') - logger.info( - f"wastewater_structure_id_sia405abwasser_list : {wastewater_structure_id_sia405abwasser_list}", + # 3. Show wwt_structures_id_sia405abwasser_list + logger.debug( + f"wwt_structures_id_sia405abwasser_list : {wwt_structures_id_sia405abwasser_list}", ) + # 4. check if filtered + if filtered then: + if wwt_structures_id_sia405abwasser_list then: + # take out wwt_structures_id_sia405abwasser_list from selection + subset_ids = remove_from_selection (subset_ids, get_ws_wn_ids('wwt_structures') + else: + # do nothing + else: + if wwt_structures_id_sia405abwasser_list then: + # add all data except wwt_structures to selection + subset_ids = add_to_selection (subset_ids, get_ws_wn_ids('wastewater_structure') + # take out wwt_structures_id_sia405abwasser_list from selection + subset_ids = remove_from_selection (subset_ids, wwt_structures_id_sia405abwasser_list) + # add reach_ids + subset_ids = add_to_selection (subset_ids, get_cl_re_ids('channel') + filtered = True + else: + # do nothing + # Orientation oriented = orientation is not None if oriented: diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index 804d4522..9ff089e6 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -448,7 +448,7 @@ def check_fk_provider_null(): return check_fk_provider_null -def skip_wwtp_structure_ids(): +def skip_wwtp_structure_ids_old(): """ Get list of id's of class wastewater_structure without wwtp_structure (ARABauwerk) """ @@ -489,6 +489,106 @@ def skip_wwtp_structure_ids(): return not_wwtp_structure_ids +def get_cl_re_ids(classname): + """ + Get list of id's of reaches of the channels provided + """ + + # define classes that this is allowed to use - adapt for TWW to include model changes + if classname IN ('channel') + logger.info(f"get list of id's of wastewater_nodes of {classname} ...") + + connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) + connection.set_session(autocommit=True) + cursor = connection.cursor() + + cl_re_ids = [] + + # select all obj_id of the wastewater_nodes of wwtp_structure + cursor.execute( + "SELECT wn.obj_id FROM qgep_od.channel LEFT JOIN qgep_od.wastewater_networkelement wn ON wn.fk_wastewater_structure = channel.obj_id;" + ) + + # cursor.fetchall() - see https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ + # cl_re_ids_count = int(cursor.fetchone()[0]) + # if cl_re_ids_count == 0: + if cursor.fetchone() is None: + cl_re_ids = None + else: + records = cursor.fetchall() + for row in records: + logger.debug(f" row[0] = {row[0]}") + # https://www.pythontutorial.net/python-string-methods/python-string-concatenation/ + strrow = str(row[0]) + cl_re_ids.append(strrow) + logger.debug(f" building up '{cl_re_ids}' ...") + + return cl_re_ids + else + logger.warning(f"Do not use this function with {classname} !") + return None + +def get_ws_wn_ids(classname): + """ + Get list of id's of wastewater_nodes of the wastewater_structure (sub)class provided, eg. wwtp_structure (ARABauwerk, does not work for channel + """ + + # define classes that this is allowed to use - adapt for TWW to include model changes + if classname IN ('discharge_point', 'manhole', 'infiltration_installation', 'wastewater_structure') + logger.info(f"get list of id's of wastewater_nodes of {classname} ..."): + connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) + connection.set_session(autocommit=True) + cursor = connection.cursor() + + ws_wn_ids = [] + + # select all obj_id of the wastewater_nodes of wwtp_structure + cursor.execute( + "SELECT wn.obj_id FROM qgep_od.{classname} LEFT JOIN qgep_od.wastewater_networkelement wn ON wn.fk_wastewater_structure = {classname}.obj_id;" + ) + + # cursor.fetchall() - see https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ + # ws_wn_ids_count = int(cursor.fetchone()[0]) + # if ws_wn_ids_count == 0: + if cursor.fetchone() is None: + ws_wn_ids = None + else: + records = cursor.fetchall() + for row in records: + logger.debug(f" row[0] = {row[0]}") + # https://www.pythontutorial.net/python-string-methods/python-string-concatenation/ + strrow = str(row[0]) + ws_wn_ids.append(strrow) + logger.debug(f" building up '{ws_wn_ids}' ...") + + return ws_wn_ids + else + logger.warning(f"Do not use this function with {classname} !") + return None + + +def remove_from_selection (selected_ids, remove_ids) + """ + Remove ids from selected_ids + """ + + for list_item in remove_ids: + selected_ids = selected_ids.append(list_item) + + return selected_ids + + +def add_to_selection (selected_ids, add_ids) + """ + Remove ids from selected_ids + """ + + for list_item in add_ids: + selected_ids = selected_ids.add(list_item) + + return selected_ids + + def create_ili_schema(schema, model, log_path, recreate_schema=False): """ Create schema for INTERLIS import From 4f9bf9822aaddb4f5892e4a4043b1307522544dd Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 6 Nov 2024 17:33:36 +0000 Subject: [PATCH 005/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepsia405/export.py | 10 ++++++---- qgepqwat2ili/utils/ili2db.py | 4 ++-- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index c6b31572..df6f0322 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -10,10 +10,12 @@ # 4.10.2024 #from ..utils.ili2db import skip_wwtp_structure_ids # 6.11.2024 replaced with -from ..utils.ili2db import get_cl_re_ids -from ..utils.ili2db import get_ws_wn_ids -from ..utils.ili2db import add_to_selection -from ..utils.ili2db import remove_from_selection +from ..utils.ili2db import ( + add_to_selection, + get_cl_re_ids, + get_ws_wn_ids, + remove_from_selection, +) from ..utils.various import logger from .model_abwasser import get_abwasser_model from .model_qgep import get_qgep_model diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index 9ff089e6..e225a2e7 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -497,7 +497,7 @@ def get_cl_re_ids(classname): # define classes that this is allowed to use - adapt for TWW to include model changes if classname IN ('channel') logger.info(f"get list of id's of wastewater_nodes of {classname} ...") - + connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) connection.set_session(autocommit=True) cursor = connection.cursor() @@ -582,7 +582,7 @@ def add_to_selection (selected_ids, add_ids) """ Remove ids from selected_ids """ - + for list_item in add_ids: selected_ids = selected_ids.add(list_item) From 42747e331f4bd848629509878556981b4cea5688 Mon Sep 17 00:00:00 2001 From: SJiB Date: Wed, 6 Nov 2024 18:39:30 +0100 Subject: [PATCH 006/127] add : with if --- qgepqwat2ili/utils/ili2db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index e225a2e7..29fd0b51 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -495,7 +495,7 @@ def get_cl_re_ids(classname): """ # define classes that this is allowed to use - adapt for TWW to include model changes - if classname IN ('channel') + if classname IN ('channel'): logger.info(f"get list of id's of wastewater_nodes of {classname} ...") connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) From 9e0601cdd6ef614159bca36e919a7116350fece5 Mon Sep 17 00:00:00 2001 From: SJiB Date: Wed, 6 Nov 2024 18:42:20 +0100 Subject: [PATCH 007/127] formatting and debugging --- qgepqwat2ili/qgepsia405/export.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index df6f0322..8c25c70c 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -48,7 +48,6 @@ def qgep_export(selection=None, labels_file=None, orientation=None): subset_ids = selection if selection is not None else [] # 2. check if wwtp_structures exist - wwt_structures_id_sia405abwasser_list = None wwt_structures_id_sia405abwasser_list = get_ws_wn_ids('wwtp_structures') @@ -58,15 +57,15 @@ def qgep_export(selection=None, labels_file=None, orientation=None): ) # 4. check if filtered - if filtered then: + if filtered: if wwt_structures_id_sia405abwasser_list then: # take out wwt_structures_id_sia405abwasser_list from selection - subset_ids = remove_from_selection (subset_ids, get_ws_wn_ids('wwt_structures') + subset_ids = remove_from_selection (subset_ids, wwt_structures_id_sia405abwasser_list) else: # do nothing else: - if wwt_structures_id_sia405abwasser_list then: - # add all data except wwt_structures to selection + if wwt_structures_id_sia405abwasser_list then: + # add all data of wastewater_structures to selection subset_ids = add_to_selection (subset_ids, get_ws_wn_ids('wastewater_structure') # take out wwt_structures_id_sia405abwasser_list from selection subset_ids = remove_from_selection (subset_ids, wwt_structures_id_sia405abwasser_list) @@ -83,6 +82,7 @@ def qgep_export(selection=None, labels_file=None, orientation=None): else: labelorientation = 0 + def get_tid(relation): """ Makes a tid for a relation From 0c4a08ebe372327437f2a18d930ca329a766895e Mon Sep 17 00:00:00 2001 From: SJiB Date: Wed, 6 Nov 2024 18:44:44 +0100 Subject: [PATCH 008/127] take out then --- qgepqwat2ili/qgepsia405/export.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index 8c25c70c..4fa2f212 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -58,19 +58,20 @@ def qgep_export(selection=None, labels_file=None, orientation=None): # 4. check if filtered if filtered: - if wwt_structures_id_sia405abwasser_list then: + if wwt_structures_id_sia405abwasser_list: # take out wwt_structures_id_sia405abwasser_list from selection subset_ids = remove_from_selection (subset_ids, wwt_structures_id_sia405abwasser_list) else: # do nothing else: - if wwt_structures_id_sia405abwasser_list then: + if wwt_structures_id_sia405abwasser_list: # add all data of wastewater_structures to selection subset_ids = add_to_selection (subset_ids, get_ws_wn_ids('wastewater_structure') # take out wwt_structures_id_sia405abwasser_list from selection subset_ids = remove_from_selection (subset_ids, wwt_structures_id_sia405abwasser_list) # add reach_ids subset_ids = add_to_selection (subset_ids, get_cl_re_ids('channel') + # treat export as with a selection filtered = True else: # do nothing From 3775cbcda808ad0762376a9868caa35fb9ad31e4 Mon Sep 17 00:00:00 2001 From: SJiB Date: Wed, 6 Nov 2024 18:47:14 +0100 Subject: [PATCH 009/127] corrections --- qgepqwat2ili/qgepsia405/export.py | 7 +++---- qgepqwat2ili/utils/ili2db.py | 2 +- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index 4fa2f212..ef337be3 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -61,8 +61,7 @@ def qgep_export(selection=None, labels_file=None, orientation=None): if wwt_structures_id_sia405abwasser_list: # take out wwt_structures_id_sia405abwasser_list from selection subset_ids = remove_from_selection (subset_ids, wwt_structures_id_sia405abwasser_list) - else: - # do nothing + # else do nothing else: if wwt_structures_id_sia405abwasser_list: # add all data of wastewater_structures to selection @@ -73,8 +72,8 @@ def qgep_export(selection=None, labels_file=None, orientation=None): subset_ids = add_to_selection (subset_ids, get_cl_re_ids('channel') # treat export as with a selection filtered = True - else: - # do nothing + + # else do nothing # Orientation oriented = orientation is not None diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index 29fd0b51..b9e5977f 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -495,7 +495,7 @@ def get_cl_re_ids(classname): """ # define classes that this is allowed to use - adapt for TWW to include model changes - if classname IN ('channel'): + if classname == 'channel': logger.info(f"get list of id's of wastewater_nodes of {classname} ...") connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) From 58dc371dc4a5094af11f0f49446570ae07261f3c Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 6 Nov 2024 17:47:29 +0000 Subject: [PATCH 010/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepsia405/export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index ef337be3..d996bbed 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -72,7 +72,7 @@ def qgep_export(selection=None, labels_file=None, orientation=None): subset_ids = add_to_selection (subset_ids, get_cl_re_ids('channel') # treat export as with a selection filtered = True - + # else do nothing # Orientation From 6b79e036d5b1597bbfc10168733881a9b851e821 Mon Sep 17 00:00:00 2001 From: SJiB Date: Wed, 6 Nov 2024 18:49:55 +0100 Subject: [PATCH 011/127] add missing ) --- qgepqwat2ili/qgepsia405/export.py | 4 ++-- qgepqwat2ili/utils/ili2db.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index ef337be3..6ae1ce8b 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -65,11 +65,11 @@ def qgep_export(selection=None, labels_file=None, orientation=None): else: if wwt_structures_id_sia405abwasser_list: # add all data of wastewater_structures to selection - subset_ids = add_to_selection (subset_ids, get_ws_wn_ids('wastewater_structure') + subset_ids = add_to_selection (subset_ids, get_ws_wn_ids('wastewater_structure')) # take out wwt_structures_id_sia405abwasser_list from selection subset_ids = remove_from_selection (subset_ids, wwt_structures_id_sia405abwasser_list) # add reach_ids - subset_ids = add_to_selection (subset_ids, get_cl_re_ids('channel') + subset_ids = add_to_selection (subset_ids, get_cl_re_ids('channel')) # treat export as with a selection filtered = True diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index b9e5977f..390f72fa 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -524,7 +524,7 @@ def get_cl_re_ids(classname): logger.debug(f" building up '{cl_re_ids}' ...") return cl_re_ids - else + else: logger.warning(f"Do not use this function with {classname} !") return None From 739450c8fe22879546d9fb215e8ac36ac3685165 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 6 Nov 2024 17:50:35 +0000 Subject: [PATCH 012/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepsia405/export.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index 2746fd5e..d11b5c66 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -8,7 +8,7 @@ from .. import utils # 4.10.2024 -#from ..utils.ili2db import skip_wwtp_structure_ids +# from ..utils.ili2db import skip_wwtp_structure_ids # 6.11.2024 replaced with from ..utils.ili2db import ( add_to_selection, @@ -42,14 +42,13 @@ def qgep_export(selection=None, labels_file=None, orientation=None): # backport from tww https://github.com/teksi/wastewater/blob/3acfba249866d299f8a22e249d9f1e475fe7b88d/plugin/teksi_wastewater/interlis/interlis_model_mapping/interlis_exporter_to_intermediate_schema.py#L83 abwasser_session.execute(text("SET CONSTRAINTS ALL DEFERRED;")) - # 1. Filtering - check if selection filtered = selection is not None subset_ids = selection if selection is not None else [] # 2. check if wwtp_structures exist wwt_structures_id_sia405abwasser_list = None - wwt_structures_id_sia405abwasser_list = get_ws_wn_ids('wwtp_structures') + wwt_structures_id_sia405abwasser_list = get_ws_wn_ids("wwtp_structures") # 3. Show wwt_structures_id_sia405abwasser_list logger.debug( @@ -60,16 +59,16 @@ def qgep_export(selection=None, labels_file=None, orientation=None): if filtered: if wwt_structures_id_sia405abwasser_list: # take out wwt_structures_id_sia405abwasser_list from selection - subset_ids = remove_from_selection (subset_ids, wwt_structures_id_sia405abwasser_list) + subset_ids = remove_from_selection(subset_ids, wwt_structures_id_sia405abwasser_list) # else do nothing else: if wwt_structures_id_sia405abwasser_list: # add all data of wastewater_structures to selection - subset_ids = add_to_selection (subset_ids, get_ws_wn_ids('wastewater_structure')) + subset_ids = add_to_selection(subset_ids, get_ws_wn_ids("wastewater_structure")) # take out wwt_structures_id_sia405abwasser_list from selection - subset_ids = remove_from_selection (subset_ids, wwt_structures_id_sia405abwasser_list) + subset_ids = remove_from_selection(subset_ids, wwt_structures_id_sia405abwasser_list) # add reach_ids - subset_ids = add_to_selection (subset_ids, get_cl_re_ids('channel')) + subset_ids = add_to_selection(subset_ids, get_cl_re_ids("channel")) # treat export as with a selection filtered = True @@ -82,7 +81,6 @@ def qgep_export(selection=None, labels_file=None, orientation=None): else: labelorientation = 0 - def get_tid(relation): """ Makes a tid for a relation From db8df128029f86c654b2095a93d3fc0249fcf04c Mon Sep 17 00:00:00 2001 From: SJiB Date: Wed, 6 Nov 2024 18:54:54 +0100 Subject: [PATCH 013/127] define array --- qgepqwat2ili/utils/ili2db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index 390f72fa..464e235b 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -534,7 +534,7 @@ def get_ws_wn_ids(classname): """ # define classes that this is allowed to use - adapt for TWW to include model changes - if classname IN ('discharge_point', 'manhole', 'infiltration_installation', 'wastewater_structure') + if classname IN ['discharge_point', 'manhole', 'infiltration_installation', 'wastewater_structure'] logger.info(f"get list of id's of wastewater_nodes of {classname} ..."): connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) connection.set_session(autocommit=True) From 2a2d9911fd3701d7c6370680b85c379a47b8488e Mon Sep 17 00:00:00 2001 From: SJiB Date: Wed, 6 Nov 2024 19:12:08 +0100 Subject: [PATCH 014/127] adapt sia405 export --- qgepqwat2ili/qgepsia405/export.py | 1 + 1 file changed, 1 insertion(+) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index d11b5c66..add3e195 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -252,6 +252,7 @@ def wastewater_networkelement_common(row): return { # "abwasserbauwerkref": get_tid(row.fk_wastewater_structure__REL), + # 6.11.2024 Besides wn_id and re_id we also need ws_obj_ids in a seperate subset - call it ws_subset_id "abwasserbauwerkref": check_fk_in_subsetid( wastewater_structure_id_sia405abwasser_list, row.fk_wastewater_structure__REL ), From b270d096c6011243d866e892f2700597e6ac3e58 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 12 Nov 2024 13:54:19 +0100 Subject: [PATCH 015/127] correct functions --- qgepqwat2ili/utils/ili2db.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index 464e235b..20d5d718 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -573,14 +573,14 @@ def remove_from_selection (selected_ids, remove_ids) """ for list_item in remove_ids: - selected_ids = selected_ids.append(list_item) + selected_ids = selected_ids.remove(list_item) return selected_ids def add_to_selection (selected_ids, add_ids) """ - Remove ids from selected_ids + Add ids to selected_ids """ for list_item in add_ids: From 06a9955a9395ae1ba354e3c37ff1b559eaf1d043 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 12 Nov 2024 13:55:06 +0100 Subject: [PATCH 016/127] rename variable wwt_structure_id_sia405abwasser_list to ws_off_sia405abwasser_list --- qgepqwat2ili/qgepsia405/export.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index add3e195..68d16cf7 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -46,27 +46,27 @@ def qgep_export(selection=None, labels_file=None, orientation=None): filtered = selection is not None subset_ids = selection if selection is not None else [] - # 2. check if wwtp_structures exist - wwt_structures_id_sia405abwasser_list = None - wwt_structures_id_sia405abwasser_list = get_ws_wn_ids("wwtp_structures") + # 2. check if wastewater_structures exist that are not part of SIA 405 Abwasser (in Release 2015 this is the class wwtp_structures, in Release 2020 it will be more - to be extended in tww) + ws_off_sia405abwasser_list = None + ws_off_sia405abwasser_list = get_ws_wn_ids("wwtp_structures") - # 3. Show wwt_structures_id_sia405abwasser_list + # 3. Show ws_off_sia405abwasser_list logger.debug( - f"wwt_structures_id_sia405abwasser_list : {wwt_structures_id_sia405abwasser_list}", + f"ws_off_sia405abwasser_list : {ws_off_sia405abwasser_list}", ) # 4. check if filtered if filtered: - if wwt_structures_id_sia405abwasser_list: - # take out wwt_structures_id_sia405abwasser_list from selection - subset_ids = remove_from_selection(subset_ids, wwt_structures_id_sia405abwasser_list) + if ws_off_sia405abwasser_list: + # take out ws_off_sia405abwasser_list from selection + subset_ids = remove_from_selection(subset_ids, ws_off_sia405abwasser_list) # else do nothing else: - if wwt_structures_id_sia405abwasser_list: + if ws_off_sia405abwasser_list: # add all data of wastewater_structures to selection subset_ids = add_to_selection(subset_ids, get_ws_wn_ids("wastewater_structure")) - # take out wwt_structures_id_sia405abwasser_list from selection - subset_ids = remove_from_selection(subset_ids, wwt_structures_id_sia405abwasser_list) + # take out ws_off_sia405abwasser_list from selection + subset_ids = remove_from_selection(subset_ids, ws_off_sia405abwasser_list) # add reach_ids subset_ids = add_to_selection(subset_ids, get_cl_re_ids("channel")) # treat export as with a selection From fc58e20028cc17eed0cb581a36ea6c4555a7daeb Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 12 Nov 2024 14:18:17 +0100 Subject: [PATCH 017/127] export.py wastewater_networkelement_common - change to subset_ids Check later if ws_subset_ids is needed or if obj_ids of ws are already added there anyway --- qgepqwat2ili/qgepsia405/export.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index 68d16cf7..80ea6aa2 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -252,9 +252,9 @@ def wastewater_networkelement_common(row): return { # "abwasserbauwerkref": get_tid(row.fk_wastewater_structure__REL), - # 6.11.2024 Besides wn_id and re_id we also need ws_obj_ids in a seperate subset - call it ws_subset_id + # 6.11.2024 Besides wn_id and re_id we also need ws_obj_ids in a separate subset - call it ws_subset_id "abwasserbauwerkref": check_fk_in_subsetid( - wastewater_structure_id_sia405abwasser_list, row.fk_wastewater_structure__REL + subset_ids, row.fk_wastewater_structure__REL ), "bemerkung": truncate(emptystr_to_null(row.remark), 80), "bezeichnung": null_to_emptystr(row.identifier), @@ -590,7 +590,7 @@ def textpos_common(row, t_type, geojson_crs_def): # --- sia405_baseclass --- **base_common(row, "haltungspunkt"), # --- haltungspunkt --- - # changed call from get_tid to check_fk_in_subsetid so it does not wirte foreignkeys on elements that do not exist + # changed call from get_tid to check_fk_in_subsetid so it does not write foreignkeys on elements that do not exist # abwassernetzelementref=get_tid(row.fk_wastewater_networkelement__REL), abwassernetzelementref=check_fk_in_subsetid( subset_ids, row.fk_wastewater_networkelement__REL From 886b9e3eaf40d29f17c5308aafca96ade703497c Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 12 Nov 2024 14:36:03 +0100 Subject: [PATCH 018/127] missing : --- qgepqwat2ili/utils/ili2db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index 20d5d718..b23e43d0 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -534,7 +534,7 @@ def get_ws_wn_ids(classname): """ # define classes that this is allowed to use - adapt for TWW to include model changes - if classname IN ['discharge_point', 'manhole', 'infiltration_installation', 'wastewater_structure'] + if classname IN ['discharge_point', 'manhole', 'infiltration_installation', 'wastewater_structure']: logger.info(f"get list of id's of wastewater_nodes of {classname} ..."): connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) connection.set_session(autocommit=True) From ebbd630dd3d591d211c010e63f6c0c70d2265fc0 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 12 Nov 2024 14:50:31 +0100 Subject: [PATCH 019/127] adapt IN to in --- qgepqwat2ili/utils/ili2db.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index b23e43d0..d70fb230 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -534,7 +534,9 @@ def get_ws_wn_ids(classname): """ # define classes that this is allowed to use - adapt for TWW to include model changes - if classname IN ['discharge_point', 'manhole', 'infiltration_installation', 'wastewater_structure']: + + allowed_classnames = ['discharge_point', 'manhole', 'infiltration_installation', 'wastewater_structure'] + if classname in allowed_classnames: logger.info(f"get list of id's of wastewater_nodes of {classname} ..."): connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) connection.set_session(autocommit=True) From 13961e1560971fe55bcb424307ff8138534c8c58 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 13:52:29 +0000 Subject: [PATCH 020/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/utils/ili2db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index d70fb230..91682386 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -534,7 +534,7 @@ def get_ws_wn_ids(classname): """ # define classes that this is allowed to use - adapt for TWW to include model changes - + allowed_classnames = ['discharge_point', 'manhole', 'infiltration_installation', 'wastewater_structure'] if classname in allowed_classnames: logger.info(f"get list of id's of wastewater_nodes of {classname} ..."): From 020478abbf4d5facea0c0aa34aae0860480adea9 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 12 Nov 2024 14:53:28 +0100 Subject: [PATCH 021/127] wrong : --- qgepqwat2ili/utils/ili2db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index d70fb230..ef79ceec 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -537,7 +537,7 @@ def get_ws_wn_ids(classname): allowed_classnames = ['discharge_point', 'manhole', 'infiltration_installation', 'wastewater_structure'] if classname in allowed_classnames: - logger.info(f"get list of id's of wastewater_nodes of {classname} ..."): + logger.info(f"get list of id's of wastewater_nodes of {classname} ...") connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) connection.set_session(autocommit=True) cursor = connection.cursor() From c39fa8aa6b47ce2c32ec5949533684e30ee6496a Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 12 Nov 2024 15:00:29 +0100 Subject: [PATCH 022/127] else: --- qgepqwat2ili/utils/ili2db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index a551b612..c4fd7eb3 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -564,7 +564,7 @@ def get_ws_wn_ids(classname): logger.debug(f" building up '{ws_wn_ids}' ...") return ws_wn_ids - else + else: logger.warning(f"Do not use this function with {classname} !") return None From 60ca3ec2394ab68ac4410a576c508f4aa5ecd9f4 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 12 Nov 2024 15:03:59 +0100 Subject: [PATCH 023/127] remove spaces --- qgepqwat2ili/utils/ili2db.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index c4fd7eb3..73752a69 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -569,7 +569,7 @@ def get_ws_wn_ids(classname): return None -def remove_from_selection (selected_ids, remove_ids) +def remove_from_selection(selected_ids, remove_ids) """ Remove ids from selected_ids """ @@ -580,7 +580,7 @@ def remove_from_selection (selected_ids, remove_ids) return selected_ids -def add_to_selection (selected_ids, add_ids) +def add_to_selection(selected_ids, add_ids) """ Add ids to selected_ids """ From 04b03b552ad80329fc0978a46d6f3fe868c4aa6d Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 12 Nov 2024 15:05:48 +0100 Subject: [PATCH 024/127] adding: --- qgepqwat2ili/utils/ili2db.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index 73752a69..cf669fef 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -569,7 +569,7 @@ def get_ws_wn_ids(classname): return None -def remove_from_selection(selected_ids, remove_ids) +def remove_from_selection(selected_ids, remove_ids): """ Remove ids from selected_ids """ @@ -580,7 +580,7 @@ def remove_from_selection(selected_ids, remove_ids) return selected_ids -def add_to_selection(selected_ids, add_ids) +def add_to_selection(selected_ids, add_ids): """ Add ids to selected_ids """ From b089be5e1be46532d7df3c870391bec5b04b1b51 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 14:06:13 +0000 Subject: [PATCH 025/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/utils/ili2db.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index cf669fef..6d3fa2ff 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -495,7 +495,7 @@ def get_cl_re_ids(classname): """ # define classes that this is allowed to use - adapt for TWW to include model changes - if classname == 'channel': + if classname == "channel": logger.info(f"get list of id's of wastewater_nodes of {classname} ...") connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) @@ -528,6 +528,7 @@ def get_cl_re_ids(classname): logger.warning(f"Do not use this function with {classname} !") return None + def get_ws_wn_ids(classname): """ Get list of id's of wastewater_nodes of the wastewater_structure (sub)class provided, eg. wwtp_structure (ARABauwerk, does not work for channel @@ -535,7 +536,12 @@ def get_ws_wn_ids(classname): # define classes that this is allowed to use - adapt for TWW to include model changes - allowed_classnames = ['discharge_point', 'manhole', 'infiltration_installation', 'wastewater_structure'] + allowed_classnames = [ + "discharge_point", + "manhole", + "infiltration_installation", + "wastewater_structure", + ] if classname in allowed_classnames: logger.info(f"get list of id's of wastewater_nodes of {classname} ...") connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) From 91156f0f5f21607d01535f3e5187e319da91a6f2 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 12 Nov 2024 15:19:54 +0100 Subject: [PATCH 026/127] adapt qgep export and qgepdss export --- qgepqwat2ili/qgep/export.py | 10 ++++++++-- qgepqwat2ili/qgepdss/export.py | 14 +++++++++----- 2 files changed, 17 insertions(+), 7 deletions(-) diff --git a/qgepqwat2ili/qgep/export.py b/qgepqwat2ili/qgep/export.py index fe98d2ff..f8b97fc2 100644 --- a/qgepqwat2ili/qgep/export.py +++ b/qgepqwat2ili/qgep/export.py @@ -8,8 +8,14 @@ from .. import utils # 4.10.2024 -from ..utils.ili2db import skip_wwtp_structure_ids -from ..utils.various import logger +# from ..utils.ili2db import skip_wwtp_structure_ids +# 6.11.2024 replaced with +from ..utils.ili2db import ( + add_to_selection, + get_cl_re_ids, + get_ws_wn_ids, + remove_from_selection, +)from ..utils.various import logger from .model_abwasser import get_abwasser_model from .model_qgep import get_qgep_model diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 20b72188..433241f5 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -8,7 +8,14 @@ from .. import utils # 4.10.2024 -from ..utils.ili2db import skip_wwtp_structure_ids +# from ..utils.ili2db import skip_wwtp_structure_ids +# 6.11.2024 replaced with +from ..utils.ili2db import ( + add_to_selection, + get_cl_re_ids, + get_ws_wn_ids, + remove_from_selection, +) from ..utils.various import logger from .model_abwasser import get_abwasser_model from .model_qgep import get_qgep_model @@ -46,10 +53,7 @@ def qgep_export(selection=None, labels_file=None, orientation=None): # Logging for debugging logger.debug(f"print subset_ids '{subset_ids}'") - # get list of id's of class wwtp_structure (ARABauwerk) to be able to check if fk_wastewater_structure references to wwtp_structure - - wastewater_structure_id_sia405abwasser_list = None - wastewater_structure_id_sia405abwasser_list = skip_wwtp_structure_ids() + # ws_off_sia405abwasser_list and selection process not needed for VSA-DSS Export logger.info( f"wastewater_structure_id_sia405abwasser_list : {wastewater_structure_id_sia405abwasser_list}", From 629bf1b3c3dab148688eac6dd645c129d9d30889 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 14:20:17 +0000 Subject: [PATCH 027/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgep/export.py | 21 +++++++++++++++------ qgepqwat2ili/qgepdss/export.py | 6 ------ 2 files changed, 15 insertions(+), 12 deletions(-) diff --git a/qgepqwat2ili/qgep/export.py b/qgepqwat2ili/qgep/export.py index f8b97fc2..0fbb4609 100644 --- a/qgepqwat2ili/qgep/export.py +++ b/qgepqwat2ili/qgep/export.py @@ -11,16 +11,25 @@ # from ..utils.ili2db import skip_wwtp_structure_ids # 6.11.2024 replaced with from ..utils.ili2db import ( + ..utils.various, + .model_abwasser, + .model_qgep, + :, add_to_selection, + def, + from, + get_abwasser_model, get_cl_re_ids, + get_qgep_model, get_ws_wn_ids, + import, + labels_file=None, + logger, + orientation=None, + qgep_export, remove_from_selection, -)from ..utils.various import logger -from .model_abwasser import get_abwasser_model -from .model_qgep import get_qgep_model - - -def qgep_export(selection=None, labels_file=None, orientation=None): + selection=None, +) """ Export data from the QGEP model into the ili2pg model. diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 433241f5..0f79f335 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -10,12 +10,6 @@ # 4.10.2024 # from ..utils.ili2db import skip_wwtp_structure_ids # 6.11.2024 replaced with -from ..utils.ili2db import ( - add_to_selection, - get_cl_re_ids, - get_ws_wn_ids, - remove_from_selection, -) from ..utils.various import logger from .model_abwasser import get_abwasser_model from .model_qgep import get_qgep_model From 7d3037e83d42068518ee10f5b93650830ee4e98a Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 12 Nov 2024 15:21:42 +0100 Subject: [PATCH 028/127] missing linebreak --- qgepqwat2ili/qgep/export.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/qgepqwat2ili/qgep/export.py b/qgepqwat2ili/qgep/export.py index f8b97fc2..7a777ab8 100644 --- a/qgepqwat2ili/qgep/export.py +++ b/qgepqwat2ili/qgep/export.py @@ -15,7 +15,8 @@ get_cl_re_ids, get_ws_wn_ids, remove_from_selection, -)from ..utils.various import logger +) +from ..utils.various import logger from .model_abwasser import get_abwasser_model from .model_qgep import get_qgep_model From 4b3fa8babb9637cffd202bdb6e917048c9d85871 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 14:26:40 +0000 Subject: [PATCH 029/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgep/export.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/qgepqwat2ili/qgep/export.py b/qgepqwat2ili/qgep/export.py index b99502d9..9c6e77f7 100644 --- a/qgepqwat2ili/qgep/export.py +++ b/qgepqwat2ili/qgep/export.py @@ -5,8 +5,6 @@ from sqlalchemy.orm import Session from sqlalchemy.sql import text -from .. import utils - # 4.10.2024 # from ..utils.ili2db import skip_wwtp_structure_ids # 6.11.2024 replaced with @@ -15,12 +13,6 @@ # 4.10.2024 # from ..utils.ili2db import skip_wwtp_structure_ids # 6.11.2024 replaced with -from ..utils.ili2db import ( - add_to_selection, - get_cl_re_ids, - get_ws_wn_ids, - remove_from_selection, -) from ..utils.various import logger from .model_abwasser import get_abwasser_model from .model_qgep import get_qgep_model From 6782371696ba969a973fb0157a8f645a2936d9db Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 12 Nov 2024 17:01:08 +0100 Subject: [PATCH 030/127] ili2db - get_ws_wn_ids works also with channel - no restriction Then we will get the reaches --- qgepqwat2ili/qgepdss/export.py | 3 -- qgepqwat2ili/qgepsia405/export.py | 2 +- qgepqwat2ili/utils/ili2db.py | 58 ++++++++++++------------------- 3 files changed, 24 insertions(+), 39 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 0f79f335..e2228bd8 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -49,9 +49,6 @@ def qgep_export(selection=None, labels_file=None, orientation=None): # ws_off_sia405abwasser_list and selection process not needed for VSA-DSS Export - logger.info( - f"wastewater_structure_id_sia405abwasser_list : {wastewater_structure_id_sia405abwasser_list}", - ) # Orientation oriented = orientation is not None diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index 80ea6aa2..ffcf0ce6 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -48,7 +48,7 @@ def qgep_export(selection=None, labels_file=None, orientation=None): # 2. check if wastewater_structures exist that are not part of SIA 405 Abwasser (in Release 2015 this is the class wwtp_structures, in Release 2020 it will be more - to be extended in tww) ws_off_sia405abwasser_list = None - ws_off_sia405abwasser_list = get_ws_wn_ids("wwtp_structures") + ws_off_sia405abwasser_list = get_ws_wn_ids("wwtp_structure") # 3. Show ws_off_sia405abwasser_list logger.debug( diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index 6d3fa2ff..afccf7c8 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -534,45 +534,33 @@ def get_ws_wn_ids(classname): Get list of id's of wastewater_nodes of the wastewater_structure (sub)class provided, eg. wwtp_structure (ARABauwerk, does not work for channel """ - # define classes that this is allowed to use - adapt for TWW to include model changes - - allowed_classnames = [ - "discharge_point", - "manhole", - "infiltration_installation", - "wastewater_structure", - ] - if classname in allowed_classnames: - logger.info(f"get list of id's of wastewater_nodes of {classname} ...") - connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) - connection.set_session(autocommit=True) - cursor = connection.cursor() - - ws_wn_ids = [] + logger.info(f"get list of id's of wastewater_nodes of {classname} ...") + connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) + connection.set_session(autocommit=True) + cursor = connection.cursor() - # select all obj_id of the wastewater_nodes of wwtp_structure - cursor.execute( - "SELECT wn.obj_id FROM qgep_od.{classname} LEFT JOIN qgep_od.wastewater_networkelement wn ON wn.fk_wastewater_structure = {classname}.obj_id;" - ) + ws_wn_ids = [] - # cursor.fetchall() - see https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ - # ws_wn_ids_count = int(cursor.fetchone()[0]) - # if ws_wn_ids_count == 0: - if cursor.fetchone() is None: - ws_wn_ids = None - else: - records = cursor.fetchall() - for row in records: - logger.debug(f" row[0] = {row[0]}") - # https://www.pythontutorial.net/python-string-methods/python-string-concatenation/ - strrow = str(row[0]) - ws_wn_ids.append(strrow) - logger.debug(f" building up '{ws_wn_ids}' ...") + # select all obj_id of the wastewater_nodes of wwtp_structure + cursor.execute( + "SELECT wn.obj_id FROM qgep_od.{classname} LEFT JOIN qgep_od.wastewater_networkelement wn ON wn.fk_wastewater_structure = {classname}.obj_id;" + ) - return ws_wn_ids + # cursor.fetchall() - see https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ + # ws_wn_ids_count = int(cursor.fetchone()[0]) + # if ws_wn_ids_count == 0: + if cursor.fetchone() is None: + ws_wn_ids = None else: - logger.warning(f"Do not use this function with {classname} !") - return None + records = cursor.fetchall() + for row in records: + logger.debug(f" row[0] = {row[0]}") + # https://www.pythontutorial.net/python-string-methods/python-string-concatenation/ + strrow = str(row[0]) + ws_wn_ids.append(strrow) + logger.debug(f" building up '{ws_wn_ids}' ...") + + return ws_wn_ids def remove_from_selection(selected_ids, remove_ids): From edf61162adec9fb0868f7043ad2d2c86a63689e8 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 16:01:34 +0000 Subject: [PATCH 031/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepdss/export.py | 1 - 1 file changed, 1 deletion(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index e2228bd8..aef85dd0 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -49,7 +49,6 @@ def qgep_export(selection=None, labels_file=None, orientation=None): # ws_off_sia405abwasser_list and selection process not needed for VSA-DSS Export - # Orientation oriented = orientation is not None if oriented: From f0cd371fde49eb632079a0136b0ddfb2895c7a1d Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 12 Nov 2024 17:11:12 +0100 Subject: [PATCH 032/127] adapt select statement --- qgepqwat2ili/utils/ili2db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index afccf7c8..d6ed09d8 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -543,7 +543,7 @@ def get_ws_wn_ids(classname): # select all obj_id of the wastewater_nodes of wwtp_structure cursor.execute( - "SELECT wn.obj_id FROM qgep_od.{classname} LEFT JOIN qgep_od.wastewater_networkelement wn ON wn.fk_wastewater_structure = {classname}.obj_id;" + f"SELECT wn.obj_id FROM qgep_od.{classname} LEFT JOIN qgep_od.wastewater_networkelement wn ON wn.fk_wastewater_structure = {classname}.obj_id;" ) # cursor.fetchall() - see https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ From 669c8d57d18118e377decf57c2a031007c9e8925 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 12 Nov 2024 17:22:45 +0100 Subject: [PATCH 033/127] Update ili2db.py --- qgepqwat2ili/utils/ili2db.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index d6ed09d8..9e70bf33 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -488,7 +488,7 @@ def skip_wwtp_structure_ids_old(): return not_wwtp_structure_ids - +# 12.11.2024 to clean up - get_ws_wn_ids kann das auch def get_cl_re_ids(classname): """ Get list of id's of reaches of the channels provided @@ -531,7 +531,7 @@ def get_cl_re_ids(classname): def get_ws_wn_ids(classname): """ - Get list of id's of wastewater_nodes of the wastewater_structure (sub)class provided, eg. wwtp_structure (ARABauwerk, does not work for channel + Get list of id's of wastewater_nodes of the wastewater_structure (sub)class provided, eg. wwtp_structure (ARABauwerk, does also work for channel (give reaches then) """ logger.info(f"get list of id's of wastewater_nodes of {classname} ...") @@ -576,11 +576,14 @@ def remove_from_selection(selected_ids, remove_ids): def add_to_selection(selected_ids, add_ids): """ - Add ids to selected_ids + Append ids to selected_ids """ + if selected_id = None: + selected_id = [] + for list_item in add_ids: - selected_ids = selected_ids.add(list_item) + selected_ids = selected_ids.append(list_item) return selected_ids From de8dcb13749cf05ec6bf21ba23109de89d163de9 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 12 Nov 2024 17:27:59 +0100 Subject: [PATCH 034/127] is None --- qgepqwat2ili/utils/ili2db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index 9e70bf33..b4cf3d60 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -579,7 +579,7 @@ def add_to_selection(selected_ids, add_ids): Append ids to selected_ids """ - if selected_id = None: + if selected_id is None: selected_id = [] for list_item in add_ids: From 9846b4783c73d3f522611bf05ede451e5a7c55d2 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 12 Nov 2024 20:39:42 +0100 Subject: [PATCH 035/127] adaptions and tests --- qgepqwat2ili/qgepsia405/export.py | 20 +++++++++++++++----- qgepqwat2ili/utils/ili2db.py | 15 +++++++++------ 2 files changed, 24 insertions(+), 11 deletions(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index ffcf0ce6..3d9839e2 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -65,10 +65,16 @@ def qgep_export(selection=None, labels_file=None, orientation=None): if ws_off_sia405abwasser_list: # add all data of wastewater_structures to selection subset_ids = add_to_selection(subset_ids, get_ws_wn_ids("wastewater_structure")) + logger.debug( + f"subset_ids of all wws : {subset_ids}", + ) # take out ws_off_sia405abwasser_list from selection subset_ids = remove_from_selection(subset_ids, ws_off_sia405abwasser_list) + logger.debug( + f"subset_ids of all wws minus ws_off_sia405abwasser_list: {subset_ids}", + ) # add reach_ids - subset_ids = add_to_selection(subset_ids, get_cl_re_ids("channel")) + # subset_ids = add_to_selection(subset_ids, get_cl_re_ids("channel")) # treat export as with a selection filtered = True @@ -251,11 +257,11 @@ def wastewater_networkelement_common(row): """ return { - # "abwasserbauwerkref": get_tid(row.fk_wastewater_structure__REL), + "abwasserbauwerkref": get_tid(row.fk_wastewater_structure__REL), # 6.11.2024 Besides wn_id and re_id we also need ws_obj_ids in a separate subset - call it ws_subset_id - "abwasserbauwerkref": check_fk_in_subsetid( - subset_ids, row.fk_wastewater_structure__REL - ), + #"abwasserbauwerkref": check_fk_in_subsetid( + # subset_ids, row.fk_wastewater_structure__REL + #), "bemerkung": truncate(emptystr_to_null(row.remark), 80), "bezeichnung": null_to_emptystr(row.identifier), } @@ -709,8 +715,12 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info( "Exporting QGEP.dryweather_downspout -> ABWASSER.trockenwetterfallrohr, ABWASSER.metaattribute" ) + query = qgep_session.query(QGEP.dryweather_downspout) if filtered: + logger.info( + f"filtered: subset_ids = {subset_ids}" + ) query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( QGEP.wastewater_networkelement.obj_id.in_(subset_ids) ) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index b4cf3d60..753af4a7 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -557,8 +557,9 @@ def get_ws_wn_ids(classname): logger.debug(f" row[0] = {row[0]}") # https://www.pythontutorial.net/python-string-methods/python-string-concatenation/ strrow = str(row[0]) - ws_wn_ids.append(strrow) - logger.debug(f" building up '{ws_wn_ids}' ...") + if strrow not is None: + ws_wn_ids.append(strrow) + # logger.debug(f" building up '{ws_wn_ids}' ...") return ws_wn_ids @@ -569,7 +570,8 @@ def remove_from_selection(selected_ids, remove_ids): """ for list_item in remove_ids: - selected_ids = selected_ids.remove(list_item) + #selected_ids = selected_ids.remove(list_item) + selected_ids.remove(list_item) return selected_ids @@ -579,11 +581,12 @@ def add_to_selection(selected_ids, add_ids): Append ids to selected_ids """ - if selected_id is None: - selected_id = [] + if selected_ids is None: + selected_ids = [] for list_item in add_ids: - selected_ids = selected_ids.append(list_item) + #selected_ids = selected_ids.append(list_item) + selected_ids.append(list_item) return selected_ids From 687b7cc7792ff2176741d5dffcbe1d515129fb66 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 19:39:58 +0000 Subject: [PATCH 036/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepsia405/export.py | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index 3d9839e2..83005358 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -10,12 +10,7 @@ # 4.10.2024 # from ..utils.ili2db import skip_wwtp_structure_ids # 6.11.2024 replaced with -from ..utils.ili2db import ( - add_to_selection, - get_cl_re_ids, - get_ws_wn_ids, - remove_from_selection, -) +from ..utils.ili2db import add_to_selection, get_ws_wn_ids, remove_from_selection from ..utils.various import logger from .model_abwasser import get_abwasser_model from .model_qgep import get_qgep_model @@ -259,9 +254,9 @@ def wastewater_networkelement_common(row): return { "abwasserbauwerkref": get_tid(row.fk_wastewater_structure__REL), # 6.11.2024 Besides wn_id and re_id we also need ws_obj_ids in a separate subset - call it ws_subset_id - #"abwasserbauwerkref": check_fk_in_subsetid( + # "abwasserbauwerkref": check_fk_in_subsetid( # subset_ids, row.fk_wastewater_structure__REL - #), + # ), "bemerkung": truncate(emptystr_to_null(row.remark), 80), "bezeichnung": null_to_emptystr(row.identifier), } @@ -718,9 +713,7 @@ def textpos_common(row, t_type, geojson_crs_def): query = qgep_session.query(QGEP.dryweather_downspout) if filtered: - logger.info( - f"filtered: subset_ids = {subset_ids}" - ) + logger.info(f"filtered: subset_ids = {subset_ids}") query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( QGEP.wastewater_networkelement.obj_id.in_(subset_ids) ) From 9acdfa87583dda07d81455e3c15077efd22327e3 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 12 Nov 2024 20:41:53 +0100 Subject: [PATCH 037/127] if not --- qgepqwat2ili/utils/ili2db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index 753af4a7..714f3859 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -557,7 +557,7 @@ def get_ws_wn_ids(classname): logger.debug(f" row[0] = {row[0]}") # https://www.pythontutorial.net/python-string-methods/python-string-concatenation/ strrow = str(row[0]) - if strrow not is None: + if not strrow is None: ws_wn_ids.append(strrow) # logger.debug(f" building up '{ws_wn_ids}' ...") From 9c0b633f8e8b6d574000189a39683923c70abb99 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 19:42:56 +0000 Subject: [PATCH 038/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/utils/ili2db.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index 714f3859..de4e9a43 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -488,6 +488,7 @@ def skip_wwtp_structure_ids_old(): return not_wwtp_structure_ids + # 12.11.2024 to clean up - get_ws_wn_ids kann das auch def get_cl_re_ids(classname): """ @@ -570,7 +571,7 @@ def remove_from_selection(selected_ids, remove_ids): """ for list_item in remove_ids: - #selected_ids = selected_ids.remove(list_item) + # selected_ids = selected_ids.remove(list_item) selected_ids.remove(list_item) return selected_ids @@ -585,7 +586,7 @@ def add_to_selection(selected_ids, add_ids): selected_ids = [] for list_item in add_ids: - #selected_ids = selected_ids.append(list_item) + # selected_ids = selected_ids.append(list_item) selected_ids.append(list_item) return selected_ids From c6d63ed9c63a4baddbc675ad0acf85f1211018ed Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 12 Nov 2024 21:15:51 +0100 Subject: [PATCH 039/127] add logger for query --- qgepqwat2ili/qgepsia405/export.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index 83005358..476510bc 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -717,6 +717,9 @@ def textpos_common(row, t_type, geojson_crs_def): query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( QGEP.wastewater_networkelement.obj_id.in_(subset_ids) ) + # add sql statement to logger + statement = query.statement + logger.info(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.dryweather_downspout From fdb5e7f7e2da7bccdbeb71b0c3f58a77d748ec18 Mon Sep 17 00:00:00 2001 From: SJiB Date: Fri, 15 Nov 2024 11:14:05 +0100 Subject: [PATCH 040/127] adapt kek export as sia405_abwasser --- qgepqwat2ili/qgep/export.py | 37 ++++++++++++++++++++++++++++++++----- 1 file changed, 32 insertions(+), 5 deletions(-) diff --git a/qgepqwat2ili/qgep/export.py b/qgepqwat2ili/qgep/export.py index 9c6e77f7..b292da01 100644 --- a/qgepqwat2ili/qgep/export.py +++ b/qgepqwat2ili/qgep/export.py @@ -39,19 +39,46 @@ def qgep_export(selection=None, labels_file=None, orientation=None): # backport from tww https://github.com/teksi/wastewater/blob/3acfba249866d299f8a22e249d9f1e475fe7b88d/plugin/teksi_wastewater/interlis/interlis_model_mapping/interlis_exporter_to_intermediate_schema.py#L83 abwasser_session.execute(text("SET CONSTRAINTS ALL DEFERRED;")) - # Filtering + # 1. Filtering - check if selection filtered = selection is not None subset_ids = selection if selection is not None else [] # get list of id's of class wwtp_structure (ARABauwerk) to be able to check if fk_wastewater_structure references to wwtp_structure - wastewater_structure_id_sia405abwasser_list = None - wastewater_structure_id_sia405abwasser_list = skip_wwtp_structure_ids() + # 2. check if wastewater_structures exist that are not part of SIA 405 Abwasser (in Release 2015 this is the class wwtp_structures, in Release 2020 it will be more - to be extended in tww) + ws_off_sia405abwasser_list = None + ws_off_sia405abwasser_list = get_ws_wn_ids("wwtp_structure") - logger.info( - f"wastewater_structure_id_sia405abwasser_list : {wastewater_structure_id_sia405abwasser_list}", + # 3. Show ws_off_sia405abwasser_list + logger.debug( + f"ws_off_sia405abwasser_list : {ws_off_sia405abwasser_list}", ) + # 4. check if filtered + if filtered: + if ws_off_sia405abwasser_list: + # take out ws_off_sia405abwasser_list from selection + subset_ids = remove_from_selection(subset_ids, ws_off_sia405abwasser_list) + # else do nothing + else: + if ws_off_sia405abwasser_list: + # add all data of wastewater_structures to selection + subset_ids = add_to_selection(subset_ids, get_ws_wn_ids("wastewater_structure")) + logger.debug( + f"subset_ids of all wws : {subset_ids}", + ) + # take out ws_off_sia405abwasser_list from selection + subset_ids = remove_from_selection(subset_ids, ws_off_sia405abwasser_list) + logger.debug( + f"subset_ids of all wws minus ws_off_sia405abwasser_list: {subset_ids}", + ) + # add reach_ids + # subset_ids = add_to_selection(subset_ids, get_cl_re_ids("channel")) + # treat export as with a selection + filtered = True + + # else do nothing + # Orientation oriented = orientation is not None if oriented: From cb5fec1bd15385c6fe68ee36fc4b63a1222eb0b6 Mon Sep 17 00:00:00 2001 From: SJiB Date: Fri, 15 Nov 2024 11:16:25 +0100 Subject: [PATCH 041/127] adapt abwasserbauwerkref --- qgepqwat2ili/qgep/export.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/qgepqwat2ili/qgep/export.py b/qgepqwat2ili/qgep/export.py index b292da01..cb54bbe5 100644 --- a/qgepqwat2ili/qgep/export.py +++ b/qgepqwat2ili/qgep/export.py @@ -258,10 +258,11 @@ def wastewater_networkelement_common(row): """ return { - # "abwasserbauwerkref": get_tid(row.fk_wastewater_structure__REL), - "abwasserbauwerkref": check_fk_in_subsetid( - wastewater_structure_id_sia405abwasser_list, row.fk_wastewater_structure__REL - ), + "abwasserbauwerkref": get_tid(row.fk_wastewater_structure__REL), + # 6.11.2024 Besides wn_id and re_id we also need ws_obj_ids in a separate subset - call it ws_subset_id + # "abwasserbauwerkref": check_fk_in_subsetid( + # subset_ids, row.fk_wastewater_structure__REL + # ), "bemerkung": truncate(emptystr_to_null(row.remark), 80), "bezeichnung": null_to_emptystr(row.identifier), } From 4fd6a7c410a3dfb6c6da94561a7f96030ce1a881 Mon Sep 17 00:00:00 2001 From: SJiB Date: Fri, 15 Nov 2024 11:20:19 +0100 Subject: [PATCH 042/127] update imports --- qgepqwat2ili/qgep/export.py | 1 + qgepqwat2ili/qgepdss/export.py | 3 ++- qgepqwat2ili/utils/ili2db.py | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/qgepqwat2ili/qgep/export.py b/qgepqwat2ili/qgep/export.py index cb54bbe5..8111eb49 100644 --- a/qgepqwat2ili/qgep/export.py +++ b/qgepqwat2ili/qgep/export.py @@ -13,6 +13,7 @@ # 4.10.2024 # from ..utils.ili2db import skip_wwtp_structure_ids # 6.11.2024 replaced with +from ..utils.ili2db import add_to_selection, get_ws_wn_ids, remove_from_selection from ..utils.various import logger from .model_abwasser import get_abwasser_model from .model_qgep import get_qgep_model diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index aef85dd0..d204eb96 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -9,7 +9,8 @@ # 4.10.2024 # from ..utils.ili2db import skip_wwtp_structure_ids -# 6.11.2024 replaced with +# 6.11.2024 replaced with - to check if really necessary here (as no sia405 abwasser exceptions needed) +from ..utils.ili2db import add_to_selection, get_ws_wn_ids, remove_from_selection from ..utils.various import logger from .model_abwasser import get_abwasser_model from .model_qgep import get_qgep_model diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index de4e9a43..7390bf49 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -558,7 +558,7 @@ def get_ws_wn_ids(classname): logger.debug(f" row[0] = {row[0]}") # https://www.pythontutorial.net/python-string-methods/python-string-concatenation/ strrow = str(row[0]) - if not strrow is None: + if strrow is not None: ws_wn_ids.append(strrow) # logger.debug(f" building up '{ws_wn_ids}' ...") From 11587cade40e0d2ae304f0619aa2aa67d41238ee Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 10:20:35 +0000 Subject: [PATCH 043/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepdss/export.py | 1 - 1 file changed, 1 deletion(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index d204eb96..c7f27c30 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -10,7 +10,6 @@ # 4.10.2024 # from ..utils.ili2db import skip_wwtp_structure_ids # 6.11.2024 replaced with - to check if really necessary here (as no sia405 abwasser exceptions needed) -from ..utils.ili2db import add_to_selection, get_ws_wn_ids, remove_from_selection from ..utils.various import logger from .model_abwasser import get_abwasser_model from .model_qgep import get_qgep_model From c5f40d20811df309a2f1da19c26620db354030fc Mon Sep 17 00:00:00 2001 From: SJiB Date: Fri, 15 Nov 2024 11:48:42 +0100 Subject: [PATCH 044/127] try with adapted query --- qgepqwat2ili/qgepsia405/export.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index 476510bc..7b379275 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -714,7 +714,10 @@ def textpos_common(row, t_type, geojson_crs_def): query = qgep_session.query(QGEP.dryweather_downspout) if filtered: logger.info(f"filtered: subset_ids = {subset_ids}") - query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + #query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + #) + query = query.join(QGEP.wastewater_structure,structure_part.fk_wastewater_structure == wastewater_structure.obj_id).join(QGEP.wastewater_networkelement).filter( QGEP.wastewater_networkelement.obj_id.in_(subset_ids) ) # add sql statement to logger From 0af45b581ed405bf6c61d0e258cf08d2cea0a81e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 10:49:28 +0000 Subject: [PATCH 045/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepsia405/export.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index 7b379275..be6c04c1 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -714,11 +714,16 @@ def textpos_common(row, t_type, geojson_crs_def): query = qgep_session.query(QGEP.dryweather_downspout) if filtered: logger.info(f"filtered: subset_ids = {subset_ids}") - #query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) - #) - query = query.join(QGEP.wastewater_structure,structure_part.fk_wastewater_structure == wastewater_structure.obj_id).join(QGEP.wastewater_networkelement).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = ( + query.join( + QGEP.wastewater_structure, + structure_part.fk_wastewater_structure == wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) # add sql statement to logger statement = query.statement From 8300248c4d023b92b878666ffee24d99b1f952d4 Mon Sep 17 00:00:00 2001 From: SJiB Date: Fri, 15 Nov 2024 13:32:30 +0100 Subject: [PATCH 046/127] adapt query --- qgepqwat2ili/qgepsia405/export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index 7b379275..bab315d6 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -717,7 +717,7 @@ def textpos_common(row, t_type, geojson_crs_def): #query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) #) - query = query.join(QGEP.wastewater_structure,structure_part.fk_wastewater_structure == wastewater_structure.obj_id).join(QGEP.wastewater_networkelement).filter( + query = query.join(QGEP.wastewater_structure,QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id).join(QGEP.wastewater_networkelement).filter( QGEP.wastewater_networkelement.obj_id.in_(subset_ids) ) # add sql statement to logger From 3f10b2c9f1b69f20cb9ac10ba3aaea8e99175b7c Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 12:33:56 +0000 Subject: [PATCH 047/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepsia405/export.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index b3fa62f6..c227bd44 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -716,9 +716,14 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info(f"filtered: subset_ids = {subset_ids}") # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) - #) - query = query.join(QGEP.wastewater_structure,QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id).join(QGEP.wastewater_networkelement).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) # add sql statement to logger statement = query.statement From 4b5d0ea8f4226c0910c992d7b42b76dffa6daf58 Mon Sep 17 00:00:00 2001 From: SJiB Date: Fri, 15 Nov 2024 13:37:16 +0100 Subject: [PATCH 048/127] other subclasses of structure_part --- qgepqwat2ili/qgepsia405/export.py | 32 +++++++++++++++++++++++++++---- 1 file changed, 28 insertions(+), 4 deletions(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index b3fa62f6..639c55e3 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -758,9 +758,15 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info("Exporting QGEP.access_aid -> ABWASSER.einstiegshilfe, ABWASSER.metaattribute") query = qgep_session.query(QGEP.access_aid) if filtered: - query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = query.join(QGEP.wastewater_structure,QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id).join(QGEP.wastewater_networkelement).filter( QGEP.wastewater_networkelement.obj_id.in_(subset_ids) ) + # add sql statement to logger + statement = query.statement + logger.info(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.access_aid @@ -798,9 +804,15 @@ def textpos_common(row, t_type, geojson_crs_def): ) query = qgep_session.query(QGEP.dryweather_flume) if filtered: - query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = query.join(QGEP.wastewater_structure,QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id).join(QGEP.wastewater_networkelement).filter( QGEP.wastewater_networkelement.obj_id.in_(subset_ids) ) + # add sql statement to logger + statement = query.statement + logger.info(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.dryweather_flume @@ -836,9 +848,15 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info("Exporting QGEP.cover -> ABWASSER.deckel, ABWASSER.metaattribute") query = qgep_session.query(QGEP.cover) if filtered: - query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = query.join(QGEP.wastewater_structure,QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id).join(QGEP.wastewater_networkelement).filter( QGEP.wastewater_networkelement.obj_id.in_(subset_ids) ) + # add sql statement to logger + statement = query.statement + logger.info(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.cover @@ -883,9 +901,15 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info("Exporting QGEP.benching -> ABWASSER.bankett, ABWASSER.metaattribute") query = qgep_session.query(QGEP.benching) if filtered: - query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = query.join(QGEP.wastewater_structure,QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id).join(QGEP.wastewater_networkelement).filter( QGEP.wastewater_networkelement.obj_id.in_(subset_ids) ) + # add sql statement to logger + statement = query.statement + logger.info(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.benching From 610424e86206aede7fd4d35cac2d33f1184b44eb Mon Sep 17 00:00:00 2001 From: SJiB Date: Fri, 15 Nov 2024 14:21:10 +0100 Subject: [PATCH 049/127] adapted to include wws in subset_ids --- qgepqwat2ili/qgepsia405/export.py | 14 +++++++--- qgepqwat2ili/utils/ili2db.py | 43 ++++++++++++++++++++++++++----- 2 files changed, 48 insertions(+), 9 deletions(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index 662614f1..6dc2e6c5 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -8,9 +8,9 @@ from .. import utils # 4.10.2024 -# from ..utils.ili2db import skip_wwtp_structure_ids -# 6.11.2024 replaced with -from ..utils.ili2db import add_to_selection, get_ws_wn_ids, remove_from_selection +from ..utils.ili2db import skip_wwtp_structure_ids +# 6.11.2024 replaced with / 15.11.2024 get_ws_selected_ww_networkelements added +from ..utils.ili2db import add_to_selection, get_ws_wn_ids, remove_from_selection, get_ws_selected_ww_networkelements from ..utils.various import logger from .model_abwasser import get_abwasser_model from .model_qgep import get_qgep_model @@ -75,6 +75,14 @@ def qgep_export(selection=None, labels_file=None, orientation=None): # else do nothing + # 5. get and add all id's of connected wastewater_structures (not only of wastewater_network_element (reach, wwn) + subset_wws_ids = get_ws_selected_ww_networkelements(subset_ids) + subset_ids = add_to_selection(subset_ids, subset_wws_ids) + + logger.debug( + f"subset_ids with wws : {subset_ids}", + ) + # Orientation oriented = orientation is not None if oriented: diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index 7390bf49..08bc84c9 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -464,10 +464,6 @@ def skip_wwtp_structure_ids_old(): cursor.execute( "SELECT * FROM qgep_od.wastewater_structure WHERE obj_id NOT IN (SELECT obj_id FROM qgep_od.wwtp_structure);" ) - # remove - only for testing - # cursor.execute( - # f"SELECT * FROM qgep_od.organisation WHERE obj_id NOT IN (SELECT obj_id FROM qgep_od.private);" - # ) # cursor.fetchall() - see https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ # wwtp_structure_count = int(cursor.fetchone()[0]) @@ -507,7 +503,7 @@ def get_cl_re_ids(classname): # select all obj_id of the wastewater_nodes of wwtp_structure cursor.execute( - "SELECT wn.obj_id FROM qgep_od.channel LEFT JOIN qgep_od.wastewater_networkelement wn ON wn.fk_wastewater_structure = channel.obj_id;" + "SELECT wn.obj_id FROM qgep_od.channel LEFT JOIN qgep_od.wastewater_networkelement wn ON wn.fk_wastewater_structure = channel.obj_id WHERE wn.obj_id is not NULL;" ) # cursor.fetchall() - see https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ @@ -544,7 +540,7 @@ def get_ws_wn_ids(classname): # select all obj_id of the wastewater_nodes of wwtp_structure cursor.execute( - f"SELECT wn.obj_id FROM qgep_od.{classname} LEFT JOIN qgep_od.wastewater_networkelement wn ON wn.fk_wastewater_structure = {classname}.obj_id;" + f"SELECT wn.obj_id FROM qgep_od.{classname} LEFT JOIN qgep_od.wastewater_networkelement wn ON wn.fk_wastewater_structure = {classname}.obj_id WHERE wn.obj_id is not NULL;" ) # cursor.fetchall() - see https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ @@ -565,6 +561,41 @@ def get_ws_wn_ids(classname): return ws_wn_ids +def get_ws_selected_ww_networkelements(selection): + """ + Get list of id's of wastewater_structure from selected wastewater_network_elements + """ + + logger.info(f"get list of id's of wastewater_structure of selected wastewater_network_elements {selection} ...") + connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) + connection.set_session(autocommit=True) + cursor = connection.cursor() + + ws_ids = [] + + # select all obj_id of the wastewater_nodes of wwtp_structure + cursor.execute( + f"SELECT ws.obj_id FROM qgep_od.wastewater_structure ws LEFT JOIN qgep_od.wastewater_networkelement wn ON wn.fk_wastewater_structure = ws.obj_id WHERE wn.obj_id IN {selection}" + ) + + # cursor.fetchall() - see https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ + # ws_wn_ids_count = int(cursor.fetchone()[0]) + # if ws_wn_ids_count == 0: + if cursor.fetchone() is None: + ws_ids = None + else: + records = cursor.fetchall() + for row in records: + logger.debug(f" row[0] = {row[0]}") + # https://www.pythontutorial.net/python-string-methods/python-string-concatenation/ + strrow = str(row[0]) + if strrow is not None: + ws_ids.append(strrow) + # logger.debug(f" building up '{ws_wn_ids}' ...") + + return ws_ids + + def remove_from_selection(selected_ids, remove_ids): """ Remove ids from selected_ids From 9c5666f9856bd677d961a50415eebad2ccea1766 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 13:22:02 +0000 Subject: [PATCH 050/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepsia405/export.py | 54 ++++++++++++++++++++++--------- qgepqwat2ili/utils/ili2db.py | 4 ++- 2 files changed, 42 insertions(+), 16 deletions(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index 6dc2e6c5..48aa4df7 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -8,9 +8,13 @@ from .. import utils # 4.10.2024 -from ..utils.ili2db import skip_wwtp_structure_ids # 6.11.2024 replaced with / 15.11.2024 get_ws_selected_ww_networkelements added -from ..utils.ili2db import add_to_selection, get_ws_wn_ids, remove_from_selection, get_ws_selected_ww_networkelements +from ..utils.ili2db import ( + add_to_selection, + get_ws_selected_ww_networkelements, + get_ws_wn_ids, + remove_from_selection, +) from ..utils.various import logger from .model_abwasser import get_abwasser_model from .model_qgep import get_qgep_model @@ -81,7 +85,7 @@ def qgep_export(selection=None, labels_file=None, orientation=None): logger.debug( f"subset_ids with wws : {subset_ids}", - ) + ) # Orientation oriented = orientation is not None @@ -772,10 +776,15 @@ def textpos_common(row, t_type, geojson_crs_def): query = qgep_session.query(QGEP.access_aid) if filtered: # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( - # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) # ) - query = query.join(QGEP.wastewater_structure,QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id).join(QGEP.wastewater_networkelement).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) # add sql statement to logger statement = query.statement @@ -818,10 +827,15 @@ def textpos_common(row, t_type, geojson_crs_def): query = qgep_session.query(QGEP.dryweather_flume) if filtered: # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( - # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) # ) - query = query.join(QGEP.wastewater_structure,QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id).join(QGEP.wastewater_networkelement).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) # add sql statement to logger statement = query.statement @@ -862,10 +876,15 @@ def textpos_common(row, t_type, geojson_crs_def): query = qgep_session.query(QGEP.cover) if filtered: # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( - # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) # ) - query = query.join(QGEP.wastewater_structure,QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id).join(QGEP.wastewater_networkelement).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) # add sql statement to logger statement = query.statement @@ -915,10 +934,15 @@ def textpos_common(row, t_type, geojson_crs_def): query = qgep_session.query(QGEP.benching) if filtered: # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( - # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) # ) - query = query.join(QGEP.wastewater_structure,QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id).join(QGEP.wastewater_networkelement).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) # add sql statement to logger statement = query.statement diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index 08bc84c9..8bf1e8f7 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -566,7 +566,9 @@ def get_ws_selected_ww_networkelements(selection): Get list of id's of wastewater_structure from selected wastewater_network_elements """ - logger.info(f"get list of id's of wastewater_structure of selected wastewater_network_elements {selection} ...") + logger.info( + f"get list of id's of wastewater_structure of selected wastewater_network_elements {selection} ..." + ) connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) connection.set_session(autocommit=True) cursor = connection.cursor() From 314155ba63daf22bc22df42d90dfceec2e19eba2 Mon Sep 17 00:00:00 2001 From: SJiB Date: Fri, 15 Nov 2024 14:25:44 +0100 Subject: [PATCH 051/127] adapt export abwasserbauwerkref / abwassernetzelementref --- qgepqwat2ili/qgepsia405/export.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index 6dc2e6c5..61c7e7d8 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -260,11 +260,11 @@ def wastewater_networkelement_common(row): """ return { - "abwasserbauwerkref": get_tid(row.fk_wastewater_structure__REL), - # 6.11.2024 Besides wn_id and re_id we also need ws_obj_ids in a separate subset - call it ws_subset_id - # "abwasserbauwerkref": check_fk_in_subsetid( - # subset_ids, row.fk_wastewater_structure__REL - # ), + # "abwasserbauwerkref": get_tid(row.fk_wastewater_structure__REL), + # 6.11.2024 Besides wn_id and re_id we also need ws_obj_ids in a separate subset - call it ws_subset_id / 15.11.2024 integrated wws in subset_ids + "abwasserbauwerkref": check_fk_in_subsetid( + subset_ids, row.fk_wastewater_structure__REL + ), "bemerkung": truncate(emptystr_to_null(row.remark), 80), "bezeichnung": null_to_emptystr(row.identifier), } @@ -274,7 +274,10 @@ def structure_part_common(row): Returns common attributes for structure_part """ return { - "abwasserbauwerkref": get_tid(row.fk_wastewater_structure__REL), + #"abwasserbauwerkref": get_tid(row.fk_wastewater_structure__REL), + "abwasserbauwerkref": check_fk_in_subsetid( + subset_ids, row.fk_wastewater_structure__REL + ), "bemerkung": truncate(emptystr_to_null(row.remark), 80), "bezeichnung": null_to_emptystr(row.identifier), "instandstellung": get_vl(row.renovation_demand__REL), From 7147f3e79decebc251378b61ff1e8e1e16544356 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 13:27:54 +0000 Subject: [PATCH 052/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepsia405/export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index 5395fab1..d56a4598 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -278,7 +278,7 @@ def structure_part_common(row): Returns common attributes for structure_part """ return { - #"abwasserbauwerkref": get_tid(row.fk_wastewater_structure__REL), + # "abwasserbauwerkref": get_tid(row.fk_wastewater_structure__REL), "abwasserbauwerkref": check_fk_in_subsetid( subset_ids, row.fk_wastewater_structure__REL ), From c0344be6ca6717bb7cd95264d6df3d9d089efe49 Mon Sep 17 00:00:00 2001 From: SJiB Date: Fri, 15 Nov 2024 17:41:35 +0100 Subject: [PATCH 053/127] export sia405 --- qgepqwat2ili/qgepsia405/export.py | 13 +- qgepqwat2ili/qgepsia405/export.py.old | 1299 +++++++++++++++++++++++++ 2 files changed, 1307 insertions(+), 5 deletions(-) create mode 100644 qgepqwat2ili/qgepsia405/export.py.old diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index 5395fab1..10171ef0 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -81,8 +81,10 @@ def qgep_export(selection=None, labels_file=None, orientation=None): # 5. get and add all id's of connected wastewater_structures (not only of wastewater_network_element (reach, wwn) subset_wws_ids = get_ws_selected_ww_networkelements(subset_ids) + logger.debug( + f"subset_wws_ids: {subset_wws_ids}", + ) subset_ids = add_to_selection(subset_ids, subset_wws_ids) - logger.debug( f"subset_ids with wws : {subset_ids}", ) @@ -278,10 +280,11 @@ def structure_part_common(row): Returns common attributes for structure_part """ return { - #"abwasserbauwerkref": get_tid(row.fk_wastewater_structure__REL), - "abwasserbauwerkref": check_fk_in_subsetid( - subset_ids, row.fk_wastewater_structure__REL - ), + # abwasserbauwerkref is MANDATORY, so it cannot be set to NULL + "abwasserbauwerkref": get_tid(row.fk_wastewater_structure__REL), + #"abwasserbauwerkref": check_fk_in_subsetid( + # subset_ids, row.fk_wastewater_structure__REL + #), "bemerkung": truncate(emptystr_to_null(row.remark), 80), "bezeichnung": null_to_emptystr(row.identifier), "instandstellung": get_vl(row.renovation_demand__REL), diff --git a/qgepqwat2ili/qgepsia405/export.py.old b/qgepqwat2ili/qgepsia405/export.py.old new file mode 100644 index 00000000..c3046d8c --- /dev/null +++ b/qgepqwat2ili/qgepsia405/export.py.old @@ -0,0 +1,1299 @@ +import json + +from geoalchemy2.functions import ST_Force2D, ST_GeomFromGeoJSON +from sqlalchemy import or_ +from sqlalchemy.orm import Session +from sqlalchemy.sql import text + +from .. import utils + +# 4.10.2024 +# 6.11.2024 replaced with / 15.11.2024 get_ws_selected_ww_networkelements added +from ..utils.ili2db import ( + add_to_selection, + get_ws_selected_ww_networkelements, + get_ws_wn_ids, + remove_from_selection, +) +from ..utils.various import logger +from .model_abwasser import get_abwasser_model +from .model_qgep import get_qgep_model + + +def qgep_export(selection=None, labels_file=None, orientation=None): + """ + Export data from the QGEP model into the ili2pg model. + + Args: + selection: if provided, limits the export to networkelements that are provided in the selection + """ + + QGEP = get_qgep_model() + ABWASSER = get_abwasser_model() + + # Logging disabled (very slow) + # qgep_session = Session(utils.sqlalchemy.create_engine(logger_name="qgep"), autocommit=False, autoflush=False) + # abwasser_session = Session(utils.sqlalchemy.create_engine(logger_name="abwasser"), autocommit=False, autoflush=False) + qgep_session = Session(utils.sqlalchemy.create_engine(), autocommit=False, autoflush=False) + abwasser_session = Session(utils.sqlalchemy.create_engine(), autocommit=False, autoflush=False) + tid_maker = utils.ili2db.TidMaker(id_attribute="obj_id") + + # backport from tww https://github.com/teksi/wastewater/blob/3acfba249866d299f8a22e249d9f1e475fe7b88d/plugin/teksi_wastewater/interlis/interlis_model_mapping/interlis_exporter_to_intermediate_schema.py#L83 + abwasser_session.execute(text("SET CONSTRAINTS ALL DEFERRED;")) + + # 1. Filtering - check if selection + filtered = selection is not None + subset_ids = selection if selection is not None else [] + + # 2. check if wastewater_structures exist that are not part of SIA 405 Abwasser (in Release 2015 this is the class wwtp_structures, in Release 2020 it will be more - to be extended in tww) + ws_off_sia405abwasser_list = None + ws_off_sia405abwasser_list = get_ws_wn_ids("wwtp_structure") + + # 3. Show ws_off_sia405abwasser_list + logger.debug( + f"ws_off_sia405abwasser_list : {ws_off_sia405abwasser_list}", + ) + + # 4. check if filtered + if filtered: + if ws_off_sia405abwasser_list: + # take out ws_off_sia405abwasser_list from selection + subset_ids = remove_from_selection(subset_ids, ws_off_sia405abwasser_list) + # else do nothing + else: + if ws_off_sia405abwasser_list: + # add all data of wastewater_structures to selection + subset_ids = add_to_selection(subset_ids, get_ws_wn_ids("wastewater_structure")) + logger.debug( + f"subset_ids of all wws : {subset_ids}", + ) + # take out ws_off_sia405abwasser_list from selection + subset_ids = remove_from_selection(subset_ids, ws_off_sia405abwasser_list) + logger.debug( + f"subset_ids of all wws minus ws_off_sia405abwasser_list: {subset_ids}", + ) + # add reach_ids + # subset_ids = add_to_selection(subset_ids, get_cl_re_ids("channel")) + # treat export as with a selection + filtered = True + + # else do nothing + + # 5. get and add all id's of connected wastewater_structures (not only of wastewater_network_element (reach, wwn) + subset_wws_ids = get_ws_selected_ww_networkelements(subset_ids) + subset_ids = add_to_selection(subset_ids, subset_wws_ids) + + logger.debug( + f"subset_ids with wws : {subset_ids}", + ) + + # Orientation + oriented = orientation is not None + if oriented: + labelorientation = orientation + else: + labelorientation = 0 + + def get_tid(relation): + """ + Makes a tid for a relation + """ + if relation is None: + return None + + return tid_maker.tid_for_row(relation) + + def get_vl(relation): + """ + Gets a literal value from a value list relation + """ + if relation is None: + return None + return relation.value_de + + def null_to_emptystr(val): + """ + Converts nulls to blank strings and raises a warning + """ + if val is None: + logger.warning( + "A mandatory value was null. It will be cast to a blank string, and probably cause validation errors", + ) + val = "" + return val + + def emptystr_to_null(val): + """ + Converts blank strings to nulls and raises a warning + + This is needed as is seems ili2pg 4.4.6 crashes with emptystrings under certain circumstances (see https://github.com/QGEP/qgepqwat2ili/issues/33) + """ + if val == "": + logger.warning( + "An empty string was converted to NULL, to workaround ili2pg issue. This should have no impact on output.", + ) + val = None + return val + + def truncate(val, max_length): + """ + Raises a warning if values gets truncated + """ + if val is None: + return None + if len(val) > max_length: + # _log() got an unexpected keyword argument 'stacklevel' + # logger.warning(f"Value '{val}' exceeds expected length ({max_length})", stacklevel=2) + logger.warning(f"Value '{val}' exceeds expected length ({max_length})") + return val[0:max_length] + + def modulo_angle(val): + """ + Returns an angle between 0 and 359.9 (for Orientierung in Base_d-20181005.ili) + """ + if val is None: + return None + + # add orientation + val = val + float(labelorientation) + + val = val % 360.0 + if val > 359.9: + val = 0 + + logger.info(f"modulo_angle - added orientation: {labelorientation}") + print("modulo_angle - added orientation: ", str(labelorientation)) + + return val + + def check_fk_in_subsetid(subset, relation): + """ + checks, whether foreignkey is in the subset_ids - if yes it return the tid of the foreignkey, if no it will return None + """ + # first check for None, as is get_tid + if relation is None: + return None + + logger.debug(f"check_fk_in_subsetid - Subset ID's '{subset}'") + # get the value of the fk_ attribute as str out of the relation to be able to check whether it is in the subset + fremdschluesselstr = getattr(relation, "obj_id") + logger.debug(f"check_fk_in_subsetid - fremdschluesselstr '{fremdschluesselstr}'") + + if fremdschluesselstr in subset: + logger.debug(f"check_fk_in_subsetid - '{fremdschluesselstr}' is in subset ") + logger.debug(f"check_fk_in_subsetid - tid = '{tid_maker.tid_for_row(relation)}' ") + return tid_maker.tid_for_row(relation) + else: + if filtered: + logger.warning( + f"check_fk_in_subsetid - '{fremdschluesselstr}' is not in filtered subset - replaced with None instead!" + ) + return None + else: + logger.warning( + f"check_fk_in_subsetid - '{fremdschluesselstr}' is not in datamodel - replaced with None instead!" + ) + return None + + def create_metaattributes(row): + metaattribute = ABWASSER.metaattribute( + # FIELDS TO MAP TO ABWASSER.metaattribute + # --- metaattribute --- + # datenherr=getattr(row.fk_dataowner__REL, "name", "unknown"), # TODO : is unknown ok ? + # datenlieferant=getattr(row.fk_provider__REL, "name", "unknown"), # TODO : is unknown ok ? + # obj_id instead of name + datenherr=getattr( + row.fk_dataowner__REL, "obj_id", "unknown" + ), # TODO : is unknown ok ? + datenlieferant=getattr( + row.fk_provider__REL, "obj_id", "unknown" + ), # TODO : is unknown ok ? + letzte_aenderung=row.last_modification, + sia405_baseclass_metaattribute=get_tid(row), + # OD : is this OK ? Don't we need a different t_id from what inserted above in organisation ? if so, consider adding a "for_class" arg to tid_for_row + t_id=get_tid(row), + t_seq=0, + ) + abwasser_session.add(metaattribute) + + def base_common(row, type_name): + """ + Returns common attributes for base + """ + return { + "t_ili_tid": row.obj_id, + "t_type": type_name, + "obj_id": row.obj_id, + "t_id": get_tid(row), + } + + def wastewater_structure_common(row): + """ + Returns common attributes for wastewater_structure + ATTENTION : Mapping of 3D wastewater_structure->abwasserbauerk + is not fully implemented. + """ + return { + # --- abwasserbauwerk --- + "akten": row.records, + "astatus": get_vl(row.status__REL), + "baujahr": row.year_of_construction, + "baulicherzustand": get_vl(row.structure_condition__REL), + "baulos": row.contract_section, + "bemerkung": truncate(emptystr_to_null(row.remark), 80), + "betreiberref": get_tid(row.fk_operator__REL), + "bezeichnung": null_to_emptystr(row.identifier), + "bruttokosten": row.gross_costs, + "detailgeometrie": ST_Force2D(row.detail_geometry_geometry), + "eigentuemerref": get_tid(row.fk_owner__REL), + "ersatzjahr": row.year_of_replacement, + "finanzierung": get_vl(row.financing__REL), + "inspektionsintervall": row.inspection_interval, + "sanierungsbedarf": get_vl(row.renovation_necessity__REL), + "standortname": row.location_name, + "subventionen": row.subsidies, + "wbw_basisjahr": row.rv_base_year, + "wbw_bauart": get_vl(row.rv_construction_type__REL), + "wiederbeschaffungswert": row.replacement_value, + "zugaenglichkeit": get_vl(row.accessibility__REL), + } + + def wastewater_networkelement_common(row): + """ + Returns common attributes for network_element + """ + + return { + # "abwasserbauwerkref": get_tid(row.fk_wastewater_structure__REL), + # 6.11.2024 Besides wn_id and re_id we also need ws_obj_ids in a separate subset - call it ws_subset_id / 15.11.2024 integrated wws in subset_ids + "abwasserbauwerkref": check_fk_in_subsetid( + subset_ids, row.fk_wastewater_structure__REL + ), + "bemerkung": truncate(emptystr_to_null(row.remark), 80), + "bezeichnung": null_to_emptystr(row.identifier), + } + + def structure_part_common(row): + """ + Returns common attributes for structure_part + """ + return { + #"abwasserbauwerkref": get_tid(row.fk_wastewater_structure__REL), + "abwasserbauwerkref": check_fk_in_subsetid( + subset_ids, row.fk_wastewater_structure__REL + ), + "bemerkung": truncate(emptystr_to_null(row.remark), 80), + "bezeichnung": null_to_emptystr(row.identifier), + "instandstellung": get_vl(row.renovation_demand__REL), + } + + def textpos_common(row, t_type, geojson_crs_def): + """ + Returns common attributes for textpos + """ + t_id = tid_maker.next_tid() + return { + "t_id": t_id, + "t_type": t_type, + "t_ili_tid": t_id, + # --- TextPos --- + "textpos": ST_GeomFromGeoJSON( + json.dumps( + { + "type": "Point", + "coordinates": row["geometry"]["coordinates"], + "crs": geojson_crs_def, + } + ) + ), + "textori": modulo_angle(row["properties"]["LabelRotation"]), + "texthali": "Left", # can be Left/Center/Right + "textvali": "Bottom", # can be Top,Cap,Half,Base,Bottom + # --- SIA405_TextPos --- + "plantyp": row["properties"]["scale"], + "textinhalt": row["properties"]["LabelText"], + "bemerkung": None, + } + + # ADAPTED FROM 052a_sia405_abwasser_2015_2_d_interlisexport2.sql + logger.info("Exporting QGEP.organisation -> ABWASSER.organisation, ABWASSER.metaattribute") + query = qgep_session.query(QGEP.organisation) + for row in query: + + # AVAILABLE FIELDS IN QGEP.organisation + + # --- organisation --- + # fk_dataowner, fk_provider, identifier, last_modification, obj_id, remark, uid + + # --- _bwrel_ --- + # accident__BWREL_fk_dataowner, accident__BWREL_fk_provider, administrative_office__BWREL_obj_id, aquifier__BWREL_fk_dataowner, aquifier__BWREL_fk_provider, bathing_area__BWREL_fk_dataowner, bathing_area__BWREL_fk_provider, canton__BWREL_obj_id, catchment_area__BWREL_fk_dataowner, catchment_area__BWREL_fk_provider, connection_object__BWREL_fk_dataowner, connection_object__BWREL_fk_operator, connection_object__BWREL_fk_owner, connection_object__BWREL_fk_provider, control_center__BWREL_fk_dataowner, control_center__BWREL_fk_provider, cooperative__BWREL_obj_id, damage__BWREL_fk_dataowner, damage__BWREL_fk_provider, data_media__BWREL_fk_dataowner, data_media__BWREL_fk_provider, file__BWREL_fk_dataowner, file__BWREL_fk_provider, fish_pass__BWREL_fk_dataowner, fish_pass__BWREL_fk_provider, hazard_source__BWREL_fk_dataowner, hazard_source__BWREL_fk_owner, hazard_source__BWREL_fk_provider, hq_relation__BWREL_fk_dataowner, hq_relation__BWREL_fk_provider, hydr_geom_relation__BWREL_fk_dataowner, hydr_geom_relation__BWREL_fk_provider, hydr_geometry__BWREL_fk_dataowner, hydr_geometry__BWREL_fk_provider, hydraulic_char_data__BWREL_fk_dataowner, hydraulic_char_data__BWREL_fk_provider, maintenance_event__BWREL_fk_dataowner, maintenance_event__BWREL_fk_operating_company, maintenance_event__BWREL_fk_provider, measurement_result__BWREL_fk_dataowner, measurement_result__BWREL_fk_provider, measurement_series__BWREL_fk_dataowner, measurement_series__BWREL_fk_provider, measuring_device__BWREL_fk_dataowner, measuring_device__BWREL_fk_provider, measuring_point__BWREL_fk_dataowner, measuring_point__BWREL_fk_operator, measuring_point__BWREL_fk_provider, mechanical_pretreatment__BWREL_fk_dataowner, mechanical_pretreatment__BWREL_fk_provider, municipality__BWREL_obj_id, mutation__BWREL_fk_dataowner, mutation__BWREL_fk_provider, organisation__BWREL_fk_dataowner, organisation__BWREL_fk_provider, overflow__BWREL_fk_dataowner, overflow__BWREL_fk_provider, overflow_char__BWREL_fk_dataowner, overflow_char__BWREL_fk_provider, pipe_profile__BWREL_fk_dataowner, pipe_profile__BWREL_fk_provider, private__BWREL_obj_id, profile_geometry__BWREL_fk_dataowner, profile_geometry__BWREL_fk_provider, reach_point__BWREL_fk_dataowner, reach_point__BWREL_fk_provider, retention_body__BWREL_fk_dataowner, retention_body__BWREL_fk_provider, river_bank__BWREL_fk_dataowner, river_bank__BWREL_fk_provider, river_bed__BWREL_fk_dataowner, river_bed__BWREL_fk_provider, sector_water_body__BWREL_fk_dataowner, sector_water_body__BWREL_fk_provider, sludge_treatment__BWREL_fk_dataowner, sludge_treatment__BWREL_fk_provider, structure_part__BWREL_fk_dataowner, structure_part__BWREL_fk_provider, substance__BWREL_fk_dataowner, substance__BWREL_fk_provider, surface_runoff_parameters__BWREL_fk_dataowner, surface_runoff_parameters__BWREL_fk_provider, surface_water_bodies__BWREL_fk_dataowner, surface_water_bodies__BWREL_fk_provider, throttle_shut_off_unit__BWREL_fk_dataowner, throttle_shut_off_unit__BWREL_fk_provider, txt_symbol__BWREL_fk_dataowner, txt_symbol__BWREL_fk_provider, waste_water_association__BWREL_obj_id, waste_water_treatment__BWREL_fk_dataowner, waste_water_treatment__BWREL_fk_provider, waste_water_treatment_plant__BWREL_obj_id, wastewater_networkelement__BWREL_fk_dataowner, wastewater_networkelement__BWREL_fk_provider, wastewater_structure__BWREL_fk_dataowner, wastewater_structure__BWREL_fk_operator, wastewater_structure__BWREL_fk_owner, wastewater_structure__BWREL_fk_provider, wastewater_structure_symbol__BWREL_fk_dataowner, wastewater_structure_symbol__BWREL_fk_provider, water_catchment__BWREL_fk_dataowner, water_catchment__BWREL_fk_provider, water_control_structure__BWREL_fk_dataowner, water_control_structure__BWREL_fk_provider, water_course_segment__BWREL_fk_dataowner, water_course_segment__BWREL_fk_provider, wwtp_energy_use__BWREL_fk_dataowner, wwtp_energy_use__BWREL_fk_provider, zone__BWREL_fk_dataowner, zone__BWREL_fk_provider + + # --- _rel_ --- + # fk_dataowner__REL, fk_provider__REL + + organisation = ABWASSER.organisation( + # FIELDS TO MAP TO ABWASSER.organisation + # --- baseclass --- + # --- sia405_baseclass --- + **base_common(row, "organisation"), + # --- organisation --- + auid=row.uid, + bemerkung=truncate(emptystr_to_null(row.remark), 80), + bezeichnung=null_to_emptystr(row.identifier), + ) + abwasser_session.add(organisation) + create_metaattributes(row) + print(".", end="") + logger.info("done") + abwasser_session.flush() + + logger.info("Exporting QGEP.channel -> ABWASSER.kanal, ABWASSER.metaattribute") + query = qgep_session.query(QGEP.channel) + if filtered: + query = query.join(QGEP.wastewater_networkelement).filter( + QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + ) + for row in query: + + # AVAILABLE FIELDS IN QGEP.channel + + # --- wastewater_structure --- + # _bottom_label, _cover_label, _depth, _function_hierarchic, _input_label, _label, _output_label, _usage_current, accessibility, contract_section, detail_geometry_geometry, financing, fk_dataowner, fk_main_cover, fk_main_wastewater_node, fk_operator, fk_owner, fk_provider, gross_costs, identifier, inspection_interval, last_modification, location_name, records, remark, renovation_necessity, replacement_value, rv_base_year, rv_construction_type, status, structure_condition, subsidies, year_of_construction, year_of_replacement + + # --- _bwrel_ --- + # measuring_point__BWREL_fk_wastewater_structure, mechanical_pretreatment__BWREL_fk_wastewater_structure, re_maintenance_event_wastewater_structure__BWREL_fk_wastewater_structure, structure_part__BWREL_fk_wastewater_structure, txt_symbol__BWREL_fk_wastewater_structure, txt_text__BWREL_fk_wastewater_structure, wastewater_networkelement__BWREL_fk_wastewater_structure, wastewater_structure_symbol__BWREL_fk_wastewater_structure, wastewater_structure_text__BWREL_fk_wastewater_structure, wwtp_structure_kind__BWREL_obj_id + + # --- _rel_ --- + # accessibility__REL, bedding_encasement__REL, connection_type__REL, financing__REL, fk_dataowner__REL, fk_main_cover__REL, fk_main_wastewater_node__REL, fk_operator__REL, fk_owner__REL, fk_provider__REL, function_hierarchic__REL, function_hydraulic__REL, renovation_necessity__REL, rv_construction_type__REL, status__REL, structure_condition__REL, usage_current__REL, usage_planned__REL + + kanal = ABWASSER.kanal( + # FIELDS TO MAP TO ABWASSER.kanal + # --- baseclass --- + # --- sia405_baseclass --- + **base_common(row, "kanal"), + # --- abwasserbauwerk --- + **wastewater_structure_common(row), + # --- kanal --- + bettung_umhuellung=get_vl(row.bedding_encasement__REL), + funktionhierarchisch=get_vl(row.function_hierarchic__REL), + funktionhydraulisch=get_vl(row.function_hydraulic__REL), + nutzungsart_geplant=get_vl(row.usage_planned__REL), + nutzungsart_ist=get_vl(row.usage_current__REL), + rohrlaenge=row.pipe_length, + spuelintervall=row.jetting_interval, + verbindungsart=get_vl(row.connection_type__REL), + ) + abwasser_session.add(kanal) + create_metaattributes(row) + print(".", end="") + logger.info("done") + abwasser_session.flush() + + logger.info("Exporting QGEP.manhole -> ABWASSER.normschacht, ABWASSER.metaattribute") + query = qgep_session.query(QGEP.manhole) + if filtered: + query = query.join(QGEP.wastewater_networkelement).filter( + QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + ) + for row in query: + normschacht = ABWASSER.normschacht( + # --- baseclass --- + # --- sia405_baseclass --- + **base_common(row, "normschacht"), + # --- abwasserbauwerk --- + **wastewater_structure_common(row), + # --- normschacht --- + dimension1=row.dimension1, + dimension2=row.dimension2, + funktion=get_vl(row.function__REL), + material=get_vl(row.material__REL), + oberflaechenzulauf=get_vl(row.surface_inflow__REL), + ) + abwasser_session.add(normschacht) + create_metaattributes(row) + print(".", end="") + logger.info("done") + abwasser_session.flush() + + logger.info("Exporting QGEP.discharge_point -> ABWASSER.einleitstelle, ABWASSER.metaattribute") + query = qgep_session.query(QGEP.discharge_point) + if filtered: + query = query.join(QGEP.wastewater_networkelement).filter( + QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + ) + for row in query: + einleitstelle = ABWASSER.einleitstelle( + # --- baseclass --- + # --- sia405_baseclass --- + **base_common(row, "einleitstelle"), + # --- abwasserbauwerk --- + **wastewater_structure_common(row), + # --- einleitstelle --- + hochwasserkote=row.highwater_level, + relevanz=get_vl(row.relevance__REL), + terrainkote=row.terrain_level, + wasserspiegel_hydraulik=row.waterlevel_hydraulic, + ) + abwasser_session.add(einleitstelle) + create_metaattributes(row) + print(".", end="") + logger.info("done") + abwasser_session.flush() + + logger.info( + "Exporting QGEP.special_structure -> ABWASSER.spezialbauwerk, ABWASSER.metaattribute" + ) + query = qgep_session.query(QGEP.special_structure) + if filtered: + query = query.join(QGEP.wastewater_networkelement).filter( + QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + ) + for row in query: + + # AVAILABLE FIELDS IN QGEP.special_structure + + # --- wastewater_structure --- + # _bottom_label, _cover_label, _depth, _function_hierarchic, _input_label, _label, _output_label, _usage_current, accessibility, contract_section, detail_geometry_geometry, financing, fk_dataowner, fk_main_cover, fk_main_wastewater_node, fk_operator, fk_owner, fk_provider, gross_costs, identifier, inspection_interval, last_modification, location_name, records, remark, renovation_necessity, replacement_value, rv_base_year, rv_construction_type, status, structure_condition, subsidies, year_of_construction, year_of_replacement + + # --- special_structure --- + # bypass, emergency_spillway, function, obj_id, stormwater_tank_arrangement, upper_elevation + + # --- _bwrel_ --- + # measuring_point__BWREL_fk_wastewater_structure, mechanical_pretreatment__BWREL_fk_wastewater_structure, re_maintenance_event_wastewater_structure__BWREL_fk_wastewater_structure, structure_part__BWREL_fk_wastewater_structure, txt_symbol__BWREL_fk_wastewater_structure, txt_text__BWREL_fk_wastewater_structure, wastewater_networkelement__BWREL_fk_wastewater_structure, wastewater_structure_symbol__BWREL_fk_wastewater_structure, wastewater_structure_text__BWREL_fk_wastewater_structure, wwtp_structure_kind__BWREL_obj_id + + # --- _rel_ --- + # accessibility__REL, bypass__REL, emergency_spillway__REL, financing__REL, fk_dataowner__REL, fk_main_cover__REL, fk_main_wastewater_node__REL, fk_operator__REL, fk_owner__REL, fk_provider__REL, function__REL, renovation_necessity__REL, rv_construction_type__REL, status__REL, stormwater_tank_arrangement__REL, structure_condition__REL + + # QGEP field special_structure.upper_elevation is a 3D attribute and has no equivalent in the INTERLIS 2D model release used. It will be ignored for now and not supported with QGEP. + + spezialbauwerk = ABWASSER.spezialbauwerk( + # FIELDS TO MAP TO ABWASSER.spezialbauwerk + # --- baseclass --- + # --- sia405_baseclass --- + **base_common(row, "spezialbauwerk"), + # --- abwasserbauwerk --- + **wastewater_structure_common(row), + # --- spezialbauwerk --- + # TODO : WARNING : upper_elevation is not mapped + bypass=get_vl(row.bypass__REL), + funktion=get_vl(row.function__REL), + notueberlauf=get_vl(row.emergency_spillway__REL), + regenbecken_anordnung=get_vl(row.stormwater_tank_arrangement__REL), + ) + abwasser_session.add(spezialbauwerk) + create_metaattributes(row) + print(".", end="") + logger.info("done") + abwasser_session.flush() + + logger.info( + "Exporting QGEP.infiltration_installation -> ABWASSER.versickerungsanlage, ABWASSER.metaattribute" + ) + query = qgep_session.query(QGEP.infiltration_installation) + if filtered: + query = query.join(QGEP.wastewater_networkelement).filter( + QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + ) + for row in query: + + # AVAILABLE FIELDS IN QGEP.infiltration_installation + + # --- wastewater_structure --- + # _bottom_label, _cover_label, _depth, _function_hierarchic, _input_label, _label, _output_label, _usage_current, accessibility, contract_section, detail_geometry_geometry, financing, fk_dataowner, fk_main_cover, fk_main_wastewater_node, fk_operator, fk_owner, fk_provider, gross_costs, identifier, inspection_interval, last_modification, location_name, records, remark, renovation_necessity, replacement_value, rv_base_year, rv_construction_type, status, structure_condition, subsidies, year_of_construction, year_of_replacement + + # --- infiltration_installation --- + # absorption_capacity, defects, dimension1, dimension2, distance_to_aquifer, effective_area, emergency_spillway, fk_aquifier, kind, labeling, obj_id, seepage_utilization, upper_elevation, vehicle_access, watertightness + + # --- _bwrel_ --- + # measuring_point__BWREL_fk_wastewater_structure, mechanical_pretreatment__BWREL_fk_infiltration_installation, mechanical_pretreatment__BWREL_fk_wastewater_structure, re_maintenance_event_wastewater_structure__BWREL_fk_wastewater_structure, retention_body__BWREL_fk_infiltration_installation, structure_part__BWREL_fk_wastewater_structure, txt_symbol__BWREL_fk_wastewater_structure, txt_text__BWREL_fk_wastewater_structure, wastewater_networkelement__BWREL_fk_wastewater_structure, wastewater_structure_symbol__BWREL_fk_wastewater_structure, wastewater_structure_text__BWREL_fk_wastewater_structure, wwtp_structure_kind__BWREL_obj_id + + # --- _rel_ --- + # accessibility__REL, defects__REL, emergency_spillway__REL, financing__REL, fk_aquifier__REL, fk_dataowner__REL, fk_main_cover__REL, fk_main_wastewater_node__REL, fk_operator__REL, fk_owner__REL, fk_provider__REL, kind__REL, labeling__REL, renovation_necessity__REL, rv_construction_type__REL, seepage_utilization__REL, status__REL, structure_condition__REL, vehicle_access__REL, watertightness__REL + + logger.warning( + "QGEP field infiltration_installation.upper_elevation has no equivalent in the interlis model. It will be ignored." + ) + versickerungsanlage = ABWASSER.versickerungsanlage( + # FIELDS TO MAP TO ABWASSER.versickerungsanlage + # --- baseclass --- + # --- sia405_baseclass --- + **base_common(row, "versickerungsanlage"), + # --- abwasserbauwerk --- + **wastewater_structure_common(row), + # --- versickerungsanlage --- + # TODO : NOT MAPPED : upper_elevation + art=get_vl(row.kind__REL), + beschriftung=get_vl(row.labeling__REL), + dimension1=row.dimension1, + dimension2=row.dimension2, + gwdistanz=row.distance_to_aquifer, + maengel=get_vl(row.defects__REL), + notueberlauf=get_vl(row.emergency_spillway__REL), + saugwagen=get_vl(row.vehicle_access__REL), + schluckvermoegen=row.absorption_capacity, + versickerungswasser=get_vl(row.seepage_utilization__REL), + wasserdichtheit=get_vl(row.watertightness__REL), + wirksameflaeche=row.effective_area, + ) + abwasser_session.add(versickerungsanlage) + create_metaattributes(row) + print(".", end="") + logger.info("done") + abwasser_session.flush() + + logger.info("Exporting QGEP.pipe_profile -> ABWASSER.rohrprofil, ABWASSER.metaattribute") + query = qgep_session.query(QGEP.pipe_profile) + if filtered: + query = query.join(QGEP.reach).filter( + QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + ) + for row in query: + + # AVAILABLE FIELDS IN QGEP.pipe_profile + + # --- pipe_profile --- + # fk_dataowner, fk_provider, height_width_ratio, identifier, last_modification, obj_id, profile_type, remark + + # --- _bwrel_ --- + # profile_geometry__BWREL_fk_pipe_profile, reach__BWREL_fk_pipe_profile + + # --- _rel_ --- + # fk_dataowner__REL, fk_provider__REL, profile_type__REL + + rohrprofil = ABWASSER.rohrprofil( + # FIELDS TO MAP TO ABWASSER.rohrprofil + # --- baseclass --- + # --- sia405_baseclass --- + **base_common(row, "rohrprofil"), + # --- rohrprofil --- + bemerkung=truncate(emptystr_to_null(row.remark), 80), + bezeichnung=null_to_emptystr(row.identifier), + hoehenbreitenverhaeltnis=row.height_width_ratio, + profiltyp=get_vl(row.profile_type__REL), + ) + abwasser_session.add(rohrprofil) + create_metaattributes(row) + print(".", end="") + logger.info("done") + abwasser_session.flush() + + logger.info("Exporting QGEP.reach_point -> ABWASSER.haltungspunkt, ABWASSER.metaattribute") + query = qgep_session.query(QGEP.reach_point) + if filtered: + query = query.join( + QGEP.reach, + or_( + QGEP.reach_point.obj_id == QGEP.reach.fk_reach_point_from, + QGEP.reach_point.obj_id == QGEP.reach.fk_reach_point_to, + ), + ).filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) + for row in query: + + # AVAILABLE FIELDS IN QGEP.reach_point + + # --- reach_point --- + # elevation_accuracy, fk_dataowner, fk_provider, fk_wastewater_networkelement, identifier, last_modification, level, obj_id, outlet_shape, position_of_connection, remark, situation_geometry + + # --- _bwrel_ --- + # examination__BWREL_fk_reach_point, reach__BWREL_fk_reach_point_from, reach__BWREL_fk_reach_point_to + + # --- _rel_ --- + # elevation_accuracy__REL, fk_dataowner__REL, fk_provider__REL, fk_wastewater_networkelement__REL, outlet_shape__REL + + haltungspunkt = ABWASSER.haltungspunkt( + # FIELDS TO MAP TO ABWASSER.haltungspunkt + # --- baseclass --- + # --- sia405_baseclass --- + **base_common(row, "haltungspunkt"), + # --- haltungspunkt --- + # changed call from get_tid to check_fk_in_subsetid so it does not write foreignkeys on elements that do not exist + # abwassernetzelementref=get_tid(row.fk_wastewater_networkelement__REL), + abwassernetzelementref=check_fk_in_subsetid( + subset_ids, row.fk_wastewater_networkelement__REL + ), + auslaufform=get_vl(row.outlet_shape__REL), + bemerkung=truncate(emptystr_to_null(row.remark), 80), + bezeichnung=null_to_emptystr(row.identifier), + hoehengenauigkeit=get_vl(row.elevation_accuracy__REL), + kote=row.level, + lage=ST_Force2D(row.situation_geometry), + lage_anschluss=row.position_of_connection, + ) + abwasser_session.add(haltungspunkt) + create_metaattributes(row) + print(".", end="") + logger.info("done") + abwasser_session.flush() + + logger.info( + "Exporting QGEP.wastewater_node -> ABWASSER.abwasserknoten, ABWASSER.metaattribute" + ) + query = qgep_session.query(QGEP.wastewater_node) + if filtered: + query = query.filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) + for row in query: + + # AVAILABLE FIELDS IN QGEP.wastewater_node + + # --- wastewater_networkelement --- + # fk_dataowner, fk_provider, fk_wastewater_structure, identifier, last_modification, remark + + # --- wastewater_node --- + + # --- _bwrel_ --- + # catchment_area__BWREL_fk_wastewater_networkelement_rw_current, catchment_area__BWREL_fk_wastewater_networkelement_rw_planned, catchment_area__BWREL_fk_wastewater_networkelement_ww_current, catchment_area__BWREL_fk_wastewater_networkelement_ww_planned, connection_object__BWREL_fk_wastewater_networkelement, hydraulic_char_data__BWREL_fk_wastewater_node, overflow__BWREL_fk_overflow_to, overflow__BWREL_fk_wastewater_node, reach_point__BWREL_fk_wastewater_networkelement, throttle_shut_off_unit__BWREL_fk_wastewater_node, wastewater_structure__BWREL_fk_main_wastewater_node + + # --- _rel_ --- + # fk_dataowner__REL, fk_hydr_geometry__REL, fk_provider__REL, fk_wastewater_structure__REL + + # QGEP field wastewater_node.fk_hydr_geometry has no equivalent in the interlis model. It will be ignored. + + abwasserknoten = ABWASSER.abwasserknoten( + # FIELDS TO MAP TO ABWASSER.abwasserknoten + # --- baseclass --- + # --- sia405_baseclass --- + **base_common(row, "abwasserknoten"), + # --- abwassernetzelement --- + **wastewater_networkelement_common(row), + # --- abwasserknoten --- + # TODO : WARNING : fk_hydr_geometry is not mapped + lage=ST_Force2D(row.situation_geometry), + rueckstaukote=row.backflow_level, + sohlenkote=row.bottom_level, + ) + abwasser_session.add(abwasserknoten) + create_metaattributes(row) + print(".", end="") + logger.info("done") + abwasser_session.flush() + + logger.info("Exporting QGEP.reach -> ABWASSER.haltung, ABWASSER.metaattribute") + query = qgep_session.query(QGEP.reach) + if filtered: + query = query.filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) + for row in query: + + # AVAILABLE FIELDS IN QGEP.reach + + # --- wastewater_networkelement --- + # fk_dataowner, fk_provider, fk_wastewater_structure, identifier, last_modification, remark + + # --- reach --- + # clear_height, coefficient_of_friction, elevation_determination, fk_pipe_profile, fk_reach_point_from, fk_reach_point_to, horizontal_positioning, inside_coating, length_effective, material, obj_id, progression_geometry, reliner_material, reliner_nominal_size, relining_construction, relining_kind, ring_stiffness, slope_building_plan, wall_roughness + + # --- _bwrel_ --- + # catchment_area__BWREL_fk_wastewater_networkelement_rw_current, catchment_area__BWREL_fk_wastewater_networkelement_rw_planned, catchment_area__BWREL_fk_wastewater_networkelement_ww_current, catchment_area__BWREL_fk_wastewater_networkelement_ww_planned, connection_object__BWREL_fk_wastewater_networkelement, reach_point__BWREL_fk_wastewater_networkelement, reach_text__BWREL_fk_reach, txt_text__BWREL_fk_reach + + # --- _rel_ --- + # elevation_determination__REL, fk_dataowner__REL, fk_pipe_profile__REL, fk_provider__REL, fk_reach_point_from__REL, fk_reach_point_to__REL, fk_wastewater_structure__REL, horizontal_positioning__REL, inside_coating__REL, material__REL, reliner_material__REL, relining_construction__REL, relining_kind__REL + + # QGEP field reach.elevation_determination has no equivalent in the interlis model. It will be ignored. + + haltung = ABWASSER.haltung( + # FIELDS TO MAP TO ABWASSER.haltung + # --- baseclass --- + # --- sia405_baseclass --- + **base_common(row, "haltung"), + # --- abwassernetzelement --- + **wastewater_networkelement_common(row), + # --- haltung --- + # NOT MAPPED : elevation_determination + innenschutz=get_vl(row.inside_coating__REL), + laengeeffektiv=row.length_effective, + lagebestimmung=get_vl(row.horizontal_positioning__REL), + lichte_hoehe=row.clear_height, + material=get_vl(row.material__REL), + nachhaltungspunktref=get_tid(row.fk_reach_point_to__REL), + plangefaelle=row.slope_building_plan, # TODO : check, does this need conversion ? + reibungsbeiwert=row.coefficient_of_friction, + reliner_art=get_vl(row.relining_kind__REL), + reliner_bautechnik=get_vl(row.relining_construction__REL), + reliner_material=get_vl(row.reliner_material__REL), + reliner_nennweite=row.reliner_nominal_size, + ringsteifigkeit=row.ring_stiffness, + rohrprofilref=get_tid(row.fk_pipe_profile__REL), + verlauf=ST_Force2D(row.progression_geometry), + vonhaltungspunktref=get_tid(row.fk_reach_point_from__REL), + wandrauhigkeit=row.wall_roughness, + ) + abwasser_session.add(haltung) + create_metaattributes(row) + print(".", end="") + logger.info("done") + abwasser_session.flush() + + logger.info( + "Exporting QGEP.dryweather_downspout -> ABWASSER.trockenwetterfallrohr, ABWASSER.metaattribute" + ) + + query = qgep_session.query(QGEP.dryweather_downspout) + if filtered: + logger.info(f"filtered: subset_ids = {subset_ids}") + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) + ) + # add sql statement to logger + statement = query.statement + logger.info(f" selection query = {statement}") + for row in query: + + # AVAILABLE FIELDS IN QGEP.dryweather_downspout + + # --- structure_part --- + # fk_dataowner, fk_provider, fk_wastewater_structure, identifier, last_modification, remark, renovation_demand + + # --- dryweather_downspout --- + # diameter, obj_id + + # --- _bwrel_ --- + # access_aid_kind__BWREL_obj_id, backflow_prevention__BWREL_obj_id, benching_kind__BWREL_obj_id, dryweather_flume_material__BWREL_obj_id, electric_equipment__BWREL_obj_id, electromechanical_equipment__BWREL_obj_id, solids_retention__BWREL_obj_id, tank_cleaning__BWREL_obj_id, tank_emptying__BWREL_obj_id + + # --- _rel_ --- + # fk_dataowner__REL, fk_provider__REL, fk_wastewater_structure__REL, renovation_demand__REL + + trockenwetterfallrohr = ABWASSER.trockenwetterfallrohr( + # FIELDS TO MAP TO ABWASSER.trockenwetterfallrohr + # --- baseclass --- + # --- sia405_baseclass --- + **base_common(row, "trockenwetterfallrohr"), + # --- bauwerksteil --- + **structure_part_common(row), + # --- trockenwetterfallrohr --- + durchmesser=row.diameter, + ) + abwasser_session.add(trockenwetterfallrohr) + create_metaattributes(row) + print(".", end="") + logger.info("done") + abwasser_session.flush() + + logger.info("Exporting QGEP.access_aid -> ABWASSER.einstiegshilfe, ABWASSER.metaattribute") + query = qgep_session.query(QGEP.access_aid) + if filtered: + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) + ) + # add sql statement to logger + statement = query.statement + logger.info(f" selection query = {statement}") + for row in query: + + # AVAILABLE FIELDS IN QGEP.access_aid + + # --- structure_part --- + # fk_dataowner, fk_provider, fk_wastewater_structure, identifier, last_modification, remark, renovation_demand + + # --- access_aid --- + # kind, obj_id + + # --- _bwrel_ --- + # access_aid_kind__BWREL_obj_id, backflow_prevention__BWREL_obj_id, benching_kind__BWREL_obj_id, dryweather_flume_material__BWREL_obj_id, electric_equipment__BWREL_obj_id, electromechanical_equipment__BWREL_obj_id, solids_retention__BWREL_obj_id, tank_cleaning__BWREL_obj_id, tank_emptying__BWREL_obj_id + + # --- _rel_ --- + # fk_dataowner__REL, fk_provider__REL, fk_wastewater_structure__REL, kind__REL, renovation_demand__REL + + einstiegshilfe = ABWASSER.einstiegshilfe( + # FIELDS TO MAP TO ABWASSER.einstiegshilfe + # --- baseclass --- + # --- sia405_baseclass --- + **base_common(row, "einstiegshilfe"), + # --- bauwerksteil --- + **structure_part_common(row), + # --- einstiegshilfe --- + art=get_vl(row.kind__REL), + ) + abwasser_session.add(einstiegshilfe) + create_metaattributes(row) + print(".", end="") + logger.info("done") + abwasser_session.flush() + + logger.info( + "Exporting QGEP.dryweather_flume -> ABWASSER.trockenwetterrinne, ABWASSER.metaattribute" + ) + query = qgep_session.query(QGEP.dryweather_flume) + if filtered: + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) + ) + # add sql statement to logger + statement = query.statement + logger.info(f" selection query = {statement}") + for row in query: + + # AVAILABLE FIELDS IN QGEP.dryweather_flume + + # --- structure_part --- + # fk_dataowner, fk_provider, fk_wastewater_structure, identifier, last_modification, remark, renovation_demand + + # --- dryweather_flume --- + # material, obj_id + + # --- _bwrel_ --- + # access_aid_kind__BWREL_obj_id, backflow_prevention__BWREL_obj_id, benching_kind__BWREL_obj_id, dryweather_flume_material__BWREL_obj_id, electric_equipment__BWREL_obj_id, electromechanical_equipment__BWREL_obj_id, solids_retention__BWREL_obj_id, tank_cleaning__BWREL_obj_id, tank_emptying__BWREL_obj_id + + # --- _rel_ --- + # fk_dataowner__REL, fk_provider__REL, fk_wastewater_structure__REL, material__REL, renovation_demand__REL + + trockenwetterrinne = ABWASSER.trockenwetterrinne( + # FIELDS TO MAP TO ABWASSER.trockenwetterrinne + # --- baseclass --- + # --- sia405_baseclass --- + **base_common(row, "trockenwetterrinne"), + # --- bauwerksteil --- + **structure_part_common(row), + # --- trockenwetterrinne --- + material=get_vl(row.material__REL), + ) + abwasser_session.add(trockenwetterrinne) + create_metaattributes(row) + print(".", end="") + logger.info("done") + abwasser_session.flush() + + logger.info("Exporting QGEP.cover -> ABWASSER.deckel, ABWASSER.metaattribute") + query = qgep_session.query(QGEP.cover) + if filtered: + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) + ) + # add sql statement to logger + statement = query.statement + logger.info(f" selection query = {statement}") + for row in query: + + # AVAILABLE FIELDS IN QGEP.cover + + # --- structure_part --- + # fk_dataowner, fk_provider, fk_wastewater_structure, identifier, last_modification, remark, renovation_demand + + # --- cover --- + # brand, cover_shape, diameter, fastening, level, material, obj_id, positional_accuracy, situation_geometry, sludge_bucket, venting + + # --- _bwrel_ --- + # access_aid_kind__BWREL_obj_id, backflow_prevention__BWREL_obj_id, benching_kind__BWREL_obj_id, dryweather_flume_material__BWREL_obj_id, electric_equipment__BWREL_obj_id, electromechanical_equipment__BWREL_obj_id, solids_retention__BWREL_obj_id, tank_cleaning__BWREL_obj_id, tank_emptying__BWREL_obj_id, wastewater_structure__BWREL_fk_main_cover + + # --- _rel_ --- + # cover_shape__REL, fastening__REL, fk_dataowner__REL, fk_provider__REL, fk_wastewater_structure__REL, material__REL, positional_accuracy__REL, renovation_demand__REL, sludge_bucket__REL, venting__REL + + deckel = ABWASSER.deckel( + # FIELDS TO MAP TO ABWASSER.deckel + # --- baseclass --- + # --- sia405_baseclass --- + **base_common(row, "deckel"), + # --- bauwerksteil --- + **structure_part_common(row), + # --- deckel --- + deckelform=get_vl(row.cover_shape__REL), + durchmesser=row.diameter, + entlueftung=get_vl(row.venting__REL), + fabrikat=row.brand, + kote=row.level, + lage=ST_Force2D(row.situation_geometry), + lagegenauigkeit=get_vl(row.positional_accuracy__REL), + material=get_vl(row.material__REL), + schlammeimer=get_vl(row.sludge_bucket__REL), + verschluss=get_vl(row.fastening__REL), + ) + abwasser_session.add(deckel) + create_metaattributes(row) + print(".", end="") + logger.info("done") + abwasser_session.flush() + + logger.info("Exporting QGEP.benching -> ABWASSER.bankett, ABWASSER.metaattribute") + query = qgep_session.query(QGEP.benching) + if filtered: + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) + ) + # add sql statement to logger + statement = query.statement + logger.info(f" selection query = {statement}") + for row in query: + + # AVAILABLE FIELDS IN QGEP.benching + + # --- structure_part --- + # fk_dataowner, fk_provider, fk_wastewater_structure, identifier, last_modification, remark, renovation_demand + + # --- benching --- + # kind, obj_id + + # --- _bwrel_ --- + # access_aid_kind__BWREL_obj_id, backflow_prevention__BWREL_obj_id, benching_kind__BWREL_obj_id, dryweather_flume_material__BWREL_obj_id, electric_equipment__BWREL_obj_id, electromechanical_equipment__BWREL_obj_id, solids_retention__BWREL_obj_id, tank_cleaning__BWREL_obj_id, tank_emptying__BWREL_obj_id + + # --- _rel_ --- + # fk_dataowner__REL, fk_provider__REL, fk_wastewater_structure__REL, kind__REL, renovation_demand__REL + + bankett = ABWASSER.bankett( + # FIELDS TO MAP TO ABWASSER.bankett + # --- baseclass --- + # --- sia405_baseclass --- + **base_common(row, "bankett"), + # --- bauwerksteil --- + **structure_part_common(row), + # --- bankett --- + art=get_vl(row.kind__REL), + ) + abwasser_session.add(bankett) + create_metaattributes(row) + print(".", end="") + logger.info("done") + abwasser_session.flush() + + # VSA -KEK commented out + + # logger.info("Exporting QGEP.examination -> ABWASSER.untersuchung, ABWASSER.metaattribute") + # query = qgep_session.query(QGEP.examination) + # if filtered: + # query = ( + # query.join(QGEP.re_maintenance_event_wastewater_structure) + # .join(QGEP.wastewater_structure) + # .join(QGEP.wastewater_networkelement) + # .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) + # ) + + # for row in query: + + # # AVAILABLE FIELDS IN QGEP.examination + + # # --- maintenance_event --- + # # --- examination --- + # # equipment, fk_reach_point, from_point_identifier, inspected_length, obj_id, recording_type, to_point_identifier, vehicle, videonumber, weather + + # # --- _bwrel_ --- + # # damage__BWREL_fk_examination, re_maintenance_event_wastewater_structure__BWREL_fk_maintenance_event + + # # --- _rel_ --- + # # fk_dataowner__REL, fk_operating_company__REL, fk_provider__REL, fk_reach_point__REL, kind__REL, recording_type__REL, status__REL, weather__REL + # logger.warning( + # f"QGEP field maintenance_event.active_zone has no equivalent in the interlis model. It will be ignored." + # ) + + # untersuchung = ABWASSER.untersuchung( + # # FIELDS TO MAP TO ABWASSER.untersuchung + # # --- baseclass --- + # # --- sia405_baseclass --- + # **base_common(row, "untersuchung"), + # # --- erhaltungsereignis --- + # # abwasserbauwerkref=row.REPLACE_ME, # TODO : convert this to M2N relation through re_maintenance_event_wastewater_structure + # art=get_vl(row.kind__REL), + # astatus=get_vl(row.status__REL), + # ausfuehrende_firmaref=get_tid(row.fk_operating_company__REL), + # ausfuehrender=row.operator, + # bemerkung=truncate(emptystr_to_null(row.remark), 80), + # bezeichnung=null_to_emptystr(row.identifier), + # datengrundlage=row.base_data, + # dauer=row.duration, + # detaildaten=row.data_details, + # ergebnis=row.result, + # grund=row.reason, + # kosten=row.cost, + # zeitpunkt=row.time_point, + # # --- untersuchung --- + # bispunktbezeichnung=row.to_point_identifier, + # erfassungsart=get_vl(row.recording_type__REL), + # fahrzeug=row.vehicle, + # geraet=row.equipment, + # haltungspunktref=get_tid(row.fk_reach_point__REL), + # inspizierte_laenge=row.inspected_length, + # videonummer=row.videonumber, + # vonpunktbezeichnung=row.from_point_identifier, + # witterung=get_vl(row.weather__REL), + # ) + # abwasser_session.add(untersuchung) + # create_metaattributes(row) + # print(".", end="") + # logger.info("done") + # abwasser_session.flush() + + # logger.info("Exporting QGEP.damage_manhole -> ABWASSER.normschachtschaden, ABWASSER.metaattribute") + # query = qgep_session.query(QGEP.damage_manhole) + # if filtered: + # query = ( + # query.join(QGEP.examination) + # .join(QGEP.re_maintenance_event_wastewater_structure) + # .join(QGEP.wastewater_structure) + # .join(QGEP.wastewater_networkelement) + # .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) + # ) + # for row in query: + + # # AVAILABLE FIELDS IN QGEP.damage_manhole + + # # --- damage --- + + # # --- damage_manhole --- + # # manhole_damage_code, manhole_shaft_area, obj_id + + # # --- _bwrel_ --- + # # damage_channel_channel_damage_code__BWREL_obj_id + + # # --- _rel_ --- + # # connection__REL, fk_dataowner__REL, fk_examination__REL, fk_provider__REL, manhole_damage_code__REL, manhole_shaft_area__REL, single_damage_class__REL + + # normschachtschaden = ABWASSER.normschachtschaden( + # # FIELDS TO MAP TO ABWASSER.normschachtschaden + # # --- baseclass --- + # # --- sia405_baseclass --- + # **base_common(row, "normschachtschaden"), + # # --- schaden --- + # anmerkung=row.comments, + # ansichtsparameter=row.view_parameters, + # einzelschadenklasse=get_vl(row.single_damage_class__REL), + # streckenschaden=row.damage_reach, + # untersuchungref=get_tid(row.fk_examination__REL), + # verbindung=get_vl(row.connection__REL), + # videozaehlerstand=row.video_counter, + # # --- normschachtschaden --- + # distanz=row.distance, + # quantifizierung1=row.quantification1, + # quantifizierung2=row.quantification2, + # schachtbereich=get_vl(row.manhole_shaft_area__REL), + # schachtschadencode=get_vl(row.manhole_damage_code__REL), + # schadenlageanfang=row.damage_begin, + # schadenlageende=row.damage_end, + # ) + # abwasser_session.add(normschachtschaden) + # create_metaattributes(row) + # print(".", end="") + # logger.info("done") + # abwasser_session.flush() + + # logger.info("Exporting QGEP.damage_channel -> ABWASSER.kanalschaden, ABWASSER.metaattribute") + # query = qgep_session.query(QGEP.damage_channel) + # if filtered: + # query = ( + # query.join(QGEP.examination) + # .join(QGEP.re_maintenance_event_wastewater_structure) + # .join(QGEP.wastewater_structure) + # .join(QGEP.wastewater_networkelement) + # .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) + # ) + # for row in query: + + # # AVAILABLE FIELDS IN QGEP.damage_channel + + # # --- damage --- + # # comments, connection, damage_begin, damage_end, damage_reach, distance, fk_dataowner, fk_examination, fk_provider, last_modification, quantification1, quantification2, single_damage_class, video_counter, view_parameters + + # # --- damage_channel --- + # # , obj_id + + # # --- _bwrel_ --- + # # damage_channel_channel_damage_code__BWREL_obj_id + + # # --- _rel_ --- + # # channel_damage_code__REL, connection__REL, fk_dataowner__REL, fk_examination__REL, fk_provider__REL, single_damage_class__REL + + # kanalschaden = ABWASSER.kanalschaden( + # # FIELDS TO MAP TO ABWASSER.kanalschaden + # # --- baseclass --- + # # --- sia405_baseclass --- + # **base_common(row, "kanalschaden"), + # # --- schaden --- + # anmerkung=row.comments, + # ansichtsparameter=row.view_parameters, + # einzelschadenklasse=get_vl(row.single_damage_class__REL), + # streckenschaden=row.damage_reach, + # untersuchungref=get_tid(row.fk_examination__REL), + # verbindung=get_vl(row.connection__REL), + # videozaehlerstand=row.video_counter, + # # --- kanalschaden --- + # distanz=row.distance, + # kanalschadencode=get_vl(row.channel_damage_code__REL), + # quantifizierung1=row.quantification1, + # quantifizierung2=row.quantification2, + # schadenlageanfang=row.damage_begin, + # schadenlageende=row.damage_end, + # ) + # abwasser_session.add(kanalschaden) + # create_metaattributes(row) + # print(".", end="") + # logger.info("done") + # abwasser_session.flush() + + # logger.info("Exporting QGEP.data_media -> ABWASSER.datentraeger, ABWASSER.metaattribute") + # query = qgep_session.query(QGEP.data_media) + # for row in query: + + # # AVAILABLE FIELDS IN QGEP.data_media + + # # --- data_media --- + # # fk_dataowner, fk_provider, identifier, kind, last_modification, location, obj_id, path, remark + + # # --- _rel_ --- + # # fk_dataowner__REL, fk_provider__REL, kind__REL + + # datentraeger = ABWASSER.datentraeger( + # # FIELDS TO MAP TO ABWASSER.datentraeger + # # --- baseclass --- + # # --- sia405_baseclass --- + # **base_common(row, "datentraeger"), + # # --- datentraeger --- + # art=get_vl(row.kind__REL), + # bemerkung=truncate(emptystr_to_null(row.remark), 80), + # bezeichnung=null_to_emptystr(row.identifier), + # pfad=row.path, + # standort=row.location, + # ) + # abwasser_session.add(datentraeger) + # create_metaattributes(row) + # print(".", end="") + # logger.info("done") + # abwasser_session.flush() + + # logger.info("Exporting QGEP.file -> ABWASSER.datei, ABWASSER.metaattribute") + # query = qgep_session.query(QGEP.file) + # if filtered: + # query = ( + # query.outerjoin(QGEP.damage, QGEP.file.object == QGEP.damage.obj_id) + # .join( + # QGEP.examination, + # or_(QGEP.file.object == QGEP.damage.obj_id, QGEP.file.object == QGEP.examination.obj_id), + # ) + # .join(QGEP.re_maintenance_event_wastewater_structure) + # .join(QGEP.wastewater_structure) + # .join(QGEP.wastewater_networkelement) + # .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) + # ) + # for row in query: + + # # AVAILABLE FIELDS IN QGEP.file + + # # --- file --- + # # class, fk_data_media, fk_dataowner, fk_provider, identifier, kind, last_modification, obj_id, object, path_relative, remark + + # # --- _rel_ --- + # # class__REL, fk_dataowner__REL, fk_provider__REL, kind__REL + + # datei = ABWASSER.datei( + # # FIELDS TO MAP TO ABWASSER.datei + # # --- baseclass --- + # # --- sia405_baseclass --- + # **base_common(row, "datei"), + # # --- datei --- + # art=get_vl(row.kind__REL) or "andere", + # bemerkung=truncate(emptystr_to_null(row.remark), 80), + # bezeichnung=null_to_emptystr(row.identifier), + # datentraegerref=get_tid(row.fk_data_media__REL), + # klasse=get_vl(row.class__REL), + # objekt=null_to_emptystr(row.object), + # relativpfad=row.path_relative, + # ) + # abwasser_session.add(datei) + # create_metaattributes(row) + # print(".", end="") + # logger.info("done") + # abwasser_session.flush() + + # Labels + # Note: these are extracted from the optional labels file (not exported from the QGEP database) + if labels_file: + logger.info(f"Exporting label positions from {labels_file}") + + # Get t_id by obj_name to create the reference on the labels below + tid_for_obj_id = { + "haltung": {}, + "abwasserbauwerk": {}, + } + for row in abwasser_session.query(ABWASSER.haltung): + tid_for_obj_id["haltung"][row.obj_id] = row.t_id + for row in abwasser_session.query(ABWASSER.abwasserbauwerk): + tid_for_obj_id["abwasserbauwerk"][row.obj_id] = row.t_id + + with open(labels_file) as labels_file_handle: + labels = json.load(labels_file_handle) + + geojson_crs_def = labels["crs"] + + for label in labels["features"]: + layer_name = label["properties"]["Layer"] + obj_id = label["properties"]["qgep_obj_id"] + + print(f"label[properties]: {label['properties']}") + + if not label["properties"]["LabelText"]: + logger.warning( + f"Label of object '{obj_id}' from layer '{layer_name}' is empty and will not be exported" + ) + continue + + if layer_name == "vw_qgep_reach": + if obj_id not in tid_for_obj_id["haltung"]: + logger.warning( + f"Label for haltung `{obj_id}` exists, but that object is not part of the export" + ) + continue + ili_label = ABWASSER.haltung_text( + **textpos_common(label, "haltung_text", geojson_crs_def), + haltungref=tid_for_obj_id["haltung"][obj_id], + ) + + elif layer_name == "vw_qgep_wastewater_structure": + if obj_id not in tid_for_obj_id["abwasserbauwerk"]: + logger.warning( + f"Label for abwasserbauwerk `{obj_id}` exists, but that object is not part of the export" + ) + continue + ili_label = ABWASSER.abwasserbauwerk_text( + **textpos_common(label, "abwasserbauwerk_text", geojson_crs_def), + abwasserbauwerkref=tid_for_obj_id["abwasserbauwerk"][obj_id], + ) + + else: + logger.warning( + f"Unknown layer for label `{layer_name}`. Label will be ignored", + ) + continue + + abwasser_session.add(ili_label) + print(".", end="") + logger.info("done") + abwasser_session.flush() + + abwasser_session.commit() + + qgep_session.close() + abwasser_session.close() From e308a1de10061f99524616e822b739840d249539 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 16:43:08 +0000 Subject: [PATCH 054/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepsia405/export.py | 4 ++-- qgepqwat2ili/qgepsia405/export.py.old | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index 10171ef0..092b96cc 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -282,9 +282,9 @@ def structure_part_common(row): return { # abwasserbauwerkref is MANDATORY, so it cannot be set to NULL "abwasserbauwerkref": get_tid(row.fk_wastewater_structure__REL), - #"abwasserbauwerkref": check_fk_in_subsetid( + # "abwasserbauwerkref": check_fk_in_subsetid( # subset_ids, row.fk_wastewater_structure__REL - #), + # ), "bemerkung": truncate(emptystr_to_null(row.remark), 80), "bezeichnung": null_to_emptystr(row.identifier), "instandstellung": get_vl(row.renovation_demand__REL), diff --git a/qgepqwat2ili/qgepsia405/export.py.old b/qgepqwat2ili/qgepsia405/export.py.old index c3046d8c..5395fab1 100644 --- a/qgepqwat2ili/qgepsia405/export.py.old +++ b/qgepqwat2ili/qgepsia405/export.py.old @@ -82,7 +82,7 @@ def qgep_export(selection=None, labels_file=None, orientation=None): # 5. get and add all id's of connected wastewater_structures (not only of wastewater_network_element (reach, wwn) subset_wws_ids = get_ws_selected_ww_networkelements(subset_ids) subset_ids = add_to_selection(subset_ids, subset_wws_ids) - + logger.debug( f"subset_ids with wws : {subset_ids}", ) From b3bdbd9b7a84db1d343962745bae9379026f1e6e Mon Sep 17 00:00:00 2001 From: SJiB Date: Fri, 15 Nov 2024 17:44:01 +0100 Subject: [PATCH 055/127] adapt ili2db.py - def get_ws_selected_ww_networkelements(selected_wwn): --- qgepqwat2ili/utils/ili2db.py | 30 +- qgepqwat2ili/utils/ili2db.py.old | 987 +++++++++++++++++++++++++++++++ 2 files changed, 1013 insertions(+), 4 deletions(-) create mode 100644 qgepqwat2ili/utils/ili2db.py.old diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index 8bf1e8f7..ef563659 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -549,7 +549,14 @@ def get_ws_wn_ids(classname): if cursor.fetchone() is None: ws_wn_ids = None else: + # added cursor.execute again to see if with this all records will be available + #15.11.2024 added - see https://stackoverflow.com/questions/58101874/cursor-fetchall-or-other-method-fetchone-is-not-working + cursor.execute( + f"SELECT wn.obj_id FROM qgep_od.{classname} LEFT JOIN qgep_od.wastewater_networkelement wn ON wn.fk_wastewater_structure = {classname}.obj_id WHERE wn.obj_id is not NULL;" + ) records = cursor.fetchall() + + # 15.11.2024 - does not get all records, but only n-1 for row in records: logger.debug(f" row[0] = {row[0]}") # https://www.pythontutorial.net/python-string-methods/python-string-concatenation/ @@ -561,25 +568,40 @@ def get_ws_wn_ids(classname): return ws_wn_ids -def get_ws_selected_ww_networkelements(selection): +def get_ws_selected_ww_networkelements(selected_wwn): """ Get list of id's of wastewater_structure from selected wastewater_network_elements """ - logger.info( - f"get list of id's of wastewater_structure of selected wastewater_network_elements {selection} ..." + logger.debug( + f"get list of id's of wastewater_structure of selected wastewater_network_elements {selected_wwn} ..." ) connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) connection.set_session(autocommit=True) cursor = connection.cursor() + selection_text = '' + + for list_item in selected_wwn: + selection_text += "'" + selection_text += list_item + selection_text += "'," + + # remove last komma to make it a correct IN statement + selection_text = selection_text[:-1] + + logger.debug( + f"selection_text = {selection_text} ..." + ) + ws_ids = [] # select all obj_id of the wastewater_nodes of wwtp_structure cursor.execute( - f"SELECT ws.obj_id FROM qgep_od.wastewater_structure ws LEFT JOIN qgep_od.wastewater_networkelement wn ON wn.fk_wastewater_structure = ws.obj_id WHERE wn.obj_id IN {selection}" + f"SELECT ws.obj_id FROM qgep_od.wastewater_structure ws LEFT JOIN qgep_od.wastewater_networkelement wn ON wn.fk_wastewater_structure = ws.obj_id WHERE wn.obj_id IN ({selection_text});" ) + # cursor.fetchall() - see https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ # ws_wn_ids_count = int(cursor.fetchone()[0]) # if ws_wn_ids_count == 0: diff --git a/qgepqwat2ili/utils/ili2db.py.old b/qgepqwat2ili/utils/ili2db.py.old new file mode 100644 index 00000000..8bf1e8f7 --- /dev/null +++ b/qgepqwat2ili/utils/ili2db.py.old @@ -0,0 +1,987 @@ +import collections + +# 11.4.2023 +import xml.etree.ElementTree as ET + +import psycopg2 +from sqlalchemy.ext.automap import AutomapBase + +from .. import config +from .various import exec_, get_pgconf_as_ili_args, get_pgconf_as_psycopg2_dsn, logger + + +def check_organisation_subclass_data(): + """ + Check if subclass entries of organisation are set and match number of organisation entries + """ + logger.info("INTEGRITY CHECK organisations subclass data...") + + connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) + connection.set_session(autocommit=True) + cursor = connection.cursor() + + cursor.execute("SELECT obj_id FROM qgep_od.organisation;") + if cursor.rowcount > 0: + organisation_count = cursor.rowcount + logger.info(f"Number of organisation datasets: {organisation_count}") + for subclass in [ + ("administrative_office"), + ("waste_water_association"), + ("municipality"), + ("canton"), + ("cooperative"), + ("private"), + ("waste_water_treatment_plant"), + ]: + cursor.execute(f"SELECT obj_id FROM qgep_od.{subclass};") + logger.info(f"Number of {subclass} datasets: {cursor.rowcount}") + organisation_count = organisation_count - cursor.rowcount + + if organisation_count == 0: + organisation_subclass_check = True + logger.info( + "OK: number of subclass elements of class organisation OK in schema qgep_od!" + ) + else: + organisation_subclass_check = False + logger.info( + f"ERROR: number of subclass elements of organisation NOT CORRECT in schema qgep_od: checksum = {organisation_count} (positiv number means missing entries, negativ means too many subclass entries)" + ) + + return organisation_subclass_check + + +def check_wastewater_structure_subclass_data(): + """ + Check if subclass entries of wastewater_structure are set and match number of wastewater_structure entries + """ + logger.info("INTEGRITY CHECK wastewater_structures subclass data...") + + connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) + connection.set_session(autocommit=True) + cursor = connection.cursor() + + cursor.execute("SELECT obj_id FROM qgep_od.wastewater_structure;") + if cursor.rowcount > 0: + wastewater_structure_count = cursor.rowcount + logger.info(f"Number of wastewater_structure datasets: {wastewater_structure_count}") + for subclass in [ + ("manhole"), + ("channel"), + ("special_structure"), + ("infiltration_installation"), + ("discharge_point"), + ("wwtp_structure"), + ]: + cursor.execute(f"SELECT obj_id FROM qgep_od.{subclass};") + logger.info(f"Number of {subclass} datasets: {cursor.rowcount}") + wastewater_structure_count = wastewater_structure_count - cursor.rowcount + + if wastewater_structure_count == 0: + wastewater_structure_subclass_check = True + logger.info( + "OK: number of subclass elements of class wastewater_structure OK in schema qgep_od!" + ) + else: + wastewater_structure_subclass_check = False + logger.info( + f"ERROR: number of subclass elements of wastewater_structure NOT CORRECT in schema qgep_od: checksum = {wastewater_structure_count} (positiv number means missing entries, negativ means too many subclass entries)" + ) + + return wastewater_structure_subclass_check + + +def check_identifier_null(): + """ + Check if attribute identifier is Null + """ + logger.info("INTEGRITY CHECK missing identifiers...") + + connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) + connection.set_session(autocommit=True) + cursor = connection.cursor() + + missing_identifier_count = 0 + # add classes to be checked + for notsubclass in [ + # VSA-KEK + ("file"), + ("data_media"), + ("maintenance_event"), + # SIA405 Abwasser + ("organisation"), + ("wastewater_structure"), + ("wastewater_networkelement"), + ("structure_part"), + ("reach_point"), + ("pipe_profile"), + # VSA-DSS + ("catchment_area"), + ("connection_object"), + ("control_center"), + ("hazard_source"), + ("hydr_geometry"), + ("hydraulic_char_data"), + ("measurement_result"), + ("measurement_series"), + ("measuring_device"), + ("measuring_point"), + ("mechanical_pretreatment"), + ("overflow"), + ("overflow_char"), + ("retention_body"), + ("river_bank"), + ("river_bed"), + ("sector_water_body"), + ("substance"), + ("surface_runoff_parameters"), + ("surface_water_bodies"), + ("throttle_shut_off_unit"), + ("waste_water_treatment"), + ("water_catchment"), + ("water_control_structure"), + ("water_course_segment"), + ("wwtp_energy_use"), + ("zone"), + ]: + cursor.execute( + f"SELECT COUNT(obj_id) FROM qgep_od.{notsubclass} WHERE identifier is null;" + ) + # use cursor.fetchone()[0] instead of cursor.rowcount + # add variable and store result of cursor.fetchone()[0] as the next call will give None value instead of count https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ + + try: + class_identifier_count = int(cursor.fetchone()[0]) + except Exception: + class_identifier_count = 0 + logger.debug( + f"Number of datasets in class '{notsubclass}' without identifier could not be identified (TypeError: 'NoneType' object is not subscriptable). Automatically set class_identifier_count = 0" + ) + else: + logger.info( + f"Number of datasets in class '{notsubclass}' without identifier : {class_identifier_count}" + ) + + # if cursor.fetchone() is None: + if class_identifier_count == 0: + missing_identifier_count = missing_identifier_count + else: + # missing_identifier_count = missing_identifier_count + int(cursor.fetchone()[0]) + missing_identifier_count = missing_identifier_count + class_identifier_count + + # add for testing + logger.info(f"missing_identifier_count : {missing_identifier_count}") + + if missing_identifier_count == 0: + identifier_null_check = True + logger.info("OK: all identifiers set in qgep_od!") + else: + identifier_null_check = False + logger.info(f"ERROR: Missing identifiers in qgep_od: {missing_identifier_count}") + return identifier_null_check + + +def check_fk_owner_null(): + """ + Check if MAMDATORY fk_owner is Null + """ + logger.info("INTEGRITY CHECK missing MAMDATORY owner references fk_owner...") + + connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) + connection.set_session(autocommit=True) + cursor = connection.cursor() + + missing_fk_owner_count = 0 + # add MANDATORY classes to be checked + for notsubclass in [ + # SIA405 Abwasser + ("wastewater_structure"), + ]: + cursor.execute(f"SELECT COUNT(obj_id) FROM qgep_od.{notsubclass} WHERE fk_owner is null;") + # use cursor.fetchone()[0] instead of cursor.rowcount + # add variable and store result of cursor.fetchone()[0] as the next call will give None value instead of count https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ + class_fk_owner_count = int(cursor.fetchone()[0]) + # logger.info( + # f"Number of datasets in class '{notsubclass}' without fk_owner : {cursor.fetchone()[0]}" + # ) + logger.info( + f"Number of datasets in class '{notsubclass}' without fk_owner : {class_fk_owner_count}" + ) + + # if cursor.fetchone() is None: + if class_fk_owner_count == 0: + missing_fk_owner_count = missing_fk_owner_count + else: + # missing_fk_owner_count = missing_fk_owner_count + int(cursor.fetchone()[0]) + missing_fk_owner_count = missing_fk_owner_count + class_fk_owner_count + + # add for testing + logger.info(f"missing_fk_owner_count : {missing_fk_owner_count}") + + if missing_fk_owner_count == 0: + check_fk_owner_null = True + logger.info("OK: all mandatory fk_owner set in qgep_od!") + else: + check_fk_owner_null = False + logger.info(f"ERROR: Missing mandatory fk_owner in qgep_od: {missing_fk_owner_count}") + return check_fk_owner_null + + +def check_fk_operator_null(): + """ + Check if MAMDATORY fk_operator is Null + """ + logger.info("INTEGRITY CHECK missing MAMDATORY operator references fk_operator...") + + connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) + connection.set_session(autocommit=True) + cursor = connection.cursor() + + missing_fk_operator_count = 0 + + # add MANDATORY classes to be checked + for notsubclass in [ + # SIA405 Abwasser + ("wastewater_structure"), + ]: + cursor.execute( + f"SELECT COUNT(obj_id) FROM qgep_od.{notsubclass} WHERE fk_operator is null;" + ) + # use cursor.fetchone()[0] instead of cursor.rowcount + logger.info( + f"Number of datasets in class '{notsubclass}' without fk_operator : {cursor.fetchone()[0]}" + ) + + if cursor.fetchone() is None: + missing_fk_operator_count = missing_fk_operator_count + else: + missing_fk_operator_count = missing_fk_operator_count + int(cursor.fetchone()[0]) + # add for testing + logger.info(f"missing_fk_operator_count : {missing_fk_operator_count}") + + if missing_fk_operator_count == 0: + check_fk_operator_null = True + logger.info("OK: all mandatory fk_operator set in qgep_od!") + else: + check_fk_operator_null = False + logger.info( + f"ERROR: Missing mandatory fk_operator in qgep_od: {missing_fk_operator_count}" + ) + + return check_fk_operator_null + + +def check_fk_dataowner_null(): + """ + Check if MAMDATORY fk_dataowner is Null + """ + logger.info("INTEGRITY CHECK missing dataowner references fk_dataowner...") + + connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) + connection.set_session(autocommit=True) + cursor = connection.cursor() + + missing_fk_dataowner_count = 0 + # add MANDATORY classes to be checked + for notsubclass in [ + # VSA-KEK + ("file"), + ("data_media"), + ("maintenance_event"), + # SIA405 Abwasser + ("organisation"), + ("wastewater_structure"), + ("wastewater_networkelement"), + ("structure_part"), + ("reach_point"), + ("pipe_profile"), + # VSA-DSS + ("catchment_area"), + ("connection_object"), + ("control_center"), + ("hazard_source"), + ("hydr_geometry"), + ("hydraulic_char_data"), + ("measurement_result"), + ("measurement_series"), + ("measuring_device"), + ("measuring_point"), + ("mechanical_pretreatment"), + ("overflow"), + ("overflow_char"), + ("retention_body"), + ("river_bank"), + ("river_bed"), + ("sector_water_body"), + ("substance"), + ("surface_runoff_parameters"), + ("surface_water_bodies"), + ("throttle_shut_off_unit"), + ("waste_water_treatment"), + ("water_catchment"), + ("water_control_structure"), + ("water_course_segment"), + ("wwtp_energy_use"), + ("zone"), + ]: + cursor.execute( + f"SELECT COUNT(obj_id) FROM qgep_od.{notsubclass} WHERE fk_dataowner is null;" + ) + # use cursor.fetchone()[0] instead of cursor.rowcount + # add variable and store result of cursor.fetchone()[0] as the next call will give None value instead of count https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ + class_fk_dataowner_count = int(cursor.fetchone()[0]) + + # logger.info( + # f"Number of datasets in class '{notsubclass}' without fk_dataowner : {cursor.fetchone()[0]}" + # ) + logger.info( + f"Number of datasets in class '{notsubclass}' without fk_dataowner : {class_fk_dataowner_count}" + ) + + # if cursor.fetchone() is None: + if class_fk_dataowner_count == 0: + missing_fk_dataowner_count = missing_fk_dataowner_count + else: + # missing_fk_dataowner_count = missing_fk_dataowner_count + int(cursor.fetchone()[0]) + missing_fk_dataowner_count = missing_fk_dataowner_count + class_fk_dataowner_count + + # add for testing + logger.info(f"missing_fk_dataowner_count : {missing_fk_dataowner_count}") + + if missing_fk_dataowner_count == 0: + check_fk_dataowner_null = True + logger.info("OK: all mandatory fk_dataowner set in qgep_od!") + else: + check_fk_dataowner_null = False + logger.info( + f"ERROR: Missing mandatory fk_dataowner in qgep_od: {missing_fk_dataowner_count}" + ) + + return check_fk_dataowner_null + + +def check_fk_provider_null(): + """ + Check if MAMDATORY fk_provider is Null + """ + logger.info("INTEGRITY CHECK missing provider references fk_provider...") + + connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) + connection.set_session(autocommit=True) + cursor = connection.cursor() + + missing_fk_provider_count = 0 + # add MANDATORY classes to be checked + for notsubclass in [ + # VSA-KEK + ("file"), + ("data_media"), + ("maintenance_event"), + # SIA405 Abwasser + ("organisation"), + ("wastewater_structure"), + ("wastewater_networkelement"), + ("structure_part"), + ("reach_point"), + ("pipe_profile"), + # VSA-DSS + ("catchment_area"), + ("connection_object"), + ("control_center"), + ("hazard_source"), + ("hydr_geometry"), + ("hydraulic_char_data"), + ("measurement_result"), + ("measurement_series"), + ("measuring_device"), + ("measuring_point"), + ("mechanical_pretreatment"), + ("overflow"), + ("overflow_char"), + ("retention_body"), + ("river_bank"), + ("river_bed"), + ("sector_water_body"), + ("substance"), + ("surface_runoff_parameters"), + ("surface_water_bodies"), + ("throttle_shut_off_unit"), + ("waste_water_treatment"), + ("water_catchment"), + ("water_control_structure"), + ("water_course_segment"), + ("wwtp_energy_use"), + ("zone"), + ]: + cursor.execute( + f"SELECT COUNT(obj_id) FROM qgep_od.{notsubclass} WHERE fk_provider is null;" + ) + # use cursor.fetchone()[0] instead of cursor.rowcount + # add variable and store result of cursor.fetchone()[0] as the next call will give None value instead of count https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ + class_fk_provider_count = int(cursor.fetchone()[0]) + # logger.info( + # f"Number of datasets in class '{notsubclass}' without fk_provider : {cursor.fetchone()[0]}" + # ) + logger.info( + f"Number of datasets in class '{notsubclass}' without fk_dataowner : {class_fk_provider_count}" + ) + + # if cursor.fetchone() is None: + if class_fk_provider_count == 0: + missing_fk_provider_count = missing_fk_provider_count + else: + # missing_fk_provider_count = missing_fk_provider_count + int(cursor.fetchone()[0]) + missing_fk_provider_count = missing_fk_provider_count + class_fk_provider_count + + # add for testing + logger.info(f"missing_fk_provider_count : {missing_fk_provider_count}") + + if missing_fk_provider_count == 0: + check_fk_provider_null = True + logger.info("OK: all mandatory fk_provider set in qgep_od!") + else: + check_fk_provider_null = False + logger.info( + f"ERROR: Missing mandatory fk_provider in qgep_od: {missing_fk_provider_count}" + ) + + return check_fk_provider_null + + +def skip_wwtp_structure_ids_old(): + """ + Get list of id's of class wastewater_structure without wwtp_structure (ARABauwerk) + """ + logger.info("get list of id's of class wwtp_structure (ARABauwerk)...") + + connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) + connection.set_session(autocommit=True) + cursor = connection.cursor() + + not_wwtp_structure_ids = [] + + # select all obj_id from wastewater_structure that are not in wwtp_structure + cursor.execute( + "SELECT * FROM qgep_od.wastewater_structure WHERE obj_id NOT IN (SELECT obj_id FROM qgep_od.wwtp_structure);" + ) + + # cursor.fetchall() - see https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ + # wwtp_structure_count = int(cursor.fetchone()[0]) + # if wwtp_structure_count == 0: + if cursor.fetchone() is None: + not_wwtp_structure_ids = None + else: + records = cursor.fetchall() + for row in records: + logger.debug(f" row[0] = {row[0]}") + # https://www.pythontutorial.net/python-string-methods/python-string-concatenation/ + # not_wwtp_structure_ids = not_wwtp_structure_ids + str(row[0]) + "," + strrow = str(row[0]) + # not_wwtp_structure_ids = ','.join([not_wwtp_structure_ids, strrow]) + # not_wwtp_structure_ids = not_wwtp_structure_ids + row[0] + not_wwtp_structure_ids.append(strrow) + logger.debug(f" building up '{not_wwtp_structure_ids}' ...") + + return not_wwtp_structure_ids + + +# 12.11.2024 to clean up - get_ws_wn_ids kann das auch +def get_cl_re_ids(classname): + """ + Get list of id's of reaches of the channels provided + """ + + # define classes that this is allowed to use - adapt for TWW to include model changes + if classname == "channel": + logger.info(f"get list of id's of wastewater_nodes of {classname} ...") + + connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) + connection.set_session(autocommit=True) + cursor = connection.cursor() + + cl_re_ids = [] + + # select all obj_id of the wastewater_nodes of wwtp_structure + cursor.execute( + "SELECT wn.obj_id FROM qgep_od.channel LEFT JOIN qgep_od.wastewater_networkelement wn ON wn.fk_wastewater_structure = channel.obj_id WHERE wn.obj_id is not NULL;" + ) + + # cursor.fetchall() - see https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ + # cl_re_ids_count = int(cursor.fetchone()[0]) + # if cl_re_ids_count == 0: + if cursor.fetchone() is None: + cl_re_ids = None + else: + records = cursor.fetchall() + for row in records: + logger.debug(f" row[0] = {row[0]}") + # https://www.pythontutorial.net/python-string-methods/python-string-concatenation/ + strrow = str(row[0]) + cl_re_ids.append(strrow) + logger.debug(f" building up '{cl_re_ids}' ...") + + return cl_re_ids + else: + logger.warning(f"Do not use this function with {classname} !") + return None + + +def get_ws_wn_ids(classname): + """ + Get list of id's of wastewater_nodes of the wastewater_structure (sub)class provided, eg. wwtp_structure (ARABauwerk, does also work for channel (give reaches then) + """ + + logger.info(f"get list of id's of wastewater_nodes of {classname} ...") + connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) + connection.set_session(autocommit=True) + cursor = connection.cursor() + + ws_wn_ids = [] + + # select all obj_id of the wastewater_nodes of wwtp_structure + cursor.execute( + f"SELECT wn.obj_id FROM qgep_od.{classname} LEFT JOIN qgep_od.wastewater_networkelement wn ON wn.fk_wastewater_structure = {classname}.obj_id WHERE wn.obj_id is not NULL;" + ) + + # cursor.fetchall() - see https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ + # ws_wn_ids_count = int(cursor.fetchone()[0]) + # if ws_wn_ids_count == 0: + if cursor.fetchone() is None: + ws_wn_ids = None + else: + records = cursor.fetchall() + for row in records: + logger.debug(f" row[0] = {row[0]}") + # https://www.pythontutorial.net/python-string-methods/python-string-concatenation/ + strrow = str(row[0]) + if strrow is not None: + ws_wn_ids.append(strrow) + # logger.debug(f" building up '{ws_wn_ids}' ...") + + return ws_wn_ids + + +def get_ws_selected_ww_networkelements(selection): + """ + Get list of id's of wastewater_structure from selected wastewater_network_elements + """ + + logger.info( + f"get list of id's of wastewater_structure of selected wastewater_network_elements {selection} ..." + ) + connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) + connection.set_session(autocommit=True) + cursor = connection.cursor() + + ws_ids = [] + + # select all obj_id of the wastewater_nodes of wwtp_structure + cursor.execute( + f"SELECT ws.obj_id FROM qgep_od.wastewater_structure ws LEFT JOIN qgep_od.wastewater_networkelement wn ON wn.fk_wastewater_structure = ws.obj_id WHERE wn.obj_id IN {selection}" + ) + + # cursor.fetchall() - see https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ + # ws_wn_ids_count = int(cursor.fetchone()[0]) + # if ws_wn_ids_count == 0: + if cursor.fetchone() is None: + ws_ids = None + else: + records = cursor.fetchall() + for row in records: + logger.debug(f" row[0] = {row[0]}") + # https://www.pythontutorial.net/python-string-methods/python-string-concatenation/ + strrow = str(row[0]) + if strrow is not None: + ws_ids.append(strrow) + # logger.debug(f" building up '{ws_wn_ids}' ...") + + return ws_ids + + +def remove_from_selection(selected_ids, remove_ids): + """ + Remove ids from selected_ids + """ + + for list_item in remove_ids: + # selected_ids = selected_ids.remove(list_item) + selected_ids.remove(list_item) + + return selected_ids + + +def add_to_selection(selected_ids, add_ids): + """ + Append ids to selected_ids + """ + + if selected_ids is None: + selected_ids = [] + + for list_item in add_ids: + # selected_ids = selected_ids.append(list_item) + selected_ids.append(list_item) + + return selected_ids + + +def create_ili_schema(schema, model, log_path, recreate_schema=False): + """ + Create schema for INTERLIS import + """ + logger.info("CONNECTING TO DATABASE...") + + connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) + connection.set_session(autocommit=True) + cursor = connection.cursor() + + if not recreate_schema: + # If the schema already exists, we just truncate all tables + cursor.execute( + f"SELECT schema_name FROM information_schema.schemata WHERE schema_name = '{schema}';" + ) + if cursor.rowcount > 0: + logger.info(f"Schema {schema} already exists, we truncate instead") + cursor.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = '{schema}';" + ) + for row in cursor.fetchall(): + cursor.execute(f"TRUNCATE TABLE {schema}.{row[0]} CASCADE;") + return + + logger.info(f"DROPPING THE SCHEMA {schema}...") + cursor.execute(f'DROP SCHEMA IF EXISTS "{schema}" CASCADE ;') + logger.info(f"CREATING THE SCHEMA {schema}...") + cursor.execute(f'CREATE SCHEMA "{schema}";') + connection.commit() + connection.close() + + logger.info(f"ILIDB SCHEMAIMPORT INTO {schema}...") + exec_( + " ".join( + [ + f'"{config.JAVA}"', + "-jar", + f'"{config.ILI2PG}"', + "--schemaimport", + *get_pgconf_as_ili_args(), + "--dbschema", + f"{schema}", + "--setupPgExt", + "--createGeomIdx", + "--createFk", + "--createFkIdx", + "--createTidCol", + "--importTid", + "--noSmartMapping", + "--defaultSrsCode", + "2056", + "--log", + f'"{log_path}"', + "--nameLang", + "de", + f"{model}", + ] + ) + ) + + +def validate_xtf_data(xtf_file, log_path): + """ + Run XTF validation using ilivalidator + """ + logger.info("VALIDATING XTF DATA...") + exec_( + f'"{config.JAVA}" -jar "{config.ILIVALIDATOR}" --modeldir "{config.ILI_FOLDER}" --log "{log_path}" "{xtf_file}"' + ) + + +# 22.7.2022 sb +def get_xtf_model(xtf_file): + """ + Get XTF model from file + """ + logger.info(f"GET XTF MODEL {xtf_file} ... ") + # logger.info("vorher" + imodel) + # funktioniert nicht + # global imodel # define imodel as global variable for import model name + # impmodel = "" + + # open and read xtf / xml file line by line until + # + # + # read string between < and . -> eg. VSA_KEK_2019_LV95 + # impmodel + from io import open + + model_list = [] + + # checkdatasection = -1 + checkmodelssection = -1 + impmodel = "not found" + + with open(xtf_file, encoding="utf-8") as f: + while True: + # if checkdatasection == -1: + if checkmodelssection == -1: + + line = f.readline() + if not line: + break + else: + # checkdatasection = line.find('') + # logger.info(str(checkdatasection)) + checkmodelssection = line.find("") + logger.info("checkmodelssection " + str(checkmodelssection)) + logger.info(str(line)) + else: + line2 = f.readline() + if not line2: + break + else: + logger.info(str(line2)) + logger.info("line2: ", str(line2)) + # logger.info(str(checkdatasection)) + logger.info("checkmodelssection2 " + str(checkmodelssection)) + # strmodel = str(line2.strip()) + strmodel = str(line2) + strmodel = strmodel.strip() + logger.info("strmodel: " + strmodel) + logger.info("strmodel: ", strmodel) + logger.info(f"strmodel: {strmodel}") + a = strmodel.find("") + logger.info("strmodel.find a : " + str(a)) + # if strmodel.find("") > -1: + if a == -1: + b = strmodel.find("") + logger.info(r"strmodel.find b \ -1: + logger.info("MODELS definition found in xtf: " + strmodel) + # + # read string between < and . -> eg. VSA_KEK_2019_LV95 + + # result = re.search('<(.*).',strmodel) + # result = str(result.group(1)) + # result2 = result.split('.',1) + # result3 = str(result2[0]) + # result4 = result3.strip('<') + # impmodel = str(result4) + # Search MODELNAME in MODEL entry: # + char1 = "=" + char2 = "VERSION=" + result = strmodel[strmodel.find(char1) + 1 : strmodel.find(char2)] + # result = re.search('<(.*).',strmodel) + # result = str(result.group(1)) + # result2 = result.split('.',1) + # result3 = str(result2[0]) + # result4 = result3.strip('<') + # impmodel = str(result4) + # strip spaces + result = result.strip() + # strip " + result = result.strip('"') + logger.info("MODEL found: " + str(result)) + logger.info(result) + model_list.append(result) + else: + logger.info("goto next line") + else: + logger.info(" found - stop checking!") + break + logger.info("model_list:") + logger.info(str(model_list)) + + if len(model_list) > 0: + # if impmodel == "not found": + # # write that MODEL was not found + # logger.info("MODEL was " + impmodel + " was not found!") + # else: + if "VSA_KEK_2019_LV95" in model_list: + impmodel = "VSA_KEK_2019_LV95" + elif "SIA405_ABWASSER_2015_LV95" in model_list: + impmodel = "SIA405_ABWASSER_2015_LV95" + elif "DSS_2015_LV95" in model_list: + impmodel = "DSS_2015_LV95" + elif "SIA405_WASSER_LV95" in model_list: + impmodel = "SIA405_WASSER_LV95" + else: + logger.info("None of the supported models was found!") + else: + # write that MODEL was not found + logger.info("MODEL information was " + impmodel + "!") + + # close xtf file to avoid conflicts + f.close() + + logger.info("MODEL found: " + str(impmodel)) + + # neu 23.7.2022 return imodel from get_xtf_model so it can be called in _init_.py + return impmodel + + +def get_xtf_model2(xtf_file): + logger.info("GET XTF MODEL xml version... ") + # logger.info("vorher" + imodel) + # funktioniert nicht + # global imodel # define imodel as global variable for import model name + # impmodel = "" + + # open and read xtf / xml file line by line until + # + # + # read string between < and . -> eg. VSA_KEK_2019_LV95 + # impmodel + + model_list = [] + + # checkdatasection = -1 + impmodel = "not found" + + # from xml file + tree = ET.parse(xtf_file) + rootinterlis = tree.getroot() + logger.info("rootinterlis.findall:", rootinterlis.findall(".")) + + i = 0 + model_found = False + + while i < 15: + try: + j = i + i = i + 1 + model_list.append(rootinterlis[0][0][j].get("NAME")) + model_found = True + # except utils.various.CmdException: + except Exception: + if model_found: + logger.info(f"{i - 1} times MODEL information was found!") + break + else: + logger.info("No MODEL information was found!") + break + + print(model_list) + logger.info("model_list:") + logger.info(str(model_list)) + + if len(model_list) > 0: + # if impmodel == "not found": + # # write that MODEL was not found + # logger.info("MODEL was " + impmodel + " was not found!") + # else: + if "VSA_KEK_2019_LV95" in model_list: + impmodel = "VSA_KEK_2019_LV95" + elif "SIA405_ABWASSER_2015_LV95" in model_list: + impmodel = "SIA405_ABWASSER_2015_LV95" + elif "DSS_2015_LV95" in model_list: + impmodel = "DSS_2015_LV95" + elif "SIA405_WASSER_LV95" in model_list: + impmodel = "SIA405_WASSER_LV95" + else: + logger.info("None of the supported models was found!") + else: + # write that MODEL was not found + logger.info("MODEL information was " + impmodel + "!") + + logger.info("MODEL found: " + str(impmodel)) + print("MODEL found: ", str(impmodel)) + + # neu 23.7.2022 return imodel from get_xtf_model so it can be called in _init_.py + return impmodel + + +def import_xtf_data(schema, xtf_file, log_path): + logger.info("IMPORTING XTF DATA...") + exec_( + " ".join( + [ + f'"{config.JAVA}"', + "-jar", + f'"{config.ILI2PG}"', + "--import", + "--deleteData", + *get_pgconf_as_ili_args(), + "--dbschema", + f'"{schema}"', + "--modeldir", + f'"{config.ILI_FOLDER}"', + "--disableValidation", + "--skipReferenceErrors", + "--createTidCol", + "--noSmartMapping", + "--defaultSrsCode", + "2056", + "--log", + f'"{log_path}"', + f'"{xtf_file}"', + ] + ) + ) + + +def export_xtf_data(schema, model_name, export_model_name, xtf_file, log_path): + logger.info("EXPORT ILIDB...") + + # if optional export_model_name is set, add it to the args + if export_model_name: + export_model_name_args = ["--exportModels", export_model_name] + else: + export_model_name_args = [] + + exec_( + " ".join( + [ + f'"{config.JAVA}"', + "-jar", + f'"{config.ILI2PG}"', + "--export", + "--models", + f"{model_name}", + *export_model_name_args, + *get_pgconf_as_ili_args(), + "--dbschema", + f"{schema}", + "--modeldir", + f'"{config.ILI_FOLDER}"', + "--disableValidation", + "--skipReferenceErrors", + "--createTidCol", + "--noSmartMapping", + "--defaultSrsCode", + "2056", + "--log", + f'"{log_path}"', + "--trace", + f'"{xtf_file}"', + ] + ) + ) + + +class TidMaker: + """ + Helper class that creates globally unique integer primary key forili2pg class (t_id) + from a a QGEP/QWAT id (obj_id or id). + """ + + def __init__(self, id_attribute="id"): + self._id_attr = id_attribute + self._autoincrementer = collections.defaultdict(lambda: len(self._autoincrementer)) + + def tid_for_row(self, row, for_class=None): + # tid are globally unique, while ids are only guaranteed unique per table, + # so include the base table in the key + # this finds the base class (the first parent class before sqlalchemy.ext.automap.Base) + class_for_id = row.__class__.__mro__[row.__class__.__mro__.index(AutomapBase) - 2] + key = (class_for_id, getattr(row, self._id_attr), for_class) + # was_created = key not in self._autoincrementer # just for debugging + tid = self._autoincrementer[key] + # if was_created: + # # just for debugging + # logger.info(f"created tid {tid} for {key}") + return tid + + def next_tid(self): + """Get an arbitrary unused tid""" + key = len(self._autoincrementer) + return self._autoincrementer[key] From 889791c97ce3b5616d17b1d9669bf8bce63654da Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 16:44:51 +0000 Subject: [PATCH 056/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/utils/ili2db.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index ef563659..ecb7b415 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -550,12 +550,12 @@ def get_ws_wn_ids(classname): ws_wn_ids = None else: # added cursor.execute again to see if with this all records will be available - #15.11.2024 added - see https://stackoverflow.com/questions/58101874/cursor-fetchall-or-other-method-fetchone-is-not-working + # 15.11.2024 added - see https://stackoverflow.com/questions/58101874/cursor-fetchall-or-other-method-fetchone-is-not-working cursor.execute( f"SELECT wn.obj_id FROM qgep_od.{classname} LEFT JOIN qgep_od.wastewater_networkelement wn ON wn.fk_wastewater_structure = {classname}.obj_id WHERE wn.obj_id is not NULL;" ) records = cursor.fetchall() - + # 15.11.2024 - does not get all records, but only n-1 for row in records: logger.debug(f" row[0] = {row[0]}") @@ -580,8 +580,8 @@ def get_ws_selected_ww_networkelements(selected_wwn): connection.set_session(autocommit=True) cursor = connection.cursor() - selection_text = '' - + selection_text = "" + for list_item in selected_wwn: selection_text += "'" selection_text += list_item @@ -590,9 +590,7 @@ def get_ws_selected_ww_networkelements(selected_wwn): # remove last komma to make it a correct IN statement selection_text = selection_text[:-1] - logger.debug( - f"selection_text = {selection_text} ..." - ) + logger.debug(f"selection_text = {selection_text} ...") ws_ids = [] @@ -601,7 +599,6 @@ def get_ws_selected_ww_networkelements(selected_wwn): f"SELECT ws.obj_id FROM qgep_od.wastewater_structure ws LEFT JOIN qgep_od.wastewater_networkelement wn ON wn.fk_wastewater_structure = ws.obj_id WHERE wn.obj_id IN ({selection_text});" ) - # cursor.fetchall() - see https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ # ws_wn_ids_count = int(cursor.fetchone()[0]) # if ws_wn_ids_count == 0: From 9b9b5cc3a8d1c31cc3ec9f9563a5551c79296de2 Mon Sep 17 00:00:00 2001 From: SJiB Date: Fri, 15 Nov 2024 17:49:21 +0100 Subject: [PATCH 057/127] take out old version --- qgepqwat2ili/utils/ili2db.py.old | 987 ------------------------------- 1 file changed, 987 deletions(-) delete mode 100644 qgepqwat2ili/utils/ili2db.py.old diff --git a/qgepqwat2ili/utils/ili2db.py.old b/qgepqwat2ili/utils/ili2db.py.old deleted file mode 100644 index 8bf1e8f7..00000000 --- a/qgepqwat2ili/utils/ili2db.py.old +++ /dev/null @@ -1,987 +0,0 @@ -import collections - -# 11.4.2023 -import xml.etree.ElementTree as ET - -import psycopg2 -from sqlalchemy.ext.automap import AutomapBase - -from .. import config -from .various import exec_, get_pgconf_as_ili_args, get_pgconf_as_psycopg2_dsn, logger - - -def check_organisation_subclass_data(): - """ - Check if subclass entries of organisation are set and match number of organisation entries - """ - logger.info("INTEGRITY CHECK organisations subclass data...") - - connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) - connection.set_session(autocommit=True) - cursor = connection.cursor() - - cursor.execute("SELECT obj_id FROM qgep_od.organisation;") - if cursor.rowcount > 0: - organisation_count = cursor.rowcount - logger.info(f"Number of organisation datasets: {organisation_count}") - for subclass in [ - ("administrative_office"), - ("waste_water_association"), - ("municipality"), - ("canton"), - ("cooperative"), - ("private"), - ("waste_water_treatment_plant"), - ]: - cursor.execute(f"SELECT obj_id FROM qgep_od.{subclass};") - logger.info(f"Number of {subclass} datasets: {cursor.rowcount}") - organisation_count = organisation_count - cursor.rowcount - - if organisation_count == 0: - organisation_subclass_check = True - logger.info( - "OK: number of subclass elements of class organisation OK in schema qgep_od!" - ) - else: - organisation_subclass_check = False - logger.info( - f"ERROR: number of subclass elements of organisation NOT CORRECT in schema qgep_od: checksum = {organisation_count} (positiv number means missing entries, negativ means too many subclass entries)" - ) - - return organisation_subclass_check - - -def check_wastewater_structure_subclass_data(): - """ - Check if subclass entries of wastewater_structure are set and match number of wastewater_structure entries - """ - logger.info("INTEGRITY CHECK wastewater_structures subclass data...") - - connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) - connection.set_session(autocommit=True) - cursor = connection.cursor() - - cursor.execute("SELECT obj_id FROM qgep_od.wastewater_structure;") - if cursor.rowcount > 0: - wastewater_structure_count = cursor.rowcount - logger.info(f"Number of wastewater_structure datasets: {wastewater_structure_count}") - for subclass in [ - ("manhole"), - ("channel"), - ("special_structure"), - ("infiltration_installation"), - ("discharge_point"), - ("wwtp_structure"), - ]: - cursor.execute(f"SELECT obj_id FROM qgep_od.{subclass};") - logger.info(f"Number of {subclass} datasets: {cursor.rowcount}") - wastewater_structure_count = wastewater_structure_count - cursor.rowcount - - if wastewater_structure_count == 0: - wastewater_structure_subclass_check = True - logger.info( - "OK: number of subclass elements of class wastewater_structure OK in schema qgep_od!" - ) - else: - wastewater_structure_subclass_check = False - logger.info( - f"ERROR: number of subclass elements of wastewater_structure NOT CORRECT in schema qgep_od: checksum = {wastewater_structure_count} (positiv number means missing entries, negativ means too many subclass entries)" - ) - - return wastewater_structure_subclass_check - - -def check_identifier_null(): - """ - Check if attribute identifier is Null - """ - logger.info("INTEGRITY CHECK missing identifiers...") - - connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) - connection.set_session(autocommit=True) - cursor = connection.cursor() - - missing_identifier_count = 0 - # add classes to be checked - for notsubclass in [ - # VSA-KEK - ("file"), - ("data_media"), - ("maintenance_event"), - # SIA405 Abwasser - ("organisation"), - ("wastewater_structure"), - ("wastewater_networkelement"), - ("structure_part"), - ("reach_point"), - ("pipe_profile"), - # VSA-DSS - ("catchment_area"), - ("connection_object"), - ("control_center"), - ("hazard_source"), - ("hydr_geometry"), - ("hydraulic_char_data"), - ("measurement_result"), - ("measurement_series"), - ("measuring_device"), - ("measuring_point"), - ("mechanical_pretreatment"), - ("overflow"), - ("overflow_char"), - ("retention_body"), - ("river_bank"), - ("river_bed"), - ("sector_water_body"), - ("substance"), - ("surface_runoff_parameters"), - ("surface_water_bodies"), - ("throttle_shut_off_unit"), - ("waste_water_treatment"), - ("water_catchment"), - ("water_control_structure"), - ("water_course_segment"), - ("wwtp_energy_use"), - ("zone"), - ]: - cursor.execute( - f"SELECT COUNT(obj_id) FROM qgep_od.{notsubclass} WHERE identifier is null;" - ) - # use cursor.fetchone()[0] instead of cursor.rowcount - # add variable and store result of cursor.fetchone()[0] as the next call will give None value instead of count https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ - - try: - class_identifier_count = int(cursor.fetchone()[0]) - except Exception: - class_identifier_count = 0 - logger.debug( - f"Number of datasets in class '{notsubclass}' without identifier could not be identified (TypeError: 'NoneType' object is not subscriptable). Automatically set class_identifier_count = 0" - ) - else: - logger.info( - f"Number of datasets in class '{notsubclass}' without identifier : {class_identifier_count}" - ) - - # if cursor.fetchone() is None: - if class_identifier_count == 0: - missing_identifier_count = missing_identifier_count - else: - # missing_identifier_count = missing_identifier_count + int(cursor.fetchone()[0]) - missing_identifier_count = missing_identifier_count + class_identifier_count - - # add for testing - logger.info(f"missing_identifier_count : {missing_identifier_count}") - - if missing_identifier_count == 0: - identifier_null_check = True - logger.info("OK: all identifiers set in qgep_od!") - else: - identifier_null_check = False - logger.info(f"ERROR: Missing identifiers in qgep_od: {missing_identifier_count}") - return identifier_null_check - - -def check_fk_owner_null(): - """ - Check if MAMDATORY fk_owner is Null - """ - logger.info("INTEGRITY CHECK missing MAMDATORY owner references fk_owner...") - - connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) - connection.set_session(autocommit=True) - cursor = connection.cursor() - - missing_fk_owner_count = 0 - # add MANDATORY classes to be checked - for notsubclass in [ - # SIA405 Abwasser - ("wastewater_structure"), - ]: - cursor.execute(f"SELECT COUNT(obj_id) FROM qgep_od.{notsubclass} WHERE fk_owner is null;") - # use cursor.fetchone()[0] instead of cursor.rowcount - # add variable and store result of cursor.fetchone()[0] as the next call will give None value instead of count https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ - class_fk_owner_count = int(cursor.fetchone()[0]) - # logger.info( - # f"Number of datasets in class '{notsubclass}' without fk_owner : {cursor.fetchone()[0]}" - # ) - logger.info( - f"Number of datasets in class '{notsubclass}' without fk_owner : {class_fk_owner_count}" - ) - - # if cursor.fetchone() is None: - if class_fk_owner_count == 0: - missing_fk_owner_count = missing_fk_owner_count - else: - # missing_fk_owner_count = missing_fk_owner_count + int(cursor.fetchone()[0]) - missing_fk_owner_count = missing_fk_owner_count + class_fk_owner_count - - # add for testing - logger.info(f"missing_fk_owner_count : {missing_fk_owner_count}") - - if missing_fk_owner_count == 0: - check_fk_owner_null = True - logger.info("OK: all mandatory fk_owner set in qgep_od!") - else: - check_fk_owner_null = False - logger.info(f"ERROR: Missing mandatory fk_owner in qgep_od: {missing_fk_owner_count}") - return check_fk_owner_null - - -def check_fk_operator_null(): - """ - Check if MAMDATORY fk_operator is Null - """ - logger.info("INTEGRITY CHECK missing MAMDATORY operator references fk_operator...") - - connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) - connection.set_session(autocommit=True) - cursor = connection.cursor() - - missing_fk_operator_count = 0 - - # add MANDATORY classes to be checked - for notsubclass in [ - # SIA405 Abwasser - ("wastewater_structure"), - ]: - cursor.execute( - f"SELECT COUNT(obj_id) FROM qgep_od.{notsubclass} WHERE fk_operator is null;" - ) - # use cursor.fetchone()[0] instead of cursor.rowcount - logger.info( - f"Number of datasets in class '{notsubclass}' without fk_operator : {cursor.fetchone()[0]}" - ) - - if cursor.fetchone() is None: - missing_fk_operator_count = missing_fk_operator_count - else: - missing_fk_operator_count = missing_fk_operator_count + int(cursor.fetchone()[0]) - # add for testing - logger.info(f"missing_fk_operator_count : {missing_fk_operator_count}") - - if missing_fk_operator_count == 0: - check_fk_operator_null = True - logger.info("OK: all mandatory fk_operator set in qgep_od!") - else: - check_fk_operator_null = False - logger.info( - f"ERROR: Missing mandatory fk_operator in qgep_od: {missing_fk_operator_count}" - ) - - return check_fk_operator_null - - -def check_fk_dataowner_null(): - """ - Check if MAMDATORY fk_dataowner is Null - """ - logger.info("INTEGRITY CHECK missing dataowner references fk_dataowner...") - - connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) - connection.set_session(autocommit=True) - cursor = connection.cursor() - - missing_fk_dataowner_count = 0 - # add MANDATORY classes to be checked - for notsubclass in [ - # VSA-KEK - ("file"), - ("data_media"), - ("maintenance_event"), - # SIA405 Abwasser - ("organisation"), - ("wastewater_structure"), - ("wastewater_networkelement"), - ("structure_part"), - ("reach_point"), - ("pipe_profile"), - # VSA-DSS - ("catchment_area"), - ("connection_object"), - ("control_center"), - ("hazard_source"), - ("hydr_geometry"), - ("hydraulic_char_data"), - ("measurement_result"), - ("measurement_series"), - ("measuring_device"), - ("measuring_point"), - ("mechanical_pretreatment"), - ("overflow"), - ("overflow_char"), - ("retention_body"), - ("river_bank"), - ("river_bed"), - ("sector_water_body"), - ("substance"), - ("surface_runoff_parameters"), - ("surface_water_bodies"), - ("throttle_shut_off_unit"), - ("waste_water_treatment"), - ("water_catchment"), - ("water_control_structure"), - ("water_course_segment"), - ("wwtp_energy_use"), - ("zone"), - ]: - cursor.execute( - f"SELECT COUNT(obj_id) FROM qgep_od.{notsubclass} WHERE fk_dataowner is null;" - ) - # use cursor.fetchone()[0] instead of cursor.rowcount - # add variable and store result of cursor.fetchone()[0] as the next call will give None value instead of count https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ - class_fk_dataowner_count = int(cursor.fetchone()[0]) - - # logger.info( - # f"Number of datasets in class '{notsubclass}' without fk_dataowner : {cursor.fetchone()[0]}" - # ) - logger.info( - f"Number of datasets in class '{notsubclass}' without fk_dataowner : {class_fk_dataowner_count}" - ) - - # if cursor.fetchone() is None: - if class_fk_dataowner_count == 0: - missing_fk_dataowner_count = missing_fk_dataowner_count - else: - # missing_fk_dataowner_count = missing_fk_dataowner_count + int(cursor.fetchone()[0]) - missing_fk_dataowner_count = missing_fk_dataowner_count + class_fk_dataowner_count - - # add for testing - logger.info(f"missing_fk_dataowner_count : {missing_fk_dataowner_count}") - - if missing_fk_dataowner_count == 0: - check_fk_dataowner_null = True - logger.info("OK: all mandatory fk_dataowner set in qgep_od!") - else: - check_fk_dataowner_null = False - logger.info( - f"ERROR: Missing mandatory fk_dataowner in qgep_od: {missing_fk_dataowner_count}" - ) - - return check_fk_dataowner_null - - -def check_fk_provider_null(): - """ - Check if MAMDATORY fk_provider is Null - """ - logger.info("INTEGRITY CHECK missing provider references fk_provider...") - - connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) - connection.set_session(autocommit=True) - cursor = connection.cursor() - - missing_fk_provider_count = 0 - # add MANDATORY classes to be checked - for notsubclass in [ - # VSA-KEK - ("file"), - ("data_media"), - ("maintenance_event"), - # SIA405 Abwasser - ("organisation"), - ("wastewater_structure"), - ("wastewater_networkelement"), - ("structure_part"), - ("reach_point"), - ("pipe_profile"), - # VSA-DSS - ("catchment_area"), - ("connection_object"), - ("control_center"), - ("hazard_source"), - ("hydr_geometry"), - ("hydraulic_char_data"), - ("measurement_result"), - ("measurement_series"), - ("measuring_device"), - ("measuring_point"), - ("mechanical_pretreatment"), - ("overflow"), - ("overflow_char"), - ("retention_body"), - ("river_bank"), - ("river_bed"), - ("sector_water_body"), - ("substance"), - ("surface_runoff_parameters"), - ("surface_water_bodies"), - ("throttle_shut_off_unit"), - ("waste_water_treatment"), - ("water_catchment"), - ("water_control_structure"), - ("water_course_segment"), - ("wwtp_energy_use"), - ("zone"), - ]: - cursor.execute( - f"SELECT COUNT(obj_id) FROM qgep_od.{notsubclass} WHERE fk_provider is null;" - ) - # use cursor.fetchone()[0] instead of cursor.rowcount - # add variable and store result of cursor.fetchone()[0] as the next call will give None value instead of count https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ - class_fk_provider_count = int(cursor.fetchone()[0]) - # logger.info( - # f"Number of datasets in class '{notsubclass}' without fk_provider : {cursor.fetchone()[0]}" - # ) - logger.info( - f"Number of datasets in class '{notsubclass}' without fk_dataowner : {class_fk_provider_count}" - ) - - # if cursor.fetchone() is None: - if class_fk_provider_count == 0: - missing_fk_provider_count = missing_fk_provider_count - else: - # missing_fk_provider_count = missing_fk_provider_count + int(cursor.fetchone()[0]) - missing_fk_provider_count = missing_fk_provider_count + class_fk_provider_count - - # add for testing - logger.info(f"missing_fk_provider_count : {missing_fk_provider_count}") - - if missing_fk_provider_count == 0: - check_fk_provider_null = True - logger.info("OK: all mandatory fk_provider set in qgep_od!") - else: - check_fk_provider_null = False - logger.info( - f"ERROR: Missing mandatory fk_provider in qgep_od: {missing_fk_provider_count}" - ) - - return check_fk_provider_null - - -def skip_wwtp_structure_ids_old(): - """ - Get list of id's of class wastewater_structure without wwtp_structure (ARABauwerk) - """ - logger.info("get list of id's of class wwtp_structure (ARABauwerk)...") - - connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) - connection.set_session(autocommit=True) - cursor = connection.cursor() - - not_wwtp_structure_ids = [] - - # select all obj_id from wastewater_structure that are not in wwtp_structure - cursor.execute( - "SELECT * FROM qgep_od.wastewater_structure WHERE obj_id NOT IN (SELECT obj_id FROM qgep_od.wwtp_structure);" - ) - - # cursor.fetchall() - see https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ - # wwtp_structure_count = int(cursor.fetchone()[0]) - # if wwtp_structure_count == 0: - if cursor.fetchone() is None: - not_wwtp_structure_ids = None - else: - records = cursor.fetchall() - for row in records: - logger.debug(f" row[0] = {row[0]}") - # https://www.pythontutorial.net/python-string-methods/python-string-concatenation/ - # not_wwtp_structure_ids = not_wwtp_structure_ids + str(row[0]) + "," - strrow = str(row[0]) - # not_wwtp_structure_ids = ','.join([not_wwtp_structure_ids, strrow]) - # not_wwtp_structure_ids = not_wwtp_structure_ids + row[0] - not_wwtp_structure_ids.append(strrow) - logger.debug(f" building up '{not_wwtp_structure_ids}' ...") - - return not_wwtp_structure_ids - - -# 12.11.2024 to clean up - get_ws_wn_ids kann das auch -def get_cl_re_ids(classname): - """ - Get list of id's of reaches of the channels provided - """ - - # define classes that this is allowed to use - adapt for TWW to include model changes - if classname == "channel": - logger.info(f"get list of id's of wastewater_nodes of {classname} ...") - - connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) - connection.set_session(autocommit=True) - cursor = connection.cursor() - - cl_re_ids = [] - - # select all obj_id of the wastewater_nodes of wwtp_structure - cursor.execute( - "SELECT wn.obj_id FROM qgep_od.channel LEFT JOIN qgep_od.wastewater_networkelement wn ON wn.fk_wastewater_structure = channel.obj_id WHERE wn.obj_id is not NULL;" - ) - - # cursor.fetchall() - see https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ - # cl_re_ids_count = int(cursor.fetchone()[0]) - # if cl_re_ids_count == 0: - if cursor.fetchone() is None: - cl_re_ids = None - else: - records = cursor.fetchall() - for row in records: - logger.debug(f" row[0] = {row[0]}") - # https://www.pythontutorial.net/python-string-methods/python-string-concatenation/ - strrow = str(row[0]) - cl_re_ids.append(strrow) - logger.debug(f" building up '{cl_re_ids}' ...") - - return cl_re_ids - else: - logger.warning(f"Do not use this function with {classname} !") - return None - - -def get_ws_wn_ids(classname): - """ - Get list of id's of wastewater_nodes of the wastewater_structure (sub)class provided, eg. wwtp_structure (ARABauwerk, does also work for channel (give reaches then) - """ - - logger.info(f"get list of id's of wastewater_nodes of {classname} ...") - connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) - connection.set_session(autocommit=True) - cursor = connection.cursor() - - ws_wn_ids = [] - - # select all obj_id of the wastewater_nodes of wwtp_structure - cursor.execute( - f"SELECT wn.obj_id FROM qgep_od.{classname} LEFT JOIN qgep_od.wastewater_networkelement wn ON wn.fk_wastewater_structure = {classname}.obj_id WHERE wn.obj_id is not NULL;" - ) - - # cursor.fetchall() - see https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ - # ws_wn_ids_count = int(cursor.fetchone()[0]) - # if ws_wn_ids_count == 0: - if cursor.fetchone() is None: - ws_wn_ids = None - else: - records = cursor.fetchall() - for row in records: - logger.debug(f" row[0] = {row[0]}") - # https://www.pythontutorial.net/python-string-methods/python-string-concatenation/ - strrow = str(row[0]) - if strrow is not None: - ws_wn_ids.append(strrow) - # logger.debug(f" building up '{ws_wn_ids}' ...") - - return ws_wn_ids - - -def get_ws_selected_ww_networkelements(selection): - """ - Get list of id's of wastewater_structure from selected wastewater_network_elements - """ - - logger.info( - f"get list of id's of wastewater_structure of selected wastewater_network_elements {selection} ..." - ) - connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) - connection.set_session(autocommit=True) - cursor = connection.cursor() - - ws_ids = [] - - # select all obj_id of the wastewater_nodes of wwtp_structure - cursor.execute( - f"SELECT ws.obj_id FROM qgep_od.wastewater_structure ws LEFT JOIN qgep_od.wastewater_networkelement wn ON wn.fk_wastewater_structure = ws.obj_id WHERE wn.obj_id IN {selection}" - ) - - # cursor.fetchall() - see https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ - # ws_wn_ids_count = int(cursor.fetchone()[0]) - # if ws_wn_ids_count == 0: - if cursor.fetchone() is None: - ws_ids = None - else: - records = cursor.fetchall() - for row in records: - logger.debug(f" row[0] = {row[0]}") - # https://www.pythontutorial.net/python-string-methods/python-string-concatenation/ - strrow = str(row[0]) - if strrow is not None: - ws_ids.append(strrow) - # logger.debug(f" building up '{ws_wn_ids}' ...") - - return ws_ids - - -def remove_from_selection(selected_ids, remove_ids): - """ - Remove ids from selected_ids - """ - - for list_item in remove_ids: - # selected_ids = selected_ids.remove(list_item) - selected_ids.remove(list_item) - - return selected_ids - - -def add_to_selection(selected_ids, add_ids): - """ - Append ids to selected_ids - """ - - if selected_ids is None: - selected_ids = [] - - for list_item in add_ids: - # selected_ids = selected_ids.append(list_item) - selected_ids.append(list_item) - - return selected_ids - - -def create_ili_schema(schema, model, log_path, recreate_schema=False): - """ - Create schema for INTERLIS import - """ - logger.info("CONNECTING TO DATABASE...") - - connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) - connection.set_session(autocommit=True) - cursor = connection.cursor() - - if not recreate_schema: - # If the schema already exists, we just truncate all tables - cursor.execute( - f"SELECT schema_name FROM information_schema.schemata WHERE schema_name = '{schema}';" - ) - if cursor.rowcount > 0: - logger.info(f"Schema {schema} already exists, we truncate instead") - cursor.execute( - f"SELECT table_name FROM information_schema.tables WHERE table_schema = '{schema}';" - ) - for row in cursor.fetchall(): - cursor.execute(f"TRUNCATE TABLE {schema}.{row[0]} CASCADE;") - return - - logger.info(f"DROPPING THE SCHEMA {schema}...") - cursor.execute(f'DROP SCHEMA IF EXISTS "{schema}" CASCADE ;') - logger.info(f"CREATING THE SCHEMA {schema}...") - cursor.execute(f'CREATE SCHEMA "{schema}";') - connection.commit() - connection.close() - - logger.info(f"ILIDB SCHEMAIMPORT INTO {schema}...") - exec_( - " ".join( - [ - f'"{config.JAVA}"', - "-jar", - f'"{config.ILI2PG}"', - "--schemaimport", - *get_pgconf_as_ili_args(), - "--dbschema", - f"{schema}", - "--setupPgExt", - "--createGeomIdx", - "--createFk", - "--createFkIdx", - "--createTidCol", - "--importTid", - "--noSmartMapping", - "--defaultSrsCode", - "2056", - "--log", - f'"{log_path}"', - "--nameLang", - "de", - f"{model}", - ] - ) - ) - - -def validate_xtf_data(xtf_file, log_path): - """ - Run XTF validation using ilivalidator - """ - logger.info("VALIDATING XTF DATA...") - exec_( - f'"{config.JAVA}" -jar "{config.ILIVALIDATOR}" --modeldir "{config.ILI_FOLDER}" --log "{log_path}" "{xtf_file}"' - ) - - -# 22.7.2022 sb -def get_xtf_model(xtf_file): - """ - Get XTF model from file - """ - logger.info(f"GET XTF MODEL {xtf_file} ... ") - # logger.info("vorher" + imodel) - # funktioniert nicht - # global imodel # define imodel as global variable for import model name - # impmodel = "" - - # open and read xtf / xml file line by line until - # - # - # read string between < and . -> eg. VSA_KEK_2019_LV95 - # impmodel - from io import open - - model_list = [] - - # checkdatasection = -1 - checkmodelssection = -1 - impmodel = "not found" - - with open(xtf_file, encoding="utf-8") as f: - while True: - # if checkdatasection == -1: - if checkmodelssection == -1: - - line = f.readline() - if not line: - break - else: - # checkdatasection = line.find('') - # logger.info(str(checkdatasection)) - checkmodelssection = line.find("") - logger.info("checkmodelssection " + str(checkmodelssection)) - logger.info(str(line)) - else: - line2 = f.readline() - if not line2: - break - else: - logger.info(str(line2)) - logger.info("line2: ", str(line2)) - # logger.info(str(checkdatasection)) - logger.info("checkmodelssection2 " + str(checkmodelssection)) - # strmodel = str(line2.strip()) - strmodel = str(line2) - strmodel = strmodel.strip() - logger.info("strmodel: " + strmodel) - logger.info("strmodel: ", strmodel) - logger.info(f"strmodel: {strmodel}") - a = strmodel.find("") - logger.info("strmodel.find a : " + str(a)) - # if strmodel.find("") > -1: - if a == -1: - b = strmodel.find("") - logger.info(r"strmodel.find b \ -1: - logger.info("MODELS definition found in xtf: " + strmodel) - # - # read string between < and . -> eg. VSA_KEK_2019_LV95 - - # result = re.search('<(.*).',strmodel) - # result = str(result.group(1)) - # result2 = result.split('.',1) - # result3 = str(result2[0]) - # result4 = result3.strip('<') - # impmodel = str(result4) - # Search MODELNAME in MODEL entry: # - char1 = "=" - char2 = "VERSION=" - result = strmodel[strmodel.find(char1) + 1 : strmodel.find(char2)] - # result = re.search('<(.*).',strmodel) - # result = str(result.group(1)) - # result2 = result.split('.',1) - # result3 = str(result2[0]) - # result4 = result3.strip('<') - # impmodel = str(result4) - # strip spaces - result = result.strip() - # strip " - result = result.strip('"') - logger.info("MODEL found: " + str(result)) - logger.info(result) - model_list.append(result) - else: - logger.info("goto next line") - else: - logger.info(" found - stop checking!") - break - logger.info("model_list:") - logger.info(str(model_list)) - - if len(model_list) > 0: - # if impmodel == "not found": - # # write that MODEL was not found - # logger.info("MODEL was " + impmodel + " was not found!") - # else: - if "VSA_KEK_2019_LV95" in model_list: - impmodel = "VSA_KEK_2019_LV95" - elif "SIA405_ABWASSER_2015_LV95" in model_list: - impmodel = "SIA405_ABWASSER_2015_LV95" - elif "DSS_2015_LV95" in model_list: - impmodel = "DSS_2015_LV95" - elif "SIA405_WASSER_LV95" in model_list: - impmodel = "SIA405_WASSER_LV95" - else: - logger.info("None of the supported models was found!") - else: - # write that MODEL was not found - logger.info("MODEL information was " + impmodel + "!") - - # close xtf file to avoid conflicts - f.close() - - logger.info("MODEL found: " + str(impmodel)) - - # neu 23.7.2022 return imodel from get_xtf_model so it can be called in _init_.py - return impmodel - - -def get_xtf_model2(xtf_file): - logger.info("GET XTF MODEL xml version... ") - # logger.info("vorher" + imodel) - # funktioniert nicht - # global imodel # define imodel as global variable for import model name - # impmodel = "" - - # open and read xtf / xml file line by line until - # - # - # read string between < and . -> eg. VSA_KEK_2019_LV95 - # impmodel - - model_list = [] - - # checkdatasection = -1 - impmodel = "not found" - - # from xml file - tree = ET.parse(xtf_file) - rootinterlis = tree.getroot() - logger.info("rootinterlis.findall:", rootinterlis.findall(".")) - - i = 0 - model_found = False - - while i < 15: - try: - j = i - i = i + 1 - model_list.append(rootinterlis[0][0][j].get("NAME")) - model_found = True - # except utils.various.CmdException: - except Exception: - if model_found: - logger.info(f"{i - 1} times MODEL information was found!") - break - else: - logger.info("No MODEL information was found!") - break - - print(model_list) - logger.info("model_list:") - logger.info(str(model_list)) - - if len(model_list) > 0: - # if impmodel == "not found": - # # write that MODEL was not found - # logger.info("MODEL was " + impmodel + " was not found!") - # else: - if "VSA_KEK_2019_LV95" in model_list: - impmodel = "VSA_KEK_2019_LV95" - elif "SIA405_ABWASSER_2015_LV95" in model_list: - impmodel = "SIA405_ABWASSER_2015_LV95" - elif "DSS_2015_LV95" in model_list: - impmodel = "DSS_2015_LV95" - elif "SIA405_WASSER_LV95" in model_list: - impmodel = "SIA405_WASSER_LV95" - else: - logger.info("None of the supported models was found!") - else: - # write that MODEL was not found - logger.info("MODEL information was " + impmodel + "!") - - logger.info("MODEL found: " + str(impmodel)) - print("MODEL found: ", str(impmodel)) - - # neu 23.7.2022 return imodel from get_xtf_model so it can be called in _init_.py - return impmodel - - -def import_xtf_data(schema, xtf_file, log_path): - logger.info("IMPORTING XTF DATA...") - exec_( - " ".join( - [ - f'"{config.JAVA}"', - "-jar", - f'"{config.ILI2PG}"', - "--import", - "--deleteData", - *get_pgconf_as_ili_args(), - "--dbschema", - f'"{schema}"', - "--modeldir", - f'"{config.ILI_FOLDER}"', - "--disableValidation", - "--skipReferenceErrors", - "--createTidCol", - "--noSmartMapping", - "--defaultSrsCode", - "2056", - "--log", - f'"{log_path}"', - f'"{xtf_file}"', - ] - ) - ) - - -def export_xtf_data(schema, model_name, export_model_name, xtf_file, log_path): - logger.info("EXPORT ILIDB...") - - # if optional export_model_name is set, add it to the args - if export_model_name: - export_model_name_args = ["--exportModels", export_model_name] - else: - export_model_name_args = [] - - exec_( - " ".join( - [ - f'"{config.JAVA}"', - "-jar", - f'"{config.ILI2PG}"', - "--export", - "--models", - f"{model_name}", - *export_model_name_args, - *get_pgconf_as_ili_args(), - "--dbschema", - f"{schema}", - "--modeldir", - f'"{config.ILI_FOLDER}"', - "--disableValidation", - "--skipReferenceErrors", - "--createTidCol", - "--noSmartMapping", - "--defaultSrsCode", - "2056", - "--log", - f'"{log_path}"', - "--trace", - f'"{xtf_file}"', - ] - ) - ) - - -class TidMaker: - """ - Helper class that creates globally unique integer primary key forili2pg class (t_id) - from a a QGEP/QWAT id (obj_id or id). - """ - - def __init__(self, id_attribute="id"): - self._id_attr = id_attribute - self._autoincrementer = collections.defaultdict(lambda: len(self._autoincrementer)) - - def tid_for_row(self, row, for_class=None): - # tid are globally unique, while ids are only guaranteed unique per table, - # so include the base table in the key - # this finds the base class (the first parent class before sqlalchemy.ext.automap.Base) - class_for_id = row.__class__.__mro__[row.__class__.__mro__.index(AutomapBase) - 2] - key = (class_for_id, getattr(row, self._id_attr), for_class) - # was_created = key not in self._autoincrementer # just for debugging - tid = self._autoincrementer[key] - # if was_created: - # # just for debugging - # logger.info(f"created tid {tid} for {key}") - return tid - - def next_tid(self): - """Get an arbitrary unused tid""" - key = len(self._autoincrementer) - return self._autoincrementer[key] From df104a11d3dbe55f270bd14a1c0c00b66d1ebb20 Mon Sep 17 00:00:00 2001 From: SJiB Date: Fri, 15 Nov 2024 17:50:20 +0100 Subject: [PATCH 058/127] adapt filters dss export --- qgepqwat2ili/qgepdss/export.py | 31 ++++++++++--------------------- 1 file changed, 10 insertions(+), 21 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index c7f27c30..75a9355e 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -10,6 +10,7 @@ # 4.10.2024 # from ..utils.ili2db import skip_wwtp_structure_ids # 6.11.2024 replaced with - to check if really necessary here (as no sia405 abwasser exceptions needed) +from ..utils.ili2db import add_to_selection, get_ws_wn_ids, remove_from_selection from ..utils.various import logger from .model_abwasser import get_abwasser_model from .model_qgep import get_qgep_model @@ -1459,10 +1460,7 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info("Exporting QGEP.pipe_profile -> ABWASSER.rohrprofil, ABWASSER.metaattribute") query = qgep_session.query(QGEP.pipe_profile) - if filtered: - query = query.join(QGEP.reach).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) - ) + for row in query: # AVAILABLE FIELDS IN QGEP.pipe_profile @@ -1608,10 +1606,8 @@ def textpos_common(row, t_type, geojson_crs_def): "Exporting QGEP.control_center -> ABWASSER.steuerungszentrale, ABWASSER.metaattribute" ) query = qgep_session.query(QGEP.control_center) - if filtered: - query = query.join(QGEP.throttle_shut_off_unit, QGEP.wastewater_node).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) - ) + # Always export all, no filtering + for row in query: # AVAILABLE FIELDS IN QGEP.control_center @@ -1938,10 +1934,7 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info("Exporting QGEP.hydr_geometry -> ABWASSER.hydr_geometrie, ABWASSER.metaattribute") query = qgep_session.query(QGEP.hydr_geometry) - if filtered: - query = query.join(QGEP.wastewater_node).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) - ) + for row in query: # AVAILABLE FIELDS IN QGEP.hydr_geometry @@ -2119,10 +2112,6 @@ def textpos_common(row, t_type, geojson_crs_def): "Exporting QGEP.profile_geometry -> ABWASSER.rohrprofil_geometrie, ABWASSER.metaattribute" ) query = qgep_session.query(QGEP.profile_geometry) - if filtered: - query = query.join(QGEP.pipe_profile, QGEP.reach).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) - ) for row in query: # AVAILABLE FIELDS IN QGEP.profile_geometry @@ -2156,10 +2145,7 @@ def textpos_common(row, t_type, geojson_crs_def): "Exporting QGEP.hydr_geom_relation -> ABWASSER.hydr_geomrelation, ABWASSER.metaattribute" ) query = qgep_session.query(QGEP.hydr_geom_relation) - if filtered: - query = query.join(QGEP.hydr_geometry, QGEP.wastewater_node).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) - ) + for row in query: # AVAILABLE FIELDS IN QGEP.hydr_geom_relation @@ -2194,7 +2180,10 @@ def textpos_common(row, t_type, geojson_crs_def): ) query = qgep_session.query(QGEP.mechanical_pretreatment) if filtered: - query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + query = query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ).join(QGEP.wastewater_networkelement).filter( QGEP.wastewater_networkelement.obj_id.in_(subset_ids) ) for row in query: From fb01fdbfb150b25dfecdf21283a8f5cc8662b58f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 16:51:51 +0000 Subject: [PATCH 059/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepdss/export.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 75a9355e..bd54241d 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -10,7 +10,6 @@ # 4.10.2024 # from ..utils.ili2db import skip_wwtp_structure_ids # 6.11.2024 replaced with - to check if really necessary here (as no sia405 abwasser exceptions needed) -from ..utils.ili2db import add_to_selection, get_ws_wn_ids, remove_from_selection from ..utils.various import logger from .model_abwasser import get_abwasser_model from .model_qgep import get_qgep_model @@ -2180,11 +2179,13 @@ def textpos_common(row, t_type, geojson_crs_def): ) query = qgep_session.query(QGEP.mechanical_pretreatment) if filtered: - query = query.join( + query = ( + query.join( QGEP.wastewater_structure, QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, - ).join(QGEP.wastewater_networkelement).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) for row in query: From 531cc02ee119c6122cae65981a152231c6a6ce24 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 16:41:13 +0100 Subject: [PATCH 060/127] Correct query filtered mechanical_pretreatment --- qgepqwat2ili/qgepdss/export.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index bd54241d..589024ee 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -2182,11 +2182,14 @@ def textpos_common(row, t_type, geojson_crs_def): query = ( query.join( QGEP.wastewater_structure, - QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + QGEP.mechanical_pretreatment.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, ) .join(QGEP.wastewater_networkelement) .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) + # add sql statement to logger + statement = query.statement + logger.info(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.mechanical_pretreatment From a5c578af0005038d1792fbe69fb786240e179135 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 15:41:30 +0000 Subject: [PATCH 061/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepdss/export.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 589024ee..d0228bd3 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -2182,7 +2182,8 @@ def textpos_common(row, t_type, geojson_crs_def): query = ( query.join( QGEP.wastewater_structure, - QGEP.mechanical_pretreatment.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + QGEP.mechanical_pretreatment.fk_wastewater_structure + == QGEP.wastewater_structure.obj_id, ) .join(QGEP.wastewater_networkelement) .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) From 24859f00b22644c060527b07b2da036eb8f7cdf6 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 16:53:08 +0100 Subject: [PATCH 062/127] ili2db.py - improve remove_from_selection function --- qgepqwat2ili/utils/ili2db.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index ecb7b415..aa00fea8 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -619,12 +619,15 @@ def get_ws_selected_ww_networkelements(selected_wwn): def remove_from_selection(selected_ids, remove_ids): """ - Remove ids from selected_ids + Remove ids from selected_ids if they are in selected_ids """ for list_item in remove_ids: # selected_ids = selected_ids.remove(list_item) - selected_ids.remove(list_item) + try: + selected_ids.remove(list_item) + except Exception: + logger.debug logger.debug(f" remove_from_selection: '{list_item}' not in selected_ids - could not be removed!") return selected_ids From 0a6fd60ecc99f394ea91f8069b67317f9deb1526 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 16:54:51 +0100 Subject: [PATCH 063/127] correction --- qgepqwat2ili/utils/ili2db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index aa00fea8..c328f50a 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -627,7 +627,7 @@ def remove_from_selection(selected_ids, remove_ids): try: selected_ids.remove(list_item) except Exception: - logger.debug logger.debug(f" remove_from_selection: '{list_item}' not in selected_ids - could not be removed!") + logger.debug(f" remove_from_selection: '{list_item}' not in selected_ids - could not be removed!") return selected_ids From 4d2d405310e2bd1fccfad5ead819acf346e77d44 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 15:55:05 +0000 Subject: [PATCH 064/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/utils/ili2db.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index c328f50a..015c76db 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -627,7 +627,9 @@ def remove_from_selection(selected_ids, remove_ids): try: selected_ids.remove(list_item) except Exception: - logger.debug(f" remove_from_selection: '{list_item}' not in selected_ids - could not be removed!") + logger.debug( + f" remove_from_selection: '{list_item}' not in selected_ids - could not be removed!" + ) return selected_ids From 083686469c4b3b84b80d05db6f53485c4459317f Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 17:18:34 +0100 Subject: [PATCH 065/127] add explicit join for filter retention_body --- qgepqwat2ili/qgepdss/export.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index d0228bd3..d21cd4a3 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -2190,7 +2190,7 @@ def textpos_common(row, t_type, geojson_crs_def): ) # add sql statement to logger statement = query.statement - logger.info(f" selection query = {statement}") + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.mechanical_pretreatment @@ -2226,9 +2226,15 @@ def textpos_common(row, t_type, geojson_crs_def): ) query = qgep_session.query(QGEP.retention_body) if filtered: - query = query.join(QGEP.infiltration_installation, QGEP.wastewater_networkelement).filter( + # explicit join on added + query = query.join( + QGEP.infiltration_installation, QGEP.retention_body.fk_infiltration_installation + == QGEP.infiltration_installation.obj_id,).join( QGEP.wastewater_networkelement).filter( QGEP.wastewater_networkelement.obj_id.in_(subset_ids) ) + # add sql statement to logger + statement = query.statement + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.retention_body @@ -2267,7 +2273,7 @@ def textpos_common(row, t_type, geojson_crs_def): if filtered: # add sql statement to logger statement = query.statement - logger.info(f" always export all overflow_char datasets query = {statement}") + logger.debug(f" always export all overflow_char datasets query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.overflow_char @@ -2306,7 +2312,7 @@ def textpos_common(row, t_type, geojson_crs_def): ) # add sql statement to logger statement = query.statement - logger.info(f" selection query = {statement}") + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.hq_relation From 1f9ab3c1d517c45f411db578120bd9e4ba185926 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 16:18:50 +0000 Subject: [PATCH 066/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepdss/export.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index d21cd4a3..323c9f4b 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -2227,10 +2227,14 @@ def textpos_common(row, t_type, geojson_crs_def): query = qgep_session.query(QGEP.retention_body) if filtered: # explicit join on added - query = query.join( - QGEP.infiltration_installation, QGEP.retention_body.fk_infiltration_installation - == QGEP.infiltration_installation.obj_id,).join( QGEP.wastewater_networkelement).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + query = ( + query.join( + QGEP.infiltration_installation, + QGEP.retention_body.fk_infiltration_installation + == QGEP.infiltration_installation.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) # add sql statement to logger statement = query.statement From 76f555d65896608ba5170173ee9936e03d4275dd Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 17:44:32 +0100 Subject: [PATCH 067/127] adjust filter subclasses structure_part dss --- qgepqwat2ili/qgepdss/export.py | 107 ++++++++++++++++++++++++++++----- 1 file changed, 92 insertions(+), 15 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 323c9f4b..8be581f0 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -2350,10 +2350,21 @@ def textpos_common(row, t_type, geojson_crs_def): "Exporting QGEP.dryweather_downspout -> ABWASSER.trockenwetterfallrohr, ABWASSER.metaattribute" ) query = qgep_session.query(QGEP.dryweather_downspout) - if filtered: - query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + logger.info(f"filtered: subset_ids = {subset_ids}") + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) + # add sql statement to logger + statement = query.statement + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.dryweather_downspout @@ -2388,9 +2399,20 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info("Exporting QGEP.access_aid -> ABWASSER.einstiegshilfe, ABWASSER.metaattribute") query = qgep_session.query(QGEP.access_aid) if filtered: - query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) + # add sql statement to logger + statement = query.statement + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.access_aid @@ -2427,9 +2449,20 @@ def textpos_common(row, t_type, geojson_crs_def): ) query = qgep_session.query(QGEP.dryweather_flume) if filtered: - query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) + # add sql statement to logger + statement = query.statement + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.dryweather_flume @@ -2464,9 +2497,20 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info("Exporting QGEP.cover -> ABWASSER.deckel, ABWASSER.metaattribute") query = qgep_session.query(QGEP.cover) if filtered: - query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) + # add sql statement to logger + statement = query.statement + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.cover @@ -2514,9 +2558,20 @@ def textpos_common(row, t_type, geojson_crs_def): ) query = qgep_session.query(QGEP.electric_equipment) if filtered: - query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) + # add sql statement to logger + statement = query.statement + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.electric_equipment @@ -2555,9 +2610,20 @@ def textpos_common(row, t_type, geojson_crs_def): ) query = qgep_session.query(QGEP.electromechanical_equipment) if filtered: - query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) + # add sql statement to logger + statement = query.statement + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.electromechanical_equipment @@ -2594,9 +2660,20 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info("Exporting QGEP.benching -> ABWASSER.bankett, ABWASSER.metaattribute") query = qgep_session.query(QGEP.benching) if filtered: - query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) + # add sql statement to logger + statement = query.statement + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.benching From 50b92081afd70979cf51daebae5947dbbc013d99 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 17:44:49 +0100 Subject: [PATCH 068/127] adjust filter subclasses structure_part kek --- qgepqwat2ili/qgep/export.py | 76 ++++++++++++++++++++++++++++++++----- 1 file changed, 66 insertions(+), 10 deletions(-) diff --git a/qgepqwat2ili/qgep/export.py b/qgepqwat2ili/qgep/export.py index 8111eb49..cf3de478 100644 --- a/qgepqwat2ili/qgep/export.py +++ b/qgepqwat2ili/qgep/export.py @@ -719,9 +719,21 @@ def textpos_common(row, t_type, geojson_crs_def): ) query = qgep_session.query(QGEP.dryweather_downspout) if filtered: - query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + logger.info(f"filtered: subset_ids = {subset_ids}") + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) + # add sql statement to logger + statement = query.statement + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.dryweather_downspout @@ -757,9 +769,20 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info("Exporting QGEP.access_aid -> ABWASSER.einstiegshilfe, ABWASSER.metaattribute") query = qgep_session.query(QGEP.access_aid) if filtered: - query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) + # add sql statement to logger + statement = query.statement + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.access_aid @@ -797,9 +820,20 @@ def textpos_common(row, t_type, geojson_crs_def): ) query = qgep_session.query(QGEP.dryweather_flume) if filtered: - query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) + # add sql statement to logger + statement = query.statement + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.dryweather_flume @@ -835,9 +869,20 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info("Exporting QGEP.cover -> ABWASSER.deckel, ABWASSER.metaattribute") query = qgep_session.query(QGEP.cover) if filtered: - query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) + # add sql statement to logger + statement = query.statement + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.cover @@ -882,9 +927,20 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info("Exporting QGEP.benching -> ABWASSER.bankett, ABWASSER.metaattribute") query = qgep_session.query(QGEP.benching) if filtered: - query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) + # add sql statement to logger + statement = query.statement + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.benching From 303f62317f8d8f239b366f1c7305063d42274417 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 17:45:19 +0100 Subject: [PATCH 069/127] adjust logger info to debug in sia405 export --- qgepqwat2ili/qgepsia405/export.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index 092b96cc..2f0f6713 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -745,7 +745,7 @@ def textpos_common(row, t_type, geojson_crs_def): ) # add sql statement to logger statement = query.statement - logger.info(f" selection query = {statement}") + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.dryweather_downspout @@ -794,7 +794,7 @@ def textpos_common(row, t_type, geojson_crs_def): ) # add sql statement to logger statement = query.statement - logger.info(f" selection query = {statement}") + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.access_aid @@ -845,7 +845,7 @@ def textpos_common(row, t_type, geojson_crs_def): ) # add sql statement to logger statement = query.statement - logger.info(f" selection query = {statement}") + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.dryweather_flume @@ -894,7 +894,7 @@ def textpos_common(row, t_type, geojson_crs_def): ) # add sql statement to logger statement = query.statement - logger.info(f" selection query = {statement}") + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.cover @@ -952,7 +952,7 @@ def textpos_common(row, t_type, geojson_crs_def): ) # add sql statement to logger statement = query.statement - logger.info(f" selection query = {statement}") + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.benching From b996143b0e9a50ad439d25d8dc2a18032237b34b Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 16:45:33 +0000 Subject: [PATCH 070/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgep/export.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/qgepqwat2ili/qgep/export.py b/qgepqwat2ili/qgep/export.py index cf3de478..8e01d749 100644 --- a/qgepqwat2ili/qgep/export.py +++ b/qgepqwat2ili/qgep/export.py @@ -721,7 +721,7 @@ def textpos_common(row, t_type, geojson_crs_def): if filtered: logger.info(f"filtered: subset_ids = {subset_ids}") # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( - # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) # ) query = ( query.join( @@ -769,7 +769,7 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info("Exporting QGEP.access_aid -> ABWASSER.einstiegshilfe, ABWASSER.metaattribute") query = qgep_session.query(QGEP.access_aid) if filtered: - # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) # ) query = ( From c6d6039a8c4dc107a699d3d6d8844fd1737e725d Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 17:54:49 +0100 Subject: [PATCH 071/127] readd if filtered: --- qgepqwat2ili/qgepdss/export.py | 1 + 1 file changed, 1 insertion(+) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 8be581f0..c2a716d1 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -2350,6 +2350,7 @@ def textpos_common(row, t_type, geojson_crs_def): "Exporting QGEP.dryweather_downspout -> ABWASSER.trockenwetterfallrohr, ABWASSER.metaattribute" ) query = qgep_session.query(QGEP.dryweather_downspout) + if filtered: logger.info(f"filtered: subset_ids = {subset_ids}") # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) From a82a82d705eccbab9037f1302887b4ca061a3655 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 16:55:16 +0000 Subject: [PATCH 072/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepdss/export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index c2a716d1..7f835366 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -2400,7 +2400,7 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info("Exporting QGEP.access_aid -> ABWASSER.einstiegshilfe, ABWASSER.metaattribute") query = qgep_session.query(QGEP.access_aid) if filtered: - # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) # ) query = ( From c17590d9a7e2bb59aae54b3c796ec76588111678 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 18:01:32 +0100 Subject: [PATCH 073/127] dss export adapt filter harzard_source --- qgepqwat2ili/qgepdss/export.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index c2a716d1..1a1d0329 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -2867,9 +2867,12 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info("Exporting QGEP.hazard_source -> ABWASSER.gefahrenquelle, ABWASSER.metaattribute") query = qgep_session.query(QGEP.hazard_source) if filtered: - query = query.join(QGEP.connection_object, QGEP.wastewater_networkelement).filter( + query = query.join(QGEP.connection_object, QGEP.hazard_source.fk_connection_object == QGEP.connection_object.obj_id,).join(QGEP.wastewater_networkelement).filter( QGEP.wastewater_networkelement.obj_id.in_(subset_ids) ) + # add sql statement to logger + statement = query.statement + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.hazard_source From 4c7d7850139c272e59ee23bd5d4747f54de75d57 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 17:03:58 +0000 Subject: [PATCH 074/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepdss/export.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 4786cc02..7d4471d4 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -2867,8 +2867,13 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info("Exporting QGEP.hazard_source -> ABWASSER.gefahrenquelle, ABWASSER.metaattribute") query = qgep_session.query(QGEP.hazard_source) if filtered: - query = query.join(QGEP.connection_object, QGEP.hazard_source.fk_connection_object == QGEP.connection_object.obj_id,).join(QGEP.wastewater_networkelement).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + query = ( + query.join( + QGEP.connection_object, + QGEP.hazard_source.fk_connection_object == QGEP.connection_object.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) # add sql statement to logger statement = query.statement From 2a71594cd38a54cf625085f26a1fa21d54e52c0f Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 18:10:41 +0100 Subject: [PATCH 075/127] adapt filter accident --- qgepqwat2ili/qgepdss/export.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 7d4471d4..9504f6cd 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -2911,8 +2911,12 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info("Exporting QGEP.accident -> ABWASSER.unfall, ABWASSER.metaattribute") query = qgep_session.query(QGEP.accident) if filtered: - query = query.join( - QGEP.hazard_source, QGEP.connection_object, QGEP.wastewater_networkelement + query = ( + query.join(QGEP.hazard_source) + .join(QGEP.connection_object, + QGEP.hazard_source.fk_connection_object == QGEP.connection_object.obj_id, + ) + .join(QGEP.wastewater_networkelement) ).filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) for row in query: From 662a7a3352047bcc31684deb38fb0a5b09e422d7 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 17:10:57 +0000 Subject: [PATCH 076/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepdss/export.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 9504f6cd..b9ddb2fb 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -2913,7 +2913,8 @@ def textpos_common(row, t_type, geojson_crs_def): if filtered: query = ( query.join(QGEP.hazard_source) - .join(QGEP.connection_object, + .join( + QGEP.connection_object, QGEP.hazard_source.fk_connection_object == QGEP.connection_object.obj_id, ) .join(QGEP.wastewater_networkelement) From 0e57fd02c791ee59ef10d5b0ab849d344be8483a Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 18:12:06 +0100 Subject: [PATCH 077/127] adapt filter substance --- qgepqwat2ili/qgepdss/export.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index b9ddb2fb..aede6896 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -2954,8 +2954,13 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info("Exporting QGEP.substance -> ABWASSER.stoff, ABWASSER.metaattribute") query = qgep_session.query(QGEP.substance) if filtered: - query = query.join( - QGEP.hazard_source, QGEP.connection_object, QGEP.wastewater_networkelement + query = ( + query.join(QGEP.hazard_source) + .join( + QGEP.connection_object, + QGEP.hazard_source.fk_connection_object == QGEP.connection_object.obj_id, + ) + .join(QGEP.wastewater_networkelement) ).filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) for row in query: From 2201fa6ba66f97a31801a5056bdf8846f4ac90bd Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 18:23:48 +0100 Subject: [PATCH 078/127] dss adapt filter measuring_point --- qgepqwat2ili/qgepdss/export.py | 31 ++++++++++++++++++------------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index aede6896..ec1f2585 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -3082,9 +3082,12 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info("Exporting QGEP.measuring_point -> ABWASSER.messstelle, ABWASSER.metaattribute") query = qgep_session.query(QGEP.measuring_point) if filtered: - query1 = query.join( - QGEP.wastewater_structure, - QGEP.wastewater_networkelement, + query1 = ( + query.join( + QGEP.wastewater_structure, + QGEP.measuring_point.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) ) # needs to add QGEP.wastewater_structure as waste_water_treatment_plant is a subclass of organisation that has a relation to wastewater_structure and then wastewater_networkelement # variant1 for query2 @@ -3104,18 +3107,20 @@ def textpos_common(row, t_type, geojson_crs_def): # ) # query2 via waste_water_treatment_plant TODO : Fix Mapping - query2 = query.join( - QGEP.model_classes_tww_od.waste_water_treatment_plant, - QGEP.model_classes_tww_od.wwtp_structure, - QGEP.model_classes_tww_od.wastewater_networkelement, + query2 = ( + query.join( + QGEP.model_classes_tww_od.waste_water_treatment_plant, QGEP.measuring_point.fk_waste_water_treatment_plant == QGEP.waste_water_treatment_plant.obj_id,) + .join(QGEP.model_classes_tww_od.wwtp_structure) + .join(QGEP.model_classes_tww_od.wastewater_networkelement) ) # only until VSA-DSS Release 2015 - query3 = query.join( - QGEP.water_course_segment, - QGEP.river, - QGEP.sector_water_body, - QGEP.discharge_point, - QGEP.wastewater_networkelement, + query3 = ( + query.join( + QGEP.water_course_segment, waste_water_treatment_plant, QGEP.measuring_point.fk_water_course_segment == QGEP.waste_water_treatment_plant.obj_id,) + .join(QGEP.river) + .join(QGEP.sector_water_body) + .join(QGEP.discharge_point) + .join(QGEP.wastewater_networkelement) ) query = query.union(query1, query2, query3) # query = query.union(query1, query3) From db7cbc7faed03c4c6b7bb3af72f5a939b250add8 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 17:24:03 +0000 Subject: [PATCH 079/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepdss/export.py | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index ec1f2585..4e3ee0d4 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -3082,13 +3082,10 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info("Exporting QGEP.measuring_point -> ABWASSER.messstelle, ABWASSER.metaattribute") query = qgep_session.query(QGEP.measuring_point) if filtered: - query1 = ( - query.join( - QGEP.wastewater_structure, - QGEP.measuring_point.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, - ) - .join(QGEP.wastewater_networkelement) - ) + query1 = query.join( + QGEP.wastewater_structure, + QGEP.measuring_point.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ).join(QGEP.wastewater_networkelement) # needs to add QGEP.wastewater_structure as waste_water_treatment_plant is a subclass of organisation that has a relation to wastewater_structure and then wastewater_networkelement # variant1 for query2 # query2=query.join( @@ -3109,14 +3106,21 @@ def textpos_common(row, t_type, geojson_crs_def): # query2 via waste_water_treatment_plant TODO : Fix Mapping query2 = ( query.join( - QGEP.model_classes_tww_od.waste_water_treatment_plant, QGEP.measuring_point.fk_waste_water_treatment_plant == QGEP.waste_water_treatment_plant.obj_id,) + QGEP.model_classes_tww_od.waste_water_treatment_plant, + QGEP.measuring_point.fk_waste_water_treatment_plant + == QGEP.waste_water_treatment_plant.obj_id, + ) .join(QGEP.model_classes_tww_od.wwtp_structure) .join(QGEP.model_classes_tww_od.wastewater_networkelement) ) # only until VSA-DSS Release 2015 query3 = ( query.join( - QGEP.water_course_segment, waste_water_treatment_plant, QGEP.measuring_point.fk_water_course_segment == QGEP.waste_water_treatment_plant.obj_id,) + QGEP.water_course_segment, + waste_water_treatment_plant, + QGEP.measuring_point.fk_water_course_segment + == QGEP.waste_water_treatment_plant.obj_id, + ) .join(QGEP.river) .join(QGEP.sector_water_body) .join(QGEP.discharge_point) From 390641eb64c94464ae7a713a589d35f06c2940b7 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 18:26:07 +0100 Subject: [PATCH 080/127] correction --- qgepqwat2ili/qgepdss/export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index ec1f2585..ee1101b0 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -3116,7 +3116,7 @@ def textpos_common(row, t_type, geojson_crs_def): # only until VSA-DSS Release 2015 query3 = ( query.join( - QGEP.water_course_segment, waste_water_treatment_plant, QGEP.measuring_point.fk_water_course_segment == QGEP.waste_water_treatment_plant.obj_id,) + QGEP.water_course_segment, QGEP.measuring_point.fk_water_course_segment == QGEP.water_course_segment.obj_id,) .join(QGEP.river) .join(QGEP.sector_water_body) .join(QGEP.discharge_point) From 305a86452766e81c92f32a4306b743d0eef5dfbe Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 17:27:45 +0000 Subject: [PATCH 081/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepdss/export.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 419801b3..191af961 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -3116,7 +3116,9 @@ def textpos_common(row, t_type, geojson_crs_def): # only until VSA-DSS Release 2015 query3 = ( query.join( - QGEP.water_course_segment, QGEP.measuring_point.fk_water_course_segment == QGEP.water_course_segment.obj_id,) + QGEP.water_course_segment, + QGEP.measuring_point.fk_water_course_segment == QGEP.water_course_segment.obj_id, + ) .join(QGEP.river) .join(QGEP.sector_water_body) .join(QGEP.discharge_point) From bf5678fb1b3a102b1a487a30298b8a80547f46c4 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 18:33:09 +0100 Subject: [PATCH 082/127] corrections --- qgepqwat2ili/qgepdss/export.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 191af961..b5f9d816 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -2919,6 +2919,9 @@ def textpos_common(row, t_type, geojson_crs_def): ) .join(QGEP.wastewater_networkelement) ).filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) + # add sql statement to logger + statement = query.statement + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.accident @@ -3106,12 +3109,12 @@ def textpos_common(row, t_type, geojson_crs_def): # query2 via waste_water_treatment_plant TODO : Fix Mapping query2 = ( query.join( - QGEP.model_classes_tww_od.waste_water_treatment_plant, + QGEP.waste_water_treatment_plant, QGEP.measuring_point.fk_waste_water_treatment_plant == QGEP.waste_water_treatment_plant.obj_id, ) - .join(QGEP.model_classes_tww_od.wwtp_structure) - .join(QGEP.model_classes_tww_od.wastewater_networkelement) + .join(QGEP.wwtp_structure) + .join(QGEP.wastewater_networkelement) ) # only until VSA-DSS Release 2015 query3 = ( @@ -3127,6 +3130,9 @@ def textpos_common(row, t_type, geojson_crs_def): query = query.union(query1, query2, query3) # query = query.union(query1, query3) query = query.filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) + # add sql statement to logger + statement = query.statement + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.measuring_point From fb21f27ac64a4d164a84e503686f356dbecb8f1c Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 19:31:31 +0100 Subject: [PATCH 083/127] adapt query3 --- qgepqwat2ili/qgepdss/export.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index b5f9d816..e05368a2 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -3122,9 +3122,9 @@ def textpos_common(row, t_type, geojson_crs_def): QGEP.water_course_segment, QGEP.measuring_point.fk_water_course_segment == QGEP.water_course_segment.obj_id, ) - .join(QGEP.river) - .join(QGEP.sector_water_body) - .join(QGEP.discharge_point) + .join(QGEP.river, QGEP.water_course_segment.fk_water_course == QGEP.river.obj_id,) + .join(QGEP.sector_water_body, QGEP.sector_water_body.fk_surface_water_bodies == QGEP.sector_water_body.obj_id,) + .join(QGEP.discharge_point, QGEP.measuring_point.fk_sector_water_body == QGEP.discharge_point.obj_id,) .join(QGEP.wastewater_networkelement) ) query = query.union(query1, query2, query3) From 40bfe84f623ec42a1b3436bf2da66144c4e7a536 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 18:33:21 +0000 Subject: [PATCH 084/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepdss/export.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index e05368a2..0473f0ac 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -3122,9 +3122,18 @@ def textpos_common(row, t_type, geojson_crs_def): QGEP.water_course_segment, QGEP.measuring_point.fk_water_course_segment == QGEP.water_course_segment.obj_id, ) - .join(QGEP.river, QGEP.water_course_segment.fk_water_course == QGEP.river.obj_id,) - .join(QGEP.sector_water_body, QGEP.sector_water_body.fk_surface_water_bodies == QGEP.sector_water_body.obj_id,) - .join(QGEP.discharge_point, QGEP.measuring_point.fk_sector_water_body == QGEP.discharge_point.obj_id,) + .join( + QGEP.river, + QGEP.water_course_segment.fk_water_course == QGEP.river.obj_id, + ) + .join( + QGEP.sector_water_body, + QGEP.sector_water_body.fk_surface_water_bodies == QGEP.sector_water_body.obj_id, + ) + .join( + QGEP.discharge_point, + QGEP.measuring_point.fk_sector_water_body == QGEP.discharge_point.obj_id, + ) .join(QGEP.wastewater_networkelement) ) query = query.union(query1, query2, query3) From 18fa3cee16ac05b47b8df4d8653d97347d831c49 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 19:38:17 +0100 Subject: [PATCH 085/127] correction --- qgepqwat2ili/qgepdss/export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index e05368a2..45b08ead 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -3122,7 +3122,7 @@ def textpos_common(row, t_type, geojson_crs_def): QGEP.water_course_segment, QGEP.measuring_point.fk_water_course_segment == QGEP.water_course_segment.obj_id, ) - .join(QGEP.river, QGEP.water_course_segment.fk_water_course == QGEP.river.obj_id,) + .join(QGEP.river, QGEP.water_course_segment.fk_watercourse == QGEP.river.obj_id,) .join(QGEP.sector_water_body, QGEP.sector_water_body.fk_surface_water_bodies == QGEP.sector_water_body.obj_id,) .join(QGEP.discharge_point, QGEP.measuring_point.fk_sector_water_body == QGEP.discharge_point.obj_id,) .join(QGEP.wastewater_networkelement) From 3d1554665a67e21a3ad3d6da56d44cf73d3e6c9f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 18:40:31 +0000 Subject: [PATCH 086/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepdss/export.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index ae7a89f2..33feae09 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -3122,10 +3122,18 @@ def textpos_common(row, t_type, geojson_crs_def): QGEP.water_course_segment, QGEP.measuring_point.fk_water_course_segment == QGEP.water_course_segment.obj_id, ) - .join(QGEP.river, QGEP.water_course_segment.fk_watercourse == QGEP.river.obj_id,) - .join(QGEP.sector_water_body, QGEP.sector_water_body.fk_surface_water_bodies == QGEP.sector_water_body.obj_id,) - .join(QGEP.discharge_point, QGEP.measuring_point.fk_sector_water_body == QGEP.discharge_point.obj_id,) - + .join( + QGEP.river, + QGEP.water_course_segment.fk_watercourse == QGEP.river.obj_id, + ) + .join( + QGEP.sector_water_body, + QGEP.sector_water_body.fk_surface_water_bodies == QGEP.sector_water_body.obj_id, + ) + .join( + QGEP.discharge_point, + QGEP.measuring_point.fk_sector_water_body == QGEP.discharge_point.obj_id, + ) .join(QGEP.wastewater_networkelement) ) query = query.union(query1, query2, query3) From 9ca04ff29c21f6c292d8332a64e319df7892443f Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 19:51:22 +0100 Subject: [PATCH 087/127] adaptions --- qgepqwat2ili/qgepdss/export.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 33feae09..e8d45ce1 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -3124,7 +3124,7 @@ def textpos_common(row, t_type, geojson_crs_def): ) .join( QGEP.river, - QGEP.water_course_segment.fk_watercourse == QGEP.river.obj_id, + QGEP.water_course_segment.fk_surface_water_bodies == QGEP.river.obj_id, ) .join( QGEP.sector_water_body, @@ -3132,7 +3132,7 @@ def textpos_common(row, t_type, geojson_crs_def): ) .join( QGEP.discharge_point, - QGEP.measuring_point.fk_sector_water_body == QGEP.discharge_point.obj_id, + QGEP.discharge_point.fk_sector_water_body == QGEP.sector_water_body.obj_id, ) .join(QGEP.wastewater_networkelement) ) From 79b81a3fcf1e837a5f64d67aa376f57ca783f092 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 20:02:39 +0100 Subject: [PATCH 088/127] comment and correction --- qgepqwat2ili/qgepdss/export.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index e8d45ce1..c6ef3681 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -3124,7 +3124,8 @@ def textpos_common(row, t_type, geojson_crs_def): ) .join( QGEP.river, - QGEP.water_course_segment.fk_surface_water_bodies == QGEP.river.obj_id, + # Fehler im Datenmodell fk_water_course should be name fk_surface_water_bodies (resp. fk_surface_water_body - class should be renamed to single) + QGEP.water_course_segment.fk_water_course == QGEP.river.obj_id, ) .join( QGEP.sector_water_body, From 3686c3fc402706dcb7a39562495205ec5e011183 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 20:07:42 +0100 Subject: [PATCH 089/127] correction --- qgepqwat2ili/qgepdss/export.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index c6ef3681..a16e04af 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -3124,8 +3124,8 @@ def textpos_common(row, t_type, geojson_crs_def): ) .join( QGEP.river, - # Fehler im Datenmodell fk_water_course should be name fk_surface_water_bodies (resp. fk_surface_water_body - class should be renamed to single) - QGEP.water_course_segment.fk_water_course == QGEP.river.obj_id, + # Fehler im Datenmodell fk_watercourse should be name fk_surface_water_bodies (resp. fk_surface_water_body - class should be renamed to single) + QGEP.water_course_segment.fk_watercourse == QGEP.river.obj_id, ) .join( QGEP.sector_water_body, From 4502a99bcccef23b0eb66a572b9ac3932956a161 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 20:16:39 +0100 Subject: [PATCH 090/127] adapt query2 via waste_water_treatment_plant(subclass or organisation) --- qgepqwat2ili/qgepdss/export.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index a16e04af..8202d2d0 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -3106,14 +3106,15 @@ def textpos_common(row, t_type, geojson_crs_def): # QGEP.wastewater_networkelement, # ) - # query2 via waste_water_treatment_plant TODO : Fix Mapping + # query2 via waste_water_treatment_plant Release 2015 where waste_water_treatment_plant is subclass of organisation query2 = ( query.join( QGEP.waste_water_treatment_plant, QGEP.measuring_point.fk_waste_water_treatment_plant == QGEP.waste_water_treatment_plant.obj_id, ) - .join(QGEP.wwtp_structure) + .join(QGEP.wastewater_structure, QGEP.wastewater_structure.fk_owner + == QGEP.waste_water_treatment_plant.obj_id,) .join(QGEP.wastewater_networkelement) ) # only until VSA-DSS Release 2015 From 2fffc331fb79d3c9a5036b2ec5719d3622701a72 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 19:16:55 +0000 Subject: [PATCH 091/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepdss/export.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 8202d2d0..d644da0c 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -3106,15 +3106,17 @@ def textpos_common(row, t_type, geojson_crs_def): # QGEP.wastewater_networkelement, # ) - # query2 via waste_water_treatment_plant Release 2015 where waste_water_treatment_plant is subclass of organisation + # query2 via waste_water_treatment_plant Release 2015 where waste_water_treatment_plant is subclass of organisation query2 = ( query.join( QGEP.waste_water_treatment_plant, QGEP.measuring_point.fk_waste_water_treatment_plant == QGEP.waste_water_treatment_plant.obj_id, ) - .join(QGEP.wastewater_structure, QGEP.wastewater_structure.fk_owner - == QGEP.waste_water_treatment_plant.obj_id,) + .join( + QGEP.wastewater_structure, + QGEP.wastewater_structure.fk_owner == QGEP.waste_water_treatment_plant.obj_id, + ) .join(QGEP.wastewater_networkelement) ) # only until VSA-DSS Release 2015 From 4d0bf86dc8db8e4a6a347e064a1ec384d05322d6 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 20:29:20 +0100 Subject: [PATCH 092/127] adapt dss measuring_device filter --- qgepqwat2ili/qgepdss/export.py | 70 +++++++++++++++++++++++++++++----- 1 file changed, 60 insertions(+), 10 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index d644da0c..413c79d3 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -3186,16 +3186,66 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info("Exporting QGEP.measuring_device -> ABWASSER.messgeraet, ABWASSER.metaattribute") query = qgep_session.query(QGEP.measuring_device) if filtered: - query = query.join( - QGEP.measuring_point, QGEP.wastewater_structure, QGEP.wastewater_networkelement - ).filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) - # or does not work with this - currently do not support - # QGEP.wastewater_networkelement, - # or_( - # (QGEP.measuring_point, QGEP.waste_water_treatment_plant, QGEP.wastewater_networkelement), - # (QGEP.measuring_point, QGEP.wastewater_structure, QGEP.wastewater_networkelement), - # (QGEP.measuring_point, QGEP.water_course_segment, QGEP.river, QGEP.sector_water_body, QGEP.discharge_point, QGEP.wastewater_networkelement), - # ) + # query = query.join( + # QGEP.measuring_point, QGEP.wastewater_structure, QGEP.wastewater_networkelement + # ).filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) + + query1 = ( + query.join( + QGEP.measuring_point, QGEP.measuring_device.fk_measuring_point == QGEP.measuring_point.obj_id, + ) + .join( + QGEP.wastewater_structure, + QGEP.measuring_point.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ). + join(QGEP.wastewater_networkelement) + ) + # query2 via waste_water_treatment_plant Release 2015 where waste_water_treatment_plant is subclass of organisation + query2 = ( + query.join( + QGEP.measuring_point, QGEP.measuring_device.fk_measuring_point == QGEP.measuring_point.obj_id, + ) + .join( + QGEP.waste_water_treatment_plant, + QGEP.measuring_point.fk_waste_water_treatment_plant + == QGEP.waste_water_treatment_plant.obj_id, + ) + .join( + QGEP.wastewater_structure, + QGEP.wastewater_structure.fk_owner == QGEP.waste_water_treatment_plant.obj_id, + ) + .join(QGEP.wastewater_networkelement) + ) + # only until VSA-DSS Release 2015 + query3 = ( + query.join( + QGEP.measuring_point, QGEP.measuring_device.fk_measuring_point == QGEP.measuring_point.obj_id, + ) + .join( + QGEP.water_course_segment, + QGEP.measuring_point.fk_water_course_segment == QGEP.water_course_segment.obj_id, + ) + .join( + QGEP.river, + # Fehler im Datenmodell fk_watercourse should be name fk_surface_water_bodies (resp. fk_surface_water_body - class should be renamed to single) + QGEP.water_course_segment.fk_watercourse == QGEP.river.obj_id, + ) + .join( + QGEP.sector_water_body, + QGEP.sector_water_body.fk_surface_water_bodies == QGEP.sector_water_body.obj_id, + ) + .join( + QGEP.discharge_point, + QGEP.discharge_point.fk_sector_water_body == QGEP.sector_water_body.obj_id, + ) + .join(QGEP.wastewater_networkelement) + ) + query = query.union(query1, query2, query3) + # query = query.union(query1, query3) + query = query.filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) + # add sql statement to logger + statement = query.statement + logger.debug(f" selection query = {statement}") for row in query: From b2c3abba314dd7560ea016d12cc98282441cfdf9 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 20:31:51 +0100 Subject: [PATCH 093/127] intend --- qgepqwat2ili/qgepdss/export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 413c79d3..4eedf4c8 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -3190,7 +3190,7 @@ def textpos_common(row, t_type, geojson_crs_def): # QGEP.measuring_point, QGEP.wastewater_structure, QGEP.wastewater_networkelement # ).filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) - query1 = ( + query1 = ( query.join( QGEP.measuring_point, QGEP.measuring_device.fk_measuring_point == QGEP.measuring_point.obj_id, ) From 99e986be24e9f0e40c3426146a244b53b8d5d280 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 19:32:07 +0000 Subject: [PATCH 094/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepdss/export.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 4eedf4c8..7242dc3d 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -3187,23 +3187,25 @@ def textpos_common(row, t_type, geojson_crs_def): query = qgep_session.query(QGEP.measuring_device) if filtered: # query = query.join( - # QGEP.measuring_point, QGEP.wastewater_structure, QGEP.wastewater_networkelement + # QGEP.measuring_point, QGEP.wastewater_structure, QGEP.wastewater_networkelement # ).filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) query1 = ( query.join( - QGEP.measuring_point, QGEP.measuring_device.fk_measuring_point == QGEP.measuring_point.obj_id, + QGEP.measuring_point, + QGEP.measuring_device.fk_measuring_point == QGEP.measuring_point.obj_id, ) .join( QGEP.wastewater_structure, QGEP.measuring_point.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, - ). - join(QGEP.wastewater_networkelement) + ) + .join(QGEP.wastewater_networkelement) ) # query2 via waste_water_treatment_plant Release 2015 where waste_water_treatment_plant is subclass of organisation query2 = ( query.join( - QGEP.measuring_point, QGEP.measuring_device.fk_measuring_point == QGEP.measuring_point.obj_id, + QGEP.measuring_point, + QGEP.measuring_device.fk_measuring_point == QGEP.measuring_point.obj_id, ) .join( QGEP.waste_water_treatment_plant, @@ -3219,7 +3221,8 @@ def textpos_common(row, t_type, geojson_crs_def): # only until VSA-DSS Release 2015 query3 = ( query.join( - QGEP.measuring_point, QGEP.measuring_device.fk_measuring_point == QGEP.measuring_point.obj_id, + QGEP.measuring_point, + QGEP.measuring_device.fk_measuring_point == QGEP.measuring_point.obj_id, ) .join( QGEP.water_course_segment, From 64410504239bb806a9520af7aacf02b34e285b9f Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 20:35:38 +0100 Subject: [PATCH 095/127] adapt filter measuring_series --- qgepqwat2ili/qgepdss/export.py | 68 +++++++++++++++++++++++++++++----- 1 file changed, 59 insertions(+), 9 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 4eedf4c8..f55258f2 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -3281,15 +3281,65 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info("Exporting QGEP.measurement_series -> ABWASSER.messreihe, ABWASSER.metaattribute") query = qgep_session.query(QGEP.measurement_series) if filtered: - query = query.join( - QGEP.measuring_point, QGEP.wastewater_structure, QGEP.wastewater_networkelement - ).filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) - # QGEP.wastewater_networkelement, - # or_( - # (QGEP.measuring_point, QGEP.waste_water_treatment_plant, QGEP.wastewater_networkelement), - # (QGEP.measuring_point, QGEP.wastewater_structure, QGEP.wastewater_networkelement), - # (QGEP.measuring_point, QGEP.water_course_segment, QGEP.river, QGEP.sector_water_body, QGEP.discharge_point, QGEP.wastewater_networkelement), - # ) + # query = query.join( + # QGEP.measuring_point, QGEP.wastewater_structure, QGEP.wastewater_networkelement + # ).filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) + query1 = ( + query.join( + QGEP.measuring_point, QGEP.measuring_device.fk_measuring_point == QGEP.measuring_point.obj_id, + ) + .join( + QGEP.wastewater_structure, + QGEP.measuring_point.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ). + join(QGEP.wastewater_networkelement) + ) + # query2 via waste_water_treatment_plant Release 2015 where waste_water_treatment_plant is subclass of organisation + query2 = ( + query.join( + QGEP.measuring_point, QGEP.measuring_device.fk_measuring_point == QGEP.measuring_point.obj_id, + ) + .join( + QGEP.waste_water_treatment_plant, + QGEP.measuring_point.fk_waste_water_treatment_plant + == QGEP.waste_water_treatment_plant.obj_id, + ) + .join( + QGEP.wastewater_structure, + QGEP.wastewater_structure.fk_owner == QGEP.waste_water_treatment_plant.obj_id, + ) + .join(QGEP.wastewater_networkelement) + ) + # only until VSA-DSS Release 2015 + query3 = ( + query.join( + QGEP.measuring_point, QGEP.measuring_device.fk_measuring_point == QGEP.measuring_point.obj_id, + ) + .join( + QGEP.water_course_segment, + QGEP.measuring_point.fk_water_course_segment == QGEP.water_course_segment.obj_id, + ) + .join( + QGEP.river, + # Fehler im Datenmodell fk_watercourse should be name fk_surface_water_bodies (resp. fk_surface_water_body - class should be renamed to single) + QGEP.water_course_segment.fk_watercourse == QGEP.river.obj_id, + ) + .join( + QGEP.sector_water_body, + QGEP.sector_water_body.fk_surface_water_bodies == QGEP.sector_water_body.obj_id, + ) + .join( + QGEP.discharge_point, + QGEP.discharge_point.fk_sector_water_body == QGEP.sector_water_body.obj_id, + ) + .join(QGEP.wastewater_networkelement) + ) + query = query.union(query1, query2, query3) + # query = query.union(query1, query3) + query = query.filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) + # add sql statement to logger + statement = query.statement + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.measurement_series From f91e8a83f324caa563ba2dfd583ebff9b1e3308c Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 19:36:04 +0000 Subject: [PATCH 096/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepdss/export.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index c23a5f4f..b94f7ef1 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -3285,22 +3285,24 @@ def textpos_common(row, t_type, geojson_crs_def): query = qgep_session.query(QGEP.measurement_series) if filtered: # query = query.join( - # QGEP.measuring_point, QGEP.wastewater_structure, QGEP.wastewater_networkelement + # QGEP.measuring_point, QGEP.wastewater_structure, QGEP.wastewater_networkelement # ).filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) query1 = ( query.join( - QGEP.measuring_point, QGEP.measuring_device.fk_measuring_point == QGEP.measuring_point.obj_id, + QGEP.measuring_point, + QGEP.measuring_device.fk_measuring_point == QGEP.measuring_point.obj_id, ) .join( QGEP.wastewater_structure, QGEP.measuring_point.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, - ). - join(QGEP.wastewater_networkelement) + ) + .join(QGEP.wastewater_networkelement) ) # query2 via waste_water_treatment_plant Release 2015 where waste_water_treatment_plant is subclass of organisation query2 = ( query.join( - QGEP.measuring_point, QGEP.measuring_device.fk_measuring_point == QGEP.measuring_point.obj_id, + QGEP.measuring_point, + QGEP.measuring_device.fk_measuring_point == QGEP.measuring_point.obj_id, ) .join( QGEP.waste_water_treatment_plant, @@ -3316,7 +3318,8 @@ def textpos_common(row, t_type, geojson_crs_def): # only until VSA-DSS Release 2015 query3 = ( query.join( - QGEP.measuring_point, QGEP.measuring_device.fk_measuring_point == QGEP.measuring_point.obj_id, + QGEP.measuring_point, + QGEP.measuring_device.fk_measuring_point == QGEP.measuring_point.obj_id, ) .join( QGEP.water_course_segment, From 51b4af653175b7c8e7a7c5df4064f5cdc5ea778f Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 20:41:14 +0100 Subject: [PATCH 097/127] correction --- qgepqwat2ili/qgepdss/export.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index b94f7ef1..1007408e 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -3290,7 +3290,7 @@ def textpos_common(row, t_type, geojson_crs_def): query1 = ( query.join( QGEP.measuring_point, - QGEP.measuring_device.fk_measuring_point == QGEP.measuring_point.obj_id, + QGEP.measurement_series.fk_measuring_point == QGEP.measuring_point.obj_id, ) .join( QGEP.wastewater_structure, @@ -3302,7 +3302,7 @@ def textpos_common(row, t_type, geojson_crs_def): query2 = ( query.join( QGEP.measuring_point, - QGEP.measuring_device.fk_measuring_point == QGEP.measuring_point.obj_id, + QGEP.measurement_series.fk_measuring_point == QGEP.measuring_point.obj_id, ) .join( QGEP.waste_water_treatment_plant, @@ -3319,7 +3319,7 @@ def textpos_common(row, t_type, geojson_crs_def): query3 = ( query.join( QGEP.measuring_point, - QGEP.measuring_device.fk_measuring_point == QGEP.measuring_point.obj_id, + QGEP.measurement_series.fk_measuring_point == QGEP.measuring_point.obj_id, ) .join( QGEP.water_course_segment, From d139e0f39cf5e3d62cbd171c5809ee7aaa937bec Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 20:46:34 +0100 Subject: [PATCH 098/127] adapt filter measurement_result --- qgepqwat2ili/qgepdss/export.py | 93 ++++++++++++++++++++++++++++------ 1 file changed, 78 insertions(+), 15 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 1007408e..1bd1a8d2 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -3383,21 +3383,84 @@ def textpos_common(row, t_type, geojson_crs_def): ) query = qgep_session.query(QGEP.measurement_result) if filtered: - query = query.join( - QGEP.measurement_series, - QGEP.measuring_point, - QGEP.wastewater_structure, - QGEP.wastewater_networkelement, - ).filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) - # or_( - # (QGEP.measurement_series, QGEP.measuring_point, QGEP.waste_water_treatment_plant, QGEP.wastewater_networkelement), - # (QGEP.measurement_series, QGEP.measuring_point, QGEP.wastewater_structure, QGEP.wastewater_networkelement), - # (QGEP.measurement_series, QGEP.measuring_point, QGEP.water_course_segment, QGEP.river, QGEP.sector_water_body, QGEP.discharge_point, QGEP.wastewater_networkelement), - - # (QGEP.measuring_device, QGEP.measuring_point, QGEP.waste_water_treatment_plant, QGEP.wastewater_networkelement), - # (QGEP.measuring_device, QGEP.measuring_point, QGEP.wastewater_structure, QGEP.wastewater_networkelement), - # (QGEP.measuring_device, QGEP.measuring_point, QGEP.water_course_segment, QGEP.river, QGEP.sector_water_body, QGEP.discharge_point, QGEP.wastewater_networkelement), - # ) + # query = query.join( + # QGEP.measurement_series, + # QGEP.measuring_point, + # QGEP.wastewater_structure, + # QGEP.wastewater_networkelement, + # ).filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) + query1 = ( + query.join( + QGEP.measurement_series, + QGEP.measurement_result.measurement_series == QGEP.measurement_series.obj_id, + ) + .join( + QGEP.measuring_point, + QGEP.measurement_series.fk_measuring_point == QGEP.measuring_point.obj_id, + ) + .join( + QGEP.wastewater_structure, + QGEP.measuring_point.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + ) + # query2 via waste_water_treatment_plant Release 2015 where waste_water_treatment_plant is subclass of organisation + query2 = ( + query.join( + QGEP.measurement_series, + QGEP.measurement_result.measurement_series == QGEP.measurement_series.obj_id, + ) + .join( + QGEP.measuring_point, + QGEP.measurement_series.fk_measuring_point == QGEP.measuring_point.obj_id, + ) + .join( + QGEP.waste_water_treatment_plant, + QGEP.measuring_point.fk_waste_water_treatment_plant + == QGEP.waste_water_treatment_plant.obj_id, + ) + .join( + QGEP.wastewater_structure, + QGEP.wastewater_structure.fk_owner == QGEP.waste_water_treatment_plant.obj_id, + ) + .join(QGEP.wastewater_networkelement) + ) + # only until VSA-DSS Release 2015 + query3 = ( + query.join( + QGEP.measurement_series, + QGEP.measurement_result.measurement_series == QGEP.measurement_series.obj_id, + ) + .join( + QGEP.measuring_point, + QGEP.measurement_series.fk_measuring_point == QGEP.measuring_point.obj_id, + ) + .join( + QGEP.water_course_segment, + QGEP.measuring_point.fk_water_course_segment == QGEP.water_course_segment.obj_id, + ) + .join( + QGEP.river, + # Fehler im Datenmodell fk_watercourse should be name fk_surface_water_bodies (resp. fk_surface_water_body - class should be renamed to single) + QGEP.water_course_segment.fk_watercourse == QGEP.river.obj_id, + ) + .join( + QGEP.sector_water_body, + QGEP.sector_water_body.fk_surface_water_bodies == QGEP.sector_water_body.obj_id, + ) + .join( + QGEP.discharge_point, + QGEP.discharge_point.fk_sector_water_body == QGEP.sector_water_body.obj_id, + ) + .join(QGEP.wastewater_networkelement) + ) + # query4 not implemented via measuring_device + query = query.union(query1, query2, query3) + # query = query.union(query1, query3) + query = query.filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) + # add sql statement to logger + statement = query.statement + logger.debug(f" selection query = {statement}") for row in query: From 5d80af5a02eb461682ad68a4f17e0a3a87d55830 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 19:46:50 +0000 Subject: [PATCH 099/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepdss/export.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 1bd1a8d2..aaadbc7f 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -3384,10 +3384,10 @@ def textpos_common(row, t_type, geojson_crs_def): query = qgep_session.query(QGEP.measurement_result) if filtered: # query = query.join( - # QGEP.measurement_series, - # QGEP.measuring_point, - # QGEP.wastewater_structure, - # QGEP.wastewater_networkelement, + # QGEP.measurement_series, + # QGEP.measuring_point, + # QGEP.wastewater_structure, + # QGEP.wastewater_networkelement, # ).filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) query1 = ( query.join( From 8c8c487fcc9e7b0a300e94d3bee64fd2dee7ff0a Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 20:49:52 +0100 Subject: [PATCH 100/127] missing fk prefix --- qgepqwat2ili/qgepdss/export.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index aaadbc7f..8cc9eb86 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -3392,7 +3392,7 @@ def textpos_common(row, t_type, geojson_crs_def): query1 = ( query.join( QGEP.measurement_series, - QGEP.measurement_result.measurement_series == QGEP.measurement_series.obj_id, + QGEP.measurement_result.fk_measurement_series == QGEP.measurement_series.obj_id, ) .join( QGEP.measuring_point, @@ -3408,7 +3408,7 @@ def textpos_common(row, t_type, geojson_crs_def): query2 = ( query.join( QGEP.measurement_series, - QGEP.measurement_result.measurement_series == QGEP.measurement_series.obj_id, + QGEP.measurement_result.fk_measurement_series == QGEP.measurement_series.obj_id, ) .join( QGEP.measuring_point, @@ -3429,7 +3429,7 @@ def textpos_common(row, t_type, geojson_crs_def): query3 = ( query.join( QGEP.measurement_series, - QGEP.measurement_result.measurement_series == QGEP.measurement_series.obj_id, + QGEP.measurement_result.fk_measurement_series == QGEP.measurement_series.obj_id, ) .join( QGEP.measuring_point, From 4da80ea5bbaab4aded81d31208997ff592551ff8 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 20:54:47 +0100 Subject: [PATCH 101/127] filter adapted backflow_prevention, solids_retention, tank_cleaning , tank_emptying --- qgepqwat2ili/qgepdss/export.py | 60 +++++++++++++++++++++++++++++----- 1 file changed, 52 insertions(+), 8 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 8cc9eb86..8002e768 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -3755,9 +3755,20 @@ def textpos_common(row, t_type, geojson_crs_def): query = qgep_session.query(QGEP.backflow_prevention) # side fk_throttle_shut_off_unit and fk_overflow not considered in filter query - they are usually added only for log_cards and then the corresponding nodes exist anyway thru the direct relation. if filtered: - query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) + # add sql statement to logger + statement = query.statement + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.backflow_prevention @@ -3798,9 +3809,20 @@ def textpos_common(row, t_type, geojson_crs_def): ) query = qgep_session.query(QGEP.solids_retention) if filtered: - query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) + # add sql statement to logger + statement = query.statement + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.solids_retention @@ -3839,9 +3861,20 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info("Exporting QGEP.tank_cleaning -> ABWASSER.beckenreinigung, ABWASSER.metaattribute") query = qgep_session.query(QGEP.tank_cleaning) if filtered: - query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) + # add sql statement to logger + statement = query.statement + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.tank_cleaning @@ -3881,9 +3914,20 @@ def textpos_common(row, t_type, geojson_crs_def): query = qgep_session.query(QGEP.tank_emptying) # side fk_throttle_shut_off_unit and fk_overflow not considered in filter query - they are usually added only for log_cards and then the corresponding nodes exist anyway thru the direct relation. if filtered: - query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( - QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # ) + query = ( + query.join( + QGEP.wastewater_structure, + QGEP.structure_part.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + ) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) ) + # add sql statement to logger + statement = query.statement + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.tank_emptying From 6eae2738dfe8a7d66ee449f51b91476a087f8b5e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 19:56:10 +0000 Subject: [PATCH 102/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepdss/export.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 8002e768..0f153496 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -3756,7 +3756,7 @@ def textpos_common(row, t_type, geojson_crs_def): # side fk_throttle_shut_off_unit and fk_overflow not considered in filter query - they are usually added only for log_cards and then the corresponding nodes exist anyway thru the direct relation. if filtered: # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( - # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) # ) query = ( query.join( @@ -3810,7 +3810,7 @@ def textpos_common(row, t_type, geojson_crs_def): query = qgep_session.query(QGEP.solids_retention) if filtered: # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( - # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) # ) query = ( query.join( @@ -3862,7 +3862,7 @@ def textpos_common(row, t_type, geojson_crs_def): query = qgep_session.query(QGEP.tank_cleaning) if filtered: # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( - # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) # ) query = ( query.join( @@ -3915,7 +3915,7 @@ def textpos_common(row, t_type, geojson_crs_def): # side fk_throttle_shut_off_unit and fk_overflow not considered in filter query - they are usually added only for log_cards and then the corresponding nodes exist anyway thru the direct relation. if filtered: # query = query.join(QGEP.wastewater_structure, QGEP.wastewater_networkelement).filter( - # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) + # QGEP.wastewater_networkelement.obj_id.in_(subset_ids) # ) query = ( query.join( From e3e82c25adcd8ae382c45eeb96e1f4bd00069e6d Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 21:02:06 +0100 Subject: [PATCH 103/127] adapt filter re_maintenance_event_wws --- qgepqwat2ili/qgepdss/export.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 8002e768..c1a1c007 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -4056,11 +4056,15 @@ def textpos_common(row, t_type, geojson_crs_def): "Exporting QGEP.maintenance_event -> ABWASSER.maintenance_event, ABWASSER.metaattribute" ) query = qgep_session.query(QGEP.maintenance_event) - # to check if join is correct like this n:m re_maintenance_event_wastewater_structure + # explicit join for n:m re_maintenance_event_wastewater_structure if filtered: - query = query.join( - QGEP.re_maintenance_event_wastewater_structure, - QGEP.wastewater_structure, + query = ( + query.join( + QGEP.re_maintenance_event_wastewater_structure, re_maintenance_event_wastewater_structure.fk_maintenance_event == fk_maintenance_event.obj_id, + ) + .join( + QGEP.wastewater_structure, re_maintenance_event_wastewater_structure.fk_wastewater_structure == wastewater_structure.obj_id, + ) QGEP.wastewater_networkelement, ).filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) for row in query: From 29603843eedc25db72fbcb729a4b6298171415d6 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 21:04:38 +0100 Subject: [PATCH 104/127] corrections --- qgepqwat2ili/qgepdss/export.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 3c035629..445eeeaf 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -4065,8 +4065,12 @@ def textpos_common(row, t_type, geojson_crs_def): .join( QGEP.wastewater_structure, re_maintenance_event_wastewater_structure.fk_wastewater_structure == wastewater_structure.obj_id, ) - QGEP.wastewater_networkelement, - ).filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) + .join(QGEP.wastewater_networkelement) + .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) + ) + # add sql statement to logger + statement = query.statement + logger.debug(f" selection query = {statement}") for row in query: # AVAILABLE FIELDS IN QGEP.maintenance_event From f8342157869bc85d866f9c43154c7c9f29fbb34f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 20:05:06 +0000 Subject: [PATCH 105/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepdss/export.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 445eeeaf..145f19c2 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -4060,10 +4060,14 @@ def textpos_common(row, t_type, geojson_crs_def): if filtered: query = ( query.join( - QGEP.re_maintenance_event_wastewater_structure, re_maintenance_event_wastewater_structure.fk_maintenance_event == fk_maintenance_event.obj_id, + QGEP.re_maintenance_event_wastewater_structure, + re_maintenance_event_wastewater_structure.fk_maintenance_event + == fk_maintenance_event.obj_id, ) .join( - QGEP.wastewater_structure, re_maintenance_event_wastewater_structure.fk_wastewater_structure == wastewater_structure.obj_id, + QGEP.wastewater_structure, + re_maintenance_event_wastewater_structure.fk_wastewater_structure + == wastewater_structure.obj_id, ) .join(QGEP.wastewater_networkelement) .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) From e81a2fa1b6d1d07b62a4f1c9fbb098c9499d7e6f Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 21:07:20 +0100 Subject: [PATCH 106/127] missing QGEP. --- qgepqwat2ili/qgepdss/export.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 445eeeaf..79d2d46b 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -4060,10 +4060,10 @@ def textpos_common(row, t_type, geojson_crs_def): if filtered: query = ( query.join( - QGEP.re_maintenance_event_wastewater_structure, re_maintenance_event_wastewater_structure.fk_maintenance_event == fk_maintenance_event.obj_id, + QGEP.re_maintenance_event_wastewater_structure, QGEP.re_maintenance_event_wastewater_structure.fk_maintenance_event == QGEP.maintenance_event.obj_id, ) .join( - QGEP.wastewater_structure, re_maintenance_event_wastewater_structure.fk_wastewater_structure == wastewater_structure.obj_id, + QGEP.wastewater_structure, QGEP.re_maintenance_event_wastewater_structure.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, ) .join(QGEP.wastewater_networkelement) .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) From 3dbeae7993c314daa4503865e1d14c36e5290424 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 20:09:07 +0000 Subject: [PATCH 107/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepdss/export.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 79d2d46b..ec6eed5f 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -4060,10 +4060,14 @@ def textpos_common(row, t_type, geojson_crs_def): if filtered: query = ( query.join( - QGEP.re_maintenance_event_wastewater_structure, QGEP.re_maintenance_event_wastewater_structure.fk_maintenance_event == QGEP.maintenance_event.obj_id, + QGEP.re_maintenance_event_wastewater_structure, + QGEP.re_maintenance_event_wastewater_structure.fk_maintenance_event + == QGEP.maintenance_event.obj_id, ) .join( - QGEP.wastewater_structure, QGEP.re_maintenance_event_wastewater_structure.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, + QGEP.wastewater_structure, + QGEP.re_maintenance_event_wastewater_structure.fk_wastewater_structure + == QGEP.wastewater_structure.obj_id, ) .join(QGEP.wastewater_networkelement) .filter(QGEP.wastewater_networkelement.obj_id.in_(subset_ids)) From 2d552aa21932271858b4cdcf19eb070e7643550d Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 21:40:10 +0100 Subject: [PATCH 108/127] add check_fk_in_subsetid to dss and kek --- qgepqwat2ili/qgep/export.py | 5 ++++- qgepqwat2ili/qgepdss/export.py | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/qgepqwat2ili/qgep/export.py b/qgepqwat2ili/qgep/export.py index 8e01d749..29183311 100644 --- a/qgepqwat2ili/qgep/export.py +++ b/qgepqwat2ili/qgep/export.py @@ -259,11 +259,14 @@ def wastewater_networkelement_common(row): """ return { - "abwasserbauwerkref": get_tid(row.fk_wastewater_structure__REL), + # "abwasserbauwerkref": get_tid(row.fk_wastewater_structure__REL), # 6.11.2024 Besides wn_id and re_id we also need ws_obj_ids in a separate subset - call it ws_subset_id # "abwasserbauwerkref": check_fk_in_subsetid( # subset_ids, row.fk_wastewater_structure__REL # ), + "abwasserbauwerkref": check_fk_in_subsetid( + subset_ids, row.fk_wastewater_structure__REL + ), "bemerkung": truncate(emptystr_to_null(row.remark), 80), "bezeichnung": null_to_emptystr(row.identifier), } diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 79d2d46b..73df7fb7 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -280,7 +280,10 @@ def wastewater_networkelement_common(row): Returns common attributes for wastewater_networkelement """ return { - "abwasserbauwerkref": get_tid(row.fk_wastewater_structure__REL), + # "abwasserbauwerkref": get_tid(row.fk_wastewater_structure__REL), + "abwasserbauwerkref": check_fk_in_subsetid( + subset_ids, row.fk_wastewater_structure__REL + ), "bemerkung": truncate(emptystr_to_null(row.remark), 80), "bezeichnung": null_to_emptystr(row.identifier), } From adae47144df62b3271ab4c50509f6dbab1715ef5 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 19 Nov 2024 21:48:27 +0100 Subject: [PATCH 109/127] adapt abwasserbauwerkref with check_fk_in_subset --- qgepqwat2ili/qgep/export.py | 1 + qgepqwat2ili/qgepdss/export.py | 16 +++++++++++++--- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/qgepqwat2ili/qgep/export.py b/qgepqwat2ili/qgep/export.py index 29183311..473eef8d 100644 --- a/qgepqwat2ili/qgep/export.py +++ b/qgepqwat2ili/qgep/export.py @@ -276,6 +276,7 @@ def structure_part_common(row): Returns common attributes for structure_part """ return { + # abwasserbauwerkref is MANDATORY, so it cannot be set to NULL "abwasserbauwerkref": get_tid(row.fk_wastewater_structure__REL), "bemerkung": truncate(emptystr_to_null(row.remark), 80), "bezeichnung": null_to_emptystr(row.identifier), diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 28992b94..839a8b63 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -293,6 +293,7 @@ def structure_part_common(row): Returns common attributes for structure_part """ return { + # abwasserbauwerkref is MANDATORY, so it cannot be set to NULL "abwasserbauwerkref": get_tid(row.fk_wastewater_structure__REL), "bemerkung": truncate(emptystr_to_null(row.remark), 80), "bezeichnung": null_to_emptystr(row.identifier), @@ -2212,7 +2213,10 @@ def textpos_common(row, t_type, geojson_crs_def): # --- sia405_baseclass --- **base_common(row, "mechanischevorreinigung"), # --- mechanischevorreinigung --- - abwasserbauwerkref=get_tid(row.fk_wastewater_structure__REL), + # abwasserbauwerkref=get_tid(row.fk_wastewater_structure__REL), + abwasserbauwerkref=check_fk_in_subsetid( + subset_ids, row.fk_wastewater_structure__REL + ), art=get_vl(row.kind__REL), bemerkung=truncate(emptystr_to_null(row.remark), 80), bezeichnung=null_to_emptystr(row.identifier), @@ -3167,7 +3171,10 @@ def textpos_common(row, t_type, geojson_crs_def): # --- sia405_baseclass --- **base_common(row, "messstelle"), # --- messstelle --- - abwasserbauwerkref=get_tid(row.fk_wastewater_structure__REL), + # abwasserbauwerkref=get_tid(row.fk_wastewater_structure__REL), + abwasserbauwerkref=check_fk_in_subsetid( + subset_ids, row.fk_wastewater_structure__REL + ), abwasserreinigungsanlageref=get_tid(row.fk_waste_water_treatment_plant__REL), art=row.kind, bemerkung=truncate(emptystr_to_null(row.remark), 80), @@ -4244,7 +4251,10 @@ def textpos_common(row, t_type, geojson_crs_def): # --- baseclass --- # --- sia405_baseclass --- # --- erhaltungsereignis_abwasserbauwerk --- - abwasserbauwerkref=get_tid(row.fk_wastewater_structure__REL), + # abwasserbauwerkref=get_tid(row.fk_wastewater_structure__REL), + abwasserbauwerkref=check_fk_in_subsetid( + subset_ids, row.fk_wastewater_structure__REL + ), erhaltungsereignis_abwasserbauwerkassocref=get_tid(row.fk_maintenance_event__REL), ) From 054c474ca8ea378821307b95420f0a0c038bd534 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 20:48:43 +0000 Subject: [PATCH 110/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepdss/export.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 839a8b63..4b927aa2 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -2214,9 +2214,7 @@ def textpos_common(row, t_type, geojson_crs_def): **base_common(row, "mechanischevorreinigung"), # --- mechanischevorreinigung --- # abwasserbauwerkref=get_tid(row.fk_wastewater_structure__REL), - abwasserbauwerkref=check_fk_in_subsetid( - subset_ids, row.fk_wastewater_structure__REL - ), + abwasserbauwerkref=check_fk_in_subsetid(subset_ids, row.fk_wastewater_structure__REL), art=get_vl(row.kind__REL), bemerkung=truncate(emptystr_to_null(row.remark), 80), bezeichnung=null_to_emptystr(row.identifier), @@ -3172,9 +3170,7 @@ def textpos_common(row, t_type, geojson_crs_def): **base_common(row, "messstelle"), # --- messstelle --- # abwasserbauwerkref=get_tid(row.fk_wastewater_structure__REL), - abwasserbauwerkref=check_fk_in_subsetid( - subset_ids, row.fk_wastewater_structure__REL - ), + abwasserbauwerkref=check_fk_in_subsetid(subset_ids, row.fk_wastewater_structure__REL), abwasserreinigungsanlageref=get_tid(row.fk_waste_water_treatment_plant__REL), art=row.kind, bemerkung=truncate(emptystr_to_null(row.remark), 80), @@ -4252,9 +4248,7 @@ def textpos_common(row, t_type, geojson_crs_def): # --- sia405_baseclass --- # --- erhaltungsereignis_abwasserbauwerk --- # abwasserbauwerkref=get_tid(row.fk_wastewater_structure__REL), - abwasserbauwerkref=check_fk_in_subsetid( - subset_ids, row.fk_wastewater_structure__REL - ), + abwasserbauwerkref=check_fk_in_subsetid(subset_ids, row.fk_wastewater_structure__REL), erhaltungsereignis_abwasserbauwerkassocref=get_tid(row.fk_maintenance_event__REL), ) From 688f1adc3a2d457105a02a904b74ec88f6413810 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 10 Dec 2024 14:37:39 +0100 Subject: [PATCH 111/127] add def get_selection_text_for_in_statement and def get_connected_wn_from_re --- qgepqwat2ili/utils/ili2db.py | 62 ++++++++++++++++++++++++++++++++++++ 1 file changed, 62 insertions(+) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index 015c76db..c57db6c2 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -484,6 +484,24 @@ def skip_wwtp_structure_ids_old(): return not_wwtp_structure_ids +#10.12.2024 +def get_selection_text_for_in_statement(selection_list) + """ + convert selection_list to selection_text to fit SQL IN statement + """ + selection_text = "" + + for list_item in selection_list: + selection_text += "'" + selection_text += list_item + selection_text += "'," + + # remove last komma to make it a correct IN statement + selection_text = selection_text[:-1] + + logger.debug(f"selection_text = {selection_text} ...") + return selection_text + # 12.11.2024 to clean up - get_ws_wn_ids kann das auch def get_cl_re_ids(classname): @@ -525,6 +543,50 @@ def get_cl_re_ids(classname): logger.warning(f"Do not use this function with {classname} !") return None +#10.12.2024 +def get_connected_wn_from_re (subset_reaches): + """ + Get connected wastewater_nodes from subset of reaches + """ + + logger.info(f"get list of id's of connected wastewater_nodes of {provides subset of reaches {subset_reaches} ...") + connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) + connection.set_session(autocommit=True) + cursor = connection.cursor() + + connected_wn_from_re_ids = [] + + # select all connected from wastewater_nodes from subset of reaches + cursor.execute( + f"SELECT wef.obj_id as wef_obj_id FROM qgep_od.reach re LEFT JOIN qgep_od.reach_point rpf ON rpf.obj_id = re.fk_reach_point_from LEFT JOIN qgep_od.wastewater_networkelement wef ON wef.obj_id = rpf.fk_wastewater_networkelement WHERE re.obj_id IN ({selection_text}) AND NOT wef.obj_id isNull;" + ) + + # cursor.fetchall() - see https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ + # ws_wn_ids_count = int(cursor.fetchone()[0]) + # if ws_wn_ids_count == 0: + if cursor.fetchone() is None: + connected_wn_from_re_ids = None + else: + # added cursor.execute again to see if with this all records will be available + # 15.11.2024 added - see https://stackoverflow.com/questions/58101874/cursor-fetchall-or-other-method-fetchone-is-not-working + cursor.execute( + f"SELECT wef.obj_id as wef_obj_id FROM qgep_od.reach re LEFT JOIN qgep_od.reach_point rpf ON rpf.obj_id = re.fk_reach_point_from LEFT JOIN qgep_od.wastewater_networkelement wef ON wef.obj_id = rpf.fk_wastewater_networkelement WHERE re.obj_id IN ({selection_text}) AND NOT wef.obj_id isNull;" + ) + records = cursor.fetchall() + + # 15.11.2024 - does not get all records, but only n-1 + for row in records: + logger.debug(f" row[0] = {row[0]}") + # https://www.pythontutorial.net/python-string-methods/python-string-concatenation/ + strrow = str(row[0]) + if strrow is not None: + connected_wn_from_re_ids.append(strrow) + logger.debug(f" building up '{connected_wn_from_re_ids}' ...") + + return connected_wn_from_re_ids + +#10.12.2024 +# to do def get_connected_wn_to_re (subset_reaches): def get_ws_wn_ids(classname): """ From b645ec76457ca3958228c43ef605248076c1a904 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 10 Dec 2024 14:39:35 +0100 Subject: [PATCH 112/127] add subset_reaches_text call --- qgepqwat2ili/utils/ili2db.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index c57db6c2..9043ea79 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -556,9 +556,11 @@ def get_connected_wn_from_re (subset_reaches): connected_wn_from_re_ids = [] + subset_reaches_text = get_selection_text_for_in_statement(subset_reaches) + # select all connected from wastewater_nodes from subset of reaches cursor.execute( - f"SELECT wef.obj_id as wef_obj_id FROM qgep_od.reach re LEFT JOIN qgep_od.reach_point rpf ON rpf.obj_id = re.fk_reach_point_from LEFT JOIN qgep_od.wastewater_networkelement wef ON wef.obj_id = rpf.fk_wastewater_networkelement WHERE re.obj_id IN ({selection_text}) AND NOT wef.obj_id isNull;" + f"SELECT wef.obj_id as wef_obj_id FROM qgep_od.reach re LEFT JOIN qgep_od.reach_point rpf ON rpf.obj_id = re.fk_reach_point_from LEFT JOIN qgep_od.wastewater_networkelement wef ON wef.obj_id = rpf.fk_wastewater_networkelement WHERE re.obj_id IN ({subset_reaches_text}) AND NOT wef.obj_id isNull;" ) # cursor.fetchall() - see https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ @@ -570,7 +572,7 @@ def get_connected_wn_from_re (subset_reaches): # added cursor.execute again to see if with this all records will be available # 15.11.2024 added - see https://stackoverflow.com/questions/58101874/cursor-fetchall-or-other-method-fetchone-is-not-working cursor.execute( - f"SELECT wef.obj_id as wef_obj_id FROM qgep_od.reach re LEFT JOIN qgep_od.reach_point rpf ON rpf.obj_id = re.fk_reach_point_from LEFT JOIN qgep_od.wastewater_networkelement wef ON wef.obj_id = rpf.fk_wastewater_networkelement WHERE re.obj_id IN ({selection_text}) AND NOT wef.obj_id isNull;" + f"SELECT wef.obj_id as wef_obj_id FROM qgep_od.reach re LEFT JOIN qgep_od.reach_point rpf ON rpf.obj_id = re.fk_reach_point_from LEFT JOIN qgep_od.wastewater_networkelement wef ON wef.obj_id = rpf.fk_wastewater_networkelement WHERE re.obj_id IN ({subset_reaches_text}) AND NOT wef.obj_id isNull;" ) records = cursor.fetchall() From 47b03c01ca14bc896ab05267849c4c231d1057b4 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 10 Dec 2024 15:39:05 +0100 Subject: [PATCH 113/127] add flag_approach_urs and alternative approach --- qgepqwat2ili/qgepsia405/export.py | 95 ++++++++++++++++++------------- qgepqwat2ili/utils/ili2db.py | 6 +- 2 files changed, 59 insertions(+), 42 deletions(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index 2f0f6713..017260e8 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -45,49 +45,66 @@ def qgep_export(selection=None, labels_file=None, orientation=None): filtered = selection is not None subset_ids = selection if selection is not None else [] - # 2. check if wastewater_structures exist that are not part of SIA 405 Abwasser (in Release 2015 this is the class wwtp_structures, in Release 2020 it will be more - to be extended in tww) - ws_off_sia405abwasser_list = None - ws_off_sia405abwasser_list = get_ws_wn_ids("wwtp_structure") + flag_approach_urs = true + + if flag_approach_urs: + # 2. Get all connected from wastewater_nodes of selected reaches + connected_from_wn_ids = get_connected_we_from_re(subset_ids) + # 3. Get all connected to wastewater_nodes of selected reaches + connected_to_wn_ids = get_connected_we_to_re(subset_ids) + # 4. Get all connected wastewater_nodes from overflows.fk_overflow_to + connected_overflow_to_wn_ids = get_connected_overflow_to_wn_ids(subset_ids) + # 5. Add results from 2., 3. and 4. to subset_ids -> adapted_subset_ids + adapted_subset_ids = add_to_selection(subset_ids, connected_from_wn_ids) + adapted_subset_ids = add_to_selection(adapted_subset_ids, connected_to_wn_ids) + adapted_subset_ids = add_to_selection(adapted_subset_ids, connected_overflow_to_wn_ids) + # 6. check blind connections - are there reaches in adapted_subset_ids that have not been in subset_ids + + + else: + # 2. check if wastewater_structures exist that are not part of SIA 405 Abwasser (in Release 2015 this is the class wwtp_structures, in Release 2020 it will be more - to be extended in tww) + ws_off_sia405abwasser_list = None + ws_off_sia405abwasser_list = get_ws_wn_ids("wwtp_structure") - # 3. Show ws_off_sia405abwasser_list - logger.debug( - f"ws_off_sia405abwasser_list : {ws_off_sia405abwasser_list}", - ) + # 3. Show ws_off_sia405abwasser_list + logger.debug( + f"ws_off_sia405abwasser_list : {ws_off_sia405abwasser_list}", + ) - # 4. check if filtered - if filtered: - if ws_off_sia405abwasser_list: - # take out ws_off_sia405abwasser_list from selection - subset_ids = remove_from_selection(subset_ids, ws_off_sia405abwasser_list) - # else do nothing - else: - if ws_off_sia405abwasser_list: - # add all data of wastewater_structures to selection - subset_ids = add_to_selection(subset_ids, get_ws_wn_ids("wastewater_structure")) - logger.debug( - f"subset_ids of all wws : {subset_ids}", - ) - # take out ws_off_sia405abwasser_list from selection - subset_ids = remove_from_selection(subset_ids, ws_off_sia405abwasser_list) - logger.debug( - f"subset_ids of all wws minus ws_off_sia405abwasser_list: {subset_ids}", - ) - # add reach_ids - # subset_ids = add_to_selection(subset_ids, get_cl_re_ids("channel")) - # treat export as with a selection - filtered = True + # 4. check if filtered + if filtered: + if ws_off_sia405abwasser_list: + # take out ws_off_sia405abwasser_list from selection + subset_ids = remove_from_selection(subset_ids, ws_off_sia405abwasser_list) + # else do nothing + else: + if ws_off_sia405abwasser_list: + # add all data of wastewater_structures to selection + subset_ids = add_to_selection(subset_ids, get_ws_wn_ids("wastewater_structure")) + logger.debug( + f"subset_ids of all wws : {subset_ids}", + ) + # take out ws_off_sia405abwasser_list from selection + subset_ids = remove_from_selection(subset_ids, ws_off_sia405abwasser_list) + logger.debug( + f"subset_ids of all wws minus ws_off_sia405abwasser_list: {subset_ids}", + ) + # add reach_ids + # subset_ids = add_to_selection(subset_ids, get_cl_re_ids("channel")) + # treat export as with a selection + filtered = True - # else do nothing + # else do nothing - # 5. get and add all id's of connected wastewater_structures (not only of wastewater_network_element (reach, wwn) - subset_wws_ids = get_ws_selected_ww_networkelements(subset_ids) - logger.debug( - f"subset_wws_ids: {subset_wws_ids}", - ) - subset_ids = add_to_selection(subset_ids, subset_wws_ids) - logger.debug( - f"subset_ids with wws : {subset_ids}", - ) + # 5. get and add all id's of connected wastewater_structures (not only of wastewater_network_element (reach, wwn) + subset_wws_ids = get_ws_selected_ww_networkelements(subset_ids) + logger.debug( + f"subset_wws_ids: {subset_wws_ids}", + ) + subset_ids = add_to_selection(subset_ids, subset_wws_ids) + logger.debug( + f"subset_ids with wws : {subset_ids}", + ) # Orientation oriented = orientation is not None diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index 9043ea79..ed67a980 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -544,9 +544,9 @@ def get_cl_re_ids(classname): return None #10.12.2024 -def get_connected_wn_from_re (subset_reaches): +def get_connected_we_from_re (subset_reaches): """ - Get connected wastewater_nodes from subset of reaches + Get connected wastewater_networkelements (wastewater_nodes and reaches) from subset of reaches """ logger.info(f"get list of id's of connected wastewater_nodes of {provides subset of reaches {subset_reaches} ...") @@ -558,7 +558,7 @@ def get_connected_wn_from_re (subset_reaches): subset_reaches_text = get_selection_text_for_in_statement(subset_reaches) - # select all connected from wastewater_nodes from subset of reaches + # select all connected from wastewater_nodes from provided subset of reaches cursor.execute( f"SELECT wef.obj_id as wef_obj_id FROM qgep_od.reach re LEFT JOIN qgep_od.reach_point rpf ON rpf.obj_id = re.fk_reach_point_from LEFT JOIN qgep_od.wastewater_networkelement wef ON wef.obj_id = rpf.fk_wastewater_networkelement WHERE re.obj_id IN ({subset_reaches_text}) AND NOT wef.obj_id isNull;" ) From 9519bcb5233874df54915da903c12c6296a033b8 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 10 Dec 2024 16:21:31 +0100 Subject: [PATCH 114/127] def filter_reaches --- qgepqwat2ili/qgepsia405/export.py | 14 ++++++++ qgepqwat2ili/utils/ili2db.py | 55 +++++++++++++++++++++++++++++++ 2 files changed, 69 insertions(+) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index 017260e8..bb54ac41 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -59,6 +59,20 @@ def qgep_export(selection=None, labels_file=None, orientation=None): adapted_subset_ids = add_to_selection(adapted_subset_ids, connected_to_wn_ids) adapted_subset_ids = add_to_selection(adapted_subset_ids, connected_overflow_to_wn_ids) # 6. check blind connections - are there reaches in adapted_subset_ids that have not been in subset_ids + subset_ids_reaches = filter_reaches(subset_ids) + adapted_subset_ids_reaches = filter_reaches(adapted_subset_ids) + + # https://www.w3schools.com/python/ref_set_difference.asp + # x = {"apple", "banana", "cherry"} + # y = {"google", "microsoft", "apple"} + # z = x.difference(y) + extra_reaches_ids = subset_ids_reaches.difference(adapted_subset_ids_reaches) + # 7. If extra_reaches then remove from adapted_subset_ids + if not extra_reaches_ids: + # list is empty - no need for adaption + else: + # if len(extra_reaches_ids) > 0: + adapted_subset_ids = remove_from_selection(adapted_subset_ids, extra_reaches_ids) else: diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index ed67a980..deeaceed 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -680,6 +680,61 @@ def get_ws_selected_ww_networkelements(selected_wwn): return ws_ids +# 10.1.2024 +def filter_reaches(selected_ids) + """ + Filter out reaches from selected_ids + """ + + logger.info(f"Filter out reaches from selected_ids {selected_ids} ...") + + if selected_ids is None: + subset_reaches_ids = None + else: + connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) + connection.set_session(autocommit=True) + cursor = connection.cursor() + + subset_reaches_ids = [] + + subset_text = get_selection_text_for_in_statement(selected_ids) + + # select all reaches + cursor.execute( + f"SELECT obj_id FROM qgep_od.reach;" + ) + + # cursor.fetchall() - see https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ + # ws_wn_ids_count = int(cursor.fetchone()[0]) + # if ws_wn_ids_count == 0: + if cursor.fetchone() is None: + all_reaches_ids = None + else: + # added cursor.execute again to see if with this all records will be available + # 15.11.2024 added - see https://stackoverflow.com/questions/58101874/cursor-fetchall-or-other-method-fetchone-is-not-working + cursor.execute( + f"SELECT obj_id FROM qgep_od.reach;" + ) + records = cursor.fetchall() + + # 15.11.2024 - does not get all records, but only n-1 + for row in records: + logger.debug(f" row[0] = {row[0]}") + # https://www.pythontutorial.net/python-string-methods/python-string-concatenation/ + strrow = str(row[0]) + if strrow is not None: + all_reaches_ids.append(strrow) + logger.debug(f" building up '{all_reaches_ids}' ...") + + + for list_item in selected_ids: + if list_item in all_reaches_ids: + subset_reaches_ids.append(list_item) + else: + logger.debug(f"'filter_reaches: {list_item}' is not a reach id") + + return subset_reaches_ids + def remove_from_selection(selected_ids, remove_ids): """ From 8d6dd37250f11c6d9d4ed64f9e0a4e1e72aeb490 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 10 Dec 2024 16:33:38 +0100 Subject: [PATCH 115/127] get all connected wws and remove wwtp_structures (if sia405 abwasser export) --- qgepqwat2ili/qgepsia405/export.py | 20 ++++++++++++++++++-- qgepqwat2ili/utils/ili2db.py | 1 + 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index bb54ac41..65031d68 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -73,8 +73,24 @@ def qgep_export(selection=None, labels_file=None, orientation=None): else: # if len(extra_reaches_ids) > 0: adapted_subset_ids = remove_from_selection(adapted_subset_ids, extra_reaches_ids) - - + # 8. get all id's of connected wastewater_structures + subset_wws_ids = get_ws_selected_ww_networkelements(adapted_subset_ids) + logger.debug( + f"subset_wws_ids: {subset_wws_ids}", + ) + # 9. if sia405 export: check if wastewater_structures exist that are not part of SIA 405 Abwasser (in Release 2015 this is the class wwtp_structures, in Release 2020 it will be more - to be extended in tww) + ws_off_sia405abwasser_list = None + ws_off_sia405abwasser_list = get_ws_wn_ids("wwtp_structure") + + # 10. Show ws_off_sia405abwasser_list + logger.debug( + f"ws_off_sia405abwasser_list : {ws_off_sia405abwasser_list}", + ) + # 11. take out ws_off_sia405abwasser_list from subset_wws_ids + subset_wws_ids = remove_from_selection(subset_wws_ids, ws_off_sia405abwasser_list) + logger.debug( + f"subset_ids of all wws minus ws_off_sia405abwasser_list: {subset_wws_ids}", + ) else: # 2. check if wastewater_structures exist that are not part of SIA 405 Abwasser (in Release 2015 this is the class wwtp_structures, in Release 2020 it will be more - to be extended in tww) ws_off_sia405abwasser_list = None diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index deeaceed..d21482b2 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -730,6 +730,7 @@ def filter_reaches(selected_ids) for list_item in selected_ids: if list_item in all_reaches_ids: subset_reaches_ids.append(list_item) + logger.debug(f"'filter_reaches: {list_item}' is a reach id - added to subset_reaches_ids") else: logger.debug(f"'filter_reaches: {list_item}' is not a reach id") From b50f66b7613cbdb2637c3f87e29203970f589ffc Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 10 Dec 2024 15:58:02 +0000 Subject: [PATCH 116/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgep/export.py | 8 +------- qgepqwat2ili/qgepdss/export.py | 3 --- qgepqwat2ili/qgepsia405/export.py | 8 ++------ qgepqwat2ili/utils/ili2db.py | 2 +- 4 files changed, 4 insertions(+), 17 deletions(-) diff --git a/qgepqwat2ili/qgep/export.py b/qgepqwat2ili/qgep/export.py index b0bb8567..88f21c5e 100644 --- a/qgepqwat2ili/qgep/export.py +++ b/qgepqwat2ili/qgep/export.py @@ -9,15 +9,13 @@ # from ..utils.ili2db import skip_wwtp_structure_ids # 6.11.2024 replaced with from .. import utils +from ..utils.basket_utils import BasketUtils # 4.10.2024 # from ..utils.ili2db import skip_wwtp_structure_ids # 6.11.2024 replaced with from ..utils.ili2db import add_to_selection, get_ws_wn_ids, remove_from_selection - -from ..utils.basket_utils import BasketUtils from ..utils.qgep_export_utils import QgepExportUtils - from ..utils.various import logger from .model_abwasser import get_abwasser_model from .model_qgep import get_qgep_model @@ -99,7 +97,6 @@ def qgep_export_kek(selection=None, labels_file=None, orientation=None, basket_e else: labelorientation = 0 - def get_tid(relation): """ Makes a tid for a relation @@ -336,7 +333,6 @@ def textpos_common(row, t_type, geojson_crs_def): subset_ids=subset_ids, ) - # ADAPTED FROM 052a_sia405_abwasser_2015_2_d_interlisexport2.sql logger.info("Exporting QGEP.organisation -> ABWASSER.organisation, ABWASSER.metaattribute") qgep_export_utils.export_organisation() @@ -600,7 +596,6 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info("done") abwasser_session.flush() - logger.info( "Exporting QGEP.dryweather_flume -> ABWASSER.trockenwetterrinne, ABWASSER.metaattribute" ) @@ -760,7 +755,6 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info("done") abwasser_session.flush() - logger.info("Exporting QGEP.examination -> ABWASSER.untersuchung, ABWASSER.metaattribute") query = qgep_session.query(qgep_model.examination) if filtered: diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 5bd619d2..27771187 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -7,15 +7,12 @@ from .. import utils - # 4.10.2024 # from ..utils.ili2db import skip_wwtp_structure_ids # 6.11.2024 replaced with - to check if really necessary here (as no sia405 abwasser exceptions needed) ======= from ..utils.basket_utils import BasketUtils - from ..utils.qgep_export_utils import QgepExportUtils - from ..utils.various import logger from .model_abwasser import get_abwasser_model from .model_qgep import get_qgep_model diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index 164858d6..b1e3627c 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -5,7 +5,7 @@ from sqlalchemy.sql import text from .. import utils - +from ..utils.basket_utils import BasketUtils # 4.10.2024 # 6.11.2024 replaced with / 15.11.2024 get_ws_selected_ww_networkelements added @@ -15,11 +15,7 @@ get_ws_wn_ids, remove_from_selection, ) - -from ..utils.basket_utils import BasketUtils - from ..utils.qgep_export_utils import QgepExportUtils - from ..utils.various import logger from .model_abwasser import get_abwasser_model from .model_qgep import get_qgep_model @@ -79,7 +75,7 @@ def qgep_export_sia405(selection=None, labels_file=None, orientation=None, baske # https://www.w3schools.com/python/ref_set_difference.asp # x = {"apple", "banana", "cherry"} # y = {"google", "microsoft", "apple"} - # z = x.difference(y) + # z = x.difference(y) extra_reaches_ids = subset_ids_reaches.difference(adapted_subset_ids_reaches) # 7. If extra_reaches then remove from adapted_subset_ids if not extra_reaches_ids: diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index ca494118..3bb5909c 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -687,7 +687,7 @@ def filter_reaches(selected_ids) """ logger.info(f"Filter out reaches from selected_ids {selected_ids} ...") - + if selected_ids is None: subset_reaches_ids = None else: From 1d6ed1a83149fa6b3d95208af017f933f23982d0 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 10 Dec 2024 17:02:32 +0100 Subject: [PATCH 117/127] correct empty if statement - added logger.debug --- qgepqwat2ili/qgepsia405/export.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index b1e3627c..5ac1ce63 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -80,6 +80,9 @@ def qgep_export_sia405(selection=None, labels_file=None, orientation=None, baske # 7. If extra_reaches then remove from adapted_subset_ids if not extra_reaches_ids: # list is empty - no need for adaption + logger.debug( + f"no extra reaches - so nothing to remove from adapted_subset_ids", + ) else: # if len(extra_reaches_ids) > 0: adapted_subset_ids = remove_from_selection(adapted_subset_ids, extra_reaches_ids) From bb94ae12762c666d4310adb6f3c92eb279200433 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 10 Dec 2024 16:02:47 +0000 Subject: [PATCH 118/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepsia405/export.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index 5ac1ce63..2c15ffb0 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -156,7 +156,6 @@ def qgep_export_sia405(selection=None, labels_file=None, orientation=None, baske else: labelorientation = 0 - def get_tid(relation): """ Makes a tid for a relation @@ -391,7 +390,6 @@ def textpos_common(row, t_type, geojson_crs_def): subset_ids=subset_ids, ) - # ADAPTED FROM 052a_sia405_abwasser_2015_2_d_interlisexport2.sql logger.info("Exporting QGEP.organisation -> ABWASSER.organisation, ABWASSER.metaattribute") qgep_export_utils.export_organisation() @@ -557,7 +555,6 @@ def textpos_common(row, t_type, geojson_crs_def): # qgep_export_utils.export_reach_point() - logger.info( "Exporting QGEP.wastewater_node -> ABWASSER.abwasserknoten, ABWASSER.metaattribute" ) @@ -606,7 +603,6 @@ def textpos_common(row, t_type, geojson_crs_def): "Exporting QGEP.dryweather_downspout -> ABWASSER.trockenwetterfallrohr, ABWASSER.metaattribute" ) - query = qgep_session.query(QGEP.dryweather_downspout) if filtered: logger.info(f"filtered: subset_ids = {subset_ids}") @@ -710,7 +706,6 @@ def textpos_common(row, t_type, geojson_crs_def): logger.info("Exporting QGEP.access_aid -> ABWASSER.einstiegshilfe, ABWASSER.metaattribute") # qgep_export_utils.export_access_aid() - logger.info( "Exporting QGEP.dryweather_flume -> ABWASSER.trockenwetterrinne, ABWASSER.metaattribute" ) @@ -1124,7 +1119,6 @@ def textpos_common(row, t_type, geojson_crs_def): # logger.info("Exporting QGEP.benching -> ABWASSER.bankett, ABWASSER.metaattribute") # qgep_export_utils.export_benching() - # Labels # Note: these are extracted from the optional labels file (not exported from the QGEP database) if labels_file: From c87b116af87f6b6dbb4aea25b44451be4e7a9cbf Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 10 Dec 2024 17:03:45 +0100 Subject: [PATCH 119/127] added : --- qgepqwat2ili/utils/ili2db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index 3bb5909c..988e8d66 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -485,7 +485,7 @@ def skip_wwtp_structure_ids_old(): return not_wwtp_structure_ids #10.12.2024 -def get_selection_text_for_in_statement(selection_list) +def get_selection_text_for_in_statement(selection_list): """ convert selection_list to selection_text to fit SQL IN statement """ From 73d41c264e6e446c305aca595e7765f0c168ee6a Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 10 Dec 2024 17:05:36 +0100 Subject: [PATCH 120/127] correct logger.info --- qgepqwat2ili/utils/ili2db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index 988e8d66..da0c5bc3 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -549,7 +549,7 @@ def get_connected_we_from_re (subset_reaches): Get connected wastewater_networkelements (wastewater_nodes and reaches) from subset of reaches """ - logger.info(f"get list of id's of connected wastewater_nodes of {provides subset of reaches {subset_reaches} ...") + logger.info(f"get list of id's of connected wastewater_nodes of provides subset of reaches {subset_reaches} ...") connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) connection.set_session(autocommit=True) cursor = connection.cursor() From baaf870eb6305e20488773ba28eaafcdc128808e Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 10 Dec 2024 17:10:36 +0100 Subject: [PATCH 121/127] def get_connected_we_to_re --- qgepqwat2ili/qgepsia405/export.py | 8 ++++-- qgepqwat2ili/utils/ili2db.py | 47 +++++++++++++++++++++++++++++-- 2 files changed, 51 insertions(+), 4 deletions(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index 2c15ffb0..39dc1dc6 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -11,6 +11,10 @@ # 6.11.2024 replaced with / 15.11.2024 get_ws_selected_ww_networkelements added from ..utils.ili2db import ( add_to_selection, + filter_reaches, + get_connected_overflow_to_wn_ids, + get_connected_we_from_re, + get_connected_we_to_re, get_ws_selected_ww_networkelements, get_ws_wn_ids, remove_from_selection, @@ -55,7 +59,7 @@ def qgep_export_sia405(selection=None, labels_file=None, orientation=None, baske filtered = selection is not None subset_ids = selection if selection is not None else [] - flag_approach_urs = true + flag_approach_urs == true if flag_approach_urs: # 2. Get all connected from wastewater_nodes of selected reaches @@ -81,7 +85,7 @@ def qgep_export_sia405(selection=None, labels_file=None, orientation=None, baske if not extra_reaches_ids: # list is empty - no need for adaption logger.debug( - f"no extra reaches - so nothing to remove from adapted_subset_ids", + "no extra reaches - so nothing to remove from adapted_subset_ids", ) else: # if len(extra_reaches_ids) > 0: diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index da0c5bc3..0691949a 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -587,8 +587,51 @@ def get_connected_we_from_re (subset_reaches): return connected_wn_from_re_ids + #10.12.2024 -# to do def get_connected_wn_to_re (subset_reaches): +def get_connected_we_to_re (subset_reaches): + """ + Get connected wastewater_networkelements (wastewater_nodes and reaches) to subset of reaches + """ + + logger.info(f"get list of id's of connected wastewater_nodes of provides subset of reaches {subset_reaches} ...") + connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) + connection.set_session(autocommit=True) + cursor = connection.cursor() + + connected_wn_to_re_ids = [] + + subset_reaches_text = get_selection_text_for_in_statement(subset_reaches) + + # select all connected to wastewater_nodes from provided subset of reaches + cursor.execute( + f"SELECT wef.obj_id as wef_obj_id FROM qgep_od.reach re LEFT JOIN qgep_od.reach_point rpf ON rpf.obj_id = re.fk_reach_point_to LEFT JOIN qgep_od.wastewater_networkelement wef ON wef.obj_id = rpf.fk_wastewater_networkelement WHERE re.obj_id IN ({subset_reaches_text}) AND NOT wef.obj_id isNull;" + ) + + # cursor.fetchall() - see https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ + # ws_wn_ids_count = int(cursor.fetchone()[0]) + # if ws_wn_ids_count == 0: + if cursor.fetchone() is None: + connected_wn_to_re_ids = None + else: + # added cursor.execute again to see if with this all records will be available + # 15.11.2024 added - see https://stackoverflow.com/questions/58101874/cursor-fetchall-or-other-method-fetchone-is-not-working + cursor.execute( + f"SELECT wef.obj_id as wef_obj_id FROM qgep_od.reach re LEFT JOIN qgep_od.reach_point rpf ON rpf.obj_id = re.fk_reach_point_to LEFT JOIN qgep_od.wastewater_networkelement wef ON wef.obj_id = rpf.fk_wastewater_networkelement WHERE re.obj_id IN ({subset_reaches_text}) AND NOT wef.obj_id isNull;" + ) + records = cursor.fetchall() + + # 15.11.2024 - does not get all records, but only n-1 + for row in records: + logger.debug(f" row[0] = {row[0]}") + # https://www.pythontutorial.net/python-string-methods/python-string-concatenation/ + strrow = str(row[0]) + if strrow is not None: + connected_wn_to_re_ids.append(strrow) + logger.debug(f" building up '{connected_wn_to_re_ids}' ...") + + return connected_wn_to_re_ids + def get_ws_wn_ids(classname): """ @@ -681,7 +724,7 @@ def get_ws_selected_ww_networkelements(selected_wwn): return ws_ids # 10.1.2024 -def filter_reaches(selected_ids) +def filter_reaches(selected_ids): """ Filter out reaches from selected_ids """ From a19155686365b4cef378f69f9e3ac37834c9dd82 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 10 Dec 2024 16:10:51 +0000 Subject: [PATCH 122/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/utils/ili2db.py | 36 ++++++++++++++++++++---------------- 1 file changed, 20 insertions(+), 16 deletions(-) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index 0691949a..9a2ca969 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -484,7 +484,8 @@ def skip_wwtp_structure_ids_old(): return not_wwtp_structure_ids -#10.12.2024 + +# 10.12.2024 def get_selection_text_for_in_statement(selection_list): """ convert selection_list to selection_text to fit SQL IN statement @@ -543,13 +544,16 @@ def get_cl_re_ids(classname): logger.warning(f"Do not use this function with {classname} !") return None -#10.12.2024 -def get_connected_we_from_re (subset_reaches): + +# 10.12.2024 +def get_connected_we_from_re(subset_reaches): """ Get connected wastewater_networkelements (wastewater_nodes and reaches) from subset of reaches """ - logger.info(f"get list of id's of connected wastewater_nodes of provides subset of reaches {subset_reaches} ...") + logger.info( + f"get list of id's of connected wastewater_nodes of provides subset of reaches {subset_reaches} ..." + ) connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) connection.set_session(autocommit=True) cursor = connection.cursor() @@ -588,13 +592,15 @@ def get_connected_we_from_re (subset_reaches): return connected_wn_from_re_ids -#10.12.2024 -def get_connected_we_to_re (subset_reaches): +# 10.12.2024 +def get_connected_we_to_re(subset_reaches): """ Get connected wastewater_networkelements (wastewater_nodes and reaches) to subset of reaches """ - logger.info(f"get list of id's of connected wastewater_nodes of provides subset of reaches {subset_reaches} ...") + logger.info( + f"get list of id's of connected wastewater_nodes of provides subset of reaches {subset_reaches} ..." + ) connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) connection.set_session(autocommit=True) cursor = connection.cursor() @@ -723,6 +729,7 @@ def get_ws_selected_ww_networkelements(selected_wwn): return ws_ids + # 10.1.2024 def filter_reaches(selected_ids): """ @@ -740,12 +747,10 @@ def filter_reaches(selected_ids): subset_reaches_ids = [] - subset_text = get_selection_text_for_in_statement(selected_ids) + get_selection_text_for_in_statement(selected_ids) # select all reaches - cursor.execute( - f"SELECT obj_id FROM qgep_od.reach;" - ) + cursor.execute(f"SELECT obj_id FROM qgep_od.reach;") # cursor.fetchall() - see https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ # ws_wn_ids_count = int(cursor.fetchone()[0]) @@ -755,9 +760,7 @@ def filter_reaches(selected_ids): else: # added cursor.execute again to see if with this all records will be available # 15.11.2024 added - see https://stackoverflow.com/questions/58101874/cursor-fetchall-or-other-method-fetchone-is-not-working - cursor.execute( - f"SELECT obj_id FROM qgep_od.reach;" - ) + cursor.execute(f"SELECT obj_id FROM qgep_od.reach;") records = cursor.fetchall() # 15.11.2024 - does not get all records, but only n-1 @@ -769,11 +772,12 @@ def filter_reaches(selected_ids): all_reaches_ids.append(strrow) logger.debug(f" building up '{all_reaches_ids}' ...") - for list_item in selected_ids: if list_item in all_reaches_ids: subset_reaches_ids.append(list_item) - logger.debug(f"'filter_reaches: {list_item}' is a reach id - added to subset_reaches_ids") + logger.debug( + f"'filter_reaches: {list_item}' is a reach id - added to subset_reaches_ids" + ) else: logger.debug(f"'filter_reaches: {list_item}' is not a reach id") From 32ea333224a1fee6e1fbf41bee96861afd25b5b4 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 10 Dec 2024 17:29:23 +0100 Subject: [PATCH 123/127] True instead of true --- qgepqwat2ili/qgepsia405/export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qgepqwat2ili/qgepsia405/export.py b/qgepqwat2ili/qgepsia405/export.py index 39dc1dc6..1c92fdd2 100644 --- a/qgepqwat2ili/qgepsia405/export.py +++ b/qgepqwat2ili/qgepsia405/export.py @@ -59,7 +59,7 @@ def qgep_export_sia405(selection=None, labels_file=None, orientation=None, baske filtered = selection is not None subset_ids = selection if selection is not None else [] - flag_approach_urs == true + flag_approach_urs = True if flag_approach_urs: # 2. Get all connected from wastewater_nodes of selected reaches From 11995c44c3492f750f608b55974dfaa8948eab21 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 10 Dec 2024 17:46:09 +0100 Subject: [PATCH 124/127] remove === --- qgepqwat2ili/qgepdss/export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 27771187..27f9fcd5 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -10,7 +10,7 @@ # 4.10.2024 # from ..utils.ili2db import skip_wwtp_structure_ids # 6.11.2024 replaced with - to check if really necessary here (as no sia405 abwasser exceptions needed) -======= + from ..utils.basket_utils import BasketUtils from ..utils.qgep_export_utils import QgepExportUtils from ..utils.various import logger From 2e984832c9a763d01774a021ca7617ad42e2ac76 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 10 Dec 2024 16:46:45 +0000 Subject: [PATCH 125/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/qgepdss/export.py | 31 +++++-------------------------- 1 file changed, 5 insertions(+), 26 deletions(-) diff --git a/qgepqwat2ili/qgepdss/export.py b/qgepqwat2ili/qgepdss/export.py index 27f9fcd5..57cc64da 100644 --- a/qgepqwat2ili/qgepdss/export.py +++ b/qgepqwat2ili/qgepdss/export.py @@ -6,17 +6,15 @@ from sqlalchemy.sql import text from .. import utils - -# 4.10.2024 -# from ..utils.ili2db import skip_wwtp_structure_ids -# 6.11.2024 replaced with - to check if really necessary here (as no sia405 abwasser exceptions needed) - from ..utils.basket_utils import BasketUtils -from ..utils.qgep_export_utils import QgepExportUtils from ..utils.various import logger from .model_abwasser import get_abwasser_model from .model_qgep import get_qgep_model +# 4.10.2024 +# from ..utils.ili2db import skip_wwtp_structure_ids +# 6.11.2024 replaced with - to check if really necessary here (as no sia405 abwasser exceptions needed) + def qgep_export_dss(selection=None, labels_file=None, orientation=None, basket_enabled=False): """ @@ -40,12 +38,11 @@ def qgep_export_dss(selection=None, labels_file=None, orientation=None, basket_e abwasser_session.execute(text("SET CONSTRAINTS ALL DEFERRED;")) basket_utils = None - current_basket = None if basket_enabled: basket_utils = BasketUtils(abwasser_model, abwasser_session) basket_utils.create_basket() - current_basket = basket_utils.basket_topic_sia405_abwasser + basket_utils.basket_topic_sia405_abwasser # Filtering filtered = selection is not None @@ -67,7 +64,6 @@ def qgep_export_dss(selection=None, labels_file=None, orientation=None, basket_e else: labelorientation = 0 - def get_tid(relation): """ Makes a tid for a relation @@ -201,7 +197,6 @@ def base_common(row, type_name): "t_id": get_tid(row), } - def organisation_common(row): """ Returns common attributes for organisation @@ -253,7 +248,6 @@ def water_control_structure_common(row): "lage": ST_Force2D(row.situation_geometry), } - def wastewater_networkelement_common(row): """ Returns common attributes for wastewater_networkelement @@ -279,7 +273,6 @@ def structure_part_common(row): "instandstellung": get_vl(row.renovation_demand__REL), } - def connection_object_common(row): """ Returns common attributes for connection_object @@ -1345,7 +1338,6 @@ def overflow_common(row): logger.info("done") abwasser_session.flush() - logger.info( "Exporting QGEP.wwtp_energy_use -> ABWASSER.araenergienutzung, ABWASSER.metaattribute" ) @@ -1477,7 +1469,6 @@ def overflow_common(row): query = qgep_session.query(QGEP.control_center) # Always export all, no filtering - for row in query: # AVAILABLE FIELDS IN QGEP.control_center @@ -1810,7 +1801,6 @@ def overflow_common(row): query = qgep_session.query(QGEP.hydr_geometry) - for row in query: # AVAILABLE FIELDS IN QGEP.hydr_geometry @@ -1996,14 +1986,12 @@ def overflow_common(row): # --- sia405_baseclass --- **qgep_export_utils.base_common(row, "mechanischevorreinigung"), # --- mechanischevorreinigung --- - # abwasserbauwerkref=get_tid(row.fk_wastewater_structure__REL), abwasserbauwerkref=check_fk_in_subsetid(subset_ids, row.fk_wastewater_structure__REL), art=get_vl(row.kind__REL), bemerkung=truncate(emptystr_to_null(row.remark), 80), bezeichnung=null_to_emptystr(row.identifier), versickerungsanlageref=get_tid(row.fk_infiltration_installation__REL), - ) abwasser_session.add(mechanischevorreinigung) qgep_export_utils.create_metaattributes(row) @@ -2244,7 +2232,6 @@ def overflow_common(row): logger.info("done") abwasser_session.flush() - logger.info( "Exporting QGEP.dryweather_flume -> ABWASSER.trockenwetterrinne, ABWASSER.metaattribute" ) @@ -2355,7 +2342,6 @@ def overflow_common(row): logger.info("done") abwasser_session.flush() - logger.info( "Exporting QGEP.electric_equipment -> ABWASSER.elektrischeeinrichtung, ABWASSER.metaattribute" ) @@ -2912,7 +2898,6 @@ def overflow_common(row): query = qgep_session.query(qgep_model.measuring_point) if filtered: query1 = query.join( - QGEP.wastewater_structure, QGEP.measuring_point.fk_wastewater_structure == QGEP.wastewater_structure.obj_id, ).join(QGEP.wastewater_networkelement) @@ -2994,11 +2979,9 @@ def overflow_common(row): # --- sia405_baseclass --- **qgep_export_utils.base_common(row, "messstelle"), # --- messstelle --- - # abwasserbauwerkref=get_tid(row.fk_wastewater_structure__REL), abwasserbauwerkref=check_fk_in_subsetid(subset_ids, row.fk_wastewater_structure__REL), abwasserreinigungsanlageref=get_tid(row.fk_waste_water_treatment_plant__REL), - art=row.kind, bemerkung=qgep_export_utils.truncate( qgep_export_utils.emptystr_to_null(row.remark), 80 @@ -3086,7 +3069,6 @@ def overflow_common(row): statement = query.statement logger.debug(f" selection query = {statement}") - for row in query: # AVAILABLE FIELDS IN QGEP.measuring_device @@ -3306,7 +3288,6 @@ def overflow_common(row): statement = query.statement logger.debug(f" selection query = {statement}") - for row in query: # AVAILABLE FIELDS IN QGEP.measurement_result @@ -4112,11 +4093,9 @@ def overflow_common(row): # --- baseclass --- # --- sia405_baseclass --- # --- erhaltungsereignis_abwasserbauwerk --- - # abwasserbauwerkref=get_tid(row.fk_wastewater_structure__REL), abwasserbauwerkref=check_fk_in_subsetid(subset_ids, row.fk_wastewater_structure__REL), erhaltungsereignis_abwasserbauwerkassocref=get_tid(row.fk_maintenance_event__REL), - ) abwasser_session2.add(erhaltungsereignis_abwasserbauwerk) From a3b37a426d65b7cfb876ccb63d1dda3e6fc91c75 Mon Sep 17 00:00:00 2001 From: SJiB Date: Tue, 10 Dec 2024 18:05:38 +0100 Subject: [PATCH 126/127] def get_connected_overflow_to_wn_ids --- qgepqwat2ili/utils/ili2db.py | 47 ++++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index 9a2ca969..31f17e88 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -591,6 +591,53 @@ def get_connected_we_from_re(subset_reaches): return connected_wn_from_re_ids +# 10.12.2024 +def get_connected_overflow_to_wn_ids(selected_ids): + + """ + Get all connected wastewater_nodes from overflows.fk_overflow_to + """ + + logger.info( + f"Get all connected wastewater_nodes from overflows.fk_overflow_to {selected_ids} ..." + ) + connection = psycopg2.connect(get_pgconf_as_psycopg2_dsn()) + connection.set_session(autocommit=True) + cursor = connection.cursor() + + connected_overflow_to_wn_ids = [] + + subset_text = get_selection_text_for_in_statement(selected_ids) + + # select all connected to wastewater_nodes from provided subset of reaches + cursor.execute( + f"SELECT ov.fk_overflow_to FROM tww_od.wastewater_node wn LEFT JOIN tww_od.overflow ov ON wn.obj_id = ov.fk_wastewater_node WHERE wn.obj_id IN ({subset_text});" + ) + + # cursor.fetchall() - see https://pynative.com/python-cursor-fetchall-fetchmany-fetchone-to-read-rows-from-table/ + # ws_wn_ids_count = int(cursor.fetchone()[0]) + # if ws_wn_ids_count == 0: + if cursor.fetchone() is None: + connected_overflow_to_wn_ids = None + else: + # added cursor.execute again to see if with this all records will be available + # 15.11.2024 added - see https://stackoverflow.com/questions/58101874/cursor-fetchall-or-other-method-fetchone-is-not-working + cursor.execute( + f"SELECT ov.fk_overflow_to FROM tww_od.wastewater_node wn LEFT JOIN tww_od.overflow ov ON wn.obj_id = ov.fk_wastewater_node WHERE wn.obj_id IN ({subset_text});" + ) + records = cursor.fetchall() + + # 15.11.2024 - does not get all records, but only n-1 + for row in records: + logger.debug(f" row[0] = {row[0]}") + # https://www.pythontutorial.net/python-string-methods/python-string-concatenation/ + strrow = str(row[0]) + if strrow is not None: + connected_overflow_to_wn_ids.append(strrow) + logger.debug(f" building up '{connected_overflow_to_wn_ids}' ...") + + return connected_overflow_to_wn_ids + # 10.12.2024 def get_connected_we_to_re(subset_reaches): From dd7866db2f99bfd3fa90489c7b3a3d57679e392c Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 10 Dec 2024 17:06:04 +0000 Subject: [PATCH 127/127] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- qgepqwat2ili/utils/ili2db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qgepqwat2ili/utils/ili2db.py b/qgepqwat2ili/utils/ili2db.py index 31f17e88..7f244dbb 100644 --- a/qgepqwat2ili/utils/ili2db.py +++ b/qgepqwat2ili/utils/ili2db.py @@ -591,9 +591,9 @@ def get_connected_we_from_re(subset_reaches): return connected_wn_from_re_ids + # 10.12.2024 def get_connected_overflow_to_wn_ids(selected_ids): - """ Get all connected wastewater_nodes from overflows.fk_overflow_to """