From 89b4cdd81524dff2d11bdf3cc357fd2a60fcbfba Mon Sep 17 00:00:00 2001 From: cching95 <73163191+cching95@users.noreply.github.com> Date: Thu, 18 Jul 2024 14:03:25 +0100 Subject: [PATCH] Add UoM to SDK Time Series Queries and Update Unit Tests (#775) * add uom option to resample query Signed-off-by: Chloe Ching * add unit test for resample uom Signed-off-by: Chloe Ching * Interpolate updates and test Signed-off-by: Chloe Ching * interpolate tests Signed-off-by: Chloe Ching * add uom parameter to time series query builder Signed-off-by: Chloe Ching * Remove code Signed-off-by: Chloe Ching * interpolate unit tests with uom Signed-off-by: Chloe Ching * uom for plot query and unit tests Signed-off-by: Chloe Ching * add uom to cirucular avg, std, latest, interpolate at time, summary and update unit tests Signed-off-by: Chloe Ching --------- Signed-off-by: Chloe Ching --- .../time_series/_time_series_query_builder.py | 56 +++++++++++++++++-- .../queries/time_series/circular_average.py | 8 +++ .../circular_standard_deviation.py | 8 +++ .../queries/time_series/interpolate.py | 8 +++ .../time_series/interpolation_at_time.py | 8 +++ .../rtdip_sdk/queries/time_series/latest.py | 1 + .../rtdip_sdk/queries/time_series/plot.py | 12 +++- .../rtdip_sdk/queries/time_series/resample.py | 12 +++- .../rtdip_sdk/queries/time_series/summary.py | 1 + .../time_series/time_series_query_builder.py | 36 +++++++++++- .../time_series/time_weighted_average.py | 8 +++ .../queries/_test_utils/sdk_test_objects.py | 41 ++++++++++++-- .../time_series/test_circular_average.py | 14 ++++- .../test_circular_standard_deviation.py | 14 ++++- .../queries/time_series/test_interpolate.py | 14 ++++- .../time_series/test_interpolation_at_time.py | 16 +++++- .../queries/time_series/test_latest.py | 15 ++++- .../queries/time_series/test_plot.py | 26 +++++++++ .../queries/time_series/test_resample.py | 14 ++++- .../queries/time_series/test_summary.py | 14 ++++- .../time_series/test_time_weighted_average.py | 22 +++++++- 21 files changed, 324 insertions(+), 24 deletions(-) diff --git a/src/sdk/python/rtdip_sdk/queries/time_series/_time_series_query_builder.py b/src/sdk/python/rtdip_sdk/queries/time_series/_time_series_query_builder.py index 302aaa03d..aea7fa29d 100644 --- a/src/sdk/python/rtdip_sdk/queries/time_series/_time_series_query_builder.py +++ b/src/sdk/python/rtdip_sdk/queries/time_series/_time_series_query_builder.py @@ -155,8 +155,12 @@ def _sample_query(parameters_dict: dict) -> tuple: "{% endif %}" "))) SELECT * FROM pivot ORDER BY `{{ timestamp_column }}` " "{% else %}" + "{% if display_uom is defined and display_uom == true %}" + "SELECT p.`EventTime`, p.`TagName`, p.`Value`, m.`UoM` FROM project p LEFT OUTER JOIN `{{ business_unit|lower }}`.`sensors`.`{{ asset|lower }}_{{ data_security_level|lower }}_metadata` m ON p.`TagName` = m.`TagName` " + "{% else %}" "SELECT * FROM project " "{% endif %}" + "{% endif %}" "{% if is_resample is defined and is_resample == true and limit is defined and limit is not none %}" "LIMIT {{ limit }} " "{% endif %}" @@ -203,6 +207,7 @@ def _sample_query(parameters_dict: dict) -> tuple: "case_insensitivity_tag_search": parameters_dict.get( "case_insensitivity_tag_search", False ), + "display_uom": parameters_dict.get("display_uom", False), } sql_template = Template(sample_query) @@ -253,8 +258,12 @@ def _plot_query(parameters_dict: dict) -> tuple: "{% endif %}" "))) SELECT * FROM pivot ORDER BY `{{ timestamp_column }}` " "{% else %}" + "{% if display_uom is defined and display_uom == true %}" + "SELECT p.`EventTime`, p.`TagName`, p.`Value`, m.`UoM` FROM project p LEFT OUTER JOIN `{{ business_unit|lower }}`.`sensors`.`{{ asset|lower }}_{{ data_security_level|lower }}_metadata` m ON p.`TagName` = m.`TagName` " + "{% else %}" "SELECT * FROM project " "{% endif %}" + "{% endif %}" "{% if is_resample is defined and is_resample == true and limit is defined and limit is not none %}" "LIMIT {{ limit }} " "{% endif %}" @@ -277,7 +286,8 @@ def _plot_query(parameters_dict: dict) -> tuple: "time_interval_rate": parameters_dict["time_interval_rate"], "time_interval_unit": parameters_dict["time_interval_unit"], "time_zone": parameters_dict["time_zone"], - "pivot": False, + "pivot": parameters_dict.get("pivot", None), + "display_uom": parameters_dict.get("display_uom", False), "limit": parameters_dict.get("limit", None), "offset": parameters_dict.get("offset", None), "is_resample": True, @@ -354,8 +364,12 @@ def _interpolation_query( "{% endif %}" "))) SELECT * FROM pivot ORDER BY `{{ timestamp_column }}` " "{% else %}" + "{% if display_uom is defined and display_uom == true %}" + "SELECT p.`EventTime`, p.`TagName`, p.`Value`, m.`UoM` FROM project p LEFT OUTER JOIN `{{ business_unit|lower }}`.`sensors`.`{{ asset|lower }}_{{ data_security_level|lower }}_metadata` m ON p.`TagName` = m.`TagName` ORDER BY `{{ tagname_column }}`, `{{ timestamp_column }}` " + "{% else%}" "SELECT * FROM project ORDER BY `{{ tagname_column }}`, `{{ timestamp_column }}` " "{% endif %}" + "{% endif %}" "{% if limit is defined and limit is not none %}" "LIMIT {{ limit }} " "{% endif %}" @@ -441,8 +455,12 @@ def _interpolation_at_time(parameters_dict: dict) -> str: "{% endif %}" "))) SELECT * FROM pivot ORDER BY `{{ timestamp_column }}` " "{% else %}" + "{% if display_uom is defined and display_uom == true %}" + "SELECT p.`EventTime`, p.`TagName`, p.`Value`, m.`UoM` FROM project p LEFT OUTER JOIN `{{ business_unit|lower }}`.`sensors`.`{{ asset|lower }}_{{ data_security_level|lower }}_metadata` m ON p.`TagName` = m.`TagName` ORDER BY `{{ tagname_column }}`, `{{ timestamp_column }}` " + "{% else%}" "SELECT * FROM project ORDER BY `{{ tagname_column }}`, `{{ timestamp_column }}` " "{% endif %}" + "{% endif %}" "{% if limit is defined and limit is not none %}" "LIMIT {{ limit }} " "{% endif %}" @@ -466,6 +484,7 @@ def _interpolation_at_time(parameters_dict: dict) -> str: "max_timestamp": parameters_dict["max_timestamp"], "window_length": parameters_dict["window_length"], "pivot": parameters_dict.get("pivot", None), + "display_uom": parameters_dict.get("display_uom", False), "limit": parameters_dict.get("limit", None), "offset": parameters_dict.get("offset", None), "tagname_column": parameters_dict.get("tagname_column", "TagName"), @@ -536,7 +555,7 @@ def _metadata_query(parameters_dict: dict) -> str: def _latest_query(parameters_dict: dict) -> str: latest_query = ( - "SELECT * FROM " + "WITH latest AS (SELECT * FROM " "{% if source is defined and source is not none %}" "`{{ source|lower }}` " "{% else %}" @@ -549,7 +568,13 @@ def _latest_query(parameters_dict: dict) -> str: " WHERE `{{ tagname_column }}` IN ('{{ tag_names | join('\\', \\'') }}') " "{% endif %}" "{% endif %}" - "ORDER BY `{{ tagname_column }}` " + "ORDER BY `{{ tagname_column }}` ) " + "{% if display_uom is defined and display_uom == true %}" + "SELECT l.*, m.`UoM` FROM latest l " + "LEFT OUTER JOIN `{{ business_unit|lower }}`.`sensors`.`{{ asset|lower }}_{{ data_security_level|lower }}_metadata` m ON l.`TagName` = m.`TagName` " + "{% else %}" + "SELECT * FROM latest " + "{% endif %}" "{% if limit is defined and limit is not none %}" "LIMIT {{ limit }} " "{% endif %}" @@ -565,6 +590,7 @@ def _latest_query(parameters_dict: dict) -> str: "asset": parameters_dict.get("asset"), "data_security_level": parameters_dict.get("data_security_level"), "tag_names": list(dict.fromkeys(parameters_dict["tag_names"])), + "display_uom": parameters_dict.get("display_uom", False), "limit": parameters_dict.get("limit", None), "offset": parameters_dict.get("offset", None), "tagname_column": parameters_dict.get("tagname_column", "TagName"), @@ -642,8 +668,12 @@ def _time_weighted_average_query(parameters_dict: dict) -> str: "{% endif %}" "))) SELECT * FROM pivot ORDER BY `{{ timestamp_column }}` " "{% else %}" + "{% if display_uom is defined and display_uom == true %}" + "SELECT p.`EventTime`, p.`TagName`, p.`Value`, m.`UoM` FROM project p LEFT OUTER JOIN `{{ business_unit|lower }}`.`sensors`.`{{ asset|lower }}_{{ data_security_level|lower }}_metadata` m ON p.`TagName` = m.`TagName` ORDER BY `{{ tagname_column }}`, `{{ timestamp_column }}` " + "{% else%}" "SELECT * FROM project ORDER BY `{{ tagname_column }}`, `{{ timestamp_column }}` " "{% endif %}" + "{% endif %}" "{% if limit is defined and limit is not none %}" "LIMIT {{ limit }} " "{% endif %}" @@ -671,6 +701,7 @@ def _time_weighted_average_query(parameters_dict: dict) -> str: "include_bad_data": parameters_dict["include_bad_data"], "step": parameters_dict["step"], "pivot": parameters_dict.get("pivot", None), + "display_uom": parameters_dict.get("display_uom", False), "limit": parameters_dict.get("limit", None), "offset": parameters_dict.get("offset", None), "time_zone": parameters_dict["time_zone"], @@ -742,8 +773,12 @@ def _circular_stats_query(parameters_dict: dict) -> str: "{% endif %}" "))) SELECT * FROM pivot ORDER BY `{{ timestamp_column }}` " "{% else %}" + "{% if display_uom is defined and display_uom == true %}" + "SELECT p.*, m.`UoM` FROM project p LEFT OUTER JOIN `{{ business_unit|lower }}`.`sensors`.`{{ asset|lower }}_{{ data_security_level|lower }}_metadata` m ON p.`TagName` = m.`TagName` ORDER BY `{{ tagname_column }}`, `{{ timestamp_column }}` " + "{% else%}" "SELECT * FROM project ORDER BY `{{ tagname_column }}`, `{{ timestamp_column }}` " "{% endif %}" + "{% endif %}" "{% if limit is defined and limit is not none %}" "LIMIT {{ limit }} " "{% endif %}" @@ -770,8 +805,12 @@ def _circular_stats_query(parameters_dict: dict) -> str: "{% endif %}" "))) SELECT * FROM pivot ORDER BY `{{ timestamp_column }}` " "{% else %}" + "{% if display_uom is defined and display_uom == true %}" + "SELECT p.*, m.`UoM` FROM project p LEFT OUTER JOIN `{{ business_unit|lower }}`.`sensors`.`{{ asset|lower }}_{{ data_security_level|lower }}_metadata` m ON p.`TagName` = m.`TagName` ORDER BY `{{ tagname_column }}`, `{{ timestamp_column }}` " + "{% else%}" "SELECT * FROM project ORDER BY `{{ tagname_column }}`, `{{ timestamp_column }}` " "{% endif %}" + "{% endif %}" "{% if limit is defined and limit is not none %}" "LIMIT {{ limit }} " "{% endif %}" @@ -798,6 +837,7 @@ def _circular_stats_query(parameters_dict: dict) -> str: "time_zone": parameters_dict["time_zone"], "circular_function": parameters_dict["circular_function"], "pivot": parameters_dict.get("pivot", None), + "display_uom": parameters_dict.get("display_uom", False), "limit": parameters_dict.get("limit", None), "offset": parameters_dict.get("offset", None), "tagname_column": parameters_dict.get("tagname_column", "TagName"), @@ -826,7 +866,7 @@ def _circular_stats_query(parameters_dict: dict) -> str: def _summary_query(parameters_dict: dict) -> str: summary_query = ( - "SELECT `{{ tagname_column }}`, " + "WITH summary AS (SELECT `{{ tagname_column }}`, " "count(`{{ value_column }}`) as Count, " "CAST(Avg(`{{ value_column }}`) as decimal(10, 2)) as Avg, " "CAST(Min(`{{ value_column }}`) as decimal(10, 2)) as Min, " @@ -847,7 +887,12 @@ def _summary_query(parameters_dict: dict) -> str: "{% if include_status is defined and include_status == true and include_bad_data is defined and include_bad_data == false %}" "AND `{{ status_column }}` IN ('Good', 'Good, Annotated', 'Substituted, Good, Annotated', 'Substituted, Good', 'Good, Questionable', 'Questionable, Good')" "{% endif %}" - "GROUP BY `{{ tagname_column }}` " + "GROUP BY `{{ tagname_column }}`) " + "{% if display_uom is defined and display_uom == true %}" + "SELECT s.*, m.`UoM` FROM summary s LEFT OUTER JOIN `{{ business_unit|lower }}`.`sensors`.`{{ asset|lower }}_{{ data_security_level|lower }}_metadata` m ON s.`TagName` = m.`TagName` " + "{% else%}" + "SELECT * FROM summary " + "{% endif %}" "{% if limit is defined and limit is not none %}" "LIMIT {{ limit }} " "{% endif %}" @@ -867,6 +912,7 @@ def _summary_query(parameters_dict: dict) -> str: "end_date": parameters_dict["end_date"], "tag_names": list(dict.fromkeys(parameters_dict["tag_names"])), "include_bad_data": parameters_dict["include_bad_data"], + "display_uom": parameters_dict.get("display_uom", False), "limit": parameters_dict.get("limit", None), "offset": parameters_dict.get("offset", None), "time_zone": parameters_dict["time_zone"], diff --git a/src/sdk/python/rtdip_sdk/queries/time_series/circular_average.py b/src/sdk/python/rtdip_sdk/queries/time_series/circular_average.py index 6cde80663..0d28e856b 100644 --- a/src/sdk/python/rtdip_sdk/queries/time_series/circular_average.py +++ b/src/sdk/python/rtdip_sdk/queries/time_series/circular_average.py @@ -39,6 +39,7 @@ def get(connection: object, parameters_dict: dict) -> pd.DataFrame: upper_bound (int): Upper boundary for the sample range include_bad_data (bool): Include "Bad" data points with True or remove "Bad" data points with False pivot (bool): Pivot the data on timestamp column with True or do not pivot the data with False + display_uom (optional bool): Display the unit of measure with True or False. Does not apply to pivoted tables. Defaults to False limit (optional int): The number of rows to be returned offset (optional int): The number of rows to skip before returning rows case_insensitivity_tag_search (optional bool): Search for tags using case insensitivity with True or case sensitivity with False @@ -48,10 +49,17 @@ def get(connection: object, parameters_dict: dict) -> pd.DataFrame: !!! warning Setting `case_insensitivity_tag_search` to True will result in a longer query time. + + !!! Note + `display_uom` True will not work in conjunction with `pivot` set to True. """ if isinstance(parameters_dict["tag_names"], list) is False: raise ValueError("tag_names must be a list") + if "pivot" in parameters_dict and "display_uom" in parameters_dict: + if parameters_dict["pivot"] is True and parameters_dict["display_uom"] is True: + raise ValueError("pivot True and display_uom True cannot be used together") + try: query = _query_builder(parameters_dict, "circular_average") diff --git a/src/sdk/python/rtdip_sdk/queries/time_series/circular_standard_deviation.py b/src/sdk/python/rtdip_sdk/queries/time_series/circular_standard_deviation.py index 3af1ea51f..1b7408c39 100644 --- a/src/sdk/python/rtdip_sdk/queries/time_series/circular_standard_deviation.py +++ b/src/sdk/python/rtdip_sdk/queries/time_series/circular_standard_deviation.py @@ -39,6 +39,7 @@ def get(connection: object, parameters_dict: dict) -> pd.DataFrame: upper_bound (int): Upper boundary for the sample range include_bad_data (bool): Include "Bad" data points with True or remove "Bad" data points with False pivot (bool): Pivot the data on timestamp column with True or do not pivot the data with False + display_uom (optional bool): Display the unit of measure with True or False. Defaults to False limit (optional int): The number of rows to be returned offset (optional int): The number of rows to skip before returning rows case_insensitivity_tag_search (optional bool): Search for tags using case insensitivity with True or case sensitivity with False @@ -48,10 +49,17 @@ def get(connection: object, parameters_dict: dict) -> pd.DataFrame: !!! warning Setting `case_insensitivity_tag_search` to True will result in a longer query time. + + !!! Note + `display_uom` True will not work in conjunction with `pivot` set to True. """ if isinstance(parameters_dict["tag_names"], list) is False: raise ValueError("tag_names must be a list") + if "pivot" in parameters_dict and "display_uom" in parameters_dict: + if parameters_dict["pivot"] is True and parameters_dict["display_uom"] is True: + raise ValueError("pivot True and display_uom True cannot be used together") + try: query = _query_builder(parameters_dict, "circular_standard_deviation") diff --git a/src/sdk/python/rtdip_sdk/queries/time_series/interpolate.py b/src/sdk/python/rtdip_sdk/queries/time_series/interpolate.py index 5dcf03b8d..130d0a0aa 100644 --- a/src/sdk/python/rtdip_sdk/queries/time_series/interpolate.py +++ b/src/sdk/python/rtdip_sdk/queries/time_series/interpolate.py @@ -47,6 +47,7 @@ def get(connection: object, parameters_dict: dict) -> pd.DataFrame: interpolation_method (str): Interpolation method (forward_fill, backward_fill, linear) include_bad_data (bool): Include "Bad" data points with True or remove "Bad" data points with False pivot (bool): Pivot the data on timestamp column with True or do not pivot the data with False + display_uom (optional bool): Display the unit of measure with True or False. Does not apply to pivoted tables. Defaults to False limit (optional int): The number of rows to be returned offset (optional int): The number of rows to skip before returning rows case_insensitivity_tag_search (optional bool): Search for tags using case insensitivity with True or case sensitivity with False @@ -57,10 +58,17 @@ def get(connection: object, parameters_dict: dict) -> pd.DataFrame: !!! warning Setting `case_insensitivity_tag_search` to True will result in a longer query time. + !!! Note + `display_uom` True will not work in conjunction with `pivot` set to True. + """ if isinstance(parameters_dict["tag_names"], list) is False: raise ValueError("tag_names must be a list") + if "pivot" in parameters_dict and "display_uom" in parameters_dict: + if parameters_dict["pivot"] is True and parameters_dict["display_uom"] is True: + raise ValueError("pivot True and display_uom True cannot be used together") + if "sample_rate" in parameters_dict: logging.warning( "Parameter sample_rate is deprecated and will be removed in v1.0.0. Please use time_interval_rate instead." diff --git a/src/sdk/python/rtdip_sdk/queries/time_series/interpolation_at_time.py b/src/sdk/python/rtdip_sdk/queries/time_series/interpolation_at_time.py index e4096a438..0386bb03f 100644 --- a/src/sdk/python/rtdip_sdk/queries/time_series/interpolation_at_time.py +++ b/src/sdk/python/rtdip_sdk/queries/time_series/interpolation_at_time.py @@ -38,6 +38,7 @@ def get(connection: object, parameters_dict: dict) -> pd.DataFrame: window_length (int): Add longer window time in days for the start or end of specified date to cater for edge cases. include_bad_data (bool): Include "Bad" data points with True or remove "Bad" data points with False pivot (bool): Pivot the data on timestamp column with True or do not pivot the data with False + display_uom (optional bool): Display the unit of measure with True or False. Does not apply to pivoted tables. Defaults to False limit (optional int): The number of rows to be returned offset (optional int): The number of rows to skip before returning rows case_insensitivity_tag_search (optional bool): Search for tags using case insensitivity with True or case sensitivity with False @@ -47,6 +48,9 @@ def get(connection: object, parameters_dict: dict) -> pd.DataFrame: !!! warning Setting `case_insensitivity_tag_search` to True will result in a longer query time. + + !!! Note + `display_uom` True will not work in conjunction with `pivot` set to True. """ if isinstance(parameters_dict["tag_names"], list) is False: raise ValueError("tag_names must be a list") @@ -54,6 +58,10 @@ def get(connection: object, parameters_dict: dict) -> pd.DataFrame: if isinstance(parameters_dict["timestamps"], list) is False: raise ValueError("timestamps must be a list") + if "pivot" in parameters_dict and "display_uom" in parameters_dict: + if parameters_dict["pivot"] is True and parameters_dict["display_uom"] is True: + raise ValueError("pivot True and display_uom True cannot be used together") + try: query = _query_builder(parameters_dict, "interpolation_at_time") diff --git a/src/sdk/python/rtdip_sdk/queries/time_series/latest.py b/src/sdk/python/rtdip_sdk/queries/time_series/latest.py index d53656eb3..0747c9797 100644 --- a/src/sdk/python/rtdip_sdk/queries/time_series/latest.py +++ b/src/sdk/python/rtdip_sdk/queries/time_series/latest.py @@ -37,6 +37,7 @@ def get(connection: object, parameters_dict: dict) -> pd.DataFrame: asset (str): Asset data_security_level (str): Level of data security tag_names (optional, list): Either pass a list of tagname/tagnames ["tag_1", "tag_2"] or leave the list blank [] or leave the parameter out completely + display_uom (optional bool): Display the unit of measure with True or False. Does not apply to pivoted tables. Defaults to False limit (optional int): The number of rows to be returned offset (optional int): The number of rows to skip before returning rows case_insensitivity_tag_search (optional bool): Search for tags using case insensitivity with True or case sensitivity with False diff --git a/src/sdk/python/rtdip_sdk/queries/time_series/plot.py b/src/sdk/python/rtdip_sdk/queries/time_series/plot.py index 78f10f0ac..e3c8c5b89 100644 --- a/src/sdk/python/rtdip_sdk/queries/time_series/plot.py +++ b/src/sdk/python/rtdip_sdk/queries/time_series/plot.py @@ -44,6 +44,9 @@ def get(connection: object, parameters_dict: dict) -> pd.DataFrame: sample_unit (str): (deprecated) Please use time_interval_unit instead. See below. time_interval_rate (str): The time interval rate (numeric input) time_interval_unit (str): The time interval unit (second, minute, day, hour) + include_bad_data (bool): Include "Bad" data points with True or remove "Bad" data points with False + pivot (optional bool): Pivot the data on timestamp column with True or do not pivot the data with False + display_uom (optional bool): Display the unit of measure with True or False. Does not apply to pivoted tables. Defaults to False limit (optional int): The number of rows to be returned offset (optional int): The number of rows to skip before returning rows case_insensitivity_tag_search (optional bool): Search for tags using case insensitivity with True or case sensitivity with False @@ -53,10 +56,17 @@ def get(connection: object, parameters_dict: dict) -> pd.DataFrame: !!! warning Setting `case_insensitivity_tag_search` to True will result in a longer query time. + + !!! Note + `display_uom` True will not work in conjunction with `pivot` set to True. """ if isinstance(parameters_dict["tag_names"], list) is False: raise ValueError("tag_names must be a list") + if "pivot" in parameters_dict and "display_uom" in parameters_dict: + if parameters_dict["pivot"] is True and parameters_dict["display_uom"] is True: + raise ValueError("pivot True and display_uom True cannot be used together") + if "sample_rate" in parameters_dict: logging.warning( "Parameter sample_rate is deprecated and will be removed in v1.0.0. Please use time_interval_rate instead." @@ -84,5 +94,5 @@ def get(connection: object, parameters_dict: dict) -> pd.DataFrame: raise e except Exception as e: - logging.exception("error with resampling function") + logging.exception("error with plot resampling function") raise e diff --git a/src/sdk/python/rtdip_sdk/queries/time_series/resample.py b/src/sdk/python/rtdip_sdk/queries/time_series/resample.py index 47ff2c399..1f8f40f13 100644 --- a/src/sdk/python/rtdip_sdk/queries/time_series/resample.py +++ b/src/sdk/python/rtdip_sdk/queries/time_series/resample.py @@ -46,20 +46,30 @@ def get(connection: object, parameters_dict: dict) -> pd.DataFrame: time_interval_unit (str): The time interval unit (second, minute, day, hour) agg_method (str): Aggregation Method (first, last, avg, min, max) include_bad_data (bool): Include "Bad" data points with True or remove "Bad" data points with False - pivot (bool): Pivot the data on timestamp column with True or do not pivot the data with False + pivot (optional bool): Pivot the data on timestamp column with True or do not pivot the data with False + display_uom (optional bool): Display the unit of measure with True or False. Does not apply to pivoted tables. Defaults to False limit (optional int): The number of rows to be returned offset (optional int): The number of rows to skip before returning rows case_insensitivity_tag_search (optional bool): Search for tags using case insensitivity with True or case sensitivity with False + Returns: DataFrame: A resampled dataframe. !!! warning Setting `case_insensitivity_tag_search` to True will result in a longer query time. + + !!! Note + `display_uom` True will not work in conjunction with `pivot` set to True. """ + if isinstance(parameters_dict["tag_names"], list) is False: raise ValueError("tag_names must be a list") + if "pivot" in parameters_dict and "display_uom" in parameters_dict: + if parameters_dict["pivot"] is True and parameters_dict["display_uom"] is True: + raise ValueError("pivot True and display_uom True cannot be used together") + if "sample_rate" in parameters_dict: logging.warning( "Parameter sample_rate is deprecated and will be removed in v1.0.0. Please use time_interval_rate instead." diff --git a/src/sdk/python/rtdip_sdk/queries/time_series/summary.py b/src/sdk/python/rtdip_sdk/queries/time_series/summary.py index 957fd722d..341767ff9 100644 --- a/src/sdk/python/rtdip_sdk/queries/time_series/summary.py +++ b/src/sdk/python/rtdip_sdk/queries/time_series/summary.py @@ -41,6 +41,7 @@ def get(connection: object, parameters_dict: dict) -> pd.DataFrame: start_date (str): Start date (Either a date in the format YY-MM-DD or a datetime in the format YYY-MM-DDTHH:MM:SS or specify the timezone offset in the format YYYY-MM-DDTHH:MM:SS+zz:zz) end_date (str): End date (Either a date in the format YY-MM-DD or a datetime in the format YYY-MM-DDTHH:MM:SS or specify the timezone offset in the format YYYY-MM-DDTHH:MM:SS+zz:zz) include_bad_data (bool): Include "Bad" data points with True or remove "Bad" data points with False + display_uom (optional bool): Display the unit of measure with True or False. Does not apply to pivoted tables. Defaults to False limit (optional int): The number of rows to be returned offset (optional int): The number of rows to skip before returning rows case_insensitivity_tag_search (optional bool): Search for tags using case insensitivity with True or case sensitivity with False diff --git a/src/sdk/python/rtdip_sdk/queries/time_series/time_series_query_builder.py b/src/sdk/python/rtdip_sdk/queries/time_series/time_series_query_builder.py index 2c67333e3..cef0534c9 100644 --- a/src/sdk/python/rtdip_sdk/queries/time_series/time_series_query_builder.py +++ b/src/sdk/python/rtdip_sdk/queries/time_series/time_series_query_builder.py @@ -122,6 +122,7 @@ def raw( start_date: str, end_date: str, include_bad_data: bool = False, + display_uom: bool = False, limit: int = None, offset: int = None, ) -> DataFrame: @@ -158,6 +159,7 @@ def raw( start_date (str): Start date (Either a date in the format YY-MM-DD or a datetime in the format YYY-MM-DDTHH:MM:SS or specify the timezone offset in the format YYYY-MM-DDTHH:MM:SS+zz:zz) end_date (str): End date (Either a date in the format YY-MM-DD or a datetime in the format YYY-MM-DDTHH:MM:SS or specify the timezone offset in the format YYYY-MM-DDTHH:MM:SS+zz:zz) include_bad_data (optional bool): Include "Bad" data points with True or remove "Bad" data points with False + display_uom (optional bool): Display the unit of measure with True or False. Defaults to False limit (optional int): The number of rows to be returned offset (optional int): The number of rows to skip before returning rows @@ -170,6 +172,7 @@ def raw( "start_date": start_date, "end_date": end_date, "include_bad_data": include_bad_data, + "display_uom": display_uom, "limit": limit, "offset": offset, "tagname_column": self.tagname_column, @@ -190,6 +193,7 @@ def resample( agg_method: str, include_bad_data: bool = False, pivot: bool = False, + display_uom: bool = False, limit: int = None, offset: int = None, ) -> DataFrame: @@ -233,6 +237,7 @@ def resample( agg_method (str): Aggregation Method (first, last, avg, min, max) include_bad_data (optional bool): Include "Bad" data points with True or remove "Bad" data points with False pivot (optional bool): Pivot the data on the timestamp column with True or do not pivot the data with False + display_uom (optional bool): Display the unit of measure with True or False. Defaults to False limit (optional int): The number of rows to be returned offset (optional int): The number of rows to skip before returning rows @@ -250,6 +255,7 @@ def resample( "time_interval_unit": time_interval_unit, "agg_method": agg_method, "pivot": pivot, + "display_uom": display_uom, "limit": limit, "offset": offset, "tagname_column": self.tagname_column, @@ -269,6 +275,8 @@ def plot( time_interval_rate: str, time_interval_unit: str, include_bad_data: bool = False, + pivot: bool = False, + display_uom: bool = False, limit: int = None, offset: int = None, ) -> DataFrame: @@ -308,6 +316,9 @@ def plot( end_date (str): End date (Either a date in the format YY-MM-DD or a datetime in the format YYY-MM-DDTHH:MM:SS or specify the timezone offset in the format YYYY-MM-DDTHH:MM:SS+zz:zz) time_interval_rate (str): The time interval rate (numeric input) time_interval_unit (str): The time interval unit (second, minute, day, hour) + include_bad_data (optional bool): Include "Bad" data points with True or remove "Bad" data points with False + pivot (optional bool): Pivot the data on the timestamp column with True or do not pivot the data with False + display_uom (optional bool): Display the unit of measure with True or False. Defaults to False limit (optional int): The number of rows to be returned offset (optional int): The number of rows to skip before returning rows @@ -320,9 +331,11 @@ def plot( "tag_names": tagname_filter, "start_date": start_date, "end_date": end_date, - "include_bad_data": include_bad_data, "time_interval_rate": time_interval_rate, "time_interval_unit": time_interval_unit, + "include_bad_data": include_bad_data, + "pivot": pivot, + "display_uom": display_uom, "limit": limit, "offset": offset, "tagname_column": self.tagname_column, @@ -345,6 +358,7 @@ def interpolate( interpolation_method: str, include_bad_data: bool = False, pivot: bool = False, + display_uom: bool = False, limit: int = None, offset: int = None, ) -> DataFrame: @@ -390,6 +404,7 @@ def interpolate( interpolation_method (str): Interpolation method (forward_fill, backward_fill, linear) include_bad_data (optional bool): Include "Bad" data points with True or remove "Bad" data points with False pivot (optional bool): Pivot the data on the timestamp column with True or do not pivot the data with False + display_uom (optional bool): Display the unit of measure with True or False. Defaults to False limit (optional int): The number of rows to be returned offset (optional int): The number of rows to skip before returning rows @@ -407,6 +422,7 @@ def interpolate( "agg_method": agg_method, "interpolation_method": interpolation_method, "pivot": pivot, + "display_uom": display_uom, "limit": limit, "offset": offset, "tagname_column": self.tagname_column, @@ -425,6 +441,7 @@ def interpolation_at_time( include_bad_data: bool = False, window_length: int = 1, pivot: bool = False, + display_uom: bool = False, limit: int = None, offset: int = None, ) -> DataFrame: @@ -461,6 +478,7 @@ def interpolation_at_time( include_bad_data (optional bool): Include "Bad" data points with True or remove "Bad" data points with False window_length (optional int): Add longer window time in days for the start or end of specified date to cater for edge cases pivot (optional bool): Pivot the data on the timestamp column with True or do not pivot the data with False + display_uom (optional bool): Display the unit of measure with True or False. Defaults to False limit (optional int): The number of rows to be returned offset (optional int): The number of rows to skip before returning rows @@ -474,6 +492,7 @@ def interpolation_at_time( "include_bad_data": include_bad_data, "window_length": window_length, "pivot": pivot, + "display_uom": display_uom, "limit": limit, "offset": offset, "tagname_column": self.tagname_column, @@ -499,6 +518,7 @@ def time_weighted_average( include_bad_data: bool = False, window_length: int = 1, pivot: bool = False, + display_uom: bool = False, limit: int = None, offset: int = None, ) -> DataFrame: @@ -544,6 +564,7 @@ def time_weighted_average( include_bad_data (optional bool): Include "Bad" data points with True or remove "Bad" data points with False window_length (optional int): Add longer window time in days for the start or end of specified date to cater for edge cases pivot (optional bool): Pivot the data on the timestamp column with True or do not pivot the data with False + display_uom (optional bool): Display the unit of measure with True or False. Defaults to False limit (optional int): The number of rows to be returned offset (optional int): The number of rows to skip before returning rows @@ -566,6 +587,7 @@ def time_weighted_average( ), "window_length": window_length, "pivot": pivot, + "display_uom": display_uom, "limit": limit, "offset": offset, "tagname_column": self.tagname_column, @@ -633,6 +655,7 @@ def metadata( def latest( self, tagname_filter: [str] = None, + display_uom: bool = False, limit: int = None, offset: int = None, ) -> DataFrame: @@ -664,6 +687,7 @@ def latest( Args: tagname_filter (list str): List of tagnames to filter on the source + display_uom (optional bool): Display the unit of measure with True or False. Defaults to False limit (optional int): The number of rows to be returned offset (optional int): The number of rows to skip before returning rows @@ -674,6 +698,7 @@ def latest( "source": self.data_source, "tag_names": [] if tagname_filter is None else tagname_filter, "tagname_column": self.tagname_column, + "display_uom": display_uom, "limit": limit, "offset": offset, "supress_warning": True, @@ -692,6 +717,7 @@ def circular_average( upper_bound: int, include_bad_data: bool = False, pivot: bool = False, + display_uom: bool = False, limit: int = None, offset: int = None, ) -> DataFrame: @@ -737,6 +763,7 @@ def circular_average( upper_bound (int): Upper boundary for the sample range include_bad_data (optional bool): Include "Bad" data points with True or remove "Bad" data points with False pivot (optional bool): Pivot the data on the timestamp column with True or do not pivot the data with False + display_uom (optional bool): Display the unit of measure with True or False. Defaults to False limit (optional int): The number of rows to be returned offset (optional int): The number of rows to skip before returning rows @@ -754,6 +781,7 @@ def circular_average( "lower_bound": lower_bound, "upper_bound": upper_bound, "pivot": pivot, + "display_uom": display_uom, "limit": limit, "offset": offset, "tagname_column": self.tagname_column, @@ -776,6 +804,7 @@ def circular_standard_deviation( upper_bound: int, include_bad_data: bool = False, pivot: bool = False, + display_uom: bool = False, limit: int = None, offset: int = None, ) -> DataFrame: @@ -821,6 +850,7 @@ def circular_standard_deviation( upper_bound (int): Upper boundary for the sample range include_bad_data (optional bool): Include "Bad" data points with True or remove "Bad" data points with False pivot (optional bool): Pivot the data on the timestamp column with True or do not pivot the data with False + display_uom (optional bool): Display the unit of measure with True or False. Defaults to False limit (optional int): The number of rows to be returned offset (optional int): The number of rows to skip before returning rows @@ -838,6 +868,7 @@ def circular_standard_deviation( "lower_bound": lower_bound, "upper_bound": upper_bound, "pivot": pivot, + "display_uom": display_uom, "limit": limit, "offset": offset, "tagname_column": self.tagname_column, @@ -857,6 +888,7 @@ def summary( start_date: str, end_date: str, include_bad_data: bool = False, + display_uom: bool = False, limit: int = None, offset: int = None, ) -> DataFrame: @@ -893,6 +925,7 @@ def summary( start_date (str): Start date (Either a date in the format YY-MM-DD or a datetime in the format YYY-MM-DDTHH:MM:SS or specify the timezone offset in the format YYYY-MM-DDTHH:MM:SS+zz:zz) end_date (str): End date (Either a date in the format YY-MM-DD or a datetime in the format YYY-MM-DDTHH:MM:SS or specify the timezone offset in the format YYYY-MM-DDTHH:MM:SS+zz:zz) include_bad_data (optional bool): Include "Bad" data points with True or remove "Bad" data points with False + display_uom (optional bool): Display the unit of measure with True or False. Does not apply to pivoted tables. Defaults to False limit (optional int): The number of rows to be returned offset (optional int): The number of rows to skip before returning rows @@ -905,6 +938,7 @@ def summary( "start_date": start_date, "end_date": end_date, "include_bad_data": include_bad_data, + "display_uom": display_uom, "limit": limit, "offset": offset, "tagname_column": self.tagname_column, diff --git a/src/sdk/python/rtdip_sdk/queries/time_series/time_weighted_average.py b/src/sdk/python/rtdip_sdk/queries/time_series/time_weighted_average.py index a9e42a106..fb9d644cd 100644 --- a/src/sdk/python/rtdip_sdk/queries/time_series/time_weighted_average.py +++ b/src/sdk/python/rtdip_sdk/queries/time_series/time_weighted_average.py @@ -43,6 +43,7 @@ def get(connection: object, parameters_dict: dict) -> pd.DataFrame: window_length (int): Add longer window time in days for the start or end of specified date to cater for edge cases. include_bad_data (bool): Include "Bad" data points with True or remove "Bad" data points with False step (str): data points with step "enabled" or "disabled". The options for step are "true", "false" or "metadata". "metadata" will retrieve the step value from the metadata table. + display_uom (optional bool): Display the unit of measure with True or False. Does not apply to pivoted tables. Defaults to False pivot (bool): Pivot the data on timestamp column with True or do not pivot the data with False limit (optional int): The number of rows to be returned offset (optional int): The number of rows to skip before returning rows @@ -53,10 +54,17 @@ def get(connection: object, parameters_dict: dict) -> pd.DataFrame: !!! warning Setting `case_insensitivity_tag_search` to True will result in a longer query time. + + !!! Note + `display_uom` True will not work in conjunction with `pivot` set to True. """ if isinstance(parameters_dict["tag_names"], list) is False: raise ValueError("tag_names must be a list") + if "pivot" in parameters_dict and "display_uom" in parameters_dict: + if parameters_dict["pivot"] is True and parameters_dict["display_uom"] is True: + raise ValueError("pivot True and display_uom True cannot be used together") + if "window_size_mins" in parameters_dict: logging.warning( "Parameter window_size_mins is deprecated and will be removed in v1.0.0. Please use time_interval_rate and time_interval_unit instead." diff --git a/tests/sdk/python/rtdip_sdk/queries/_test_utils/sdk_test_objects.py b/tests/sdk/python/rtdip_sdk/queries/_test_utils/sdk_test_objects.py index 3f2240926..d2048f7ef 100644 --- a/tests/sdk/python/rtdip_sdk/queries/_test_utils/sdk_test_objects.py +++ b/tests/sdk/python/rtdip_sdk/queries/_test_utils/sdk_test_objects.py @@ -31,36 +31,67 @@ } MOCKED_QUERY_OFFSET_LIMIT = "LIMIT 10 OFFSET 10 " + +# Raw RAW_MOCKED_QUERY = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN to_timestamp("2011-01-01T00:00:00+00:00") AND to_timestamp("2011-01-02T23:59:59+00:00") AND `TagName` IN (\'mocked-TAGNAME\') ORDER BY `TagName`, `EventTime` ) SELECT * FROM raw_events ' RAW_MOCKED_QUERY_CHECK_TAGS = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN to_timestamp("2011-01-01T00:00:00+00:00") AND to_timestamp("2011-01-02T23:59:59+00:00") AND UPPER(`TagName`) IN (\'MOCKED-TAGNAME\') ORDER BY `TagName`, `EventTime` ) SELECT * FROM raw_events ' RAW_MOCKED_QUERY_DISPLAY_UOM = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN to_timestamp("2011-01-01T00:00:00+00:00") AND to_timestamp("2011-01-02T23:59:59+00:00") AND `TagName` IN (\'mocked-TAGNAME\') ORDER BY `TagName`, `EventTime` ) SELECT e.`EventTime`, e.`TagName`, e.`Status`, e.`Value`, m.`UOM` FROM raw_events e LEFT OUTER JOIN `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_metadata` m ON e.`TagName` = m.`TagName` ' +# Resample RESAMPLE_MOCKED_QUERY = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN to_timestamp("2011-01-01T00:00:00+00:00") AND to_timestamp("2011-01-02T23:59:59+00:00") AND `TagName` IN (\'mocked-TAGNAME\') ) ,date_array AS (SELECT explode(sequence(from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000"), from_utc_timestamp(to_timestamp("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS timestamp_array) ,window_buckets AS (SELECT timestamp_array AS window_start, timestampadd(minute, 15, timestamp_array) AS window_end FROM date_array) ,resample AS (SELECT /*+ RANGE_JOIN(d, 900 ) */ d.window_start, d.window_end, e.`TagName`, avg(e.`Value`) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `Value` FROM window_buckets d INNER JOIN raw_events e ON d.window_start <= e.`EventTime` AND d.window_end > e.`EventTime`) ,project AS (SELECT window_start AS `EventTime`, `TagName`, `Value` FROM resample GROUP BY window_start, `TagName`, `Value` ORDER BY `TagName`, `EventTime` ) SELECT * FROM project ' RESAMPLE_MOCKED_QUERY_CHECK_TAGS = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN to_timestamp("2011-01-01T00:00:00+00:00") AND to_timestamp("2011-01-02T23:59:59+00:00") AND UPPER(`TagName`) IN (\'MOCKED-TAGNAME\') ) ,date_array AS (SELECT explode(sequence(from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000"), from_utc_timestamp(to_timestamp("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS timestamp_array) ,window_buckets AS (SELECT timestamp_array AS window_start, timestampadd(minute, 15, timestamp_array) AS window_end FROM date_array) ,resample AS (SELECT /*+ RANGE_JOIN(d, 900 ) */ d.window_start, d.window_end, e.`TagName`, avg(e.`Value`) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `Value` FROM window_buckets d INNER JOIN raw_events e ON d.window_start <= e.`EventTime` AND d.window_end > e.`EventTime`) ,project AS (SELECT window_start AS `EventTime`, `TagName`, `Value` FROM resample GROUP BY window_start, `TagName`, `Value` ORDER BY `TagName`, `EventTime` ) SELECT * FROM project ' RESAMPLE_MOCKED_QUERY_PIVOT = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN to_timestamp("2011-01-01T00:00:00+00:00") AND to_timestamp("2011-01-02T23:59:59+00:00") AND `TagName` IN (\'mocked-TAGNAME\') ) ,date_array AS (SELECT explode(sequence(from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000"), from_utc_timestamp(to_timestamp("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS timestamp_array) ,window_buckets AS (SELECT timestamp_array AS window_start, timestampadd(minute, 15, timestamp_array) AS window_end FROM date_array) ,resample AS (SELECT /*+ RANGE_JOIN(d, 900 ) */ d.window_start, d.window_end, e.`TagName`, avg(e.`Value`) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `Value` FROM window_buckets d INNER JOIN raw_events e ON d.window_start <= e.`EventTime` AND d.window_end > e.`EventTime`) ,project AS (SELECT window_start AS `EventTime`, `TagName`, `Value` FROM resample GROUP BY window_start, `TagName`, `Value` ORDER BY `TagName`, `EventTime` ) ,pivot AS (SELECT * FROM (SELECT `EventTime`, `Value`, `TagName` AS `TagName` FROM project) PIVOT (FIRST(`Value`) FOR `TagName` IN (\'mocked-TAGNAME\' AS `mocked-TAGNAME`))) SELECT * FROM pivot ORDER BY `EventTime` ' +RESAMPLE_MOCKED_QUERY_UOM = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN to_timestamp("2011-01-01T00:00:00+00:00") AND to_timestamp("2011-01-02T23:59:59+00:00") AND `TagName` IN (\'mocked-TAGNAME\') ) ,date_array AS (SELECT explode(sequence(from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000"), from_utc_timestamp(to_timestamp("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS timestamp_array) ,window_buckets AS (SELECT timestamp_array AS window_start, timestampadd(minute, 15, timestamp_array) AS window_end FROM date_array) ,resample AS (SELECT /*+ RANGE_JOIN(d, 900 ) */ d.window_start, d.window_end, e.`TagName`, avg(e.`Value`) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `Value` FROM window_buckets d INNER JOIN raw_events e ON d.window_start <= e.`EventTime` AND d.window_end > e.`EventTime`) ,project AS (SELECT window_start AS `EventTime`, `TagName`, `Value` FROM resample GROUP BY window_start, `TagName`, `Value` ORDER BY `TagName`, `EventTime` ) SELECT p.`EventTime`, p.`TagName`, p.`Value`, m.`UoM` FROM project p LEFT OUTER JOIN `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_metadata` m ON p.`TagName` = m.`TagName` ' + +# Plot PLOT_MOCKED_QUERY = "WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, 'yyyy-MM-dd HH:mm:ss.SSS')), \"+0000\") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN to_timestamp(\"2011-01-01T00:00:00+00:00\") AND to_timestamp(\"2011-01-02T23:59:59+00:00\") AND `TagName` IN ('mocked-TAGNAME') ) ,date_array AS (SELECT explode(sequence(from_utc_timestamp(to_timestamp(\"2011-01-01T00:00:00+00:00\"), \"+0000\"), from_utc_timestamp(to_timestamp(\"2011-01-02T23:59:59+00:00\"), \"+0000\"), INTERVAL '15 minute')) AS timestamp_array) ,window_buckets AS (SELECT timestamp_array AS window_start, timestampadd(minute, 15, timestamp_array) AS window_end FROM date_array) ,plot AS (SELECT /*+ RANGE_JOIN(d, 900 ) */ d.window_start, d.window_end, e.`TagName`, min(CASE WHEN `Status` = 'Bad' THEN null ELSE struct(e.`Value`, e.`EventTime`) END) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `min_Value`, max(CASE WHEN `Status` = 'Bad' THEN null ELSE struct(e.`Value`, e.`EventTime`) END) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `max_Value`, first(CASE WHEN `Status` = 'Bad' THEN null ELSE struct(e.`Value`, e.`EventTime`) END, True) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `first_Value`, last(CASE WHEN `Status` = 'Bad' THEN null ELSE struct(e.`Value`, e.`EventTime`) END, True) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `last_Value`, first(CASE WHEN `Status` = 'Bad' THEN struct(e.`Value`, e.`EventTime`) ELSE null END, True) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `excp_Value` FROM window_buckets d INNER JOIN raw_events e ON d.window_start <= e.`EventTime` AND d.window_end > e.`EventTime`) ,deduplicate AS (SELECT window_start AS `EventTime`, `TagName`, `min_Value` as `Min`, `max_Value` as `Max`, `first_Value` as `First`, `last_Value` as `Last`, `excp_Value` as `Exception` FROM plot GROUP BY window_start, `TagName`, `min_Value`, `max_Value`, `first_Value`, `last_Value`, `excp_Value`) ,project AS (SELECT distinct Values.EventTime, `TagName`, Values.Value FROM (SELECT * FROM deduplicate UNPIVOT (`Values` for `Aggregation` IN (`Min`, `Max`, `First`, `Last`, `Exception`))) ORDER BY `TagName`, `EventTime` ) SELECT * FROM project " PLOT_MOCKED_QUERY_CHECK_TAGS = "WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, 'yyyy-MM-dd HH:mm:ss.SSS')), \"+0000\") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN to_timestamp(\"2011-01-01T00:00:00+00:00\") AND to_timestamp(\"2011-01-02T23:59:59+00:00\") AND UPPER(`TagName`) IN ('MOCKED-TAGNAME') ) ,date_array AS (SELECT explode(sequence(from_utc_timestamp(to_timestamp(\"2011-01-01T00:00:00+00:00\"), \"+0000\"), from_utc_timestamp(to_timestamp(\"2011-01-02T23:59:59+00:00\"), \"+0000\"), INTERVAL '15 minute')) AS timestamp_array) ,window_buckets AS (SELECT timestamp_array AS window_start, timestampadd(minute, 15, timestamp_array) AS window_end FROM date_array) ,plot AS (SELECT /*+ RANGE_JOIN(d, 900 ) */ d.window_start, d.window_end, e.`TagName`, min(CASE WHEN `Status` = 'Bad' THEN null ELSE struct(e.`Value`, e.`EventTime`) END) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `min_Value`, max(CASE WHEN `Status` = 'Bad' THEN null ELSE struct(e.`Value`, e.`EventTime`) END) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `max_Value`, first(CASE WHEN `Status` = 'Bad' THEN null ELSE struct(e.`Value`, e.`EventTime`) END, True) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `first_Value`, last(CASE WHEN `Status` = 'Bad' THEN null ELSE struct(e.`Value`, e.`EventTime`) END, True) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `last_Value`, first(CASE WHEN `Status` = 'Bad' THEN struct(e.`Value`, e.`EventTime`) ELSE null END, True) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `excp_Value` FROM window_buckets d INNER JOIN raw_events e ON d.window_start <= e.`EventTime` AND d.window_end > e.`EventTime`) ,deduplicate AS (SELECT window_start AS `EventTime`, `TagName`, `min_Value` as `Min`, `max_Value` as `Max`, `first_Value` as `First`, `last_Value` as `Last`, `excp_Value` as `Exception` FROM plot GROUP BY window_start, `TagName`, `min_Value`, `max_Value`, `first_Value`, `last_Value`, `excp_Value`) ,project AS (SELECT distinct Values.EventTime, `TagName`, Values.Value FROM (SELECT * FROM deduplicate UNPIVOT (`Values` for `Aggregation` IN (`Min`, `Max`, `First`, `Last`, `Exception`))) ORDER BY `TagName`, `EventTime` ) SELECT * FROM project " +PLOT_MOCKED_QUERY_PIVOT = "WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, 'yyyy-MM-dd HH:mm:ss.SSS')), \"+0000\") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN to_timestamp(\"2011-01-01T00:00:00+00:00\") AND to_timestamp(\"2011-01-02T23:59:59+00:00\") AND `TagName` IN ('mocked-TAGNAME') ) ,date_array AS (SELECT explode(sequence(from_utc_timestamp(to_timestamp(\"2011-01-01T00:00:00+00:00\"), \"+0000\"), from_utc_timestamp(to_timestamp(\"2011-01-02T23:59:59+00:00\"), \"+0000\"), INTERVAL '15 minute')) AS timestamp_array) ,window_buckets AS (SELECT timestamp_array AS window_start, timestampadd(minute, 15, timestamp_array) AS window_end FROM date_array) ,plot AS (SELECT /*+ RANGE_JOIN(d, 900 ) */ d.window_start, d.window_end, e.`TagName`, min(CASE WHEN `Status` = 'Bad' THEN null ELSE struct(e.`Value`, e.`EventTime`) END) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `min_Value`, max(CASE WHEN `Status` = 'Bad' THEN null ELSE struct(e.`Value`, e.`EventTime`) END) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `max_Value`, first(CASE WHEN `Status` = 'Bad' THEN null ELSE struct(e.`Value`, e.`EventTime`) END, True) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `first_Value`, last(CASE WHEN `Status` = 'Bad' THEN null ELSE struct(e.`Value`, e.`EventTime`) END, True) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `last_Value`, first(CASE WHEN `Status` = 'Bad' THEN struct(e.`Value`, e.`EventTime`) ELSE null END, True) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `excp_Value` FROM window_buckets d INNER JOIN raw_events e ON d.window_start <= e.`EventTime` AND d.window_end > e.`EventTime`) ,deduplicate AS (SELECT window_start AS `EventTime`, `TagName`, `min_Value` as `Min`, `max_Value` as `Max`, `first_Value` as `First`, `last_Value` as `Last`, `excp_Value` as `Exception` FROM plot GROUP BY window_start, `TagName`, `min_Value`, `max_Value`, `first_Value`, `last_Value`, `excp_Value`) ,project AS (SELECT distinct Values.EventTime, `TagName`, Values.Value FROM (SELECT * FROM deduplicate UNPIVOT (`Values` for `Aggregation` IN (`Min`, `Max`, `First`, `Last`, `Exception`))) ORDER BY `TagName`, `EventTime` ) ,pivot AS (SELECT * FROM (SELECT `EventTime`, `Value`, `TagName` AS `TagName` FROM project) PIVOT (FIRST(`Value`) FOR `TagName` IN ('mocked-TAGNAME' AS `mocked-TAGNAME`))) SELECT * FROM pivot ORDER BY `EventTime` " +PLOT_MOCKED_QUERY_UOM = "WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, 'yyyy-MM-dd HH:mm:ss.SSS')), \"+0000\") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN to_timestamp(\"2011-01-01T00:00:00+00:00\") AND to_timestamp(\"2011-01-02T23:59:59+00:00\") AND `TagName` IN ('mocked-TAGNAME') ) ,date_array AS (SELECT explode(sequence(from_utc_timestamp(to_timestamp(\"2011-01-01T00:00:00+00:00\"), \"+0000\"), from_utc_timestamp(to_timestamp(\"2011-01-02T23:59:59+00:00\"), \"+0000\"), INTERVAL '15 minute')) AS timestamp_array) ,window_buckets AS (SELECT timestamp_array AS window_start, timestampadd(minute, 15, timestamp_array) AS window_end FROM date_array) ,plot AS (SELECT /*+ RANGE_JOIN(d, 900 ) */ d.window_start, d.window_end, e.`TagName`, min(CASE WHEN `Status` = 'Bad' THEN null ELSE struct(e.`Value`, e.`EventTime`) END) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `min_Value`, max(CASE WHEN `Status` = 'Bad' THEN null ELSE struct(e.`Value`, e.`EventTime`) END) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `max_Value`, first(CASE WHEN `Status` = 'Bad' THEN null ELSE struct(e.`Value`, e.`EventTime`) END, True) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `first_Value`, last(CASE WHEN `Status` = 'Bad' THEN null ELSE struct(e.`Value`, e.`EventTime`) END, True) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `last_Value`, first(CASE WHEN `Status` = 'Bad' THEN struct(e.`Value`, e.`EventTime`) ELSE null END, True) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `excp_Value` FROM window_buckets d INNER JOIN raw_events e ON d.window_start <= e.`EventTime` AND d.window_end > e.`EventTime`) ,deduplicate AS (SELECT window_start AS `EventTime`, `TagName`, `min_Value` as `Min`, `max_Value` as `Max`, `first_Value` as `First`, `last_Value` as `Last`, `excp_Value` as `Exception` FROM plot GROUP BY window_start, `TagName`, `min_Value`, `max_Value`, `first_Value`, `last_Value`, `excp_Value`) ,project AS (SELECT distinct Values.EventTime, `TagName`, Values.Value FROM (SELECT * FROM deduplicate UNPIVOT (`Values` for `Aggregation` IN (`Min`, `Max`, `First`, `Last`, `Exception`))) ORDER BY `TagName`, `EventTime` ) SELECT p.`EventTime`, p.`TagName`, p.`Value`, m.`UoM` FROM project p LEFT OUTER JOIN `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_metadata` m ON p.`TagName` = m.`TagName` " + +# Interpolate INTERPOLATE_MOCKED_QUERY = 'WITH resample AS (WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN to_timestamp("2011-01-01T00:00:00+00:00") AND to_timestamp("2011-01-02T23:59:59+00:00") AND `TagName` IN (\'mocked-TAGNAME\') ) ,date_array AS (SELECT explode(sequence(from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000"), from_utc_timestamp(to_timestamp("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS timestamp_array) ,window_buckets AS (SELECT timestamp_array AS window_start, timestampadd(minute, 15, timestamp_array) AS window_end FROM date_array) ,resample AS (SELECT /*+ RANGE_JOIN(d, 900 ) */ d.window_start, d.window_end, e.`TagName`, avg(e.`Value`) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `Value` FROM window_buckets d INNER JOIN raw_events e ON d.window_start <= e.`EventTime` AND d.window_end > e.`EventTime`) ,project AS (SELECT window_start AS `EventTime`, `TagName`, `Value` FROM resample GROUP BY window_start, `TagName`, `Value` ) SELECT * FROM project ),date_array AS (SELECT explode(sequence(from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000"), from_utc_timestamp(to_timestamp("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS `EventTime`, explode(array(\'mocked-TAGNAME\')) AS `TagName`) ,project AS (SELECT a.`EventTime`, a.`TagName`, last_value(b.`Value`, true) OVER (PARTITION BY a.`TagName` ORDER BY a.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS `Value` FROM date_array a LEFT OUTER JOIN resample b ON a.`EventTime` = b.`EventTime` AND a.`TagName` = b.`TagName`) SELECT * FROM project ORDER BY `TagName`, `EventTime` ' INTERPOLATE_MOCKED_QUERY_BACKWARD_FILL = 'WITH resample AS (WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN to_timestamp("2011-01-01T00:00:00+00:00") AND to_timestamp("2011-01-02T23:59:59+00:00") AND `TagName` IN (\'mocked-TAGNAME\') ) ,date_array AS (SELECT explode(sequence(from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000"), from_utc_timestamp(to_timestamp("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS timestamp_array) ,window_buckets AS (SELECT timestamp_array AS window_start, timestampadd(minute, 15, timestamp_array) AS window_end FROM date_array) ,resample AS (SELECT /*+ RANGE_JOIN(d, 900 ) */ d.window_start, d.window_end, e.`TagName`, avg(e.`Value`) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `Value` FROM window_buckets d INNER JOIN raw_events e ON d.window_start <= e.`EventTime` AND d.window_end > e.`EventTime`) ,project AS (SELECT window_start AS `EventTime`, `TagName`, `Value` FROM resample GROUP BY window_start, `TagName`, `Value` ) SELECT * FROM project ),date_array AS (SELECT explode(sequence(from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000"), from_utc_timestamp(to_timestamp("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS `EventTime`, explode(array(\'mocked-TAGNAME\')) AS `TagName`) ,project AS (SELECT a.`EventTime`, a.`TagName`, first_value(b.`Value`, true) OVER (PARTITION BY a.`TagName` ORDER BY a.`EventTime` ROWS BETWEEN CURRENT ROW AND UNBOUNDED FOLLOWING) AS `Value` FROM date_array a LEFT OUTER JOIN resample b ON a.`EventTime` = b.`EventTime` AND a.`TagName` = b.`TagName`) SELECT * FROM project ORDER BY `TagName`, `EventTime` ' INTERPOLATE_MOCKED_QUERY_CHECK_TAGS = 'WITH resample AS (WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN to_timestamp("2011-01-01T00:00:00+00:00") AND to_timestamp("2011-01-02T23:59:59+00:00") AND UPPER(`TagName`) IN (\'MOCKED-TAGNAME\') ) ,date_array AS (SELECT explode(sequence(from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000"), from_utc_timestamp(to_timestamp("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS timestamp_array) ,window_buckets AS (SELECT timestamp_array AS window_start, timestampadd(minute, 15, timestamp_array) AS window_end FROM date_array) ,resample AS (SELECT /*+ RANGE_JOIN(d, 900 ) */ d.window_start, d.window_end, e.`TagName`, avg(e.`Value`) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `Value` FROM window_buckets d INNER JOIN raw_events e ON d.window_start <= e.`EventTime` AND d.window_end > e.`EventTime`) ,project AS (SELECT window_start AS `EventTime`, `TagName`, `Value` FROM resample GROUP BY window_start, `TagName`, `Value` ) SELECT * FROM project ),date_array AS (SELECT DISTINCT explode(sequence(from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000"), from_utc_timestamp(to_timestamp("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS `EventTime`, explode(array(`TagName`)) AS `TagName` FROM resample) ,project AS (SELECT a.`EventTime`, a.`TagName`, last_value(b.`Value`, true) OVER (PARTITION BY a.`TagName` ORDER BY a.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS `Value` FROM date_array a LEFT OUTER JOIN resample b ON a.`EventTime` = b.`EventTime` AND a.`TagName` = b.`TagName`) SELECT * FROM project ORDER BY `TagName`, `EventTime` ' INTERPOLATE_MOCKED_QUERY_PIVOT = 'WITH resample AS (WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN to_timestamp("2011-01-01T00:00:00+00:00") AND to_timestamp("2011-01-02T23:59:59+00:00") AND `TagName` IN (\'mocked-TAGNAME\') ) ,date_array AS (SELECT explode(sequence(from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000"), from_utc_timestamp(to_timestamp("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS timestamp_array) ,window_buckets AS (SELECT timestamp_array AS window_start, timestampadd(minute, 15, timestamp_array) AS window_end FROM date_array) ,resample AS (SELECT /*+ RANGE_JOIN(d, 900 ) */ d.window_start, d.window_end, e.`TagName`, avg(e.`Value`) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `Value` FROM window_buckets d INNER JOIN raw_events e ON d.window_start <= e.`EventTime` AND d.window_end > e.`EventTime`) ,project AS (SELECT window_start AS `EventTime`, `TagName`, `Value` FROM resample GROUP BY window_start, `TagName`, `Value` ) SELECT * FROM project ),date_array AS (SELECT explode(sequence(from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000"), from_utc_timestamp(to_timestamp("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS `EventTime`, explode(array(\'mocked-TAGNAME\')) AS `TagName`) ,project AS (SELECT a.`EventTime`, a.`TagName`, last_value(b.`Value`, true) OVER (PARTITION BY a.`TagName` ORDER BY a.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS `Value` FROM date_array a LEFT OUTER JOIN resample b ON a.`EventTime` = b.`EventTime` AND a.`TagName` = b.`TagName`) ,pivot AS (SELECT * FROM (SELECT `EventTime`, `Value`, `TagName` AS `TagName` FROM project) PIVOT (FIRST(`Value`) FOR `TagName` IN (\'mocked-TAGNAME\' AS `mocked-TAGNAME`))) SELECT * FROM pivot ORDER BY `EventTime` ' +INTERPOLATE_MOCKED_QUERY_UOM = 'WITH resample AS (WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN to_timestamp("2011-01-01T00:00:00+00:00") AND to_timestamp("2011-01-02T23:59:59+00:00") AND `TagName` IN (\'mocked-TAGNAME\') ) ,date_array AS (SELECT explode(sequence(from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000"), from_utc_timestamp(to_timestamp("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS timestamp_array) ,window_buckets AS (SELECT timestamp_array AS window_start, timestampadd(minute, 15, timestamp_array) AS window_end FROM date_array) ,resample AS (SELECT /*+ RANGE_JOIN(d, 900 ) */ d.window_start, d.window_end, e.`TagName`, avg(e.`Value`) OVER (PARTITION BY e.`TagName`, d.window_start ORDER BY e.`EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `Value` FROM window_buckets d INNER JOIN raw_events e ON d.window_start <= e.`EventTime` AND d.window_end > e.`EventTime`) ,project AS (SELECT window_start AS `EventTime`, `TagName`, `Value` FROM resample GROUP BY window_start, `TagName`, `Value` ) SELECT p.`EventTime`, p.`TagName`, p.`Value`, m.`UoM` FROM project p LEFT OUTER JOIN `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_metadata` m ON p.`TagName` = m.`TagName` ),date_array AS (SELECT explode(sequence(from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000"), from_utc_timestamp(to_timestamp("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS `EventTime`, explode(array(\'mocked-TAGNAME\')) AS `TagName`) ,project AS (SELECT a.`EventTime`, a.`TagName`, first_value(b.`Value`, true) OVER (PARTITION BY a.`TagName` ORDER BY a.`EventTime` ROWS BETWEEN CURRENT ROW AND UNBOUNDED FOLLOWING) AS `Value` FROM date_array a LEFT OUTER JOIN resample b ON a.`EventTime` = b.`EventTime` AND a.`TagName` = b.`TagName`) SELECT p.`EventTime`, p.`TagName`, p.`Value`, m.`UoM` FROM project p LEFT OUTER JOIN `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_metadata` m ON p.`TagName` = m.`TagName` ORDER BY `TagName`, `EventTime` ' + +# Time Weighted Average TWA_MOCKED_QUERY = 'WITH raw_events AS (SELECT DISTINCT `TagName`, from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE to_date(`EventTime`) BETWEEN date_sub(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND date_add(to_date(to_timestamp("2011-01-02T23:59:59+00:00")), 1) AND `TagName` IN (\'mocked-TAGNAME\') ) ,date_array AS (SELECT explode(sequence(from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000"), from_utc_timestamp(to_timestamp("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS `EventTime`, explode(array(\'mocked-TAGNAME\')) AS `TagName`) ,boundary_events AS (SELECT coalesce(a.`TagName`, b.`TagName`) AS `TagName`, coalesce(a.`EventTime`, b.`EventTime`) AS `EventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON a.`EventTime` = b.`EventTime` AND a.`TagName` = b.`TagName`) ,window_buckets AS (SELECT `EventTime` AS window_start, LEAD(`EventTime`) OVER (ORDER BY `EventTime`) AS window_end FROM (SELECT distinct `EventTime` FROM date_array) ) ,window_events AS (SELECT /*+ RANGE_JOIN(b, 900 ) */ b.`TagName`, b.`EventTime`, a.window_start AS `WindowEventTime`, b.`Status`, b.`Value` FROM boundary_events b LEFT OUTER JOIN window_buckets a ON a.window_start <= b.`EventTime` AND a.window_end > b.`EventTime`) ,fill_status AS (SELECT *, last_value(`Status`, true) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS `Fill_Status`, CASE WHEN `Fill_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") THEN `Value` ELSE null END AS `Good_Value` FROM window_events) ,fill_value AS (SELECT *, last_value(`Good_Value`, true) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS `Fill_Value` FROM fill_status) ,fill_step AS (SELECT *, false AS Step FROM fill_value) ,interpolate AS (SELECT *, CASE WHEN `Step` = false AND `Status` IS NULL AND `Value` IS NULL THEN lag(`EventTime`) OVER ( PARTITION BY `TagName` ORDER BY `EventTime` ) ELSE NULL END AS `Previous_EventTime`, CASE WHEN `Step` = false AND `Status` IS NULL AND `Value` IS NULL THEN lag(`Fill_Value`) OVER ( PARTITION BY `TagName` ORDER BY `EventTime` ) ELSE NULL END AS `Previous_Fill_Value`, lead(`EventTime`) OVER ( PARTITION BY `TagName` ORDER BY `EventTime` ) AS `Next_EventTime`, CASE WHEN `Step` = false AND `Status` IS NULL AND `Value` IS NULL THEN lead(`Fill_Value`) OVER ( PARTITION BY `TagName` ORDER BY `EventTime` ) ELSE NULL END AS `Next_Fill_Value`, CASE WHEN `Step` = false AND `Status` IS NULL AND `Value` IS NULL THEN `Previous_Fill_Value` + ( (`Next_Fill_Value` - `Previous_Fill_Value`) * ( ( unix_timestamp(`EventTime`) - unix_timestamp(`Previous_EventTime`) ) / ( unix_timestamp(`Next_EventTime`) - unix_timestamp(`Previous_EventTime`) ) ) ) ELSE NULL END AS `Interpolated_Value`, coalesce(`Interpolated_Value`, `Fill_Value`) as `Event_Value` FROM fill_step ),twa_calculations AS (SELECT `TagName`, `EventTime`, `WindowEventTime`, `Step`, `Status`, `Value`, `Previous_EventTime`, `Previous_Fill_Value`, `Next_EventTime`, `Next_Fill_Value`, `Interpolated_Value`, `Fill_Status`, `Fill_Value`, `Event_Value`, lead(`Fill_Status`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_Status` , CASE WHEN `Next_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") OR (`Fill_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") AND `Next_Status` NOT IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good")) THEN lead(`Event_Value`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) ELSE `Value` END AS `Next_Value_For_Status` , CASE WHEN `Fill_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") THEN `Next_Value_For_Status` ELSE 0 END AS `Next_Value` , CASE WHEN `Fill_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") AND `Next_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") THEN ((cast(`Next_EventTime` AS double) - cast(`EventTime` AS double)) / 60) WHEN `Fill_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") AND `Next_Status` NOT IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") THEN ((cast(`Next_EventTime` AS integer) - cast(`EventTime` AS double)) / 60) ELSE 0 END AS good_minutes , CASE WHEN Step == false THEN ((`Event_Value` + `Next_Value`) * 0.5) * good_minutes ELSE (`Event_Value` * good_minutes) END AS twa_value FROM interpolate) ,twa AS (SELECT `TagName`, `WindowEventTime` AS `EventTime`, sum(twa_value) / sum(good_minutes) AS `Value` from twa_calculations GROUP BY `TagName`, `WindowEventTime`) ,project AS (SELECT * FROM twa WHERE `EventTime` BETWEEN to_timestamp("2011-01-01T00:00:00") AND to_timestamp("2011-01-02T23:59:59")) SELECT * FROM project ORDER BY `TagName`, `EventTime` ' TWA_MOCKED_QUERY_CHECK_TAGS = 'WITH raw_events AS (SELECT DISTINCT `TagName`, from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE to_date(`EventTime`) BETWEEN date_sub(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND date_add(to_date(to_timestamp("2011-01-02T23:59:59+00:00")), 1) AND UPPER(`TagName`) IN (\'MOCKED-TAGNAME\') ) ,date_array AS (SELECT DISTINCT explode(sequence(from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000"), from_utc_timestamp(to_timestamp("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS `EventTime`, explode(array(`TagName`)) AS `TagName` FROM raw_events) ,boundary_events AS (SELECT coalesce(a.`TagName`, b.`TagName`) AS `TagName`, coalesce(a.`EventTime`, b.`EventTime`) AS `EventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON a.`EventTime` = b.`EventTime` AND a.`TagName` = b.`TagName`) ,window_buckets AS (SELECT `EventTime` AS window_start, LEAD(`EventTime`) OVER (ORDER BY `EventTime`) AS window_end FROM (SELECT distinct `EventTime` FROM date_array) ) ,window_events AS (SELECT /*+ RANGE_JOIN(b, 900 ) */ b.`TagName`, b.`EventTime`, a.window_start AS `WindowEventTime`, b.`Status`, b.`Value` FROM boundary_events b LEFT OUTER JOIN window_buckets a ON a.window_start <= b.`EventTime` AND a.window_end > b.`EventTime`) ,fill_status AS (SELECT *, last_value(`Status`, true) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS `Fill_Status`, CASE WHEN `Fill_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") THEN `Value` ELSE null END AS `Good_Value` FROM window_events) ,fill_value AS (SELECT *, last_value(`Good_Value`, true) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS `Fill_Value` FROM fill_status) ,fill_step AS (SELECT *, false AS Step FROM fill_value) ,interpolate AS (SELECT *, CASE WHEN `Step` = false AND `Status` IS NULL AND `Value` IS NULL THEN lag(`EventTime`) OVER ( PARTITION BY `TagName` ORDER BY `EventTime` ) ELSE NULL END AS `Previous_EventTime`, CASE WHEN `Step` = false AND `Status` IS NULL AND `Value` IS NULL THEN lag(`Fill_Value`) OVER ( PARTITION BY `TagName` ORDER BY `EventTime` ) ELSE NULL END AS `Previous_Fill_Value`, lead(`EventTime`) OVER ( PARTITION BY `TagName` ORDER BY `EventTime` ) AS `Next_EventTime`, CASE WHEN `Step` = false AND `Status` IS NULL AND `Value` IS NULL THEN lead(`Fill_Value`) OVER ( PARTITION BY `TagName` ORDER BY `EventTime` ) ELSE NULL END AS `Next_Fill_Value`, CASE WHEN `Step` = false AND `Status` IS NULL AND `Value` IS NULL THEN `Previous_Fill_Value` + ( (`Next_Fill_Value` - `Previous_Fill_Value`) * ( ( unix_timestamp(`EventTime`) - unix_timestamp(`Previous_EventTime`) ) / ( unix_timestamp(`Next_EventTime`) - unix_timestamp(`Previous_EventTime`) ) ) ) ELSE NULL END AS `Interpolated_Value`, coalesce(`Interpolated_Value`, `Fill_Value`) as `Event_Value` FROM fill_step ),twa_calculations AS (SELECT `TagName`, `EventTime`, `WindowEventTime`, `Step`, `Status`, `Value`, `Previous_EventTime`, `Previous_Fill_Value`, `Next_EventTime`, `Next_Fill_Value`, `Interpolated_Value`, `Fill_Status`, `Fill_Value`, `Event_Value`, lead(`Fill_Status`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_Status` , CASE WHEN `Next_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") OR (`Fill_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") AND `Next_Status` NOT IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good")) THEN lead(`Event_Value`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) ELSE `Value` END AS `Next_Value_For_Status` , CASE WHEN `Fill_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") THEN `Next_Value_For_Status` ELSE 0 END AS `Next_Value` , CASE WHEN `Fill_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") AND `Next_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") THEN ((cast(`Next_EventTime` AS double) - cast(`EventTime` AS double)) / 60) WHEN `Fill_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") AND `Next_Status` NOT IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") THEN ((cast(`Next_EventTime` AS integer) - cast(`EventTime` AS double)) / 60) ELSE 0 END AS good_minutes , CASE WHEN Step == false THEN ((`Event_Value` + `Next_Value`) * 0.5) * good_minutes ELSE (`Event_Value` * good_minutes) END AS twa_value FROM interpolate) ,twa AS (SELECT `TagName`, `WindowEventTime` AS `EventTime`, sum(twa_value) / sum(good_minutes) AS `Value` from twa_calculations GROUP BY `TagName`, `WindowEventTime`) ,project AS (SELECT * FROM twa WHERE `EventTime` BETWEEN to_timestamp("2011-01-01T00:00:00") AND to_timestamp("2011-01-02T23:59:59")) SELECT * FROM project ORDER BY `TagName`, `EventTime` ' TWA_MOCKED_QUERY_PIVOT = 'WITH raw_events AS (SELECT DISTINCT `TagName`, from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE to_date(`EventTime`) BETWEEN date_sub(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND date_add(to_date(to_timestamp("2011-01-02T23:59:59+00:00")), 1) AND `TagName` IN (\'mocked-TAGNAME\') ) ,date_array AS (SELECT explode(sequence(from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000"), from_utc_timestamp(to_timestamp("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS `EventTime`, explode(array(\'mocked-TAGNAME\')) AS `TagName`) ,boundary_events AS (SELECT coalesce(a.`TagName`, b.`TagName`) AS `TagName`, coalesce(a.`EventTime`, b.`EventTime`) AS `EventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON a.`EventTime` = b.`EventTime` AND a.`TagName` = b.`TagName`) ,window_buckets AS (SELECT `EventTime` AS window_start, LEAD(`EventTime`) OVER (ORDER BY `EventTime`) AS window_end FROM (SELECT distinct `EventTime` FROM date_array) ) ,window_events AS (SELECT /*+ RANGE_JOIN(b, 900 ) */ b.`TagName`, b.`EventTime`, a.window_start AS `WindowEventTime`, b.`Status`, b.`Value` FROM boundary_events b LEFT OUTER JOIN window_buckets a ON a.window_start <= b.`EventTime` AND a.window_end > b.`EventTime`) ,fill_status AS (SELECT *, last_value(`Status`, true) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS `Fill_Status`, CASE WHEN `Fill_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") THEN `Value` ELSE null END AS `Good_Value` FROM window_events) ,fill_value AS (SELECT *, last_value(`Good_Value`, true) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS `Fill_Value` FROM fill_status) ,fill_step AS (SELECT *, IFNULL(Step, false) AS Step FROM fill_value f LEFT JOIN `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_metadata` m ON f.`TagName` = m.`TagName`) ,interpolate AS (SELECT *, CASE WHEN `Step` = false AND `Status` IS NULL AND `Value` IS NULL THEN lag(`EventTime`) OVER ( PARTITION BY `TagName` ORDER BY `EventTime` ) ELSE NULL END AS `Previous_EventTime`, CASE WHEN `Step` = false AND `Status` IS NULL AND `Value` IS NULL THEN lag(`Fill_Value`) OVER ( PARTITION BY `TagName` ORDER BY `EventTime` ) ELSE NULL END AS `Previous_Fill_Value`, lead(`EventTime`) OVER ( PARTITION BY `TagName` ORDER BY `EventTime` ) AS `Next_EventTime`, CASE WHEN `Step` = false AND `Status` IS NULL AND `Value` IS NULL THEN lead(`Fill_Value`) OVER ( PARTITION BY `TagName` ORDER BY `EventTime` ) ELSE NULL END AS `Next_Fill_Value`, CASE WHEN `Step` = false AND `Status` IS NULL AND `Value` IS NULL THEN `Previous_Fill_Value` + ( (`Next_Fill_Value` - `Previous_Fill_Value`) * ( ( unix_timestamp(`EventTime`) - unix_timestamp(`Previous_EventTime`) ) / ( unix_timestamp(`Next_EventTime`) - unix_timestamp(`Previous_EventTime`) ) ) ) ELSE NULL END AS `Interpolated_Value`, coalesce(`Interpolated_Value`, `Fill_Value`) as `Event_Value` FROM fill_step ),twa_calculations AS (SELECT `TagName`, `EventTime`, `WindowEventTime`, `Step`, `Status`, `Value`, `Previous_EventTime`, `Previous_Fill_Value`, `Next_EventTime`, `Next_Fill_Value`, `Interpolated_Value`, `Fill_Status`, `Fill_Value`, `Event_Value`, lead(`Fill_Status`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_Status` , CASE WHEN `Next_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") OR (`Fill_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") AND `Next_Status` NOT IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good")) THEN lead(`Event_Value`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) ELSE `Value` END AS `Next_Value_For_Status` , CASE WHEN `Fill_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") THEN `Next_Value_For_Status` ELSE 0 END AS `Next_Value` , CASE WHEN `Fill_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") AND `Next_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") THEN ((cast(`Next_EventTime` AS double) - cast(`EventTime` AS double)) / 60) WHEN `Fill_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") AND `Next_Status` NOT IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") THEN ((cast(`Next_EventTime` AS integer) - cast(`EventTime` AS double)) / 60) ELSE 0 END AS good_minutes , CASE WHEN Step == false THEN ((`Event_Value` + `Next_Value`) * 0.5) * good_minutes ELSE (`Event_Value` * good_minutes) END AS twa_value FROM interpolate) ,twa AS (SELECT `TagName`, `WindowEventTime` AS `EventTime`, sum(twa_value) / sum(good_minutes) AS `Value` from twa_calculations GROUP BY `TagName`, `WindowEventTime`) ,project AS (SELECT * FROM twa WHERE `EventTime` BETWEEN to_timestamp("2011-01-01T00:00:00") AND to_timestamp("2011-01-02T23:59:59")) ,pivot AS (SELECT * FROM (SELECT `EventTime`, `Value`, `TagName` AS `TagName` FROM project) PIVOT (FIRST(`Value`) FOR `TagName` IN (\'mocked-TAGNAME\' AS `mocked-TAGNAME`))) SELECT * FROM pivot ORDER BY `EventTime` ' TWA_MOCKED_QUERY_METADATA = 'WITH raw_events AS (SELECT DISTINCT `TagName`, from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE to_date(`EventTime`) BETWEEN date_sub(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND date_add(to_date(to_timestamp("2011-01-02T23:59:59+00:00")), 1) AND `TagName` IN (\'mocked-TAGNAME\') ) ,date_array AS (SELECT explode(sequence(from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000"), from_utc_timestamp(to_timestamp("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS `EventTime`, explode(array(\'mocked-TAGNAME\')) AS `TagName`) ,boundary_events AS (SELECT coalesce(a.`TagName`, b.`TagName`) AS `TagName`, coalesce(a.`EventTime`, b.`EventTime`) AS `EventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON a.`EventTime` = b.`EventTime` AND a.`TagName` = b.`TagName`) ,window_buckets AS (SELECT `EventTime` AS window_start, LEAD(`EventTime`) OVER (ORDER BY `EventTime`) AS window_end FROM (SELECT distinct `EventTime` FROM date_array) ) ,window_events AS (SELECT /*+ RANGE_JOIN(b, 900 ) */ b.`TagName`, b.`EventTime`, a.window_start AS `WindowEventTime`, b.`Status`, b.`Value` FROM boundary_events b LEFT OUTER JOIN window_buckets a ON a.window_start <= b.`EventTime` AND a.window_end > b.`EventTime`) ,fill_status AS (SELECT *, last_value(`Status`, true) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS `Fill_Status`, CASE WHEN `Fill_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") THEN `Value` ELSE null END AS `Good_Value` FROM window_events) ,fill_value AS (SELECT *, last_value(`Good_Value`, true) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS `Fill_Value` FROM fill_status) ,fill_step AS (SELECT *, IFNULL(Step, false) AS Step FROM fill_value f LEFT JOIN `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_metadata` m ON f.`TagName` = m.`TagName`) ,interpolate AS (SELECT *, CASE WHEN `Step` = false AND `Status` IS NULL AND `Value` IS NULL THEN lag(`EventTime`) OVER ( PARTITION BY `TagName` ORDER BY `EventTime` ) ELSE NULL END AS `Previous_EventTime`, CASE WHEN `Step` = false AND `Status` IS NULL AND `Value` IS NULL THEN lag(`Fill_Value`) OVER ( PARTITION BY `TagName` ORDER BY `EventTime` ) ELSE NULL END AS `Previous_Fill_Value`, lead(`EventTime`) OVER ( PARTITION BY `TagName` ORDER BY `EventTime` ) AS `Next_EventTime`, CASE WHEN `Step` = false AND `Status` IS NULL AND `Value` IS NULL THEN lead(`Fill_Value`) OVER ( PARTITION BY `TagName` ORDER BY `EventTime` ) ELSE NULL END AS `Next_Fill_Value`, CASE WHEN `Step` = false AND `Status` IS NULL AND `Value` IS NULL THEN `Previous_Fill_Value` + ( (`Next_Fill_Value` - `Previous_Fill_Value`) * ( ( unix_timestamp(`EventTime`) - unix_timestamp(`Previous_EventTime`) ) / ( unix_timestamp(`Next_EventTime`) - unix_timestamp(`Previous_EventTime`) ) ) ) ELSE NULL END AS `Interpolated_Value`, coalesce(`Interpolated_Value`, `Fill_Value`) as `Event_Value` FROM fill_step ),twa_calculations AS (SELECT `TagName`, `EventTime`, `WindowEventTime`, `Step`, `Status`, `Value`, `Previous_EventTime`, `Previous_Fill_Value`, `Next_EventTime`, `Next_Fill_Value`, `Interpolated_Value`, `Fill_Status`, `Fill_Value`, `Event_Value`, lead(`Fill_Status`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_Status` , CASE WHEN `Next_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") OR (`Fill_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") AND `Next_Status` NOT IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good")) THEN lead(`Event_Value`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) ELSE `Value` END AS `Next_Value_For_Status` , CASE WHEN `Fill_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") THEN `Next_Value_For_Status` ELSE 0 END AS `Next_Value` , CASE WHEN `Fill_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") AND `Next_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") THEN ((cast(`Next_EventTime` AS double) - cast(`EventTime` AS double)) / 60) WHEN `Fill_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") AND `Next_Status` NOT IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") THEN ((cast(`Next_EventTime` AS integer) - cast(`EventTime` AS double)) / 60) ELSE 0 END AS good_minutes , CASE WHEN Step == false THEN ((`Event_Value` + `Next_Value`) * 0.5) * good_minutes ELSE (`Event_Value` * good_minutes) END AS twa_value FROM interpolate) ,twa AS (SELECT `TagName`, `WindowEventTime` AS `EventTime`, sum(twa_value) / sum(good_minutes) AS `Value` from twa_calculations GROUP BY `TagName`, `WindowEventTime`) ,project AS (SELECT * FROM twa WHERE `EventTime` BETWEEN to_timestamp("2011-01-01T00:00:00") AND to_timestamp("2011-01-02T23:59:59")) SELECT * FROM project ORDER BY `TagName`, `EventTime` ' +TWA_MOCKED_QUERY_UOM = 'WITH raw_events AS (SELECT DISTINCT `TagName`, from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE to_date(`EventTime`) BETWEEN date_sub(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND date_add(to_date(to_timestamp("2011-01-02T23:59:59+00:00")), 1) AND `TagName` IN (\'mocked-TAGNAME\') ) ,date_array AS (SELECT explode(sequence(from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000"), from_utc_timestamp(to_timestamp("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS `EventTime`, explode(array(\'mocked-TAGNAME\')) AS `TagName`) ,boundary_events AS (SELECT coalesce(a.`TagName`, b.`TagName`) AS `TagName`, coalesce(a.`EventTime`, b.`EventTime`) AS `EventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON a.`EventTime` = b.`EventTime` AND a.`TagName` = b.`TagName`) ,window_buckets AS (SELECT `EventTime` AS window_start, LEAD(`EventTime`) OVER (ORDER BY `EventTime`) AS window_end FROM (SELECT distinct `EventTime` FROM date_array) ) ,window_events AS (SELECT /*+ RANGE_JOIN(b, 900 ) */ b.`TagName`, b.`EventTime`, a.window_start AS `WindowEventTime`, b.`Status`, b.`Value` FROM boundary_events b LEFT OUTER JOIN window_buckets a ON a.window_start <= b.`EventTime` AND a.window_end > b.`EventTime`) ,fill_status AS (SELECT *, last_value(`Status`, true) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS `Fill_Status`, CASE WHEN `Fill_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") THEN `Value` ELSE null END AS `Good_Value` FROM window_events) ,fill_value AS (SELECT *, last_value(`Good_Value`, true) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS `Fill_Value` FROM fill_status) ,fill_step AS (SELECT *, false AS Step FROM fill_value) ,interpolate AS (SELECT *, CASE WHEN `Step` = false AND `Status` IS NULL AND `Value` IS NULL THEN lag(`EventTime`) OVER ( PARTITION BY `TagName` ORDER BY `EventTime` ) ELSE NULL END AS `Previous_EventTime`, CASE WHEN `Step` = false AND `Status` IS NULL AND `Value` IS NULL THEN lag(`Fill_Value`) OVER ( PARTITION BY `TagName` ORDER BY `EventTime` ) ELSE NULL END AS `Previous_Fill_Value`, lead(`EventTime`) OVER ( PARTITION BY `TagName` ORDER BY `EventTime` ) AS `Next_EventTime`, CASE WHEN `Step` = false AND `Status` IS NULL AND `Value` IS NULL THEN lead(`Fill_Value`) OVER ( PARTITION BY `TagName` ORDER BY `EventTime` ) ELSE NULL END AS `Next_Fill_Value`, CASE WHEN `Step` = false AND `Status` IS NULL AND `Value` IS NULL THEN `Previous_Fill_Value` + ( (`Next_Fill_Value` - `Previous_Fill_Value`) * ( ( unix_timestamp(`EventTime`) - unix_timestamp(`Previous_EventTime`) ) / ( unix_timestamp(`Next_EventTime`) - unix_timestamp(`Previous_EventTime`) ) ) ) ELSE NULL END AS `Interpolated_Value`, coalesce(`Interpolated_Value`, `Fill_Value`) as `Event_Value` FROM fill_step ),twa_calculations AS (SELECT `TagName`, `EventTime`, `WindowEventTime`, `Step`, `Status`, `Value`, `Previous_EventTime`, `Previous_Fill_Value`, `Next_EventTime`, `Next_Fill_Value`, `Interpolated_Value`, `Fill_Status`, `Fill_Value`, `Event_Value`, lead(`Fill_Status`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_Status` , CASE WHEN `Next_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") OR (`Fill_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") AND `Next_Status` NOT IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good")) THEN lead(`Event_Value`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) ELSE `Value` END AS `Next_Value_For_Status` , CASE WHEN `Fill_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") THEN `Next_Value_For_Status` ELSE 0 END AS `Next_Value` , CASE WHEN `Fill_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") AND `Next_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") THEN ((cast(`Next_EventTime` AS double) - cast(`EventTime` AS double)) / 60) WHEN `Fill_Status` IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") AND `Next_Status` NOT IN ("Good", "Good, Annotated", "Substituted, Good, Annotated", "Substituted, Good", "Good, Questionable", "Questionable, Good") THEN ((cast(`Next_EventTime` AS integer) - cast(`EventTime` AS double)) / 60) ELSE 0 END AS good_minutes , CASE WHEN Step == false THEN ((`Event_Value` + `Next_Value`) * 0.5) * good_minutes ELSE (`Event_Value` * good_minutes) END AS twa_value FROM interpolate) ,twa AS (SELECT `TagName`, `WindowEventTime` AS `EventTime`, sum(twa_value) / sum(good_minutes) AS `Value` from twa_calculations GROUP BY `TagName`, `WindowEventTime`) ,project AS (SELECT * FROM twa WHERE `EventTime` BETWEEN to_timestamp("2011-01-01T00:00:00") AND to_timestamp("2011-01-02T23:59:59")) SELECT p.`EventTime`, p.`TagName`, p.`Value`, m.`UoM` FROM project p LEFT OUTER JOIN `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_metadata` m ON p.`TagName` = m.`TagName` ORDER BY `TagName`, `EventTime` ' + +# Interpolation at Time IAT_MOCKED_QUERY = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE to_date(`EventTime`) BETWEEN date_sub(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND date_add(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND `TagName` IN (\'mocked-TAGNAME\') ) , date_array AS (SELECT explode(array( from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000") )) AS `EventTime`, explode(array(\'mocked-TAGNAME\')) AS `TagName`) , interpolation_events AS (SELECT coalesce(a.`TagName`, b.`TagName`) AS `TagName`, coalesce(a.`EventTime`, b.`EventTime`) AS `EventTime`, a.`EventTime` AS `Requested_EventTime`, b.`EventTime` AS `Found_EventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON a.`EventTime` = b.`EventTime` AND a.`TagName` = b.`TagName`) , interpolation_calculations AS (SELECT *, lag(`EventTime`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_EventTime`, lag(`Value`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_Value`, lead(`EventTime`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_EventTime`, lead(`Value`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_Value`, CASE WHEN `Requested_EventTime` = `Found_EventTime` THEN `Value` WHEN `Next_EventTime` IS NULL THEN `Previous_Value` WHEN `Previous_EventTime` IS NULL AND `Next_EventTime` IS NULL THEN NULL ELSE `Previous_Value` + ((`Next_Value` - `Previous_Value`) * ((unix_timestamp(`EventTime`) - unix_timestamp(`Previous_EventTime`)) / (unix_timestamp(`Next_EventTime`) - unix_timestamp(`Previous_EventTime`)))) END AS `Interpolated_Value` FROM interpolation_events) ,project AS (SELECT `TagName`, `EventTime`, `Interpolated_Value` AS `Value` FROM interpolation_calculations WHERE `EventTime` IN ( from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000") ) ) SELECT * FROM project ORDER BY `TagName`, `EventTime` ' IAT_MOCKED_QUERY_CHECK_TAGS = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE to_date(`EventTime`) BETWEEN date_sub(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND date_add(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND UPPER(`TagName`) IN (\'MOCKED-TAGNAME\') ) , date_array AS (SELECT DISTINCT explode(array( from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000") )) AS `EventTime`, explode(array(`TagName`)) AS `TagName` FROM raw_events) , interpolation_events AS (SELECT coalesce(a.`TagName`, b.`TagName`) AS `TagName`, coalesce(a.`EventTime`, b.`EventTime`) AS `EventTime`, a.`EventTime` AS `Requested_EventTime`, b.`EventTime` AS `Found_EventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON a.`EventTime` = b.`EventTime` AND a.`TagName` = b.`TagName`) , interpolation_calculations AS (SELECT *, lag(`EventTime`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_EventTime`, lag(`Value`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_Value`, lead(`EventTime`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_EventTime`, lead(`Value`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_Value`, CASE WHEN `Requested_EventTime` = `Found_EventTime` THEN `Value` WHEN `Next_EventTime` IS NULL THEN `Previous_Value` WHEN `Previous_EventTime` IS NULL AND `Next_EventTime` IS NULL THEN NULL ELSE `Previous_Value` + ((`Next_Value` - `Previous_Value`) * ((unix_timestamp(`EventTime`) - unix_timestamp(`Previous_EventTime`)) / (unix_timestamp(`Next_EventTime`) - unix_timestamp(`Previous_EventTime`)))) END AS `Interpolated_Value` FROM interpolation_events) ,project AS (SELECT `TagName`, `EventTime`, `Interpolated_Value` AS `Value` FROM interpolation_calculations WHERE `EventTime` IN ( from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000") ) ) SELECT * FROM project ORDER BY `TagName`, `EventTime` ' IAT_MOCKED_QUERY_PIVOT = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE to_date(`EventTime`) BETWEEN date_sub(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND date_add(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND `TagName` IN (\'mocked-TAGNAME\') ) , date_array AS (SELECT explode(array( from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000") )) AS `EventTime`, explode(array(\'mocked-TAGNAME\')) AS `TagName`) , interpolation_events AS (SELECT coalesce(a.`TagName`, b.`TagName`) AS `TagName`, coalesce(a.`EventTime`, b.`EventTime`) AS `EventTime`, a.`EventTime` AS `Requested_EventTime`, b.`EventTime` AS `Found_EventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON a.`EventTime` = b.`EventTime` AND a.`TagName` = b.`TagName`) , interpolation_calculations AS (SELECT *, lag(`EventTime`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_EventTime`, lag(`Value`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_Value`, lead(`EventTime`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_EventTime`, lead(`Value`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_Value`, CASE WHEN `Requested_EventTime` = `Found_EventTime` THEN `Value` WHEN `Next_EventTime` IS NULL THEN `Previous_Value` WHEN `Previous_EventTime` IS NULL AND `Next_EventTime` IS NULL THEN NULL ELSE `Previous_Value` + ((`Next_Value` - `Previous_Value`) * ((unix_timestamp(`EventTime`) - unix_timestamp(`Previous_EventTime`)) / (unix_timestamp(`Next_EventTime`) - unix_timestamp(`Previous_EventTime`)))) END AS `Interpolated_Value` FROM interpolation_events) ,project AS (SELECT `TagName`, `EventTime`, `Interpolated_Value` AS `Value` FROM interpolation_calculations WHERE `EventTime` IN ( from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000") ) ) ,pivot AS (SELECT * FROM (SELECT `EventTime`, `Value`, `TagName` AS `TagName` FROM project) PIVOT (FIRST(`Value`) FOR `TagName` IN (\'mocked-TAGNAME\' AS `mocked-TAGNAME`))) SELECT * FROM pivot ORDER BY `EventTime` ' +IAT_MOCKED_QUERY_UOM = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE to_date(`EventTime`) BETWEEN date_sub(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND date_add(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND `TagName` IN (\'mocked-TAGNAME\') ) , date_array AS (SELECT explode(array( from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000") )) AS `EventTime`, explode(array(\'mocked-TAGNAME\')) AS `TagName`) , interpolation_events AS (SELECT coalesce(a.`TagName`, b.`TagName`) AS `TagName`, coalesce(a.`EventTime`, b.`EventTime`) AS `EventTime`, a.`EventTime` AS `Requested_EventTime`, b.`EventTime` AS `Found_EventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON a.`EventTime` = b.`EventTime` AND a.`TagName` = b.`TagName`) , interpolation_calculations AS (SELECT *, lag(`EventTime`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_EventTime`, lag(`Value`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_Value`, lead(`EventTime`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_EventTime`, lead(`Value`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_Value`, CASE WHEN `Requested_EventTime` = `Found_EventTime` THEN `Value` WHEN `Next_EventTime` IS NULL THEN `Previous_Value` WHEN `Previous_EventTime` IS NULL AND `Next_EventTime` IS NULL THEN NULL ELSE `Previous_Value` + ((`Next_Value` - `Previous_Value`) * ((unix_timestamp(`EventTime`) - unix_timestamp(`Previous_EventTime`)) / (unix_timestamp(`Next_EventTime`) - unix_timestamp(`Previous_EventTime`)))) END AS `Interpolated_Value` FROM interpolation_events) ,project AS (SELECT `TagName`, `EventTime`, `Interpolated_Value` AS `Value` FROM interpolation_calculations WHERE `EventTime` IN ( from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000") ) ) SELECT p.`EventTime`, p.`TagName`, p.`Value`, m.`UoM` FROM project p LEFT OUTER JOIN `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_metadata` m ON p.`TagName` = m.`TagName` ORDER BY `TagName`, `EventTime` ' + +# Metadata METADATA_MOCKED_QUERY = "SELECT * FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_metadata` WHERE `TagName` IN ('mocked-TAGNAME') ORDER BY `TagName` " METADATA_MOCKED_QUERY_CHECK_TAGS = "SELECT * FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_metadata` WHERE UPPER(`TagName`) IN ('MOCKED-TAGNAME') ORDER BY `TagName` " METADATA_MOCKED_QUERY_NO_TAGS = "SELECT * FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_metadata` ORDER BY `TagName` " + +# Circular Average CIRCULAR_AVERAGE_MOCKED_QUERY = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN TO_TIMESTAMP("2011-01-01T00:00:00+00:00") AND TO_TIMESTAMP("2011-01-02T23:59:59+00:00") AND `TagName` IN (\'mocked-TAGNAME\') ) ,date_array AS (SELECT EXPLODE(SEQUENCE(FROM_UTC_TIMESTAMP(TO_TIMESTAMP("2011-01-01T00:00:00+00:00"), "+0000"), FROM_UTC_TIMESTAMP(TO_TIMESTAMP("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS `EventTime`, EXPLODE(ARRAY(\'mocked-TAGNAME\')) AS `TagName`) ,window_events AS (SELECT COALESCE(a.`TagName`, b.`TagName`) AS `TagName`, COALESCE(a.`EventTime`, b.`EventTime`) AS `EventTime`, WINDOW(COALESCE(a.`EventTime`, b.`EventTime`), \'15 minute\').START `WindowEventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON CAST(a.`EventTime` AS LONG) = CAST(b.`EventTime` AS LONG) AND a.`TagName` = b.`TagName`) ,calculation_set_up AS (SELECT `EventTime`, `WindowEventTime`, `TagName`, `Value`, MOD(`Value` - 0, (360 - 0))*(2*pi()/(360 - 0)) AS `Value_in_Radians`, LAG(`EventTime`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_EventTime`, (unix_millis(`EventTime`) - unix_millis(`Previous_EventTime`)) / 86400000 AS Time_Difference, COS(`Value_in_Radians`) AS Cos_Value, SIN(`Value_in_Radians`) AS Sin_Value FROM window_events) ,circular_average_calculations AS (SELECT `WindowEventTime`, `TagName`, Time_Difference, AVG(Cos_Value) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN 1 PRECEDING AND CURRENT ROW) AS Average_Cos, AVG(Sin_Value) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN 1 PRECEDING AND CURRENT ROW) AS Average_Sin, SQRT(POW(Average_Cos, 2) + POW(Average_Sin, 2)) AS Vector_Length, Average_Cos/Vector_Length AS Rescaled_Average_Cos, Average_Sin/Vector_Length AS Rescaled_Average_Sin, Time_Difference * Rescaled_Average_Cos AS Diff_Average_Cos, Time_Difference * Rescaled_Average_Sin AS Diff_Average_Sin FROM calculation_set_up) ,circular_average_results AS (SELECT `WindowEventTime` AS `EventTime`, `TagName`, sum(Diff_Average_Cos)/sum(Time_Difference) AS Cos_Time_Averages, sum(Diff_Average_Sin)/sum(Time_Difference) AS Sin_Time_Averages, array_min(array(1, sqrt(pow(Cos_Time_Averages, 2) + pow(Sin_Time_Averages, 2)))) AS R, mod(2*pi() + atan2(Sin_Time_Averages, Cos_Time_Averages), 2*pi()) AS Circular_Average_Value_in_Radians, (Circular_Average_Value_in_Radians * (360 - 0)) / (2*pi())+ 0 AS Circular_Average_Value_in_Degrees FROM circular_average_calculations GROUP BY `TagName`, `WindowEventTime`) ,project AS (SELECT `EventTime`, `TagName`, Circular_Average_Value_in_Degrees AS `Value` FROM circular_average_results) SELECT * FROM project ORDER BY `TagName`, `EventTime` ' CIRCULAR_AVERAGE_MOCKED_QUERY_CHECK_TAGS = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN TO_TIMESTAMP("2011-01-01T00:00:00+00:00") AND TO_TIMESTAMP("2011-01-02T23:59:59+00:00") AND UPPER(`TagName`) IN (\'MOCKED-TAGNAME\') ) ,date_array AS (SELECT DISTINCT EXPLODE(SEQUENCE(FROM_UTC_TIMESTAMP(TO_TIMESTAMP("2011-01-01T00:00:00+00:00"), "+0000"), FROM_UTC_TIMESTAMP(TO_TIMESTAMP("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS `EventTime`, EXPLODE(ARRAY(`TagName`)) AS `TagName` FROM raw_events) ,window_events AS (SELECT COALESCE(a.`TagName`, b.`TagName`) AS `TagName`, COALESCE(a.`EventTime`, b.`EventTime`) AS `EventTime`, WINDOW(COALESCE(a.`EventTime`, b.`EventTime`), \'15 minute\').START `WindowEventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON CAST(a.`EventTime` AS LONG) = CAST(b.`EventTime` AS LONG) AND a.`TagName` = b.`TagName`) ,calculation_set_up AS (SELECT `EventTime`, `WindowEventTime`, `TagName`, `Value`, MOD(`Value` - 0, (360 - 0))*(2*pi()/(360 - 0)) AS `Value_in_Radians`, LAG(`EventTime`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_EventTime`, (unix_millis(`EventTime`) - unix_millis(`Previous_EventTime`)) / 86400000 AS Time_Difference, COS(`Value_in_Radians`) AS Cos_Value, SIN(`Value_in_Radians`) AS Sin_Value FROM window_events) ,circular_average_calculations AS (SELECT `WindowEventTime`, `TagName`, Time_Difference, AVG(Cos_Value) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN 1 PRECEDING AND CURRENT ROW) AS Average_Cos, AVG(Sin_Value) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN 1 PRECEDING AND CURRENT ROW) AS Average_Sin, SQRT(POW(Average_Cos, 2) + POW(Average_Sin, 2)) AS Vector_Length, Average_Cos/Vector_Length AS Rescaled_Average_Cos, Average_Sin/Vector_Length AS Rescaled_Average_Sin, Time_Difference * Rescaled_Average_Cos AS Diff_Average_Cos, Time_Difference * Rescaled_Average_Sin AS Diff_Average_Sin FROM calculation_set_up) ,circular_average_results AS (SELECT `WindowEventTime` AS `EventTime`, `TagName`, sum(Diff_Average_Cos)/sum(Time_Difference) AS Cos_Time_Averages, sum(Diff_Average_Sin)/sum(Time_Difference) AS Sin_Time_Averages, array_min(array(1, sqrt(pow(Cos_Time_Averages, 2) + pow(Sin_Time_Averages, 2)))) AS R, mod(2*pi() + atan2(Sin_Time_Averages, Cos_Time_Averages), 2*pi()) AS Circular_Average_Value_in_Radians, (Circular_Average_Value_in_Radians * (360 - 0)) / (2*pi())+ 0 AS Circular_Average_Value_in_Degrees FROM circular_average_calculations GROUP BY `TagName`, `WindowEventTime`) ,project AS (SELECT `EventTime`, `TagName`, Circular_Average_Value_in_Degrees AS `Value` FROM circular_average_results) SELECT * FROM project ORDER BY `TagName`, `EventTime` ' CIRCULAR_AVERAGE_MOCKED_QUERY_PIVOT = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN TO_TIMESTAMP("2011-01-01T00:00:00+00:00") AND TO_TIMESTAMP("2011-01-02T23:59:59+00:00") AND `TagName` IN (\'mocked-TAGNAME\') ) ,date_array AS (SELECT EXPLODE(SEQUENCE(FROM_UTC_TIMESTAMP(TO_TIMESTAMP("2011-01-01T00:00:00+00:00"), "+0000"), FROM_UTC_TIMESTAMP(TO_TIMESTAMP("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS `EventTime`, EXPLODE(ARRAY(\'mocked-TAGNAME\')) AS `TagName`) ,window_events AS (SELECT COALESCE(a.`TagName`, b.`TagName`) AS `TagName`, COALESCE(a.`EventTime`, b.`EventTime`) AS `EventTime`, WINDOW(COALESCE(a.`EventTime`, b.`EventTime`), \'15 minute\').START `WindowEventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON CAST(a.`EventTime` AS LONG) = CAST(b.`EventTime` AS LONG) AND a.`TagName` = b.`TagName`) ,calculation_set_up AS (SELECT `EventTime`, `WindowEventTime`, `TagName`, `Value`, MOD(`Value` - 0, (360 - 0))*(2*pi()/(360 - 0)) AS `Value_in_Radians`, LAG(`EventTime`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_EventTime`, (unix_millis(`EventTime`) - unix_millis(`Previous_EventTime`)) / 86400000 AS Time_Difference, COS(`Value_in_Radians`) AS Cos_Value, SIN(`Value_in_Radians`) AS Sin_Value FROM window_events) ,circular_average_calculations AS (SELECT `WindowEventTime`, `TagName`, Time_Difference, AVG(Cos_Value) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN 1 PRECEDING AND CURRENT ROW) AS Average_Cos, AVG(Sin_Value) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN 1 PRECEDING AND CURRENT ROW) AS Average_Sin, SQRT(POW(Average_Cos, 2) + POW(Average_Sin, 2)) AS Vector_Length, Average_Cos/Vector_Length AS Rescaled_Average_Cos, Average_Sin/Vector_Length AS Rescaled_Average_Sin, Time_Difference * Rescaled_Average_Cos AS Diff_Average_Cos, Time_Difference * Rescaled_Average_Sin AS Diff_Average_Sin FROM calculation_set_up) ,circular_average_results AS (SELECT `WindowEventTime` AS `EventTime`, `TagName`, sum(Diff_Average_Cos)/sum(Time_Difference) AS Cos_Time_Averages, sum(Diff_Average_Sin)/sum(Time_Difference) AS Sin_Time_Averages, array_min(array(1, sqrt(pow(Cos_Time_Averages, 2) + pow(Sin_Time_Averages, 2)))) AS R, mod(2*pi() + atan2(Sin_Time_Averages, Cos_Time_Averages), 2*pi()) AS Circular_Average_Value_in_Radians, (Circular_Average_Value_in_Radians * (360 - 0)) / (2*pi())+ 0 AS Circular_Average_Value_in_Degrees FROM circular_average_calculations GROUP BY `TagName`, `WindowEventTime`) ,project AS (SELECT `EventTime`, `TagName`, Circular_Average_Value_in_Degrees AS `Value` FROM circular_average_results) ,pivot AS (SELECT * FROM (SELECT `EventTime`, `Value`, `TagName` AS `TagName` FROM project) PIVOT (FIRST(`Value`) FOR `TagName` IN (\'mocked-TAGNAME\' AS `mocked-TAGNAME`))) SELECT * FROM pivot ORDER BY `EventTime` ' +CIRCULAR_AVERAGE_MOCKED_QUERY_UOM = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN TO_TIMESTAMP("2011-01-01T00:00:00+00:00") AND TO_TIMESTAMP("2011-01-02T23:59:59+00:00") AND `TagName` IN (\'mocked-TAGNAME\') ) ,date_array AS (SELECT EXPLODE(SEQUENCE(FROM_UTC_TIMESTAMP(TO_TIMESTAMP("2011-01-01T00:00:00+00:00"), "+0000"), FROM_UTC_TIMESTAMP(TO_TIMESTAMP("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS `EventTime`, EXPLODE(ARRAY(\'mocked-TAGNAME\')) AS `TagName`) ,window_events AS (SELECT COALESCE(a.`TagName`, b.`TagName`) AS `TagName`, COALESCE(a.`EventTime`, b.`EventTime`) AS `EventTime`, WINDOW(COALESCE(a.`EventTime`, b.`EventTime`), \'15 minute\').START `WindowEventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON CAST(a.`EventTime` AS LONG) = CAST(b.`EventTime` AS LONG) AND a.`TagName` = b.`TagName`) ,calculation_set_up AS (SELECT `EventTime`, `WindowEventTime`, `TagName`, `Value`, MOD(`Value` - 0, (360 - 0))*(2*pi()/(360 - 0)) AS `Value_in_Radians`, LAG(`EventTime`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_EventTime`, (unix_millis(`EventTime`) - unix_millis(`Previous_EventTime`)) / 86400000 AS Time_Difference, COS(`Value_in_Radians`) AS Cos_Value, SIN(`Value_in_Radians`) AS Sin_Value FROM window_events) ,circular_average_calculations AS (SELECT `WindowEventTime`, `TagName`, Time_Difference, AVG(Cos_Value) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN 1 PRECEDING AND CURRENT ROW) AS Average_Cos, AVG(Sin_Value) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN 1 PRECEDING AND CURRENT ROW) AS Average_Sin, SQRT(POW(Average_Cos, 2) + POW(Average_Sin, 2)) AS Vector_Length, Average_Cos/Vector_Length AS Rescaled_Average_Cos, Average_Sin/Vector_Length AS Rescaled_Average_Sin, Time_Difference * Rescaled_Average_Cos AS Diff_Average_Cos, Time_Difference * Rescaled_Average_Sin AS Diff_Average_Sin FROM calculation_set_up) ,circular_average_results AS (SELECT `WindowEventTime` AS `EventTime`, `TagName`, sum(Diff_Average_Cos)/sum(Time_Difference) AS Cos_Time_Averages, sum(Diff_Average_Sin)/sum(Time_Difference) AS Sin_Time_Averages, array_min(array(1, sqrt(pow(Cos_Time_Averages, 2) + pow(Sin_Time_Averages, 2)))) AS R, mod(2*pi() + atan2(Sin_Time_Averages, Cos_Time_Averages), 2*pi()) AS Circular_Average_Value_in_Radians, (Circular_Average_Value_in_Radians * (360 - 0)) / (2*pi())+ 0 AS Circular_Average_Value_in_Degrees FROM circular_average_calculations GROUP BY `TagName`, `WindowEventTime`) ,project AS (SELECT `EventTime`, `TagName`, Circular_Average_Value_in_Degrees AS `Value` FROM circular_average_results) SELECT p.*, m.`UoM` FROM project p LEFT OUTER JOIN `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_metadata` m ON p.`TagName` = m.`TagName` ORDER BY `TagName`, `EventTime` ' + +# Circular Standard Deviations CIRCULAR_SD_MOCKED_QUERY = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN TO_TIMESTAMP("2011-01-01T00:00:00+00:00") AND TO_TIMESTAMP("2011-01-02T23:59:59+00:00") AND `TagName` IN (\'mocked-TAGNAME\') ) ,date_array AS (SELECT EXPLODE(SEQUENCE(FROM_UTC_TIMESTAMP(TO_TIMESTAMP("2011-01-01T00:00:00+00:00"), "+0000"), FROM_UTC_TIMESTAMP(TO_TIMESTAMP("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS `EventTime`, EXPLODE(ARRAY(\'mocked-TAGNAME\')) AS `TagName`) ,window_events AS (SELECT COALESCE(a.`TagName`, b.`TagName`) AS `TagName`, COALESCE(a.`EventTime`, b.`EventTime`) AS `EventTime`, WINDOW(COALESCE(a.`EventTime`, b.`EventTime`), \'15 minute\').START `WindowEventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON CAST(a.`EventTime` AS LONG) = CAST(b.`EventTime` AS LONG) AND a.`TagName` = b.`TagName`) ,calculation_set_up AS (SELECT `EventTime`, `WindowEventTime`, `TagName`, `Value`, MOD(`Value` - 0, (360 - 0))*(2*pi()/(360 - 0)) AS `Value_in_Radians`, LAG(`EventTime`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_EventTime`, (unix_millis(`EventTime`) - unix_millis(`Previous_EventTime`)) / 86400000 AS Time_Difference, COS(`Value_in_Radians`) AS Cos_Value, SIN(`Value_in_Radians`) AS Sin_Value FROM window_events) ,circular_average_calculations AS (SELECT `WindowEventTime`, `TagName`, Time_Difference, AVG(Cos_Value) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN 1 PRECEDING AND CURRENT ROW) AS Average_Cos, AVG(Sin_Value) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN 1 PRECEDING AND CURRENT ROW) AS Average_Sin, SQRT(POW(Average_Cos, 2) + POW(Average_Sin, 2)) AS Vector_Length, Average_Cos/Vector_Length AS Rescaled_Average_Cos, Average_Sin/Vector_Length AS Rescaled_Average_Sin, Time_Difference * Rescaled_Average_Cos AS Diff_Average_Cos, Time_Difference * Rescaled_Average_Sin AS Diff_Average_Sin FROM calculation_set_up) ,circular_average_results AS (SELECT `WindowEventTime` AS `EventTime`, `TagName`, sum(Diff_Average_Cos)/sum(Time_Difference) AS Cos_Time_Averages, sum(Diff_Average_Sin)/sum(Time_Difference) AS Sin_Time_Averages, array_min(array(1, sqrt(pow(Cos_Time_Averages, 2) + pow(Sin_Time_Averages, 2)))) AS R, mod(2*pi() + atan2(Sin_Time_Averages, Cos_Time_Averages), 2*pi()) AS Circular_Average_Value_in_Radians, SQRT(-2*LN(R)) * ( 360 - 0) / (2*PI()) AS Circular_Standard_Deviation FROM circular_average_calculations GROUP BY `TagName`, `WindowEventTime`) ,project AS (SELECT `EventTime`, `TagName`, Circular_Standard_Deviation AS `Value` FROM circular_average_results) SELECT * FROM project ORDER BY `TagName`, `EventTime` ' CIRCULAR_SD_MOCKED_QUERY_CHECK_TAGS = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN TO_TIMESTAMP("2011-01-01T00:00:00+00:00") AND TO_TIMESTAMP("2011-01-02T23:59:59+00:00") AND UPPER(`TagName`) IN (\'MOCKED-TAGNAME\') ) ,date_array AS (SELECT DISTINCT EXPLODE(SEQUENCE(FROM_UTC_TIMESTAMP(TO_TIMESTAMP("2011-01-01T00:00:00+00:00"), "+0000"), FROM_UTC_TIMESTAMP(TO_TIMESTAMP("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS `EventTime`, EXPLODE(ARRAY(`TagName`)) AS `TagName` FROM raw_events) ,window_events AS (SELECT COALESCE(a.`TagName`, b.`TagName`) AS `TagName`, COALESCE(a.`EventTime`, b.`EventTime`) AS `EventTime`, WINDOW(COALESCE(a.`EventTime`, b.`EventTime`), \'15 minute\').START `WindowEventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON CAST(a.`EventTime` AS LONG) = CAST(b.`EventTime` AS LONG) AND a.`TagName` = b.`TagName`) ,calculation_set_up AS (SELECT `EventTime`, `WindowEventTime`, `TagName`, `Value`, MOD(`Value` - 0, (360 - 0))*(2*pi()/(360 - 0)) AS `Value_in_Radians`, LAG(`EventTime`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_EventTime`, (unix_millis(`EventTime`) - unix_millis(`Previous_EventTime`)) / 86400000 AS Time_Difference, COS(`Value_in_Radians`) AS Cos_Value, SIN(`Value_in_Radians`) AS Sin_Value FROM window_events) ,circular_average_calculations AS (SELECT `WindowEventTime`, `TagName`, Time_Difference, AVG(Cos_Value) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN 1 PRECEDING AND CURRENT ROW) AS Average_Cos, AVG(Sin_Value) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN 1 PRECEDING AND CURRENT ROW) AS Average_Sin, SQRT(POW(Average_Cos, 2) + POW(Average_Sin, 2)) AS Vector_Length, Average_Cos/Vector_Length AS Rescaled_Average_Cos, Average_Sin/Vector_Length AS Rescaled_Average_Sin, Time_Difference * Rescaled_Average_Cos AS Diff_Average_Cos, Time_Difference * Rescaled_Average_Sin AS Diff_Average_Sin FROM calculation_set_up) ,circular_average_results AS (SELECT `WindowEventTime` AS `EventTime`, `TagName`, sum(Diff_Average_Cos)/sum(Time_Difference) AS Cos_Time_Averages, sum(Diff_Average_Sin)/sum(Time_Difference) AS Sin_Time_Averages, array_min(array(1, sqrt(pow(Cos_Time_Averages, 2) + pow(Sin_Time_Averages, 2)))) AS R, mod(2*pi() + atan2(Sin_Time_Averages, Cos_Time_Averages), 2*pi()) AS Circular_Average_Value_in_Radians, SQRT(-2*LN(R)) * ( 360 - 0) / (2*PI()) AS Circular_Standard_Deviation FROM circular_average_calculations GROUP BY `TagName`, `WindowEventTime`) ,project AS (SELECT `EventTime`, `TagName`, Circular_Standard_Deviation AS `Value` FROM circular_average_results) SELECT * FROM project ORDER BY `TagName`, `EventTime` ' CIRCULAR_SD_MOCKED_QUERY_PIVOT = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN TO_TIMESTAMP("2011-01-01T00:00:00+00:00") AND TO_TIMESTAMP("2011-01-02T23:59:59+00:00") AND `TagName` IN (\'mocked-TAGNAME\') ) ,date_array AS (SELECT EXPLODE(SEQUENCE(FROM_UTC_TIMESTAMP(TO_TIMESTAMP("2011-01-01T00:00:00+00:00"), "+0000"), FROM_UTC_TIMESTAMP(TO_TIMESTAMP("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS `EventTime`, EXPLODE(ARRAY(\'mocked-TAGNAME\')) AS `TagName`) ,window_events AS (SELECT COALESCE(a.`TagName`, b.`TagName`) AS `TagName`, COALESCE(a.`EventTime`, b.`EventTime`) AS `EventTime`, WINDOW(COALESCE(a.`EventTime`, b.`EventTime`), \'15 minute\').START `WindowEventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON CAST(a.`EventTime` AS LONG) = CAST(b.`EventTime` AS LONG) AND a.`TagName` = b.`TagName`) ,calculation_set_up AS (SELECT `EventTime`, `WindowEventTime`, `TagName`, `Value`, MOD(`Value` - 0, (360 - 0))*(2*pi()/(360 - 0)) AS `Value_in_Radians`, LAG(`EventTime`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_EventTime`, (unix_millis(`EventTime`) - unix_millis(`Previous_EventTime`)) / 86400000 AS Time_Difference, COS(`Value_in_Radians`) AS Cos_Value, SIN(`Value_in_Radians`) AS Sin_Value FROM window_events) ,circular_average_calculations AS (SELECT `WindowEventTime`, `TagName`, Time_Difference, AVG(Cos_Value) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN 1 PRECEDING AND CURRENT ROW) AS Average_Cos, AVG(Sin_Value) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN 1 PRECEDING AND CURRENT ROW) AS Average_Sin, SQRT(POW(Average_Cos, 2) + POW(Average_Sin, 2)) AS Vector_Length, Average_Cos/Vector_Length AS Rescaled_Average_Cos, Average_Sin/Vector_Length AS Rescaled_Average_Sin, Time_Difference * Rescaled_Average_Cos AS Diff_Average_Cos, Time_Difference * Rescaled_Average_Sin AS Diff_Average_Sin FROM calculation_set_up) ,circular_average_results AS (SELECT `WindowEventTime` AS `EventTime`, `TagName`, sum(Diff_Average_Cos)/sum(Time_Difference) AS Cos_Time_Averages, sum(Diff_Average_Sin)/sum(Time_Difference) AS Sin_Time_Averages, array_min(array(1, sqrt(pow(Cos_Time_Averages, 2) + pow(Sin_Time_Averages, 2)))) AS R, mod(2*pi() + atan2(Sin_Time_Averages, Cos_Time_Averages), 2*pi()) AS Circular_Average_Value_in_Radians, SQRT(-2*LN(R)) * ( 360 - 0) / (2*PI()) AS Circular_Standard_Deviation FROM circular_average_calculations GROUP BY `TagName`, `WindowEventTime`) ,project AS (SELECT `EventTime`, `TagName`, Circular_Standard_Deviation AS `Value` FROM circular_average_results) ,pivot AS (SELECT * FROM (SELECT `EventTime`, `Value`, `TagName` AS `TagName` FROM project) PIVOT (FIRST(`Value`) FOR `TagName` IN (\'mocked-TAGNAME\' AS `mocked-TAGNAME`))) SELECT * FROM pivot ORDER BY `EventTime` ' -LATEST_MOCKED_QUERY = "SELECT * FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_latest` WHERE `TagName` IN ('mocked-TAGNAME') ORDER BY `TagName` " -LATEST_MOCKED_QUERY_CHECK_TAGS = "SELECT * FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_latest` WHERE UPPER(`TagName`) IN ('MOCKED-TAGNAME') ORDER BY `TagName` " -LATEST_MOCKED_QUERY_NO_TAGS = "SELECT * FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_latest` ORDER BY `TagName` " -SUMMARY_MOCKED_QUERY = 'SELECT `TagName`, count(`Value`) as Count, CAST(Avg(`Value`) as decimal(10, 2)) as Avg, CAST(Min(`Value`) as decimal(10, 2)) as Min, CAST(Max(`Value`) as decimal(10, 2)) as Max, CAST(stddev(`Value`) as decimal(10, 2)) as StDev, CAST(sum(`Value`) as decimal(10, 2)) as Sum, CAST(variance(`Value`) as decimal(10, 2)) as Var FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN to_timestamp("2011-01-01T00:00:00+00:00") AND to_timestamp("2011-01-02T23:59:59+00:00") AND `TagName` IN (\'mocked-TAGNAME\') GROUP BY `TagName` ' -SUMMARY_MOCKED_QUERY_CHECK_TAGS = 'SELECT `TagName`, count(`Value`) as Count, CAST(Avg(`Value`) as decimal(10, 2)) as Avg, CAST(Min(`Value`) as decimal(10, 2)) as Min, CAST(Max(`Value`) as decimal(10, 2)) as Max, CAST(stddev(`Value`) as decimal(10, 2)) as StDev, CAST(sum(`Value`) as decimal(10, 2)) as Sum, CAST(variance(`Value`) as decimal(10, 2)) as Var FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN to_timestamp("2011-01-01T00:00:00+00:00") AND to_timestamp("2011-01-02T23:59:59+00:00") AND UPPER(`TagName`) IN (\'MOCKED-TAGNAME\') GROUP BY `TagName` ' +CIRCULAR_SD_MOCKED_QUERY_UOM = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN TO_TIMESTAMP("2011-01-01T00:00:00+00:00") AND TO_TIMESTAMP("2011-01-02T23:59:59+00:00") AND `TagName` IN (\'mocked-TAGNAME\') ) ,date_array AS (SELECT EXPLODE(SEQUENCE(FROM_UTC_TIMESTAMP(TO_TIMESTAMP("2011-01-01T00:00:00+00:00"), "+0000"), FROM_UTC_TIMESTAMP(TO_TIMESTAMP("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS `EventTime`, EXPLODE(ARRAY(\'mocked-TAGNAME\')) AS `TagName`) ,window_events AS (SELECT COALESCE(a.`TagName`, b.`TagName`) AS `TagName`, COALESCE(a.`EventTime`, b.`EventTime`) AS `EventTime`, WINDOW(COALESCE(a.`EventTime`, b.`EventTime`), \'15 minute\').START `WindowEventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON CAST(a.`EventTime` AS LONG) = CAST(b.`EventTime` AS LONG) AND a.`TagName` = b.`TagName`) ,calculation_set_up AS (SELECT `EventTime`, `WindowEventTime`, `TagName`, `Value`, MOD(`Value` - 0, (360 - 0))*(2*pi()/(360 - 0)) AS `Value_in_Radians`, LAG(`EventTime`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_EventTime`, (unix_millis(`EventTime`) - unix_millis(`Previous_EventTime`)) / 86400000 AS Time_Difference, COS(`Value_in_Radians`) AS Cos_Value, SIN(`Value_in_Radians`) AS Sin_Value FROM window_events) ,circular_average_calculations AS (SELECT `WindowEventTime`, `TagName`, Time_Difference, AVG(Cos_Value) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN 1 PRECEDING AND CURRENT ROW) AS Average_Cos, AVG(Sin_Value) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN 1 PRECEDING AND CURRENT ROW) AS Average_Sin, SQRT(POW(Average_Cos, 2) + POW(Average_Sin, 2)) AS Vector_Length, Average_Cos/Vector_Length AS Rescaled_Average_Cos, Average_Sin/Vector_Length AS Rescaled_Average_Sin, Time_Difference * Rescaled_Average_Cos AS Diff_Average_Cos, Time_Difference * Rescaled_Average_Sin AS Diff_Average_Sin FROM calculation_set_up) ,circular_average_results AS (SELECT `WindowEventTime` AS `EventTime`, `TagName`, sum(Diff_Average_Cos)/sum(Time_Difference) AS Cos_Time_Averages, sum(Diff_Average_Sin)/sum(Time_Difference) AS Sin_Time_Averages, array_min(array(1, sqrt(pow(Cos_Time_Averages, 2) + pow(Sin_Time_Averages, 2)))) AS R, mod(2*pi() + atan2(Sin_Time_Averages, Cos_Time_Averages), 2*pi()) AS Circular_Average_Value_in_Radians, SQRT(-2*LN(R)) * ( 360 - 0) / (2*PI()) AS Circular_Standard_Deviation FROM circular_average_calculations GROUP BY `TagName`, `WindowEventTime`) ,project AS (SELECT `EventTime`, `TagName`, Circular_Standard_Deviation AS `Value` FROM circular_average_results) SELECT p.*, m.`UoM` FROM project p LEFT OUTER JOIN `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_metadata` m ON p.`TagName` = m.`TagName` ORDER BY `TagName`, `EventTime` ' + +# Latest +LATEST_MOCKED_QUERY = "WITH latest AS (SELECT * FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_latest` WHERE `TagName` IN ('mocked-TAGNAME') ORDER BY `TagName` ) SELECT * FROM latest " +LATEST_MOCKED_QUERY_CHECK_TAGS = "WITH latest AS (SELECT * FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_latest` WHERE UPPER(`TagName`) IN ('MOCKED-TAGNAME') ORDER BY `TagName` ) SELECT * FROM latest " +LATEST_MOCKED_QUERY_NO_TAGS = "WITH latest AS (SELECT * FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_latest` ORDER BY `TagName` ) SELECT * FROM latest " +LATEST_MOCKED_QUERY_UOM = "WITH latest AS (SELECT * FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_latest` WHERE `TagName` IN ('mocked-TAGNAME') ORDER BY `TagName` ) SELECT l.*, m.`UoM` FROM latest l LEFT OUTER JOIN `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_metadata` m ON l.`TagName` = m.`TagName` " + +# Summary +SUMMARY_MOCKED_QUERY = 'WITH summary AS (SELECT `TagName`, count(`Value`) as Count, CAST(Avg(`Value`) as decimal(10, 2)) as Avg, CAST(Min(`Value`) as decimal(10, 2)) as Min, CAST(Max(`Value`) as decimal(10, 2)) as Max, CAST(stddev(`Value`) as decimal(10, 2)) as StDev, CAST(sum(`Value`) as decimal(10, 2)) as Sum, CAST(variance(`Value`) as decimal(10, 2)) as Var FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN to_timestamp("2011-01-01T00:00:00+00:00") AND to_timestamp("2011-01-02T23:59:59+00:00") AND `TagName` IN (\'mocked-TAGNAME\') GROUP BY `TagName`) SELECT * FROM summary ' +SUMMARY_MOCKED_QUERY_CHECK_TAGS = 'WITH summary AS (SELECT `TagName`, count(`Value`) as Count, CAST(Avg(`Value`) as decimal(10, 2)) as Avg, CAST(Min(`Value`) as decimal(10, 2)) as Min, CAST(Max(`Value`) as decimal(10, 2)) as Max, CAST(stddev(`Value`) as decimal(10, 2)) as StDev, CAST(sum(`Value`) as decimal(10, 2)) as Sum, CAST(variance(`Value`) as decimal(10, 2)) as Var FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN to_timestamp("2011-01-01T00:00:00+00:00") AND to_timestamp("2011-01-02T23:59:59+00:00") AND UPPER(`TagName`) IN (\'MOCKED-TAGNAME\') GROUP BY `TagName`) SELECT * FROM summary ' +SUMMARY_MOCKED_QUERY_UOM = 'WITH summary AS (SELECT `TagName`, count(`Value`) as Count, CAST(Avg(`Value`) as decimal(10, 2)) as Avg, CAST(Min(`Value`) as decimal(10, 2)) as Min, CAST(Max(`Value`) as decimal(10, 2)) as Max, CAST(stddev(`Value`) as decimal(10, 2)) as StDev, CAST(sum(`Value`) as decimal(10, 2)) as Sum, CAST(variance(`Value`) as decimal(10, 2)) as Var FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE `EventTime` BETWEEN to_timestamp("2011-01-01T00:00:00+00:00") AND to_timestamp("2011-01-02T23:59:59+00:00") AND `TagName` IN (\'mocked-TAGNAME\') GROUP BY `TagName`) SELECT s.*, m.`UoM` FROM summary s LEFT OUTER JOIN `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_metadata` m ON s.`TagName` = m.`TagName` ' diff --git a/tests/sdk/python/rtdip_sdk/queries/time_series/test_circular_average.py b/tests/sdk/python/rtdip_sdk/queries/time_series/test_circular_average.py index 8ceb26183..a790c640d 100644 --- a/tests/sdk/python/rtdip_sdk/queries/time_series/test_circular_average.py +++ b/tests/sdk/python/rtdip_sdk/queries/time_series/test_circular_average.py @@ -30,6 +30,7 @@ CIRCULAR_AVERAGE_MOCKED_QUERY, CIRCULAR_AVERAGE_MOCKED_QUERY_CHECK_TAGS, CIRCULAR_AVERAGE_MOCKED_QUERY_PIVOT, + CIRCULAR_AVERAGE_MOCKED_QUERY_UOM, ) MOCKED_CIRCULAR_AVERAGE_PARAMETER_DICT = MOCKED_PARAMETER_DICT.copy() @@ -70,10 +71,21 @@ def test_circular_average_pivot(mocker: MockerFixture): ) +def test_circular_average_uom(mocker: MockerFixture): + MOCKED_CIRCULAR_AVERAGE_PARAMETER_DICT["pivot"] = False + MOCKED_CIRCULAR_AVERAGE_PARAMETER_DICT["display_uom"] = True + _test_base_succeed( + mocker, + MOCKED_CIRCULAR_AVERAGE_PARAMETER_DICT, + CIRCULAR_AVERAGE_MOCKED_QUERY_UOM, + circular_average_get, + ) + + def test_circular_average_offset_limit(mocker: MockerFixture): MOCKED_CIRCULAR_AVERAGE_PARAMETER_DICT["limit"] = 10 MOCKED_CIRCULAR_AVERAGE_PARAMETER_DICT["offset"] = 10 - MOCKED_CIRCULAR_AVERAGE_PARAMETER_DICT["pivot"] = False + MOCKED_CIRCULAR_AVERAGE_PARAMETER_DICT["display_uom"] = False _test_base_succeed( mocker, MOCKED_CIRCULAR_AVERAGE_PARAMETER_DICT, diff --git a/tests/sdk/python/rtdip_sdk/queries/time_series/test_circular_standard_deviation.py b/tests/sdk/python/rtdip_sdk/queries/time_series/test_circular_standard_deviation.py index eb9b62b52..de7de5f6a 100644 --- a/tests/sdk/python/rtdip_sdk/queries/time_series/test_circular_standard_deviation.py +++ b/tests/sdk/python/rtdip_sdk/queries/time_series/test_circular_standard_deviation.py @@ -29,6 +29,7 @@ CIRCULAR_SD_MOCKED_QUERY, CIRCULAR_SD_MOCKED_QUERY_CHECK_TAGS, CIRCULAR_SD_MOCKED_QUERY_PIVOT, + CIRCULAR_SD_MOCKED_QUERY_UOM, ) MOCKED_CIRCULAR_SD_PARAMETER_DICT = MOCKED_PARAMETER_DICT.copy() @@ -69,10 +70,21 @@ def test_circular_standard_deviation_pivot(mocker: MockerFixture): ) +def test_circular_standard_deviation_uom(mocker: MockerFixture): + MOCKED_CIRCULAR_SD_PARAMETER_DICT["pivot"] = False + MOCKED_CIRCULAR_SD_PARAMETER_DICT["display_uom"] = True + _test_base_succeed( + mocker, + MOCKED_CIRCULAR_SD_PARAMETER_DICT, + CIRCULAR_SD_MOCKED_QUERY_UOM, + circular_standard_deviation_get, + ) + + def test_circular_standard_deviation_offset_limit(mocker: MockerFixture): MOCKED_CIRCULAR_SD_PARAMETER_DICT["limit"] = 10 MOCKED_CIRCULAR_SD_PARAMETER_DICT["offset"] = 10 - MOCKED_CIRCULAR_SD_PARAMETER_DICT["pivot"] = False + MOCKED_CIRCULAR_SD_PARAMETER_DICT["display_uom"] = False _test_base_succeed( mocker, diff --git a/tests/sdk/python/rtdip_sdk/queries/time_series/test_interpolate.py b/tests/sdk/python/rtdip_sdk/queries/time_series/test_interpolate.py index 46aa3e055..829e8e040 100644 --- a/tests/sdk/python/rtdip_sdk/queries/time_series/test_interpolate.py +++ b/tests/sdk/python/rtdip_sdk/queries/time_series/test_interpolate.py @@ -31,6 +31,7 @@ INTERPOLATE_MOCKED_QUERY_CHECK_TAGS, MOCKED_QUERY_OFFSET_LIMIT, INTERPOLATE_MOCKED_QUERY_PIVOT, + INTERPOLATE_MOCKED_QUERY_UOM, ) MOCKED_INTERPOLATE_PARAMETER_DICT = MOCKED_PARAMETER_DICT.copy() @@ -50,8 +51,19 @@ def test_interpolate_backward_fill(mocker: MockerFixture): ) +def test_interpolate_uom(mocker: MockerFixture): + MOCKED_INTERPOLATE_PARAMETER_DICT["display_uom"] = True + _test_base_succeed( + mocker, + MOCKED_INTERPOLATE_PARAMETER_DICT, + INTERPOLATE_MOCKED_QUERY_UOM, + interpolate_get, + ) + + def test_interpolate_forward_fill(mocker: MockerFixture): MOCKED_INTERPOLATE_PARAMETER_DICT["interpolation_method"] = "forward_fill" + MOCKED_INTERPOLATE_PARAMETER_DICT["display_uom"] = False _test_base_succeed( mocker, MOCKED_INTERPOLATE_PARAMETER_DICT, @@ -93,9 +105,9 @@ def test_interpolate_pivot(mocker: MockerFixture): def test_interpolate_offset_limit(mocker: MockerFixture): + MOCKED_INTERPOLATE_PARAMETER_DICT["pivot"] = False MOCKED_INTERPOLATE_PARAMETER_DICT["offset"] = 10 MOCKED_INTERPOLATE_PARAMETER_DICT["limit"] = 10 - MOCKED_INTERPOLATE_PARAMETER_DICT["pivot"] = False _test_base_succeed( mocker, diff --git a/tests/sdk/python/rtdip_sdk/queries/time_series/test_interpolation_at_time.py b/tests/sdk/python/rtdip_sdk/queries/time_series/test_interpolation_at_time.py index 60b26ee4b..55ebbc778 100644 --- a/tests/sdk/python/rtdip_sdk/queries/time_series/test_interpolation_at_time.py +++ b/tests/sdk/python/rtdip_sdk/queries/time_series/test_interpolation_at_time.py @@ -30,6 +30,7 @@ IAT_MOCKED_QUERY, IAT_MOCKED_QUERY_CHECK_TAGS, IAT_MOCKED_QUERY_PIVOT, + IAT_MOCKED_QUERY_UOM, ) MOCKED_IAT_PARAMETER_DICT = MOCKED_PARAMETER_DICT.copy() @@ -68,10 +69,23 @@ def test_interpolation_at_time_pivot(mocker: MockerFixture): ) +def test_interpolation_at_time_uom(mocker: MockerFixture): + MOCKED_IAT_PARAMETER_DICT["pivot"] = False + MOCKED_IAT_PARAMETER_DICT["display_uom"] = True + + _test_base_succeed( + mocker, + MOCKED_IAT_PARAMETER_DICT, + IAT_MOCKED_QUERY_UOM, + interpolation_at_time_get, + ) + + def test_interpolation_at_time_offset_limit(mocker: MockerFixture): + MOCKED_IAT_PARAMETER_DICT["display_uom"] = False MOCKED_IAT_PARAMETER_DICT["offset"] = 10 MOCKED_IAT_PARAMETER_DICT["limit"] = 10 - MOCKED_IAT_PARAMETER_DICT["pivot"] = False + _test_base_succeed( mocker, MOCKED_IAT_PARAMETER_DICT, diff --git a/tests/sdk/python/rtdip_sdk/queries/time_series/test_latest.py b/tests/sdk/python/rtdip_sdk/queries/time_series/test_latest.py index d8717e1e1..5a5737ffa 100644 --- a/tests/sdk/python/rtdip_sdk/queries/time_series/test_latest.py +++ b/tests/sdk/python/rtdip_sdk/queries/time_series/test_latest.py @@ -27,6 +27,7 @@ LATEST_MOCKED_QUERY, LATEST_MOCKED_QUERY_CHECK_TAGS, LATEST_MOCKED_QUERY_NO_TAGS, + LATEST_MOCKED_QUERY_UOM, ) MOCKED_LATEST_PARAMETER_DICT = MOCKED_PARAMETER_DICT.copy() @@ -45,8 +46,16 @@ def test_latest_check_tags(mocker: MockerFixture): ) -def test_latest_offset_limit(mocker: MockerFixture): +def test_latest_uom(mocker: MockerFixture): MOCKED_LATEST_PARAMETER_DICT["case_insensitivity_tag_search"] = False + MOCKED_LATEST_PARAMETER_DICT["display_uom"] = True + _test_base_succeed( + mocker, MOCKED_LATEST_PARAMETER_DICT, LATEST_MOCKED_QUERY_UOM, latest_raw + ) + + +def test_latest_offset_limit(mocker: MockerFixture): + MOCKED_LATEST_PARAMETER_DICT["display_uom"] = False MOCKED_LATEST_PARAMETER_DICT["offset"] = 10 MOCKED_LATEST_PARAMETER_DICT["limit"] = 10 _test_base_succeed( @@ -59,8 +68,8 @@ def test_latest_offset_limit(mocker: MockerFixture): def test_no_tag_latest(mocker: MockerFixture): MOCKED_LATEST_PARAMETER_DICT.pop("tag_names") - MOCKED_LATEST_PARAMETER_DICT.pop("offset") - MOCKED_LATEST_PARAMETER_DICT.pop("limit") + MOCKED_LATEST_PARAMETER_DICT["offset"] = None + MOCKED_LATEST_PARAMETER_DICT["limit"] = None _test_base_succeed( mocker, MOCKED_LATEST_PARAMETER_DICT, LATEST_MOCKED_QUERY_NO_TAGS, latest_raw ) diff --git a/tests/sdk/python/rtdip_sdk/queries/time_series/test_plot.py b/tests/sdk/python/rtdip_sdk/queries/time_series/test_plot.py index 320728edc..fefab34be 100644 --- a/tests/sdk/python/rtdip_sdk/queries/time_series/test_plot.py +++ b/tests/sdk/python/rtdip_sdk/queries/time_series/test_plot.py @@ -27,6 +27,8 @@ MOCKED_PARAMETER_DICT, PLOT_MOCKED_QUERY, PLOT_MOCKED_QUERY_CHECK_TAGS, + PLOT_MOCKED_QUERY_PIVOT, + PLOT_MOCKED_QUERY_UOM, ) MOCKED_PLOT_PARAMETER_DICT = MOCKED_PARAMETER_DICT.copy() @@ -65,7 +67,31 @@ def test_plot_sample_rate_unit(mocker: MockerFixture): ) +def test_plot_pivot(mocker: MockerFixture): + MOCKED_PLOT_PARAMETER_DICT["pivot"] = True + + _test_base_succeed( + mocker, + MOCKED_PLOT_PARAMETER_DICT, + PLOT_MOCKED_QUERY_PIVOT, + plot_get, + ) + + +def test_plot_uom(mocker: MockerFixture): + MOCKED_PLOT_PARAMETER_DICT["pivot"] = False + MOCKED_PLOT_PARAMETER_DICT["display_uom"] = True + + _test_base_succeed( + mocker, + MOCKED_PLOT_PARAMETER_DICT, + PLOT_MOCKED_QUERY_UOM, + plot_get, + ) + + def test_plot_offset_limit(mocker: MockerFixture): + MOCKED_PLOT_PARAMETER_DICT["display_uom"] = False MOCKED_PLOT_PARAMETER_DICT["offset"] = 10 MOCKED_PLOT_PARAMETER_DICT["limit"] = 10 _test_base_succeed( diff --git a/tests/sdk/python/rtdip_sdk/queries/time_series/test_resample.py b/tests/sdk/python/rtdip_sdk/queries/time_series/test_resample.py index 188e859ab..35bd18d54 100644 --- a/tests/sdk/python/rtdip_sdk/queries/time_series/test_resample.py +++ b/tests/sdk/python/rtdip_sdk/queries/time_series/test_resample.py @@ -28,6 +28,7 @@ RESAMPLE_MOCKED_QUERY, RESAMPLE_MOCKED_QUERY_CHECK_TAGS, RESAMPLE_MOCKED_QUERY_PIVOT, + RESAMPLE_MOCKED_QUERY_UOM, ) MOCKED_RESAMPLED_PARAMETER_DICT = MOCKED_PARAMETER_DICT.copy() @@ -78,10 +79,21 @@ def test_resample_pivot(mocker: MockerFixture): ) +def test_resample_uom(mocker: MockerFixture): + MOCKED_RESAMPLED_PARAMETER_DICT["pivot"] = False + MOCKED_RESAMPLED_PARAMETER_DICT["display_uom"] = True + _test_base_succeed( + mocker, + MOCKED_RESAMPLED_PARAMETER_DICT, + RESAMPLE_MOCKED_QUERY_UOM, + resample_get, + ) + + def test_resample_offset_limit(mocker: MockerFixture): MOCKED_RESAMPLED_PARAMETER_DICT["offset"] = 10 MOCKED_RESAMPLED_PARAMETER_DICT["limit"] = 10 - MOCKED_RESAMPLED_PARAMETER_DICT["pivot"] = False + MOCKED_RESAMPLED_PARAMETER_DICT["display_uom"] = False _test_base_succeed( mocker, MOCKED_RESAMPLED_PARAMETER_DICT, diff --git a/tests/sdk/python/rtdip_sdk/queries/time_series/test_summary.py b/tests/sdk/python/rtdip_sdk/queries/time_series/test_summary.py index e32a590c5..d706bf328 100644 --- a/tests/sdk/python/rtdip_sdk/queries/time_series/test_summary.py +++ b/tests/sdk/python/rtdip_sdk/queries/time_series/test_summary.py @@ -34,6 +34,7 @@ MOCKED_PARAMETER_DICT, SUMMARY_MOCKED_QUERY, SUMMARY_MOCKED_QUERY_CHECK_TAGS, + SUMMARY_MOCKED_QUERY_UOM, ) MOCKED_SUMMARY_DICT = MOCKED_PARAMETER_DICT.copy() @@ -58,8 +59,19 @@ def test_summary_get_check_tags(mocker: MockerFixture): ) -def test_summary_offset_limit(mocker: MockerFixture): +def test_summary_uom(mocker: MockerFixture): MOCKED_SUMMARY_DICT["case_insensitivity_tag_search"] = False + MOCKED_SUMMARY_DICT["display_uom"] = True + _test_base_succeed( + mocker, + MOCKED_SUMMARY_DICT, + SUMMARY_MOCKED_QUERY_UOM, + summary_get, + ) + + +def test_summary_offset_limit(mocker: MockerFixture): + MOCKED_SUMMARY_DICT["display_uom"] = False MOCKED_SUMMARY_DICT["offset"] = 10 MOCKED_SUMMARY_DICT["limit"] = 10 _test_base_succeed( diff --git a/tests/sdk/python/rtdip_sdk/queries/time_series/test_time_weighted_average.py b/tests/sdk/python/rtdip_sdk/queries/time_series/test_time_weighted_average.py index 2920a565b..55f2142ea 100644 --- a/tests/sdk/python/rtdip_sdk/queries/time_series/test_time_weighted_average.py +++ b/tests/sdk/python/rtdip_sdk/queries/time_series/test_time_weighted_average.py @@ -31,6 +31,7 @@ TWA_MOCKED_QUERY_CHECK_TAGS, TWA_MOCKED_QUERY_PIVOT, TWA_MOCKED_QUERY_METADATA, + TWA_MOCKED_QUERY_UOM, ) MOCKED_TWA_PARAMETER_DICT = MOCKED_PARAMETER_DICT.copy() @@ -52,6 +53,7 @@ def test_time_weighted_average(mocker: MockerFixture): def test_time_weighted_average_check_tags(mocker: MockerFixture): MOCKED_TWA_PARAMETER_DICT["case_insensitivity_tag_search"] = True + _test_base_succeed( mocker, MOCKED_TWA_PARAMETER_DICT, @@ -63,6 +65,7 @@ def test_time_weighted_average_check_tags(mocker: MockerFixture): def test_time_weighted_average_with_window_size_mins(mocker: MockerFixture): MOCKED_TWA_PARAMETER_DICT["case_insensitivity_tag_search"] = False MOCKED_TWA_PARAMETER_DICT["window_size_mins"] = 15 + _test_base_succeed( mocker, MOCKED_TWA_PARAMETER_DICT, @@ -73,6 +76,7 @@ def test_time_weighted_average_with_window_size_mins(mocker: MockerFixture): def test_time_weighted_average_metadata_step(mocker: MockerFixture): MOCKED_TWA_PARAMETER_DICT["step"] = "metadata" + _test_base_succeed( mocker, MOCKED_TWA_PARAMETER_DICT, @@ -83,6 +87,7 @@ def test_time_weighted_average_metadata_step(mocker: MockerFixture): def test_time_weighted_average_pivot(mocker: MockerFixture): MOCKED_TWA_PARAMETER_DICT["pivot"] = True + _test_base_succeed( mocker, MOCKED_TWA_PARAMETER_DICT, @@ -91,11 +96,24 @@ def test_time_weighted_average_pivot(mocker: MockerFixture): ) +def test_time_weighted_average_uom(mocker: MockerFixture): + MOCKED_TWA_PARAMETER_DICT["pivot"] = False + MOCKED_TWA_PARAMETER_DICT["step"] = "false" + MOCKED_TWA_PARAMETER_DICT["display_uom"] = True + + _test_base_succeed( + mocker, + MOCKED_TWA_PARAMETER_DICT, + TWA_MOCKED_QUERY_UOM, + time_weighted_average_get, + ) + + def test_time_weighted_average_offset_limit(mocker: MockerFixture): MOCKED_TWA_PARAMETER_DICT["offset"] = 10 MOCKED_TWA_PARAMETER_DICT["limit"] = 10 - MOCKED_TWA_PARAMETER_DICT["pivot"] = False - MOCKED_TWA_PARAMETER_DICT["step"] = "false" + MOCKED_TWA_PARAMETER_DICT["display_uom"] = False + _test_base_succeed( mocker, MOCKED_TWA_PARAMETER_DICT,