From a248c2b0f73252c056ac3ec9e766add11908a53d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=98yvind=20Eide?= Date: Fri, 4 Oct 2024 08:42:29 +0200 Subject: [PATCH 1/2] Remove deactivated localization job --- pyproject.toml | 1 - src/semeio/workflows/localisation/__init__.py | 0 .../localisation/local_config_script.py | 481 --------- .../localisation/local_script_lib.py | 963 ------------------ .../localisation/localisation_config.py | 425 -------- .../localisation_debug_settings.py | 16 - tests/workflows/localisation/__init__.py | 0 .../testdata_scaling.txt | 17 - .../testdata_scaling_decay_method1.txt | 1 - .../testdata_scaling_decay_method2.txt | 1 - .../testdata_scaling_decay_method3.txt | 1 - .../testdata_scaling_decay_method4.txt | 1 - .../testdata_scaling_decay_method5.txt | 1 - .../testdata_scaling_smooth.txt | 50 - .../localisation/test_configs/__init__.py | 0 .../localisation/test_configs/test_config.py | 739 -------------- .../test_configs/test_field_config.py | 238 ----- .../test_configs/test_obs_config.py | 8 - .../test_configs/test_parameters.py | 24 - .../localisation/test_integration.py | 526 ---------- tests/workflows/localisation/test_methods.py | 293 ------ .../workflows/localisation/test_valid_rst.py | 13 - 22 files changed, 3799 deletions(-) delete mode 100644 src/semeio/workflows/localisation/__init__.py delete mode 100644 src/semeio/workflows/localisation/local_config_script.py delete mode 100644 src/semeio/workflows/localisation/local_script_lib.py delete mode 100644 src/semeio/workflows/localisation/localisation_config.py delete mode 100644 src/semeio/workflows/localisation/localisation_debug_settings.py delete mode 100644 tests/workflows/localisation/__init__.py delete mode 100644 tests/workflows/localisation/snapshots/test_methods/test_calculate_scaling_factors_in_regions/testdata_scaling.txt delete mode 100644 tests/workflows/localisation/snapshots/test_methods/test_decay_function_with_new_options/testdata_scaling_decay_method1.txt delete mode 100644 tests/workflows/localisation/snapshots/test_methods/test_decay_function_with_new_options/testdata_scaling_decay_method2.txt delete mode 100644 tests/workflows/localisation/snapshots/test_methods/test_decay_function_with_new_options/testdata_scaling_decay_method3.txt delete mode 100644 tests/workflows/localisation/snapshots/test_methods/test_decay_function_with_new_options/testdata_scaling_decay_method4.txt delete mode 100644 tests/workflows/localisation/snapshots/test_methods/test_decay_function_with_new_options/testdata_scaling_decay_method5.txt delete mode 100644 tests/workflows/localisation/snapshots/test_methods/test_smooth_parameter/testdata_scaling_smooth.txt delete mode 100644 tests/workflows/localisation/test_configs/__init__.py delete mode 100644 tests/workflows/localisation/test_configs/test_config.py delete mode 100644 tests/workflows/localisation/test_configs/test_field_config.py delete mode 100644 tests/workflows/localisation/test_configs/test_obs_config.py delete mode 100644 tests/workflows/localisation/test_configs/test_parameters.py delete mode 100644 tests/workflows/localisation/test_integration.py delete mode 100644 tests/workflows/localisation/test_methods.py delete mode 100644 tests/workflows/localisation/test_valid_rst.py diff --git a/pyproject.toml b/pyproject.toml index b8c4eefb3..38eb19619 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,7 +52,6 @@ repository = "https://github.com/equinor/semeio" semeio_forward_models = "semeio.hook_implementations.forward_models" CsvExport2Job = "semeio.workflows.csv_export2.csv_export2" AhmAnalysisJob = "semeio.workflows.ahm_analysis.ahmanalysis" -LocalisationConfigJob = "semeio.workflows.localisation.local_config_script" [project.entry-points."console_scripts"] csv_export2 = "semeio.workflows.csv_export2.csv_export2:cli" diff --git a/src/semeio/workflows/localisation/__init__.py b/src/semeio/workflows/localisation/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/semeio/workflows/localisation/local_config_script.py b/src/semeio/workflows/localisation/local_config_script.py deleted file mode 100644 index fe2ac0467..000000000 --- a/src/semeio/workflows/localisation/local_config_script.py +++ /dev/null @@ -1,481 +0,0 @@ -import ert -from ert.config import ConfigValidationError -from resdata.grid import Grid - -import semeio.workflows.localisation.local_script_lib as local -from semeio.communication import SemeioScript -from semeio.workflows.localisation.localisation_config import LocalisationConfig - - -class LocalisationConfigJob(SemeioScript): - def run(self, *args, **_): - # (SemeioScript wraps this run method) - ert = self.ert() - # Read yml file with specifications - config_dict = local.read_localisation_config(args) - - # Get all observations from ert instance - obs_keys = list(self.facade.get_observations().obs_vectors.keys()) - - ensemble_config = ert.ert_config.ensemble_config - ert_parameters = local.get_param_from_ert(ensemble_config) - - correlations = config_dict.get("correlations", []) - log_level = config_dict.get("log_level", 1) - write_scaling_factors = config_dict.get("write_scaling_factors", False) - - config = LocalisationConfig( - observations=obs_keys, - parameters=ert_parameters.to_list(), - correlations=correlations, - log_level=log_level, - write_scaling_factors=write_scaling_factors, - ) - - update_configuration = local.add_ministeps( - config, - ert_parameters.to_dict(), - ensemble_config, - Grid(self.facade.grid_file) if self.facade.grid_file is not None else None, - ) - ert.update_configuration = update_configuration - - @staticmethod - def validate(*_): - raise ConfigValidationError( - "LOCALISATION_JOB is disabled, please remove from config file" - ) - - -DESCRIPTION = """ -=================== -Localisation setup -=================== -LOCALISATION_JOB is used to define which pairs of model parameters and -observations to be active and which pairs to have reduced or 0 correlation. -If no localisation is specified, all model parameters and observations may be -correlated, although correlations can be small. With a finite ensemble of -realisations the estimate of correlations will have sampling uncertainty and -unwanted or unphysical correlations may appear. - -By using the localisation job, it is possible to restrict the allowed correlations -or reduce the correlations by a factor between 0 and 1. - -Features ----------- -The following features are implemented: - - The user defines groups of model parameters and observations, - called correlation groups or ministeps. It is possible to specify many correlation - groups. - - Wildcard notation can be used to specify a selection of model parameter groups - and observation groups. - - For scalar parameters coming from the ERT keyword GEN_KW - the correlation with observations can be specified to be active or inactive. - - For field parameters coming from the ERT keywords FIELD and SURFACE, - it is also possible to specify that the correlation between observations and - model parameters may vary from location to location. A field parameter - value corresponding to a grid cell (i,j,k) in location (x,y,z) is reduced by a - scaling factor varying by distance from a reference point e.g at a location (X,Y,Z), - usually specified to be close to an observation group. - - Pairs of observations and model parameters (obs, param) must only be spesified once. - - -Using the localisation setup in ERT -------------------------------------- - -To setup localisation: - - Specify a YAML format configuration file for localisation. - - Create a workflow file containing the line: - LOCALISATION_JOB - - Specify to load the workflow file in the ERT config file using - LOAD_WORKFLOW keyword in ERT. - - Specify to automatically run the workflow after the initial ensemble is created, - but before the first update by using the HOOK_WORKFLOW keyword - with the option PRE_FIRST_UPDATE. - - To QC the specification of the config file for localisation, it is possible to - run the workflow before running initial ensemble also. -""" - -EXAMPLES = """ -Example configurations -------------------------- - -The configuration file is a YAML format file where pairs of groups of observations -and groups of model parameters are specified. - -Per default, all correlations between the observations from the observation -group and model parameters from the model parameter group are active -and unmodified. All other combinations of pairs of observations and model -parameters not specified in a correlation group, are inactive and correlations are 0. -But it is possible to specify many correlation groups. If a pair of observation -and model parameter appear multiple times -(e.g. because they are member of multiple correlation groups), -an error message is raised. - -It is also possible to scale down correlations that are specified for 3D and 2D fields. - -Example 1: ------------- -In the first example below, four correlation groups are defined. -The first correlation group is called ``CORR1`` (a user defined name), -and defines all observations to have active correlation with all model -parameters starting with ``aps_valysar_grf`` and with ``GEO:PARAM``. -The keyword **field_scale** defines a scaling of the correlations between the -observations in the group and the model parameters selected of type -``FIELD`` in the ERT configuration file. - -The second correlation group (with name ``CORR2`` ) activates correlations -between observations matching the wildcard specification -["OP_2_WWCT*", "OP_5_*"] and all parameters except those starting -with ``aps_``. - -The third correlation group (with name ''CORR3'' ) activates correlations -between all observations and all parameters starting with ''aps_volon_grf''. -In this case, the scaling factor for correlations are defined by a 3D parameter -read from file. This example shows that it is possible to use scaling -factor defined by the user outside of ERT. - -The fourth correlation group (with name ''CORR4'' ) activates correlations -between all observations and all parameters starting with ''aps_therys_grf''. -For this case, the scaling factor is specified per segment or region of -the modelling grid. For each segment specified to be active, a corresponding -scaling factor is assigned for all correlations between the observations and -the field parameter values in the segment. The option to smooth out the -discontinuities of the scaling factor field between segments is also applied. -:: - - - log_level:3 - write_scaling_factors: True - correlations: - - name: CORR1 - obs_group: - add: ["*"] - param_group: - add: ["aps_valysar_grf*","GEO:PARAM*"] - field_scale: - method: gaussian_decay - main_range: 1700 - perp_range: 850 - azimuth: 310 - ref_point: [463400, 5932915] - - - name: CORR2 - obs_group: - add: ["OP_2_WWCT*", "OP_5_*"] - param_group: - add: ["*"] - remove: ["aps_*"] - surface_scale: - method: exponential_decay - main_range: 800 - perp_range: 350 - azimuth: 120 - ref_point: [463000, 5932850] - surface_file: "../../rms/output/hum/TopVolantis.irap" - - - - name: CORR3 - obs_group: - add: ["*"] - param_group: - add: ["aps_volon_grf*"] - field_scale: - method: from_file - filename: "scaling_aps_volon_grf.grdecl" - param_name: "SCALING" - - - name: CORR4 - obs_group: - add: ["*"] - param_group: - add: ["aps_therys_grf*"] - field_scale: - method: segment - segment_filename: "region.grdecl" - param_name: "REGION" - active_segments: [ 1,2,4] - scalingfactors: [1.0, 0.5, 0.3] - smooth_ranges: [2,3] - - -Keywords ------------ -:log_level: - Optional. Defines how much information to write to the log file. - Possible values: integer value from 0 to 4 - Default is 0 corresponding to minimum info output to the log file. - -:write_scaling_factors: - Optional. - Default is not to write calculated scaling factor files. - Possible values: ``True`` or ``False``. - Define whether output file with calculated scaling factors is to be - created or not. The purpose is to QC the calculated scaling factors - and make it possible to visualise them. Is only relevant when using - **field_scale** with methods calculating the scaling factors. - -:correlations: - List of specifications of correlation groups. A correlation group - specify a set of observations and a set of model parameters. - The correlation between pairs of observations and model parameters - from these groups is set active, but some of the pairs like correlation - between a field parameter value and an observation may be scaled by a - factor, but the default if no scaling is specified, is to keep the correlation - unchanged. - -:name: - Name of correlation group. Sub keyword under a correlation group. - -:obs_group: - Sub keyword under a correlation group. - Defines a group of observations using the sub keywords **add** - and **remove**. - -:param_group: - Sub keyword under a correlation group. - Defines a group of model parameters using sub keywords **add** - and **remove**. - -:field_scale: - Optional. - Sub keyword under a correlation group. - Defines how correlations between *field* parameters and observations - in the observation group are modified. - Default (when this keyword is not used) is to keep the correlations between - the observations and model parameters of type *field* unchanged for - the correlation group. - - For distance based localisation, this keyword is used. Typically, the correlations - are reduced by distance from the observations to field parameter value. - A reference point is specified in separate keyword - and should usually be located close to the observations in the observation group - when using scaling of correlations between field parameters and observations. - Sub keywords: **method**. Depending on which method is chosen, - additional keywords must be specified. - -:surface_scale: - Optional. - Sub keyword under a correlation group. - Defines how correlations between *surface* parameters and observations - in the observation group are modified. - Default (when this keyword is not used) is to keep the correlations between - the observations and model parameters of type *surface* unchanged for - the correlation group. - - Similar to fields, surface parameters are also field parameters, but in 2D. - Scaling of this is also done in a similar way as for 3D field parameters. - Sub keywords: **method** and **surface_file**. Depending on which - method is chosen, additional keywords must be specified. - -:add: - Sub keyword under **obs_group** and **param_group**. Both **add** - and **remove** keywords are followed by a list of observations or - parameter names. Wildcard notation can be specified, and all observations - or parameters specified in the ERT config file which matches the wildcard - expansion, are included in the list. - - - The keyword **add** will add new observations or parameters to the list of - selected observations or parameters while the keyword **remove** will remove - the specified observations or parameter from the selection. The **add** keyword - is required while the **remove** keyword is optional. - - The specification of parameters in the list is of the form - *node_name:parameter_name* where *node_name* is an ERT identifier - and *parameter_name* is the name of a parameter belonging to the ERT node. - - For instance if the ``GEN_KW`` ERT keyword is used, the ERT identifier is - the node name while the parameter names used in the distribution file, contains - names of the parameters for that node. - - For ERT nodes defined by the ERT keywords ``FIELD`` or ``SURFACE``, - only the nodename is specified like ``aps_Valysar_grf1``. - The nodename represents all field values for all grid cells in the whole - 3D or 2D grid the field belongs to. - -:remove: - For details see the keyword **add:**. The main purpose of **remove** is to - have a quick and easy way to specify all parameters or observations - except a few one by combining **add** and **remove**. - - -:method: - Sub keyword under **field_scale** and **surface_scale**. Is required if - **field_scale** or **surface_scale** is used. - Define a method for calculating the scaling factor. The available methods - depends on whether **method** is a sub keyword of the **field_scale** - or **surface_scale** keyword. - - For **field_scale** the available methods are **gaussian_decay**, - **exponential_decay**, **const_gaussian_decay**, **const_exponential_decay**, - **from_file**, **segment** and **constant**. - - For **surface_scale** the available methods are **gaussian_decay**, - **exponential_decay**, **const_gaussian_decay**, - **const_exponential_decay** and **constant**. - -:exponential_decay: - Name of a method or scaling function having default expression defined by - *f(d) = exp(-3d)* where *d* is the normalised distance. The normalised - distance is *d = sqrt[(dx/Rx)^2 + (dy/Ry)^2]* where dx and dy are the differences - in x and y coordinates between reference point and grid cell midpoint for - grid cells having field parameter values. Rx and Ry are the *main_range* - and *perp_range* range parameters. A rotation of the ellipse defined by *d=1* - is also possible by specifying azimuth direction different from 0. - There are options to modify the default definition of this function. - If cutoff is specified using **cutoff**, the function - returns 0 if the normalised distance *d > 1*. - This method requires specification of keywords **main_range**, **perp_range**, - **azimuth** and **ref_point**. - Optional specification of keywords **cutoff**. - -:gaussian_decay: - Name of a method or scaling function defined by *f(d) = exp(-3 d^2)* where *d* is - normalised distance. - For more details see **exponential_decay** above. - -:const_exponential_decay: - Name of a method or scaling function defined by - *f(d) = 1* for *d <= 1*, - *f(d) = exp(-3(d-1)/(D-1))* for *d > 1* where D is **normalised_tapering_range**. - and *d* is the normalised distance. - See description above for **exponential_decay**. - There are options to modify the default definition of this function. - If cutoff is specified using **cutoff**, then - *f(d) = 0* for *d > D*. - This method requires specification of keywords **main_range**, **perp_range**, - **azimuth**, **ref_point** and **normalised_tapering_range**. - Optional specification of keywords **cutoff**. - -:const_gaussian_decay: - Name of a method or scaling function having default expression defined by - *f(d) = 1* for *d <= 1*, - *f(d) = exp(-3 [ (d-1)/(D-1)]^2 )* for *d > 1* where D - is **normalised_tapering_range**. and *d* is the normalised distance. - See description above for **exponential_decay**. - There are options to modify the default definition of this function. - If cutoff is specified using **cutoff**, then - *f(d) = 0* for *d > D*. - This method requires specification of keywords **main_range**, **perp_range**, - **azimuth**, **ref_point** and **normalised_tapering_range**. - Optional specification of keywords **cutoff**. - -:constant: - Name of a method or scaling function where the scaling function is constant. - The method requires specification of the keyword **value** followed by - the scaling factor. - -:main_range: - Sub keyword under **field_scale** or **surface_scale**. Is only used for - method **exponential_decay** and **gaussian_decay**. - It defines the distance where the scaling values are reduced to approximately - 0.05 and is measured in the **azimuth** direction. - -:perp_range: - Sub keyword under **field_scale** or **surface_scale**. Is only used for - method **exponential_decay** and **gaussian_decay**. - It defines the distance where the scaling values are reduced to approximately - 0.05 and is measured orthogonal to the **azimuth** direction. - -:azimuth: - Sub keyword under **field_scale** or **surface_scale**. Is only used for - method **exponential_decay** and **gaussian_decay**. - It defines the azimuth direction for main anisotropy direction - for the decay function for scaling factor. - -:ref_point: - Sub keyword under **field_scale** or **surface_scale**. Is only used for - method **exponential_decay** and **gaussian_decay**. - It defines the (x,y) position used by the scaling functions when calculating - distance to a grid cell with a field parameter value. A grid cell located at the - reference point will have distance 0 which means that the scaling function is - 1.0 for correlations between observations and the field parameter in that - location. - -:cutoff: - Optional sub keyword under **field_scale** or **surface_scale**. - Is only used for method **exponential_decay**, **gaussian_decay**, - **const_exponential_decay** and **const_gaussian_decay**. - Takes True/False as values. Default is False. If a distance from reference - point to a grid cell is larger than the specified range, the scaling - function is set to 0 if this keyword is set to True. - -:normalised_tapering_range: - Optional sub keyword under **field_scale** or **surface_scale**. - Is only used for method **const_exponential_decay** and - **const_gaussian_decay**. - Legal values for normalised_tapering range *D* are *D > 1*. - Default value is D=1.5. - When using this option, the scaling function is set - to 1.0 for all grid cells with normalised distance *d <= 1*. - For normalised distance *d > 1* the function decays from 1 and approach 0 - with increasing distance. The normalised distance is defined to be - the distance measured in number of ranges. Normalised distance equal to 1 - corresponds to all points on the ellipse with half axes equal to the - specified **main_range** and **perp_range** centered at the reference - point. By using this option described here, a second ellipse - is defined corresponding to normalised distance equal to the specified - value for this keyword. At this distance the scaling factor has decreased - to around 0.05.Typical values for **normalised_tapering_range** are - between 1 and 3. - If **cutoff** is True, the scaling function is - set to 0 for normalised distance *d > D*. - -:surface_file: - Sub keyword under **surface_scale**. Is required and specify filename for - a surface file. Is used to find the size (number of grid cells) of the - surface parameters. - -:from_file: - Scaling function defined externally and read from file. Requires keywords - **filename** and **param_name** containing the file name and the name - of the parameter in the GRDECL file to be used. - -:segment: - Scaling function method available for FIELDS, and is specified for methods - under **field_scale**. - Requires the following keywords: **segment_file**, **param_name**, - **active_segments** and **scalingfactors**, all as sub keywords - under **field_scale**. The segment file must contain integer values - for segment numbers for each grid cell value for the field. The file format - is GRDECL text format. - The parameter name is the parameter to read from the supplied file - for keyword **segment_file**. - -:active_segments: - Sub keyword under **field_scale**. Is only used if method is **segment**. - A list of integer numbers for the segments to use to define active field - parameter values. - -:scalingfactors: - Sub keyword under **field_scale**. Is only used if method is **segment**. - A list of float values between 0 and 1 is specified. The values are - scaling factors to be used in the active segments specified. - The list in **active_segments** and **scalingfactors** must of same - length and the first value in the **scalingfactors** list corresponds to - the first segment number in the **active_segments** list and so on. - -:smooth_ranges: - It is possible to remove the discontinuities for the scaling factor across - two different segments. This is done by using a smoothing operation - on the scaling factor field. A rectangular area of specified size is used in - a moving average operation to modify the scaling factor field. The size - of this moving average rectangular area is per default set to size 0 in - both I-direction and J-direction of the grid specified in ERT config file, - and no moving average is done. But by specifying size in I and J direction - (DI, DJ) as integer values greater than 0, the moving average window - defined by the 2D interval around a grid cell with - index (I,J,K) is [I - DI, I + DI] x [J - DJ, J + DJ] x [ K, K] . The keyword - smooth_ranges is followed by a list of two non-negative integer. - Default: [0, 0] - -""" - - -@ert.plugin(name="semeio") -def legacy_ertscript_workflow(config): - workflow = config.add_workflow(LocalisationConfigJob, "LOCALISATION_JOB") - workflow.description = DESCRIPTION - workflow.examples = EXAMPLES - workflow.category = "observations.correlation" diff --git a/src/semeio/workflows/localisation/local_script_lib.py b/src/semeio/workflows/localisation/local_script_lib.py deleted file mode 100644 index 5f0c23f66..000000000 --- a/src/semeio/workflows/localisation/local_script_lib.py +++ /dev/null @@ -1,963 +0,0 @@ -import itertools -import logging -import math -from collections import defaultdict -from dataclasses import dataclass, field -from typing import Dict, List - -import cwrap -import numpy as np -import yaml -from ert.config import Field, GenDataConfig, GenKwConfig, SurfaceConfig -from numpy import ma -from resdata.geometry import Surface -from resdata.grid.rd_grid import Grid -from resdata.rd_type import ResDataType -from resdata.resfile import Resdata3DKW - -from semeio.workflows.localisation.localisation_debug_settings import ( - LogLevel, - debug_print, -) - - -class RowScaling: - def assign_vector(self, *_): - pass - - def __setitem__(self, *_): - pass - - -@dataclass -class Parameter: - name: str - parameters: List = field(default_factory=list) - - def to_list(self): - if self.parameters: - return [f"{self.name}:{parameter}" for parameter in self.parameters] - return [f"{self.name}"] - - def to_dict(self): - return {self.name: self.parameters} - - -@dataclass -class Parameters: - parameters: List[Parameter] = field(default_factory=list) - - def append(self, new): - self.parameters.append(new) - - def to_list(self): - result = [] - for parameter in self.parameters: - result.extend(parameter.to_list()) - return result - - def to_dict(self): - result = {} - for parameter in self.parameters: - if parameter.name in result: - raise ValueError(f"Duplicate parameters found: {parameter.name}") - result.update(parameter.to_dict()) - return result - - @classmethod - def from_list(cls, input_list): - result = defaultdict(list) - for item in input_list: - words = item.split(":") - if len(words) == 1: - name = words[0] - parameters = None - elif len(words) == 2: - name, parameters = words - else: - raise ValueError(f"Too many : in {item}") - if name in result: - if not parameters: - raise ValueError( - f"Inconsistent parameters, found {name} in " - f"{dict(result)}, but did not find parameters" - ) - if not result[name]: - raise ValueError( - f"Inconsistent parameters, found {name} in {dict(result)} but " - f"did not expect parameters, found {parameters}" - ) - if parameters: - result[name].append(parameters) - else: - result[name] = [] - return cls([Parameter(key, val) for key, val in result.items()]) - - -@dataclass -class Decay: - obs_pos: list - main_range: float - perp_range: float - azimuth: float - grid: Grid - - def __post_init__(self): - angle = (90.0 - self.azimuth) * math.pi / 180.0 - self.cosangle = math.cos(angle) - self.sinangle = math.sin(angle) - - def get_dx_dy(self, data_index): - try: - # Assume the grid is 3D Grid - x, y, _ = self.grid.get_xyz(active_index=data_index) - except AttributeError: - # Assume the grid is a 2D Surface grid - x, y = self.grid.getXY(data_index) - x_unrotated = x - self.obs_pos[0] - y_unrotated = y - self.obs_pos[1] - - dx = ( - x_unrotated * self.cosangle + y_unrotated * self.sinangle - ) / self.main_range - dy = ( - -x_unrotated * self.sinangle + y_unrotated * self.cosangle - ) / self.perp_range - return dx, dy - - def norm_dist_square(self, data_index): - dx, dy = self.get_dx_dy(data_index) - d2 = dx**2 + dy**2 - return d2 - - -@dataclass -class GaussianDecay(Decay): - cutoff: bool - - def __call__(self, data_index): - d2 = super().norm_dist_square(data_index) - if self.cutoff and d2 > 1.0: - return 0.0 - exp_arg = -3.0 * d2 - return math.exp(exp_arg) - - -@dataclass -class ConstGaussianDecay(Decay): - normalised_tapering_range: float - cutoff: bool - - def __call__(self, data_index): - d2 = super().norm_dist_square(data_index) - d = math.sqrt(d2) - if d <= 1.0: - return 1.0 - if self.cutoff and d > self.normalised_tapering_range: - return 0.0 - - distance_from_inner_ellipse = (d - 1) / (self.normalised_tapering_range - 1) - exp_arg = -3 * distance_from_inner_ellipse**2 - return math.exp(exp_arg) - - -@dataclass -class ExponentialDecay(Decay): - cutoff: bool - - def __call__(self, data_index): - d2 = super().norm_dist_square(data_index) - d = math.sqrt(d2) - if self.cutoff and d > 1.0: - return 0.0 - exp_arg = -3.0 * d - return math.exp(exp_arg) - - -@dataclass -class ConstExponentialDecay(Decay): - normalised_tapering_range: float - cutoff: bool - - def __call__(self, data_index): - d2 = super().norm_dist_square(data_index) - d = math.sqrt(d2) - if d <= 1.0: - return 1.0 - if self.cutoff and d > self.normalised_tapering_range: - return 0.0 - - distance_from_inner_ellipse = (d - 1) / (self.normalised_tapering_range - 1) - exp_arg = -3 * distance_from_inner_ellipse - return math.exp(exp_arg) - - -@dataclass -class ConstantScalingFactor: - value: float - - def __call__(self, data_index): - return self.value - - -class ScalingValues: - scaling_param_number = 1 - corr_name = None - - @classmethod - def initialize(cls): - cls.scaling_param_number = 1 - cls.corr_name = None - - @classmethod - def write_qc_parameter( - cls, - node_name, - corr_name, - field_scale, - grid, - param_for_field, - log_level=LogLevel.OFF, - ): - if param_for_field is None or field_scale is None: - return - - scaling_values = np.reshape( - param_for_field, (grid.getNX(), grid.getNY(), grid.getNZ()), "F" - ) - - # Write scaling parameter once per corr_name - if corr_name != cls.corr_name: - cls.corr_name = corr_name - # Need a parameter name <= 8 character long - scaling_kw_name = "S_" + str(cls.scaling_param_number) - scaling_kw = grid.create_kw(scaling_values, scaling_kw_name, False) - filename = ( - cls.corr_name + "_" + node_name + "_" + scaling_kw_name + ".GRDECL" - ) - print( - "Write calculated scaling factor with name: " - f"{scaling_kw_name} to file: {filename}" - ) - debug_print( - f"Write calculated scaling factor with name: " - f"{scaling_kw_name} to file: {filename}", - LogLevel.LEVEL3, - log_level, - ) - with cwrap.open(filename, "w") as file: - grid.write_grdecl(scaling_kw, file) - # Increase parameter number to define unique parameter name - cls.scaling_param_number += 1 - - -def get_param_from_ert(ens_config): - new_params = Parameters() - for key in ens_config.parameters: - node = ens_config.getNode(key) - my_param = Parameter(key) - new_params.append(my_param) - if isinstance(node, GenKwConfig): - my_param.parameters = node.getKeyWords() - return new_params - - -def read_localisation_config(args): - if len(args) == 1: - specification_file_name = args[0] - else: - raise ValueError(f"Expecting a single argument. Got {args} arguments.") - - print(f"\nDefine localisation setup using config file: {specification_file_name}") - logging.info( - "\nDefine localisation setup using config file: %s", specification_file_name - ) - with open(specification_file_name, encoding="utf-8") as yml_file: - localisation_yml = yaml.safe_load(yml_file) - return localisation_yml - - -def activate_gen_kw_param( - node_name: str, - param_list: List[str], - ert_param_dict: Dict[str, List[str]], - log_level: LogLevel = LogLevel.OFF, -) -> List[int]: - """ - Activate the selected parameters for the specified node. - The param_list contains the list of parameters defined in GEN_KW - for this node to be activated. - """ - debug_print("Set active parameters", LogLevel.LEVEL2, log_level) - all_params = ert_param_dict[node_name] - index_list = [] - for param_name in param_list: - index = all_params.index(param_name) - if index is not None: - debug_print( - f"Active parameter: {param_name} index: {index}", - LogLevel.LEVEL3, - log_level, - ) - index_list.append(index) - return index_list - - -def build_decay_object( - method, - ref_pos, - main_range, - perp_range, - azimuth, - grid, - use_cutoff, - tapering_range=None, -): - if method == "gaussian_decay": - decay_obj = GaussianDecay( - ref_pos, - main_range, - perp_range, - azimuth, - grid, - use_cutoff, - ) - elif method == "exponential_decay": - decay_obj = ExponentialDecay( - ref_pos, - main_range, - perp_range, - azimuth, - grid, - use_cutoff, - ) - elif method == "const_gaussian_decay": - decay_obj = ConstGaussianDecay( - ref_pos, - main_range, - perp_range, - azimuth, - grid, - tapering_range, - use_cutoff, - ) - elif method == "const_exponential_decay": - decay_obj = ConstExponentialDecay( - ref_pos, - main_range, - perp_range, - azimuth, - grid, - tapering_range, - use_cutoff, - ) - else: - _valid_methods = [ - "gaussian_decay", - "exponential_decay", - "const_gaussian_decay", - "const_exponential_decay", - ] - raise NotImplementedError( - f"The only allowed methods for function 'apply_decay' are: {_valid_methods}" - ) - return decay_obj - - -def apply_decay( - method, - row_scaling, - data_size, - grid, - ref_pos, - main_range, - perp_range, - azimuth, - use_cutoff=False, - tapering_range=None, - calculate_qc_parameter=False, -): - """ - Calculates the scaling factor, assign it to ERT instance by row_scaling - and returns a full sized grid parameter with scaling factors for active - grid cells and 0 elsewhere to be used for QC purpose. - """ - decay_obj = build_decay_object( - method, - ref_pos, - main_range, - perp_range, - azimuth, - grid, - use_cutoff, - tapering_range, - ) - - scaling_vector = np.zeros(data_size, dtype=np.float64) - for index in range(data_size): - scaling_vector[index] = decay_obj(index) - row_scaling.assign_vector(scaling_vector) - - scaling_values = None - if calculate_qc_parameter and isinstance(grid, Grid): - nx, ny, nz = grid.get_nx(), grid.get_ny(), grid.get_nz() - scaling_values = np.zeros(nx * ny * nz, dtype=np.float32) - for index in range(data_size): - global_index = grid.global_index(active_index=index) - scaling_values[global_index] = scaling_vector[index] - - return scaling_values - - -def apply_constant( - row_scaling, - data_size, - grid, - value, - log_level, - calculate_qc_parameter=False, -): - """ - Assign constant value to the scaling factor, - assign it to ERT instance by row_scaling - and returns a full sized grid parameter with scaling factors for active - grid cells and 0 elsewhere to be used for QC purpose. - """ - debug_print(f"Scaling factor is constant: {value}", LogLevel.LEVEL3, log_level) - decay_obj = ConstantScalingFactor(value) - - scaling_vector = np.zeros(data_size, dtype=np.float32) - for index in range(data_size): - scaling_vector[index] = decay_obj(index) - row_scaling.assign_vector(scaling_vector) - - scaling_values = None - if calculate_qc_parameter and isinstance(grid, Grid): - nx, ny, nz = grid.get_nx(), grid.get_ny(), grid.get_nz() - scaling_values = np.zeros(nx * ny * nz, dtype=np.float32) - for index in range(data_size): - global_index = grid.global_index(active_index=index) - scaling_values[global_index] = scaling_vector[index] - - return scaling_values - - -def apply_from_file(row_scaling, data_size, grid, filename, param_name, log_level): - debug_print( - f"Read scaling factors as parameter {param_name}", LogLevel.LEVEL3, log_level - ) - debug_print(f"File name: {filename}", LogLevel.LEVEL3, log_level) - with cwrap.open(filename, "r") as file: - scaling_parameter = Resdata3DKW.read_grdecl( - grid, - file, - param_name, - strict=True, - rd_type=ResDataType.RD_FLOAT, - ) - for index in range(data_size): - global_index = grid.global_index(active_index=index) - row_scaling[index] = scaling_parameter[global_index] - - -def active_region(region_parameter, user_defined_active_region_list): - """ - Find all region parameter values matching any of the regions defined - to be used in localisation and mask the unused values - """ - active_region_values_used = ma.zeros(len(region_parameter), dtype=np.int32) - active_region_values_used[:] = -9999 - for region_number in user_defined_active_region_list: - found_values = region_parameter == region_number - active_region_values_used[found_values] = region_number - is_not_used = active_region_values_used == -9999 - active_region_values_used.mask = is_not_used - return active_region_values_used - - -def define_look_up_index(user_defined_active_region_list, max_region_number): - """ - Define an array taking region number as input and returning the index in the - user define active region list. Is used for fast lookup of scaling parameter - corresponding to the region number. - """ - active_segment_array = np.array(user_defined_active_region_list) - index_per_used_region = ma.zeros((max_region_number + 1), dtype=np.int32) - index_values = np.arange(len(active_segment_array)) - index_per_used_region[active_segment_array[index_values]] = index_values - return index_per_used_region - - -def calculate_scaling_factors_in_regions( - grid, region_parameter, active_segment_list, scaling_value_list, smooth_range_list -): - # ('grid' and 'smooth-range-list' are not currently used) - - min_region_number = region_parameter.min() - max_region_number = region_parameter.max() - - # Get a list of region numbers that exists in region parameter - regions_in_param = [] - for region_number in range(min_region_number, max_region_number + 1): - has_region = region_parameter == region_number - if has_region.any(): - regions_in_param.append(region_number) - - active_region_values_used = active_region(region_parameter, active_segment_list) - index_per_used_region = define_look_up_index(active_segment_list, max_region_number) - scaling_value_array = np.array(scaling_value_list) - - # Get selected (not masked) region values - selected_grid_cells = np.logical_not(active_region_values_used.mask) - selected_region_values = active_region_values_used[selected_grid_cells] - - # Look up scaling values for selected region values - scaling_values_active = scaling_value_array[ - index_per_used_region[selected_region_values] - ] - - # Create a full sized 3D parameter for scaling values - # where all but the selected region values have 0 scaling value. - scaling_values = np.zeros(len(region_parameter), dtype=np.float32) - scaling_values[selected_grid_cells] = scaling_values_active - - return scaling_values, active_region_values_used, regions_in_param - - -def smooth_parameter( - grid, smooth_range_list, scaling_values, active_region_values_used -): - """ - Function taking as input a 3D parameter scaling_values and calculates a new - 3D parameter scaling_values_smooth using local average within a rectangular window - around the cell to be assigned the smoothed value. The smoothing window is - defined by the two range parameters in smooth_range_list. - They contain integer values >=0 and smooth_range_list = [0,0] means no smoothing. - The input parameter active_region_values_used has non-negative integer values - with region number for all grid cells containing values for the input 3D parameter - scaling_values. All other grid cells are masked. - The smoothing algorithm is defined such that only values not masked are used. - If the scaling_values contain constant values for each - active region and e.g 0 for all inactive regions and for inactive grid cells, - then the smoothing will only appear on the border between active regions. - """ - nx, ny, nz = grid.get_nx(), grid.get_ny(), grid.get_nz() - di = smooth_range_list[0] - dj = smooth_range_list[1] - scaling_values_smooth = np.zeros(nx * ny * nz, dtype=np.float32) - for k, j0, i0 in itertools.product(range(nz), range(ny), range(nx)): - index0 = i0 + j0 * nx + k * nx * ny - if active_region_values_used[index0] is not ma.masked: - sumv = 0.0 - nval = 0 - ilow = max(0, i0 - di) - ihigh = min(i0 + di + 1, nx) - jlow = max(0, j0 - dj) - jhigh = min(j0 + dj + 1, ny) - for i in range(ilow, ihigh): - for j in range(jlow, jhigh): - index = i + j * nx + k * nx * ny - if active_region_values_used[index] is not ma.masked: - # Only use values from grid cells that are active - # and from regions defined as active by the user. - v = scaling_values[index] - sumv += v - nval += 1 - if nval > 0: - scaling_values_smooth[index0] = sumv / nval - return scaling_values_smooth - - -def apply_segment( - row_scaling, - data_size, - grid, - region_param_dict, - active_segment_list, - scaling_factor_list, - smooth_range_list, - corr_name, - log_level=LogLevel.OFF, -): - """ - Purpose: Use region numbers and list of scaling factors per region to - create scaling factors per active . - Input dictionary with keyword which is correlation group name, - where values are numpy vector with region parameters - for each grid cell in ERTBOX grid. - A scaling factor is specified for each specified active region. - Optionally also a spatial smoothing of the scaling factors - can be done by specifying smooth ranges in number of - grid cells in I and J direction. If this is not specified, - no smoothing is done. - NOTE: Smoothing is done only between active segments, - and no smoothing between active segments and inactive - segments or inactive grid cells. - """ - - debug_print(f"Active segments: {active_segment_list}", LogLevel.LEVEL3, log_level) - - max_region_number_specified = max(active_segment_list) - - region_parameter = region_param_dict[corr_name] - max_region_parameter = region_parameter.max() - if max_region_parameter < max_region_number_specified: - raise ValueError( - "Specified an active region with number " - f"{max_region_number_specified} which is larger \n" - f"than max region parameter {max_region_parameter} for " - f"correlation group {corr_name}." - ) - - ( - scaling_values, - active_localisation_region, - regions_in_param, - ) = calculate_scaling_factors_in_regions( - grid, - region_parameter, - active_segment_list, - scaling_factor_list, - smooth_range_list, - ) - if smooth_range_list is not None: - scaling_values = smooth_parameter( - grid, smooth_range_list, scaling_values, active_localisation_region - ) - - # Assign values to row_scaling object - for index in range(data_size): - global_index = grid.global_index(active_index=index) - row_scaling[index] = scaling_values[global_index] - - not_defined_in_region_param = [] - for n in active_segment_list: - if n not in regions_in_param: - not_defined_in_region_param.append(n) - if len(not_defined_in_region_param) > 0: - debug_print( - f"Warning: The following region numbers are specified in \n" - " config file for correlation group " - f"{corr_name}, \n" - " but not found in region parameter: " - f"{not_defined_in_region_param}", - LogLevel.LEVEL3, - log_level, - ) - return scaling_values - - -def read_region_files_for_all_correlation_groups(user_config, grid): - if grid is None: - # No grid is defined. Not relevant to look for region files to read. - return None - - region_param_dict = {} - corr_name_dict = {} - nx, ny, nz = grid.get_nx(), grid.get_ny(), grid.get_nz() - for _, corr_spec in enumerate(user_config.correlations): - region_param_dict[corr_spec.name] = None - if ( - corr_spec.field_scale is not None - and corr_spec.field_scale.method == "segment" - ): - filename = corr_spec.field_scale.segment_filename - param_name = corr_spec.field_scale.param_name - debug_print( - f"Use parameter: {param_name} from file: {filename} " - f"in {corr_spec.name}", - LogLevel.LEVEL2, - user_config.log_level, - ) - - if filename not in corr_name_dict: - # Read the file - with cwrap.open(filename, "r") as file: - region_parameter_read = Resdata3DKW.read_grdecl( - grid, - file, - param_name, - strict=True, - rd_type=ResDataType.RD_INT, - ) - region_parameter = np.zeros(nx * ny * nz, dtype=np.int32) - not_active = np.zeros(nx * ny * nz, dtype=np.int32) - for k, j, i in itertools.product(range(nz), range(ny), range(nx)): - index = i + j * nx + k * nx * ny - v = region_parameter_read[i, j, k] - region_parameter[index] = v - if grid.get_active_index(ijk=(i, j, k)) == -1: - not_active[index] = 1 - region_parameter_masked = ma.masked_array( - region_parameter, mask=not_active - ) - region_param_dict[corr_spec.name] = region_parameter_masked - corr_name_dict[filename] = corr_spec.name - else: - # The region_parameter is already read for a previous - # correlation group. Re-use it instead of re-reading the file - existing_corr_name = corr_name_dict[filename] - region_param_dict[corr_spec.name] = region_param_dict[ - existing_corr_name - ] - return region_param_dict - - -def add_ministeps( - user_config, - ert_param_dict, - ert_ensemble_config, - grid_for_field, -): - debug_print("Add all ministeps:", LogLevel.LEVEL1, user_config.log_level) - ScalingValues.initialize() - # Read all region files used in correlation groups, - # but only once per unique region file. - - region_param_dict = read_region_files_for_all_correlation_groups( - user_config, grid_for_field - ) - update_steps = [] - for corr_spec in user_config.correlations: - debug_print( - f"Define ministep: {corr_spec.name}", LogLevel.LEVEL1, user_config.log_level - ) - ministep_name = corr_spec.name - update_step = defaultdict(list) - update_step["name"] = corr_spec.name - obs_list = corr_spec.obs_group.result_items - param_dict = Parameters.from_list(corr_spec.param_group.result_items).to_dict() - - # Setup model parameter group - for node_name, param_list in param_dict.items(): - node = ert_ensemble_config.getNode(node_name) - impl_type = type(node).__name__ - debug_print( - f"Add node: {node_name} of type: {impl_type}", - LogLevel.LEVEL2, - user_config.log_level, - ) - if isinstance(node, GenKwConfig): - index_list = activate_gen_kw_param( - node_name, - param_list, - ert_param_dict, - user_config.log_level, - ) - update_step["parameters"].append([node_name, index_list]) - elif isinstance(node, Field): - assert grid_for_field is not None - _decay_methods_group1 = ["gaussian_decay", "exponential_decay"] - _decay_methods_group2 = [ - "const_gaussian_decay", - "const_exponential_decay", - ] - _decay_methods_all = _decay_methods_group1 + _decay_methods_group2 - if corr_spec.field_scale is not None: - debug_print( - "Scale field parameter correlations using method: " - f"{corr_spec.field_scale.method}", - LogLevel.LEVEL3, - user_config.log_level, - ) - row_scaling = RowScaling() - data_size = grid_for_field.get_num_active() - param_for_field = None - if corr_spec.field_scale.method in _decay_methods_all: - ref_pos = corr_spec.field_scale.ref_point - main_range = corr_spec.field_scale.main_range - perp_range = corr_spec.field_scale.perp_range - azimuth = corr_spec.field_scale.azimuth - use_cutoff = corr_spec.field_scale.cutoff - tapering_range = None - if corr_spec.field_scale.method in _decay_methods_group2: - tapering_range = ( - corr_spec.field_scale.normalised_tapering_range - ) - check_if_ref_point_in_grid(ref_pos, grid_for_field) - param_for_field = apply_decay( - corr_spec.field_scale.method, - row_scaling, - data_size, - grid_for_field, - ref_pos, - main_range, - perp_range, - azimuth, - use_cutoff, - tapering_range, - user_config.write_scaling_factors, - ) - elif corr_spec.field_scale.method == "constant": - param_for_field = apply_constant( - row_scaling, - data_size, - grid_for_field, - corr_spec.field_scale.value, - user_config.log_level, - user_config.write_scaling_factors, - ) - elif corr_spec.field_scale.method == "from_file": - apply_from_file( - row_scaling, - data_size, - grid_for_field, - corr_spec.field_scale.filename, - corr_spec.field_scale.param_name, - user_config.log_level, - ) - - elif corr_spec.field_scale.method == "segment": - param_for_field = apply_segment( - row_scaling, - data_size, - grid_for_field, - region_param_dict, - corr_spec.field_scale.active_segments, - corr_spec.field_scale.scalingfactors, - corr_spec.field_scale.smooth_ranges, - corr_spec.name, - user_config.log_level, - ) - else: - logging.error( - "Scaling method: %s is not implemented.", - corr_spec.field_scale.method, - ) - raise ValueError( - f"Scaling method: {corr_spec.field_scale.method} " - "is not implemented" - ) - - if user_config.write_scaling_factors: - ScalingValues.write_qc_parameter( - node_name, - corr_spec.name, - corr_spec.field_scale, - grid_for_field, - param_for_field, - user_config.log_level, - ) - update_step["row_scaling_parameters"].append( - [node_name, row_scaling] - ) - else: - scaling_factor_default = 1.0 - debug_print( - f"No correlation scaling specified for node {node_name} " - f"in {ministep_name}. " - f"Use default scaling factor: {scaling_factor_default}", - LogLevel.LEVEL3, - user_config.log_level, - ) - elif isinstance(node, GenDataConfig): - debug_print( - f"Parameter {node_name} of type: {impl_type} " - f"in {ministep_name}", - LogLevel.LEVEL3, - user_config.log_level, - ) - elif isinstance(node, SurfaceConfig): - _decay_methods_surf_group1 = ["gaussian_decay", "exponential_decay"] - _decay_methods_surf_group2 = [ - "const_gaussian_decay", - "const_exponential_decay", - ] - _decay_methods_surf_all = ( - _decay_methods_surf_group1 + _decay_methods_surf_group2 - ) - if corr_spec.surface_scale is not None: - surface_file = corr_spec.surface_scale.surface_file - debug_print( - f"Get surface size from: {surface_file}", - LogLevel.LEVEL3, - user_config.log_level, - ) - debug_print( - "Scale surface parameter correlations using method: " - f"{corr_spec.surface_scale.method}", - LogLevel.LEVEL3, - user_config.log_level, - ) - - surface = Surface(surface_file) - data_size = surface.getNX() * surface.getNY() - row_scaling = RowScaling() - if corr_spec.surface_scale.method in _decay_methods_surf_all: - ref_pos = corr_spec.surface_scale.ref_point - main_range = corr_spec.surface_scale.main_range - perp_range = corr_spec.surface_scale.perp_range - azimuth = corr_spec.surface_scale.azimuth - use_cutoff = corr_spec.surface_scale.cutoff - tapering_range = None - if corr_spec.surface_scale.method in _decay_methods_surf_group2: - tapering_range = ( - corr_spec.surface_scale.normalised_tapering_range - ) - apply_decay( - corr_spec.surface_scale.method, - row_scaling, - data_size, - surface, - ref_pos, - main_range, - perp_range, - azimuth, - use_cutoff, - tapering_range, - ) - elif corr_spec.surface_scale.method == "constant": - param_for_field = apply_constant( - row_scaling, - data_size, - None, - corr_spec.surface_scale.value, - user_config.log_level, - ) - else: - logging.error( - "Scaling method: %s is not implemented.", - corr_spec.surface_scale.method, - ) - raise ValueError( - f"Scaling method: {corr_spec.surface_scale.method} " - "is not implemented" - ) - - update_step["row_scaling_parameters"].append( - [node_name, row_scaling] - ) - else: - debug_print( - "Surface parameter is specified, but no surface_scale " - "keyword is specified. Require that surface_scale " - "keyword is specified.", - LogLevel.LEVEL3, - user_config.log_level, - ) - raise KeyError( - f" When using surface parameter {node_name} the keyword" - f" 'surface_scale' must be specified." - ) - # Setup observation group - update_step["observations"] = obs_list - debug_print( - f"Observations in {ministep_name}: {obs_list} ", - LogLevel.LEVEL3, - user_config.log_level, - ) - - update_steps.append(update_step) - - return update_steps - - -def check_if_ref_point_in_grid(ref_point, grid): - try: - grid.find_cell_xy(ref_point[0], ref_point[1], 0) - except ValueError as err: - raise ValueError( - f"Reference point {ref_point} corresponds to undefined grid cell " - f"or is outside the area defined by the grid {grid.get_name()}\n" - "Check specification of reference point." - ) from err diff --git a/src/semeio/workflows/localisation/localisation_config.py b/src/semeio/workflows/localisation/localisation_config.py deleted file mode 100644 index 424ee9826..000000000 --- a/src/semeio/workflows/localisation/localisation_config.py +++ /dev/null @@ -1,425 +0,0 @@ -import itertools -import pathlib -from typing import List, Literal, Optional, Union - -from pydantic import ( - BaseModel, - ConfigDict, - Field, - computed_field, - confloat, - conint, - conlist, - field_validator, -) -from typing_extensions import Annotated - - -def expand_wildcards(patterns, list_of_words): - all_matches = [] - errors = [] - for pattern in patterns: - matches = [ - words for words in list_of_words if pathlib.Path(words).match(pattern) - ] - if len(matches) > 0: - all_matches.extend(matches) - else: - errors.append(f"No match for: {pattern}") - all_matches = set(all_matches) - if len(errors) > 0: - raise ValueError( - " These specifications does not match anything defined in ERT model\n" - f" {errors}, available: {list_of_words}" - ) - return all_matches - - -def check_for_duplicated_correlation_specifications(correlations): - # All observations and model parameters used in correlations - all_combinations = [] - - for corr in correlations: - all_combinations.extend( - list( - itertools.product( - corr.obs_group.result_items, corr.param_group.result_items - ) - ) - ) - errors = [] - seen = set() - for combination in all_combinations: - if combination in seen: - errors.append(f"Observation: {combination[0]}, parameter: {combination[1]}") - else: - seen.add(combination) - return errors - - -class ObsConfig(BaseModel): - """ - Specification of list of observations. A wildcard notation is allowed. - Use the 'add' keyword to specify observations to include, - and the 'remove' keyword to remove some of the observations - specified by the 'add' keyword. The 'remove' keyword is useful when - observations are specified by wildcard notation. - Example: - obs_group: - add: ["WELLA_WWCT*", "WELLB_FOPR", "WELLC*_WOPR*"] - remove: ["WELLC2*"] - """ - - model_config = ConfigDict(extra="forbid") - - add: Union[str, List[str]] - remove: Optional[Union[str, List[str]]] = None - - @computed_field # type: ignore[prop-decorator] - @property - def result_items(self) -> List[str]: - res = _check_specification(self.add, self.remove, self.context) - return res - - context: List[str] - - def __init__(self, **data): - add = data.get("add") - if isinstance(add, str): - add = data["add"] = [add] - - remove = data.get("remove") - if isinstance(remove, str): - remove = data["remove"] = [remove] - - result = _check_specification(add, remove, data["context"]) - if len(result) == 0: - raise ValueError( - f"Adding: {add} and removing: {remove} resulted in no items" - ) - - super().__init__(**data) - - -class ParamConfig(ObsConfig): - """ - Specification of list of model parameters. Wildcard notation is allowed. - The syntax and meaning of the 'add' and 'remove' keywords are the - same as for specification of observations. - - For nodes with parameters defined by the ERT keyword GEN_KW, the - parameter name is specified on the format: : - For nodes with parameters defined by the ERT keyword SURFACE and FIELD, - the parameters belonging to a surface node is specified by: - - Example: - param_group: - add: ["GEO_PARAM:*", "FIELD_PARAM*", "GENERAL:*", "SURF_PARAM_*"] - remove: ["FIELD_PARAM_GRF1", "GEO_PARAM:A*", "GENERAL:2", "GENERAL:3"] - - (This includes all model parameters from node GEO_PARAM except parameter A, - all field parameter nodes starting with node name FIELD_PARAM except - FIELD_PARAM_GRF1, - all surface parameter nodes starting with node name SURF_PARAM_) - """ - - model_config = ConfigDict(extra="forbid") - - -class GaussianConfig(BaseModel): - """ - Method for calculating correlation scaling factor using a Gaussian function - centered at the specified reference point. The ranges and orientation define - how the scaling factor is reduced away from the reference point. - - The following equation for the scaling is defined: - First the normalised distance is calculated: - d = sqrt( (dx/main_range)^2 + (dy/perp_range)^2) ) - where dx is distance in azimuth direction and dy perpendicular to - azimuth direction. - - The default scaling function is defined for gaussian decay by: - f(d) = exp(-3 * d^2) - - This method can be used both to scale correlations for model parameter - nodes of type FIELD and SURFACE. For nodes of type surface, - a file specifying the grid layout of the surface must be specified. - """ - - model_config = ConfigDict(extra="forbid") - - method: Literal[ - "gaussian_decay", - "exponential_decay", - "const_gaussian_decay", - "const_exponential_decay", - ] - main_range: Annotated[float, Field(gt=0)] - perp_range: Annotated[float, Field(gt=0)] - azimuth: Annotated[float, Field(ge=0.0, le=360)] - ref_point: Annotated[List[float], Field(min_length=2, max_length=2)] - cutoff: Optional[bool] = Field(default=False) - surface_file: Optional[str] = None - - -class ExponentialConfig(GaussianConfig): - """ - Method for calculating correlation scaling factor using Exponential function. - See the doc string for Gaussian function for more details. - """ - - method: Literal["exponential_decay"] = "exponential_decay" - - -class ConstWithGaussianTaperingConfig(GaussianConfig): - """ - Method for calculating correlation scaling factor which is 1 inside range - and fall off using Gaussian function outside range. - - The function is defined by: - f(d) = 1 if d <= 1 - f(d) = exp(-3 * ((d-1)/(D-1))^2 ) for d > 1 and here D > 1. - Here d=1 represents the ellipse defined by the range settings, and D= 1 - represents the second ellipse at which the scaling function is reduced - to about 0.05 . - - Optionally the use of cutoff set the values for the function to 0 for d > D. - f(d) = 0 for d > D - This will create a discontinuity at d=D of size 0.05 for the scaling value. - - This method can be used both to scale correlations for model parameter - nodes of type FIELD and SURFACE. For nodes of type surface, - a file specifying the grid layout of the surface must be specified. - """ - - method: Literal["const_gaussian_decay"] = "const_gaussian_decay" - normalised_tapering_range: Optional[Annotated[float, Field(gt=1)]] = 1.5 - - -class ConstWithExponentialTaperingConfig(GaussianConfig): - """ - Method for calculating correlation scaling factor which is 1 inside range - and fall off using Exponential function outside range. See above for - ConstWithGaussianTaperingConfig. - """ - - method: Literal["const_exponential_decay"] = "const_exponential_decay" - normalised_tapering_range: Optional[Annotated[float, Field(gt=1)]] = 1.5 - - -class ScalingFromFileConfig(BaseModel): - model_config = ConfigDict(extra="forbid") - - method: Literal["from_file"] - filename: str - param_name: str - - -class ScalingForSegmentsConfig(BaseModel): - model_config = ConfigDict(extra="forbid") - - method: Literal["segment"] - segment_filename: str - param_name: str - active_segments: Union[ # type: ignore - conint(ge=0), conlist(item_type=conint(ge=0), min_length=1) - ] - scalingfactors: Union[ # type: ignore - confloat(ge=0), conlist(item_type=confloat(ge=0, le=1), min_length=1) - ] - smooth_ranges: Optional[ # type: ignore - conlist(item_type=conint(ge=0), min_length=2, max_length=2) - ] = [0, 0] - - @field_validator("scalingfactors") - @classmethod - def check_length_consistency(cls, scalingfactors, info): - # Ensure that active segment list and scaling factor lists are of equal length. - active_segment_list = info.data.get("active_segments", None) - if not active_segment_list: - return scalingfactors - if len(scalingfactors) != len(active_segment_list): - raise ValueError( - "The specified length of 'active_segments' list" - f"{active_segment_list }\n" - f" and 'scalingfactors' list {scalingfactors} are different." - ) - return scalingfactors - - -class ConstantScalingConfig(BaseModel): - """ - Method for calculating constant correlation scaling factor. - """ - - model_config = ConfigDict(extra="forbid") - method: Literal["constant"] - value: Optional[Annotated[float, Field(strict=True, gt=0, le=1)]] = 1.0 - surface_file: Optional[str] = None - - -class CorrelationConfig(BaseModel): - """ - The keyword 'correlations' specify a set of observations and model parameters - to have active correlations. For scalar parameters coming from ERT keyword GEN_KW, - the correlations estimated by ERT will not be reduced, - and the scaling factor is 1. - - For model parameter nodes of type FIELD or SURFACE, additional - information about how to adjust the correlations can be specified. The sub-keywords - 'field_scale' and 'surface_scale' are used for nodes of type FIELD or - SURFACE respectively. It is possible to not specify these keywords, and in that - case it means that ERT estimated correlations between any field model parameter, - e.g the field model parameter belonging to a grid cell (i,j,k) and the observations - specified, is not modified and the scaling factor is 1. If on the other hand, - a specification of a method for calculating scaling factor exits, - the covariance will be reduced by this factor: - - new_cov(field(i,j,k)) = orig_cov(field(i,j,k), obs) * scaling_factor - - For some of methods for calculating scaling factors, a reference point, - typically a well location, must be specified. For other methods any reference - point is not used. - - """ - - model_config = ConfigDict(extra="forbid") - name: str - obs_group: ObsConfig - param_group: ParamConfig - field_scale: Optional[ - Union[ - GaussianConfig, - ExponentialConfig, - ConstWithGaussianTaperingConfig, - ConstWithExponentialTaperingConfig, - ScalingFromFileConfig, - ScalingForSegmentsConfig, - ConstantScalingConfig, - ] - ] - - surface_scale: Optional[ - Union[ - GaussianConfig, - ExponentialConfig, - ConstWithGaussianTaperingConfig, - ConstWithExponentialTaperingConfig, - ConstantScalingConfig, - ] - ] - - obs_context: list - params_context: list - - def __init__(self, **data): - data["obs_group"]["context"] = data["obs_context"] - data["param_group"]["context"] = data["params_context"] - - if "surface_scale" in data: - surf_scale = data["surface_scale"] - if not isinstance(surf_scale, dict): - raise ValueError("surface_scale must be dict") - - # String with relative path to surface files relative to config path - if "surface_file" not in surf_scale: - raise ValueError( - "Missing keyword: 'surface_file' in keyword: 'surface_scale' " - ) - - filename = pathlib.Path(surf_scale["surface_file"]) - # Check that the file exists - if not filename.exists(): - raise ValueError(f"File for surface: {filename} does not exist.") - - method = surf_scale.get("method") - _valid_methods = { - "gaussian_decay": GaussianConfig, - "exponential_decay": ExponentialConfig, - "const_gaussian_decay": ConstWithGaussianTaperingConfig, - "const_exponential_decay": ConstWithExponentialTaperingConfig, - "constant": ConstantScalingConfig, - } - - if method not in _valid_methods: - raise ValueError( - f"Unknown method: {method}, valid methods are:" - f" {_valid_methods.keys()}" - ) - data["surface_scale"] = _valid_methods[method](**surf_scale) - else: - data["surface_scale"] = None - - if "field_scale" in data: - field_scale = data.get("field_scale") - # field_scale["surface_file"] = field_scale.get("surface_file", None) - method = field_scale.get("method") - _valid_methods = { - "gaussian_decay": GaussianConfig, - "exponential_decay": ExponentialConfig, - "const_gaussian_decay": ConstWithGaussianTaperingConfig, - "const_exponential_decay": ConstWithExponentialTaperingConfig, - "from_file": ScalingFromFileConfig, - "segment": ScalingForSegmentsConfig, - "constant": ConstantScalingConfig, - } - if method not in _valid_methods: - raise ValueError( - f"Unknown method: {method}, valid methods are:" - f" {_valid_methods.keys()}" - ) - - data["field_scale"] = _valid_methods[method](**field_scale) - else: - data["field_scale"] = None - - super().__init__(**data) - - -class LocalisationConfig(BaseModel): - """ - observations: A list of observations from ERT in format nodename - parameters: A dict of parameters from ERT in format nodename:paramname. - Key is node name. Values are lists of parameter names - for the node. - correlations: A list of CorrelationConfig objects keeping name of - one correlation set which defines the input to - create a ministep object. - log_level: Integer defining how much log output to write to screen - write_scaling_factors: Turn on writing calculated scaling parameters to file. - Possible values: True/False. Default: False - """ - - model_config = ConfigDict(extra="forbid") - observations: List[str] - parameters: List[str] - correlations: List[CorrelationConfig] - log_level: Optional[Annotated[int, Field(strict=True, ge=0, le=5)]] = 1 - write_scaling_factors: Optional[bool] = False - - def __init__(self, **data): - for correlation in data["correlations"]: - correlation["obs_context"] = data["observations"] - correlation["params_context"] = data["parameters"] - - super().__init__(**data) - - @field_validator("correlations", mode="after") - @classmethod - def validate_correlations(cls, correlations): - duplicates = check_for_duplicated_correlation_specifications(correlations) - if len(duplicates) > 0: - error_msgs = "\n".join(duplicates) - raise ValueError( - f"Found {len(duplicates)} duplicated correlations: \n{error_msgs}" - ) - return correlations - - -def _check_specification(items_to_add, items_to_remove, valid_items): - added_items = expand_wildcards(items_to_add, valid_items) - if items_to_remove is not None: - removed_items = expand_wildcards(items_to_remove, valid_items) - added_items = added_items.difference(removed_items) - added_items = list(added_items) - return sorted(added_items) diff --git a/src/semeio/workflows/localisation/localisation_debug_settings.py b/src/semeio/workflows/localisation/localisation_debug_settings.py deleted file mode 100644 index 6bf2f273e..000000000 --- a/src/semeio/workflows/localisation/localisation_debug_settings.py +++ /dev/null @@ -1,16 +0,0 @@ -import logging -from enum import IntEnum - - -class LogLevel(IntEnum): - OFF = 0 - LEVEL1 = 1 - LEVEL2 = 2 - LEVEL3 = 3 - LEVEL4 = 4 - - -def debug_print(text, threshold, log_level=LogLevel.OFF): - if threshold <= log_level: - # Use the logging module to log the info to file - logging.info(" " * threshold + "-- " + text) diff --git a/tests/workflows/localisation/__init__.py b/tests/workflows/localisation/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/workflows/localisation/snapshots/test_methods/test_calculate_scaling_factors_in_regions/testdata_scaling.txt b/tests/workflows/localisation/snapshots/test_methods/test_calculate_scaling_factors_in_regions/testdata_scaling.txt deleted file mode 100644 index c5fe26f21..000000000 --- a/tests/workflows/localisation/snapshots/test_methods/test_calculate_scaling_factors_in_regions/testdata_scaling.txt +++ /dev/null @@ -1,17 +0,0 @@ -[0. 0. 1. 1. 1. 0. 0. 0. 0. 0. 0. 0. 1. 1. 1. 0. 0. 0. - 0. 0. 0. 0. 1. 1. 1. 0. 0. 0. 0. 0. 0. 0. 1. 1. 1. 0. - 0. 0. 0. 0. 0. 0. 1. 1. 1. 0.2 0.2 0.2 0.2 0.2 0.5 0.5 0.5 0.5 - 0.5 0.2 0.2 0.2 0.2 0.2 0.5 0.5 0.5 0.5 0.5 0.2 0.2 0.2 0.2 0.2 0.5 0.5 - 0.5 0.5 0.5 0. 0. 0. 0. 0. 0.5 0.5 0.5 0.5 0.5 0. 0. 0. 0. 0. - 0.5 0.5 0.5 0.5 0.5 0. 0. 0. 0. 0. 0. 0. 1. 1. 1. 0. 0. 0. - 0. 0. 0. 0. 1. 1. 1. 0. 0. 0. 0. 0. 0. 0. 1. 1. 1. 0. - 0. 0. 0. 0. 0. 0. 1. 1. 1. 0. 0. 0. 0. 0. 0. 0. 1. 1. - 1. 0.2 0.2 0.2 0.2 0.2 0.5 0.5 0.5 0.5 0.5 0.2 0.2 0.2 0.2 0.2 0.5 0.5 - 0.5 0.5 0.5 0.2 0.2 0.2 0.2 0.2 0.5 0.5 0.5 0.5 0.5 0. 0. 0. 0. 0. - 0.5 0.5 0.5 0.5 0.5 0. 0. 0. 0. 0. 0.5 0.5 0.5 0.5 0.5 0. 0. 0. - 0. 0. 0. 0. 1. 1. 1. 0. 0. 0. 0. 0. 0. 0. 1. 1. 1. 0. - 0. 0. 0. 0. 0. 0. 1. 1. 1. 0. 0. 0. 0. 0. 0. 0. 1. 1. - 1. 0. 0. 0. 0. 0. 0. 0. 1. 1. 1. 0.2 0.2 0.2 0.2 0.2 0.5 0.5 - 0.5 0.5 0.5 0.2 0.2 0.2 0.2 0.2 0.5 0.5 0.5 0.5 0.5 0.2 0.2 0.2 0.2 0.2 - 0.5 0.5 0.5 0.5 0.5 0. 0. 0. 0. 0. 0.5 0.5 0.5 0.5 0.5 0. 0. 0. - 0. 0. 0.5 0.5 0.5 0.5 0.5 0. 0. 0. 0. 0. ] \ No newline at end of file diff --git a/tests/workflows/localisation/snapshots/test_methods/test_decay_function_with_new_options/testdata_scaling_decay_method1.txt b/tests/workflows/localisation/snapshots/test_methods/test_decay_function_with_new_options/testdata_scaling_decay_method1.txt deleted file mode 100644 index c8c4a6105..000000000 --- a/tests/workflows/localisation/snapshots/test_methods/test_decay_function_with_new_options/testdata_scaling_decay_method1.txt +++ /dev/null @@ -1 +0,0 @@ -[0. 0. 0. ... 0. 0. 0.] \ No newline at end of file diff --git a/tests/workflows/localisation/snapshots/test_methods/test_decay_function_with_new_options/testdata_scaling_decay_method2.txt b/tests/workflows/localisation/snapshots/test_methods/test_decay_function_with_new_options/testdata_scaling_decay_method2.txt deleted file mode 100644 index c8c4a6105..000000000 --- a/tests/workflows/localisation/snapshots/test_methods/test_decay_function_with_new_options/testdata_scaling_decay_method2.txt +++ /dev/null @@ -1 +0,0 @@ -[0. 0. 0. ... 0. 0. 0.] \ No newline at end of file diff --git a/tests/workflows/localisation/snapshots/test_methods/test_decay_function_with_new_options/testdata_scaling_decay_method3.txt b/tests/workflows/localisation/snapshots/test_methods/test_decay_function_with_new_options/testdata_scaling_decay_method3.txt deleted file mode 100644 index c8c4a6105..000000000 --- a/tests/workflows/localisation/snapshots/test_methods/test_decay_function_with_new_options/testdata_scaling_decay_method3.txt +++ /dev/null @@ -1 +0,0 @@ -[0. 0. 0. ... 0. 0. 0.] \ No newline at end of file diff --git a/tests/workflows/localisation/snapshots/test_methods/test_decay_function_with_new_options/testdata_scaling_decay_method4.txt b/tests/workflows/localisation/snapshots/test_methods/test_decay_function_with_new_options/testdata_scaling_decay_method4.txt deleted file mode 100644 index c8c4a6105..000000000 --- a/tests/workflows/localisation/snapshots/test_methods/test_decay_function_with_new_options/testdata_scaling_decay_method4.txt +++ /dev/null @@ -1 +0,0 @@ -[0. 0. 0. ... 0. 0. 0.] \ No newline at end of file diff --git a/tests/workflows/localisation/snapshots/test_methods/test_decay_function_with_new_options/testdata_scaling_decay_method5.txt b/tests/workflows/localisation/snapshots/test_methods/test_decay_function_with_new_options/testdata_scaling_decay_method5.txt deleted file mode 100644 index 1e916fdb6..000000000 --- a/tests/workflows/localisation/snapshots/test_methods/test_decay_function_with_new_options/testdata_scaling_decay_method5.txt +++ /dev/null @@ -1 +0,0 @@ -[1. 1. 1. ... 1. 1. 1.] \ No newline at end of file diff --git a/tests/workflows/localisation/snapshots/test_methods/test_smooth_parameter/testdata_scaling_smooth.txt b/tests/workflows/localisation/snapshots/test_methods/test_smooth_parameter/testdata_scaling_smooth.txt deleted file mode 100644 index c9451213d..000000000 --- a/tests/workflows/localisation/snapshots/test_methods/test_smooth_parameter/testdata_scaling_smooth.txt +++ /dev/null @@ -1,50 +0,0 @@ -[0. 0. 1. 1. 1. 0. - 0. 0. 0. 0. 0. 0. - 1. 1. 1. 0. 0. 0. - 0. 0. 0. 0. 1. 1. - 1. 0. 0. 0. 0. 0. - 0. 0. 1. 1. 0.8857143 0. - 0. 0. 0. 0. 0. 0. - 0.78571427 0.8333333 0.675 0.47142857 0.2 0.2 - 0.2 0.2 0.5 0.5714286 0.625 0.6666667 - 0.51111114 0.35555556 0.2 0.2 0.2 0.2 - 0.5 0.5 0.5 0.5 0.425 0.32857144 - 0.2 0.2 0.2 0.2 0.5 0.5 - 0.5 0.5 0.45714286 0. 0. 0. - 0. 0. 0.5 0.5 0.5 0.5 - 0.5 0. 0. 0. 0. 0. - 0.5 0.5 0.5 0.5 0.5 0. - 0. 0. 0. 0. 0. 0. - 1. 1. 1. 0. 0. 0. - 0. 0. 0. 0. 1. 1. - 1. 0. 0. 0. 0. 0. - 0. 0. 1. 1. 1. 0. - 0. 0. 0. 0. 0. 0. - 1. 1. 0.8857143 0. 0. 0. - 0. 0. 0. 0. 0.78571427 0.8333333 - 0.675 0.47142857 0.2 0.2 0.2 0.2 - 0.5 0.5714286 0.625 0.6666667 0.51111114 0.35555556 - 0.2 0.2 0.2 0.2 0.5 0.5 - 0.5 0.5 0.425 0.32857144 0.2 0.2 - 0.2 0.2 0.5 0.5 0.5 0.5 - 0.45714286 0. 0. 0. 0. 0. - 0.5 0.5 0.5 0.5 0.5 0. - 0. 0. 0. 0. 0.5 0.5 - 0.5 0.5 0.5 0. 0. 0. - 0. 0. 0. 0. 1. 1. - 1. 0. 0. 0. 0. 0. - 0. 0. 1. 1. 1. 0. - 0. 0. 0. 0. 0. 0. - 1. 1. 1. 0. 0. 0. - 0. 0. 0. 0. 1. 1. - 0.8857143 0. 0. 0. 0. 0. - 0. 0. 0.78571427 0.8333333 0.675 0.47142857 - 0.2 0.2 0.2 0.2 0.5 0.5714286 - 0.625 0.6666667 0.51111114 0.35555556 0.2 0.2 - 0.2 0.2 0.5 0.5 0.5 0.5 - 0.425 0.32857144 0.2 0.2 0.2 0.2 - 0.5 0.5 0.5 0.5 0.45714286 0. - 0. 0. 0. 0. 0.5 0.5 - 0.5 0.5 0.5 0. 0. 0. - 0. 0. 0.5 0.5 0.5 0.5 - 0.5 0. 0. 0. 0. 0. ] \ No newline at end of file diff --git a/tests/workflows/localisation/test_configs/__init__.py b/tests/workflows/localisation/test_configs/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/workflows/localisation/test_configs/test_config.py b/tests/workflows/localisation/test_configs/test_config.py deleted file mode 100644 index c639fa994..000000000 --- a/tests/workflows/localisation/test_configs/test_config.py +++ /dev/null @@ -1,739 +0,0 @@ -from unittest.mock import MagicMock - -import pydantic -import pytest - -from semeio.workflows.localisation.localisation_config import ( - LocalisationConfig, - check_for_duplicated_correlation_specifications, - expand_wildcards, -) - -ERT_OBS = ["OBS1", "OBS2", "OBS11", "OBS22", "OBS12", "OBS13", "OBS14", "OBS3"] -ERT_PARAM = [ - "PARAM_NODE1:PARAM1", - "PARAM_NODE1:PARAM2", - "PARAM_NODE2:PARAM1", - "PARAM_NODE2:PARAM2", - "PARAM_NODE2:PARAM3", - "PARAM_NODE3:PARAM4", - "PARAM_NODE22:P1", - "PARAM_NODE22:P2", - "PARAM_NODE22:P22", - "PARAM_NODE1X:X1", - "PARAM_NODE1X:X2", - "PARAM_NODE1X:X3", - "PARAM_NODE2Y:Y1", - "PARAM_NODE2Y:Y2", - "PARAM_NODE2Y:Y3", - "PARAM_FIELD1", - "PARAM_FIELD2", - "PARAM_FIELD3", - "PARAM_GEN:0", - "PARAM_GEN:1", - "PARAM_GEN:2", - "PARAM_GEN:3", - "PARAM_GEN:4", - "PARAM_SURFACE1", - "PARAM_SURFACE2", -] - - -@pytest.mark.parametrize( - "param_group_add, expected", - [ - ( - "PARAM_NODE1:*", - ["PARAM_NODE1:PARAM1", "PARAM_NODE1:PARAM2"], - ), - ( - "PARAM_N*1:*", - ["PARAM_NODE1:PARAM1", "PARAM_NODE1:PARAM2"], - ), - ( - ["P*2*", "PARAM_NODE3*"], - [ - "PARAM_FIELD2", - "PARAM_GEN:2", - "PARAM_NODE1:PARAM2", - "PARAM_NODE1X:X2", - "PARAM_NODE22:P1", - "PARAM_NODE22:P2", - "PARAM_NODE22:P22", - "PARAM_NODE2:PARAM1", - "PARAM_NODE2:PARAM2", - "PARAM_NODE2:PARAM3", - "PARAM_NODE2Y:Y1", - "PARAM_NODE2Y:Y2", - "PARAM_NODE2Y:Y3", - "PARAM_NODE3:PARAM4", - "PARAM_SURFACE2", - ], - ), - ( - ["P*2:*", "PARAM_NODE3*"], - [ - "PARAM_NODE2:PARAM1", - "PARAM_NODE2:PARAM2", - "PARAM_NODE2:PARAM3", - "PARAM_NODE22:P1", - "PARAM_NODE22:P2", - "PARAM_NODE22:P22", - "PARAM_NODE3:PARAM4", - ], - ), - ( - ["PARAM_NODE3:P*", "PARAM_NODE2*:*2*"], - [ - "PARAM_NODE2:PARAM2", - "PARAM_NODE22:P2", - "PARAM_NODE22:P22", - "PARAM_NODE2Y:Y2", - "PARAM_NODE3:PARAM4", - ], - ), - ( - "PARAM_FIELD*", - ["PARAM_FIELD1", "PARAM_FIELD2", "PARAM_FIELD3"], - ), - ( - "PARAM_FIELD1", - ["PARAM_FIELD1"], - ), - ( - "PARAM_GEN:*", - ["PARAM_GEN:0", "PARAM_GEN:1", "PARAM_GEN:2", "PARAM_GEN:3", "PARAM_GEN:4"], - ), - ], -) -def test_simple_config(param_group_add, expected): - correlations = [ - { - "name": "some_name", - "obs_group": {"add": ["OBS1"]}, - "param_group": { - "add": param_group_add, - }, - } - ] - log_level = 2 - conf = LocalisationConfig( - observations=ERT_OBS, - parameters=ERT_PARAM, - log_level=log_level, - correlations=correlations, - ) - assert sorted(conf.correlations[0].param_group.result_items) == sorted(expected) - - -@pytest.mark.parametrize( - "obs_group_add, param_group_add, param_group_remove, expected_error", - [ - ( - ["OBS*"], - "PARAM_NODE1:PARAM1", - "P*:*:*", - "No match for: P*:*:*", - ), - ( - [], - "PARAM_NODE1:PARAM1", - [], - "", - ), - ], -) -def test_simple_config_error( - obs_group_add, param_group_add, param_group_remove, expected_error -): - correlations = [ - { - "name": "some_name", - "obs_group": { - "add": obs_group_add, - }, - "param_group": { - "add": param_group_add, - "remove": param_group_remove, - }, - } - ] - log_level = 2 - with pytest.raises(pydantic.ValidationError, match=expected_error): - LocalisationConfig( - observations=ERT_OBS, - parameters=ERT_PARAM, - log_level=log_level, - correlations=correlations, - ) - - -@pytest.mark.parametrize( - "obsgroup1, paramgroup1, obsgroup2, paramgroup2, expected_error", - [ - ( - "OBS1", - "PARAM_NODE1:*", - "OBS*", - "PARAM_NODE*:*", - "Found 2 duplicated correlations", - ), - ( - ["OBS1", "OBS2"], - ["PARAM_NODE2:*"], - ["OBS1*", "OBS2*"], - ["P*:*"], - "Found 6 duplicated correlations", - ), - ( - ["OBS1*"], - ["PARAM_FIELD2"], - ["OBS14"], - ["PARAM_F*"], - "Found 1 duplicated correlations", - ), - ( - "*", - "*", - "O*", - "P*:*", - "Found 160 duplicated correlations", - ), - ], -) -def test_simple_config_duplicate_error( - obsgroup1, paramgroup1, obsgroup2, paramgroup2, expected_error -): - correlations = [ - { - "name": "some_name1", - "obs_group": { - "add": obsgroup1, - }, - "param_group": { - "add": paramgroup1, - }, - }, - { - "name": "some_name2", - "obs_group": { - "add": obsgroup2, - }, - "param_group": { - "add": paramgroup2, - }, - }, - ] - log_level = 2 - with pytest.raises(ValueError, match=expected_error): - LocalisationConfig( - observations=ERT_OBS, - parameters=ERT_PARAM, - log_level=log_level, - correlations=correlations, - ) - - -@pytest.mark.parametrize( - "ref_point, expected_error", - [ - ( - [], - "least 2 items", - ), - ( - [100], - "least 2 items ", - ), - ( - ["not_float", 200], - "[\\s\\S]*correlations.0.ref_point.0[\\s\\S]*Input " - "should be a valid number", - ), - ( - [100, 200, 300], - "at most 2 items", - ), - ], -) -def test_simple_config_ref_point_error(ref_point, expected_error): - correlations = [ - { - "name": "some_name", - "obs_group": { - "add": "OBS", - }, - "param_group": { - "add": "PARAM_NODE1", - }, - "field_scale": { - "method": "gaussian_decay", - "main_range": 1000, - "perp_range": 1000, - "azimuth": 200, - "ref_point": ref_point, - }, - } - ] - with pytest.raises(ValueError, match=expected_error): - LocalisationConfig( - observations=["OBS"], parameters=["PARAM_NODE1"], correlations=correlations - ) - - -@pytest.mark.parametrize( - "param_group_add, param_group_remove, expected", - [ - ( - "PARAM_NODE1:*", - "PARAM_NODE2:*", - ["PARAM_NODE1:PARAM1", "PARAM_NODE1:PARAM2"], - ), - ( - "PARAM_N*1*", - "PARAM_NODE1:PARAM1", - [ - "PARAM_NODE1:PARAM2", - "PARAM_NODE1X:X1", - "PARAM_NODE1X:X2", - "PARAM_NODE1X:X3", - "PARAM_NODE2:PARAM1", - "PARAM_NODE22:P1", - "PARAM_NODE2Y:Y1", - ], - ), - ( - ["P*2*:*", "PARAM_NODE3*", "P*_GEN*"], - ["PARAM_NODE2:*", "PARAM_NODE22:P2*", "P*_G*:1", "P*_G*:3", "P*_G*:4"], - [ - "PARAM_GEN:0", - "PARAM_GEN:2", - "PARAM_NODE22:P1", - "PARAM_NODE2Y:Y1", - "PARAM_NODE2Y:Y2", - "PARAM_NODE2Y:Y3", - "PARAM_NODE3:PARAM4", - ], - ), - (["*FIELD*"], ["*NODE*"], ["PARAM_FIELD1", "PARAM_FIELD2", "PARAM_FIELD3"]), - ( - ["*"], - ["PARAM_NODE*"], - [ - "PARAM_FIELD1", - "PARAM_FIELD2", - "PARAM_FIELD3", - "PARAM_GEN:0", - "PARAM_GEN:1", - "PARAM_GEN:2", - "PARAM_GEN:3", - "PARAM_GEN:4", - "PARAM_SURFACE1", - "PARAM_SURFACE2", - ], - ), - ], -) -def test_add_remove_param_config(param_group_add, param_group_remove, expected): - correlations = [ - { - "name": "some_name", - "obs_group": {"add": ["OBS1"]}, - "param_group": { - "add": param_group_add, - "remove": param_group_remove, - }, - } - ] - log_level = 2 - conf = LocalisationConfig( - observations=ERT_OBS, - parameters=ERT_PARAM, - log_level=log_level, - correlations=correlations, - ) - assert sorted(conf.correlations[0].param_group.result_items) == sorted(expected) - - -@pytest.mark.parametrize( - "config, expected", - [ - ( - { - "add": ["PARAM_NODE1:PARAM1"], - "remove": "PARAM_NODE*:PARAM1", - }, - r"Adding: \['PARAM_NODE1:PARAM1'\] and removing: \['PARAM_NODE\*:PARAM1'\]", - ), - ( - { - "add": ["*"], - "remove": ["*"], - }, - r"Adding: \['\*'\] and removing: \['\*'\]", - ), - ( - { - "add": ["*FIELD*"], - "remove": ["*"], - }, - r"Adding: \['\*FIELD\*'\] and removing: \['\*'\]", - ), - ], -) -def test_add_remove_param_config_no_param(config, expected): - correlations = [ - {"name": "some_name", "obs_group": {"add": ["OBS1"]}, "param_group": config} - ] - with pytest.raises(ValueError, match=expected): - LocalisationConfig( - observations=ERT_OBS, parameters=ERT_PARAM, correlations=correlations - ) - - -@pytest.mark.parametrize( - "obs_group_add, obs_group_remove, expected", - [ - ( - "OBS*", - "OBS*2", - ["OBS1", "OBS11", "OBS13", "OBS14", "OBS3"], - ), - ( - "*", - "OBS*2", - ["OBS1", "OBS11", "OBS13", "OBS14", "OBS3"], - ), - ( - "*2*", - "*1*", - ["OBS2", "OBS22"], - ), - ( - "*3", - "*1*", - ["OBS3"], - ), - ], -) -def test_add_remove_obs_config(obs_group_add, obs_group_remove, expected): - correlations = [ - { - "name": "some_name", - "obs_group": {"add": [obs_group_add], "remove": [obs_group_remove]}, - "param_group": { - "add": ["PARAM_NODE1:PARAM1"], - }, - } - ] - log_level = 2 - conf = LocalisationConfig( - observations=ERT_OBS, - parameters=ERT_PARAM, - log_level=log_level, - correlations=correlations, - ) - assert len(conf.correlations) == 1 - assert conf.correlations[0].obs_group.result_items == expected - - -@pytest.mark.parametrize( - "pattern, list_of_words, expected_result", - [(["*"], ["OBS_1", "2"], {"OBS_1", "2"}), ((["OBS*"], ["OBS_1", "2"], {"OBS_1"}))], -) -def test_wildcard_expansion(pattern, list_of_words, expected_result): - result = expand_wildcards(pattern, list_of_words) - assert result == expected_result - - -@pytest.mark.parametrize( - "pattern, list_of_words, expected_error", - [ - (["NOT:"], ["OBS_1", "2"], "No match for: NOT"), - (["OBS", "OBS_1"], ["OBS_1", "2"], "No match for: OBS"), - (["NOT", "OBS"], ["OBS_1", "2"], "No match for: NOT"), - ], -) -def test_wildcard_expansion_mismatch(pattern, list_of_words, expected_error): - with pytest.raises(ValueError, match=expected_error): - expand_wildcards(pattern, list_of_words) - - -@pytest.mark.parametrize( - "obs_1, obs_2, param_1, param_2, expected", - ( - [["a"], ["a"], ["b"], ["b"], ["Observation: a, parameter: b"]], - [["a", "c"], ["a"], ["b"], ["b"], ["Observation: a, parameter: b"]], - [ - ["a", "c"], - ["a", "c"], - ["b"], - ["b"], - ["Observation: a, parameter: b", "Observation: c, parameter: b"], - ], - [ - ["a", "c"], - ["a", "c"], - ["b", "d"], - ["b"], - ["Observation: a, parameter: b", "Observation: c, parameter: b"], - ], - ), -) -def test_check_for_duplicates(obs_1, obs_2, param_1, param_2, expected): - correlation_1 = MagicMock() - correlation_2 = MagicMock() - correlation_1.obs_group.result_items = obs_1 - correlation_1.param_group.result_items = param_1 - correlation_2.obs_group.result_items = obs_2 - correlation_2.param_group.result_items = param_2 - correlations = [correlation_1, correlation_2] - result = check_for_duplicated_correlation_specifications(correlations) - assert result == expected - - -@pytest.mark.parametrize( - "active_segment_list, scaling_factor_list, smooth_ranges", - [ - ( - [1, 2, 3], - [1.0, 1.0e-5, 0.1], - [0, 0], - ), - ( - [4, 1, 3], - [1.0, 0.5, 0.0], - [1, 1], - ), - ], -) -def test_active_region_list(active_segment_list, scaling_factor_list, smooth_ranges): - correlations = [ - { - "name": "CORR1_SEGMENT", - "obs_group": { - "add": ["OBS1"], - }, - "param_group": { - "add": ["*"], - }, - "field_scale": { - "method": "segment", - "segment_filename": "Region.GRDECL", - "param_name": "Region", - "active_segments": active_segment_list, - "scalingfactors": scaling_factor_list, - "smooth_ranges": smooth_ranges, - }, - }, - ] - log_level = 2 - conf = LocalisationConfig( - observations=["OBS1"], - parameters=["PARAM_NODE1"], - log_level=log_level, - correlations=correlations, - ) - assert conf.correlations[0].field_scale.active_segments == active_segment_list - assert conf.correlations[0].field_scale.scalingfactors == scaling_factor_list - assert conf.correlations[0].field_scale.smooth_ranges == smooth_ranges - assert conf.correlations[0].field_scale.segment_filename == "Region.GRDECL" - assert conf.correlations[0].field_scale.param_name == "Region" - - -@pytest.mark.parametrize( - "active_segment_list, scaling_factor_list, smooth_ranges, expected_error", - [ - ( - [1, -2, 3], - [1.0, 0.5, 0.1], - [0, 0], - "2 validation errors for LocalisationConfig", - ), - ( - [1, 2, 3], - [-1.0, -0.5, 0.1], - [1, 1], - "3 validation errors for LocalisationConfig", - ), - ( - [1, 2, 3], - [1.0, 0.5, 0.1], - [1, -1], - "1 validation error for LocalisationConfig", - ), - ( - [1, 2, 3, 4], - [1.0, 0.5, 0.1], - [1, 1], - "The specified length of 'active_segments' list", - ), - ( - [1, 2, 4], - [1.0, 0.5, 0.1, 0.0], - [1, 1], - "The specified length of 'active_segments' list", - ), - ( - [1, 2, 4], - [1.0, 0.5, 0.1], - [1, 1, 2], - "1 validation error for LocalisationConfig", - ), - ], -) -def test_active_region_list_mismatch( - active_segment_list, scaling_factor_list, smooth_ranges, expected_error -): - correlations = [ - { - "name": "CORR1_SEGMENT", - "obs_group": { - "add": ["OBS1"], - }, - "param_group": { - "add": ["*"], - }, - "field_scale": { - "method": "segment", - "segment_filename": "Region.GRDECL", - "param_name": "Region", - "active_segments": active_segment_list, - "scalingfactors": scaling_factor_list, - "smooth_ranges": smooth_ranges, - }, - }, - ] - log_level = 2 - with pytest.raises(ValueError, match=expected_error): - LocalisationConfig( - observations=["OBS1"], - parameters=["PARAM_NODE1"], - log_level=log_level, - correlations=correlations, - ) - - -def test_invalid_keyword_errors_method_segment(): - expected_error = "Extra inputs are not permitted" - correlations = [ - { - "name": "CORR1_SEGMENT", - "obs_group": { - "add": ["OBS1"], - }, - "param_group": { - "add": ["*"], - }, - "field_scale": { - "method": "segment", - "segment_filename": "Region.GRDECL", - "param_name": "Region", - "active_segments": [1, 2, 3], - "scalingfactors": [1.0, 0.5, 0.05], - "smooth_ranges": [0, 0], - "dummy1": "unused1", - "dummy2": "unused2", - }, - }, - ] - log_level = 2 - with pytest.raises(ValueError, match=expected_error): - LocalisationConfig( - observations=["OBS1"], - parameters=["PARAM_NODE1"], - log_level=log_level, - correlations=correlations, - ) - - -def test_invalid_keyword_errors_method_from_file(): - expected_error = "Extra inputs are not permitted" - correlations = [ - { - "name": "CORR1_FROM_FILE", - "obs_group": { - "add": ["OBS1"], - }, - "param_group": { - "add": ["*"], - }, - "field_scale": { - "method": "from_file", - "segment_filename": "dummy.GRDECL", - "param_name": "Scaling", - "active_segments": [1, 2, 3], - }, - }, - ] - log_level = 2 - with pytest.raises(ValueError, match=expected_error): - LocalisationConfig( - observations=["OBS1"], - parameters=["PARAM_NODE1"], - log_level=log_level, - correlations=correlations, - ) - - -def test_invalid_keyword_errors_in_obs_group_or_param_group(): - expected_error = "Extra inputs are not permitted" - correlations = [ - { - "name": "CORR1", - "obs_group": { - "add": ["OBS1"], - "unknown_obs_keyword": "dummy", - }, - "param_group": { - "add": ["*"], - "unknown_param_keyword": "dummy", - }, - "field_scale": { - "method": "from_file", - "segment_filename": "dummy.GRDECL", - "param_name": "Scaling", - "active_segments": [1, 2, 3], - }, - }, - ] - log_level = 2 - with pytest.raises(ValueError, match=expected_error): - LocalisationConfig( - observations=["OBS1"], - parameters=["PARAM_NODE1"], - log_level=log_level, - correlations=correlations, - ) - - -def test_missing_keyword_errors_method_gaussian_decay(): - expected_error = ( - "3 validation errors for LocalisationConfig\n" - "correlations.0.perp_range[\\s\\S]*" - "correlations.0.azimuth[\\s\\S]*" - "correlations.0.ref_point" - ) - correlations = [ - { - "name": "CORR1_SEGMENT", - "obs_group": { - "add": ["OBS1"], - }, - "param_group": { - "add": ["*"], - }, - "field_scale": { - "method": "gaussian_decay", - "main_range": 1000, - }, - }, - ] - log_level = 2 - with pytest.raises(ValueError, match=expected_error): - LocalisationConfig( - observations=["OBS1"], - parameters=["PARAM_NODE1"], - log_level=log_level, - correlations=correlations, - ) diff --git a/tests/workflows/localisation/test_configs/test_field_config.py b/tests/workflows/localisation/test_configs/test_field_config.py deleted file mode 100644 index a26a09362..000000000 --- a/tests/workflows/localisation/test_configs/test_field_config.py +++ /dev/null @@ -1,238 +0,0 @@ -import pathlib -from unittest.mock import MagicMock - -import pydantic -import pytest -from hypothesis import given -from hypothesis import strategies as st - -from semeio.workflows.localisation.localisation_config import ( - CorrelationConfig, - ExponentialConfig, - GaussianConfig, -) - - -@given( - azimuth=st.one_of( - st.integers(min_value=0, max_value=360), - st.floats( - min_value=0, max_value=360, width=64, exclude_min=False, exclude_max=False - ), - ) -) -@pytest.mark.parametrize( - "config_class, method", - [(GaussianConfig, "gaussian_decay"), (ExponentialConfig, "exponential_decay")], -) -def test_gaussian_config_valid_angle(azimuth, config_class, method): - config = config_class( - method=method, - main_range=0.1, - perp_range=0.1, - ref_point=[100.0, 120.0], - azimuth=azimuth, - ) - assert config.azimuth == azimuth - - -@pytest.mark.parametrize( - "config_class, method", - [(GaussianConfig, "gaussian_decay"), (ExponentialConfig, "exponential_decay")], -) -@pytest.mark.parametrize( - "azimuth, expected_error", - [ - (-0.0001, "Input should be greater than or equal to 0"), - (360.0001, "Input should be less than or equal to 360"), - ], -) -def test_invalid_angle(config_class, method, azimuth, expected_error): - with pytest.raises(pydantic.ValidationError, match=expected_error): - config_class( - method=method, - main_range=0.1, - perp_range=0.1, - ref_point=[0.0, 10.0], - azimuth=azimuth, - ) - - -@pytest.mark.parametrize("decay_method", ["gaussian_decay", "exponential_decay"]) -def test_field_config_init(decay_method): - field_config = { - "method": decay_method, - "main_range": 0.1, - "perp_range": 0.1, - "azimuth": 10, - "ref_point": [0.0, 100.0], - } - obs_config = { - "add": "*", - } - param_group = {"add": "*"} - config = CorrelationConfig( - name="a_name", - obs_group=obs_config, - param_group=param_group, - field_scale=field_config, - obs_context=["a", "b", "c"], - params_context=["a", "b", "c"], - ) - assert config.field_scale.method == decay_method - - -@pytest.mark.parametrize( - "field_config, surface_config", - [ - pytest.param( - { - "method": "gaussian_decay", - "main_range": 0.1, - "perp_range": 0.1, - "azimuth": 10, - "ref_point": [100.0, 100.0], - }, - None, - id="Field scale, not surface scale", - ), - pytest.param( - None, - { - "method": "gaussian_decay", - "main_range": 0.1, - "perp_range": 0.1, - "azimuth": 10, - "ref_point": [10.0, 10.0], - "surface_file": "surface_file.txt", - }, - id="Surface scale, not field scale", - ), - pytest.param( - { - "method": "gaussian_decay", - "main_range": 0.1, - "perp_range": 0.1, - "azimuth": 10, - "ref_point": [100.0, 100.0], - }, - { - "method": "gaussian_decay", - "main_range": 0.1, - "perp_range": 0.1, - "azimuth": 10, - "ref_point": [10.0, 10.0], - "surface_file": "surface_file.txt", - }, - id="Field scale and surface scale", - ), - ], -) -def test_correlation_config_ref_point(field_config, surface_config, monkeypatch): - monkeypatch.setattr(pathlib.Path, "exists", MagicMock()) - - config_dict = { - "name": "random", - "obs_group": { - "add": "*", - }, - "param_group": {"add": "*"}, - "obs_context": ["a", "b", "c"], - "params_context": ["a", "b", "c"], - } - if field_config: - config_dict["field_scale"] = field_config - config = CorrelationConfig(**config_dict) - assert config.field_scale.ref_point == [100.0, 100.0] - if surface_config: - config_dict["surface_scale"] = surface_config - config = CorrelationConfig(**config_dict) - assert config.surface_scale.ref_point == [10.0, 10.0] - - -@pytest.mark.parametrize( - "field_config, surface_config", - [ - pytest.param( - { - "method": "gaussian_decay", - "main_range": 0.1, - "perp_range": 0.1, - "azimuth": 10, - }, - None, - id="Field scale, not surface scale", - ), - pytest.param( - None, - { - "method": "gaussian_decay", - "main_range": 0.1, - "perp_range": 0.1, - "azimuth": 10, - "surface_file": "surface_file.txt", - }, - id="Surface scale, not field scale", - ), - pytest.param( - { - "method": "gaussian_decay", - "main_range": 0.1, - "perp_range": 0.1, - "azimuth": 10, - }, - { - "method": "gaussian_decay", - "main_range": 0.1, - "perp_range": 0.1, - "azimuth": 10, - "surface_file": "surface_file.txt", - }, - id="Field scale and surface scale", - ), - ], -) -def test_correlation_config_no_ref_point(field_config, surface_config, monkeypatch): - monkeypatch.setattr(pathlib.Path, "exists", MagicMock()) - - config_dict = { - "name": "random", - "obs_group": { - "add": "*", - }, - "param_group": {"add": "*"}, - "obs_context": ["a", "b", "c"], - "params_context": ["a", "b", "c"], - } - if field_config: - config_dict["field_scale"] = field_config - with pytest.raises(ValueError, match="1 validation error[\\s\\S]+ref_point"): - CorrelationConfig(**config_dict) - - if surface_config: - config_dict["surface_scale"] = surface_config - with pytest.raises(ValueError, match="1 validation error[\\s\\S]+ref_point"): - CorrelationConfig(**config_dict) - - -def test_invalid_field_config_init(): - field_config = { - "method": "not_implemented_method", - "main_range": 0.1, - "perp_range": 0.1, - "azimuth": 10, - "ref_point": [0.0, 10.0], - } - obs_config = { - "add": "*", - } - param_group = {"add": "*"} - with pytest.raises(ValueError, match="Unknown method: not_implemented_method"): - CorrelationConfig( - name="a_name", - obs_group=obs_config, - param_group=param_group, - field_scale=field_config, - obs_context=["a", "b", "c"], - params_context=["a", "b", "c"], - ) diff --git a/tests/workflows/localisation/test_configs/test_obs_config.py b/tests/workflows/localisation/test_configs/test_obs_config.py deleted file mode 100644 index fb06d8113..000000000 --- a/tests/workflows/localisation/test_configs/test_obs_config.py +++ /dev/null @@ -1,8 +0,0 @@ -from semeio.workflows.localisation.localisation_config import ObsConfig - - -def test_obs_config(): - config_dict = {"add": "*", "context": ["a", "b", "c"]} - - config = ObsConfig(**config_dict) - assert config.result_items == ["a", "b", "c"] diff --git a/tests/workflows/localisation/test_configs/test_parameters.py b/tests/workflows/localisation/test_configs/test_parameters.py deleted file mode 100644 index 2bde96de5..000000000 --- a/tests/workflows/localisation/test_configs/test_parameters.py +++ /dev/null @@ -1,24 +0,0 @@ -import pytest - -from semeio.workflows.localisation.local_script_lib import Parameters - - -@pytest.mark.parametrize( - "input_list", (["A", "B:C"], ["A", "C"], ["A:1", "A:2", "A:3", "C"]) -) -def test_from_list(input_list): - parameters = Parameters.from_list(input_list) - assert parameters.to_list() == input_list - - -@pytest.mark.parametrize( - "input_list, expected_error", - ( - [["A:B:C"], "Too many : in A:B:C"], - (["A", "A:1"], "did not expect parameters, found 1"), - (["A:1", "A"], r"found A in {'A': \['1'\]}, but did not find parameters"), - ), -) -def test_from_list_error(input_list, expected_error): - with pytest.raises(ValueError, match=expected_error): - Parameters.from_list(input_list) diff --git a/tests/workflows/localisation/test_integration.py b/tests/workflows/localisation/test_integration.py deleted file mode 100644 index 0f8d05c3e..000000000 --- a/tests/workflows/localisation/test_integration.py +++ /dev/null @@ -1,526 +0,0 @@ -import itertools - -import numpy as np -import pytest -import xtgeo -import yaml -from ert import LibresFacade -from ert.storage import open_storage -from xtgeo import RegularSurface - -from semeio.workflows.localisation.local_config_script import LocalisationConfigJob - -# pylint: disable=invalid-name - - -@pytest.mark.usefixtures("setup_poly_ert") -def test_localisation_surf(): - # pylint: disable=too-many-locals - with open("poly.ert", "a", encoding="utf-8") as fout: - fout.write( - "SURFACE PARAM_SURF_A OUTPUT_FILE:surf.txt " - "INIT_FILES:surf%d.txt BASE_SURFACE:surf0.txt" - ) - nreal = 20 - ncol = 10 - nrow = 10 - rotation = 0.0 - xinc = 50.0 - yinc = 50.0 - xori = 0.0 - yori = 0.0 - values = np.zeros(nrow * ncol) - for n in range(nreal): - filename = "surf" + str(n) + ".txt" - delta = 0.1 - for j in range(nrow): - for i in range(ncol): - index = i + j * ncol - values[index] = float(j) + n * delta - surface = RegularSurface( - ncol=ncol, - nrow=nrow, - xinc=xinc, - yinc=yinc, - xori=xori, - yori=yori, - rotation=rotation, - values=values, - ) - surface.to_file(filename, fformat="irap_ascii") - - ert = LibresFacade.from_config_file("poly.ert") - config = { - "log_level": 3, - "correlations": [ - { - "name": "CORR1", - "obs_group": { - "add": "*", - }, - "param_group": { - "add": "*", - }, - "surface_scale": { - "method": "gaussian_decay", - "main_range": 1700, - "perp_range": 850, - "azimuth": 200, - "ref_point": [250, 250], - "surface_file": "surf0.txt", - }, - }, - ], - } - - with open("local_config.yaml", "w", encoding="utf-8") as fout: - yaml.dump(config, fout) - with open_storage(ert.enspath, "w") as storage: - ert.run_ertscript( - LocalisationConfigJob, - storage, - storage.create_experiment().create_ensemble( - name="default", ensemble_size=ert.get_ensemble_size() - ), - "local_config.yaml", - ) - - -@pytest.mark.usefixtures("setup_poly_ert") -def test_localisation_surf_const(): - # pylint: disable=too-many-locals - with open("poly.ert", "a", encoding="utf-8") as fout: - fout.write( - "SURFACE PARAM_SURF_A OUTPUT_FILE:surf.txt " - "INIT_FILES:surf%d.txt BASE_SURFACE:surf0.txt" - ) - nreal = 20 - ncol = 10 - nrow = 10 - rotation = 0.0 - xinc = 50.0 - yinc = 50.0 - xori = 0.0 - yori = 0.0 - values = np.zeros(nrow * ncol) - for n in range(nreal): - filename = "surf" + str(n) + ".txt" - delta = 0.1 - for j in range(nrow): - for i in range(ncol): - index = i + j * ncol - values[index] = float(j) + n * delta - surface = RegularSurface( - ncol=ncol, - nrow=nrow, - xinc=xinc, - yinc=yinc, - xori=xori, - yori=yori, - rotation=rotation, - values=values, - ) - surface.to_file(filename, fformat="irap_ascii") - - ert = LibresFacade.from_config_file("poly.ert") - config = { - "log_level": 3, - "correlations": [ - { - "name": "CORR1", - "obs_group": { - "add": "*", - }, - "param_group": { - "add": "*", - }, - "surface_scale": { - "method": "constant", - "value": 1.0, - "surface_file": "surf0.txt", - }, - }, - ], - } - - with open("local_config.yaml", "w", encoding="utf-8") as fout: - yaml.dump(config, fout) - with open_storage(ert.enspath, "w") as storage: - ert.run_ertscript( - LocalisationConfigJob, - storage, - storage.create_experiment().create_ensemble( - name="default", ensemble_size=ert.get_ensemble_size() - ), - "local_config.yaml", - ) - - -# This test and the test test_localisation_field2 are similar, -# but the first test a case with multiple fields and multiple -# ministeps where write_scaling_factor is activated and one -# file is written per ministep. -# Test case 2 tests four different methods for defining scaling factors for fields -@pytest.mark.usefixtures("setup_poly_ert") -def test_localisation_field1(): - # pylint: disable=too-many-locals - - # Make a 3D grid with no inactive cells - grid_filename = "grid3D.EGRID" - grid = create_box_grid_with_inactive_and_active_cells( - grid_filename, has_inactive_values=False - ) - - nreal = 20 - (nx, ny, nz) = grid.dimensions - with open("poly.ert", "a", encoding="utf-8") as fout: - fout.write(f"GRID {grid_filename}\n") - - property_names = ["G1", "G2", "G3", "G4", "G5", "G6"] - for pname in property_names: - filename_output = pname + ".roff" - filename_input = pname + "_%d.roff" - values = np.zeros((nx, ny, nz), dtype=np.float32) - property_field = xtgeo.GridProperty(grid, values=0.0, name=pname) - for n in range(nreal): - values = np.zeros((nx, ny, nz), dtype=np.float32) - property_field.values = values + 0.1 * n - filename = pname + "_" + str(n) + ".roff" - print(f"Write file: {filename}") - property_field.to_file(filename, fformat="roff", name=pname) - - fout.write( - f"FIELD {pname} PARAMETER {filename_output} " - f"INIT_FILES:{filename_input} MIN:-5.5 MAX:5.5 " - "FORWARD_INIT:False\n" - ) - - ert = LibresFacade.from_config_file("poly.ert") - config = { - "log_level": 3, - "write_scaling_factors": True, - "correlations": [ - { - "name": "CORR1", - "obs_group": { - "add": "*", - }, - "param_group": { - "add": ["G1", "G2"], - }, - "field_scale": { - "method": "gaussian_decay", - "main_range": 1700, - "perp_range": 850, - "azimuth": 200, - "ref_point": [700, 370], - }, - }, - { - "name": "CORR2", - "obs_group": { - "add": "*", - }, - "param_group": { - "add": ["G3"], - }, - "field_scale": { - "method": "const_gaussian_decay", - "main_range": 1000, - "perp_range": 950, - "azimuth": 100, - "ref_point": [700, 370], - }, - }, - { - "name": "CORR3", - "obs_group": { - "add": "*", - }, - "param_group": { - "add": ["G4"], - }, - "field_scale": { - "method": "const_exponential_decay", - "main_range": 1000, - "perp_range": 950, - "azimuth": 100, - "ref_point": [700, 370], - "normalised_tapering_range": 1.2, - "cutoff": True, - }, - }, - { - "name": "CORR4", - "obs_group": { - "add": "*", - }, - "param_group": { - "add": ["G5"], - }, - "field_scale": { - "method": "constant", - "value": 1.0, - }, - }, - ], - } - - with open("local_config.yaml", "w", encoding="utf-8") as fout: - yaml.dump(config, fout) - with open_storage(ert.enspath, "w") as storage: - ert.run_ertscript( - LocalisationConfigJob, - storage, - storage.create_experiment().create_ensemble( - name="default", ensemble_size=ert.get_ensemble_size() - ), - "local_config.yaml", - ) - - -def create_box_grid_with_inactive_and_active_cells( - output_grid_file, has_inactive_values=True -): - # pylint: disable=too-many-locals - nx = 30 - ny = 25 - nz = 3 - xinc = 50.0 - yinc = 50.0 - zinc = 10.0 - xori = 0.0 - yori = 0.0 - grid = xtgeo.create_box_grid( - dimension=(nx, ny, nz), - origin=(xori, yori, 0.0), - increment=(xinc, yinc, zinc), - rotation=0.0, - flip=1, - ) - # Create a polygon file to use to set some grid cells inactive - with open("polygon.txt", "w", encoding="utf-8") as fout: - x = [] - y = [] - x.append(xori + 5 * xinc) - y.append(yori + 5 * yinc) - - x.append(xori + (nx - 6) * xinc) - y.append(yori + 5 * yinc) - - x.append(xori + (nx - 6) * xinc) - y.append(yori + (ny - 6) * yinc) - - x.append(xori + 5 * xinc) - y.append(yori + (ny - 6) * yinc) - - x.append(xori + 5 * xinc) - y.append(yori + 5 * yinc) - - for i in range(5): - fout.write(f" {x[i]} {y[i]} {zinc}\n") - - polygon = xtgeo.polygons_from_file("polygon.txt", fformat="xyz") - if has_inactive_values: - grid.inactivate_outside(polygon, force_close=True) - - print(f" Write file: {output_grid_file}") - grid.to_file(output_grid_file, fformat="egrid") - return grid - - -def create_region_parameter(filename, grid): - # Create a discrete parameter to represent a region parameter - region_param_name = "Region" - region_code_names = { - "RegionA": 1, - "RegionB": 2, - "RegionC": 3, - "RegionD": 4, - "RegionE": 5, - "RegionF": 6, - } - region_param = xtgeo.GridProperty( - grid, name=region_param_name, discrete=True, values=1 - ) - region_param.dtype = np.int32 - region_param.codes = region_code_names - (nx, ny, nz) = grid.dimensions - values = np.zeros((nx, ny, nz), dtype=np.int32) - values[:, :, :] = 0 - for k, j, i in itertools.product(range(nz), range(ny), range(nx)): - if 0 <= i <= nx / 2 and 0 <= j <= ny / 2: - if 0 <= k <= nz / 2: - values[i, j, k] = 2 - else: - values[i, j, k] = 5 - if nx / 2 + 1 <= i < nx and 0 <= j <= ny / 2: - if nz / 2 <= k < nz: - values[i, j, k] = 3 - else: - values[i, j, k] = 4 - if ny / 3 + 1 <= j < 2 * ny / 3 and nx / 3 <= i <= nx / 2: - if nz / 4 <= k < nz / 2: - values[i, j, k] = 6 - else: - values[i, j, k] = 4 - region_param.values = values - print(f"Write file: {filename}") - region_param.to_file(filename, fformat="grdecl", name=region_param_name) - - -def create_field_and_scaling_param_and_update_poly_ert( - poly_config_file, grid_filename, grid -): - # pylint: disable=too-many-locals,unused-argument - (nx, ny, nz) = grid.dimensions - property_names = ["FIELD1", "FIELD2", "FIELD3", "FIELD4", "FIELD5"] - scaling_names = ["SCALING1", "SCALING2", "SCALING3", "SCALING4", "SCALING5"] - nreal = 20 - nfields = len(property_names) - with open("poly.ert", "a", encoding="utf-8") as fout: - fout.write(f"GRID {grid_filename}\n") - for m in range(nfields): - property_name = property_names[m] - scaling_name = scaling_names[m] - filename_output = property_name + ".roff" - filename_input = property_name + "_%d.roff" - scaling_filename = scaling_name + ".GRDECL" - values = np.zeros((nx, ny, nz), dtype=np.float32) - property_field = xtgeo.GridProperty(grid, values=0.0, name=property_name) - scaling_field = xtgeo.GridProperty( - grid, values=0.5 + (m - 1) * 0.2, name=scaling_name - ) - for n in range(nreal): - values = np.zeros((nx, ny, nz), dtype=np.float32) - property_field.values = values + 0.1 * n - filename = property_name + "_" + str(n) + ".roff" - print(f"Write file: {filename}") - property_field.to_file(filename, fformat="roff", name=property_name) - print(f"Write file: {scaling_filename}\n") - scaling_field.to_file(scaling_filename, fformat="grdecl", name=scaling_name) - - fout.write( - f"FIELD {property_name} PARAMETER {filename_output} " - f"INIT_FILES:{filename_input} " - "MIN:-5.5 MAX:5.5 FORWARD_INIT:False\n" - ) - - -@pytest.mark.usefixtures("setup_poly_ert") -def test_localisation_field2(): - # Make a 3D grid with some inactive cells - grid_filename = "grid3D.EGRID" - grid = create_box_grid_with_inactive_and_active_cells(grid_filename) - - # Make some field parameters and some scalingfactor parameters - poly_config_file = "poly.ert" - create_field_and_scaling_param_and_update_poly_ert( - poly_config_file, grid_filename, grid - ) - - # Create a discrete parameter to represent a region parameter - segment_filename1 = "Region1.GRDECL" - create_region_parameter(segment_filename1, grid) - segment_filename2 = "Region2.GRDECL" - create_region_parameter(segment_filename2, grid) - - ert = LibresFacade.from_config_file("poly.ert") - config = { - "log_level": 3, - "write_scaling_factors": True, - "correlations": [ - { - "name": "CORR_GAUSSIAN", - "obs_group": { - "add": "*", - }, - "param_group": { - "add": "FIELD1", - }, - "field_scale": { - "method": "gaussian_decay", - "main_range": 700, - "perp_range": 150, - "azimuth": 30, - "ref_point": [500, 0], - }, - }, - { - "name": "CORR_FROM_FILE", - "obs_group": { - "add": "*", - }, - "param_group": { - "add": "FIELD2", - }, - "field_scale": { - "method": "from_file", - "filename": "SCALING2.GRDECL", - "param_name": "SCALING2", - }, - }, - { - "name": "CORR_SEGMENT1", - "obs_group": { - "add": "*", - }, - "param_group": { - "add": "FIELD3", - }, - "field_scale": { - "method": "segment", - "segment_filename": segment_filename1, - "param_name": "Region", - "active_segments": [1, 2, 4, 5], - "scalingfactors": [1.0, 1.5e-5, 0.3, 0.15], - "smooth_ranges": [2, 2], - }, - }, - { - "name": "CORR_SEGMENT2", - "obs_group": { - "add": "*", - }, - "param_group": { - "add": "FIELD4", - }, - "field_scale": { - "method": "segment", - "segment_filename": segment_filename1, - "param_name": "Region", - "active_segments": [1, 2, 4, 5], - "scalingfactors": [0.5, 1.0, 0.8, 0.05], - "smooth_ranges": [2, 2], - }, - }, - { - "name": "CORR_SEGMENT3", - "obs_group": { - "add": "*", - }, - "param_group": { - "add": "FIELD5", - }, - "field_scale": { - "method": "segment", - "segment_filename": segment_filename2, - "param_name": "Region", - "active_segments": [1, 3, 5], - "scalingfactors": [1.0, 0.5, 0.05], - }, - }, - ], - } - - with open("local_config.yaml", "w", encoding="utf-8") as fout: - yaml.dump(config, fout) - with open_storage(ert.enspath, "w") as storage: - ert.run_ertscript( - LocalisationConfigJob, - storage, - storage.create_experiment().create_ensemble( - name="default", ensemble_size=ert.get_ensemble_size() - ), - "local_config.yaml", - ) diff --git a/tests/workflows/localisation/test_methods.py b/tests/workflows/localisation/test_methods.py deleted file mode 100644 index 5fbb8ad1b..000000000 --- a/tests/workflows/localisation/test_methods.py +++ /dev/null @@ -1,293 +0,0 @@ -from collections import namedtuple - -import cwrap -import numpy as np -import pytest -from numpy import ma -from resdata.grid.rd_grid_generator import GridGenerator - -from semeio.workflows.localisation.local_script_lib import ( - ConstantScalingFactor, - ExponentialDecay, - GaussianDecay, - build_decay_object, - calculate_scaling_factors_in_regions, - smooth_parameter, -) - -# pylint: disable=invalid-name - -BoxDimensions = namedtuple("BoxDimensions", ["nx", "ny", "nz"]) -BoxIncrements = namedtuple("BoxIncrements", ["dx", "dy", "dz"]) - - -def create_box_grid( - dimensions=BoxDimensions(10, 10, 3), # noqa B008 - increments=BoxIncrements(50.0, 50.0, 10.0), # noqa B008 - use_actnum=False, -): - actnum = None - if use_actnum: - actnum = actnum_parameter(*dimensions) - grid = GridGenerator.create_rectangular(dimensions, increments, actnum=actnum) - - return grid, *dimensions - - -def actnum_parameter(nx, ny, nz): - actnum_param = np.ones((nz, ny, nx), dtype=np.int32) - - actnum_param[:, : ny // 2, :2] = 0 - - actnum_param[:, -3:, -2:] = 0 - - return actnum_param.ravel() - - -def create_region_parameter( - grid, dimensions, scaling_per_region=None, used_regions=None -): - # pylint: disable=too-many-locals - # Extracting dimensions from the named tuple - nx, ny, nz = dimensions - - # Create 3D meshgrids for indices using 'xy' indexing for Fortran order - j, i, k = np.meshgrid(np.arange(ny), np.arange(nx), np.arange(nz), indexing="xy") - - # Initialize region and scaling arrays - region_param = ma.zeros((ny, nx, nz), dtype=np.int32) - scaling_param = ma.zeros((ny, nx, nz), dtype=np.float32) - - # Generate the global index in Fortran order and get the active index - global_indices = i + j * nx + k * nx * ny - active_indices = np.vectorize(grid.get_active_index)(global_index=global_indices) - - # Set region values based on conditions - condition_1 = (i < nx / 2) & (j < ny / 2) - condition_2 = (i < nx / 2) & (j >= ny / 2) & (j < ny) - condition_3 = (i >= nx / 2) & (j >= ny / 3) & (j < 2 * ny / 3) - - region_param[condition_1] = 1 - region_param[condition_2] = 2 - region_param[condition_3] = 3 - - inactive = active_indices < 0 - region_param[inactive] = ma.masked - - # If scaling factors are provided - if scaling_per_region is not None: - scaling_param = np.take(scaling_per_region, region_param) - scaling_param[inactive] = ma.masked - - # If used regions are specified - if used_regions is not None: - mask_used = np.isin(region_param, used_regions, invert=True) - region_param[np.logical_and(mask_used, ~inactive)] = ma.masked - - return region_param.ravel(order="F"), scaling_param.ravel(order="F") - - -def create_parameter_from_decay_functions(method_name, grid): - # pylint: disable=too-many-locals - ref_pos = (250.0, 250.0) - main_range = 150.0 - perp_range = 100.0 - azimuth = 0.0 - tapering_range = 2 - use_cutoff = True - decay_obj = None - constant_value = 1.0 - - nx = grid.getNX() - ny = grid.getNY() - nz = grid.getNZ() - filename = "tmp_" + method_name + ".grdecl" - if method_name == "constant": - decay_obj = ConstantScalingFactor(constant_value) - else: - decay_obj = build_decay_object( - method_name, - ref_pos, - main_range, - perp_range, - azimuth, - grid, - tapering_range, - use_cutoff, - ) - data_size = nx * ny * nz - scaling_vector = np.zeros(data_size, dtype=np.float32) - for index in range(data_size): - scaling_vector[index] = decay_obj(index) - - scaling_values = np.zeros(nx * ny * nz, dtype=np.float32) - - for index in range(data_size): - global_index = grid.global_index(active_index=index) - scaling_values[global_index] = scaling_vector[index] - - scaling_values_3d = np.reshape(scaling_values, (nx, ny, nz), "F") - scaling_kw_name = "SCALING" - scaling_kw = grid.create_kw(scaling_values_3d, scaling_kw_name, False) - with cwrap.open(filename, "w") as file: - grid.write_grdecl(scaling_kw, file) - - return scaling_values - - -# The selected grid cell values below should have the same value for both -# the `GaussianDecay`` and the `ExponentialDecay`` method since the -# normalized distance is 1.0 for those grid cells not at the reference point grid -# cells at (4,4,0) (4,4,1) and (4,4,2). -# Values as reference point grid cells should be 1.0. -# index = 44 is (i,j,k) = (4,4,0) -# index = 144 is (i,j,k) = (4,4,1) -# index = 244 is (i,j,k) = (4,4,2) -# index = 49 is (i,j,k) = (9,4,0) -# index = 149 is (i,j,k) = (9,4,1) -# index = 174 is (i,j,k) = (4,7,1) -# index = 114 is (i,j,k) = (4,1,1) -@pytest.mark.parametrize( - "index_list, expected", - [ - pytest.param( - [44, 144, 244], - 1.0, - id=( - "cells at (4,4,0) (4,4,1) and (4,4,2). Values " - "as reference point grid cells should be 1.0" - ), - ), - pytest.param( - [49, 149], - 0.049787066876888275, - id=( - "Values at distance 5 grid cells aways (corresponding to 250 m) " - "which is 1 range in x direction which corresponds to the " - "perp_range since azimuth is 0." - ), - ), - pytest.param( - [114, 174], - 0.049787066876888275, - id=( - "Values at distance 3 grid cells away (corresponding to 150m) " - "which is 1 range in y direction which corresponds to the " - "main_range since azimuth is 0." - ), - ), - ], -) -@pytest.mark.parametrize( - "method", - [ - ExponentialDecay, - GaussianDecay, - ], -) -def test_exponentialtype_decay_functions(method, index_list, expected): - # pylint: disable=too-many-locals - ref_pos = [225.0, 225.0] - grid, nx, ny, nz = create_box_grid() - - main_range = 150.0 - perp_range = 250.0 - azimuth = 0.0 - use_cutoff = False - decay_obj = method( - ref_pos, - main_range, - perp_range, - azimuth, - grid, - use_cutoff, - ) - - scaling_vector = np.zeros(nx * ny * nz, dtype=np.float32) - j, i, k = np.meshgrid(np.arange(ny), np.arange(nx), np.arange(nz), indexing="xy") - global_indices = i + j * nx + k * nx * ny - scaling_vector[global_indices] = np.vectorize(decay_obj)(global_indices) - - result = scaling_vector[index_list] - assert (result == np.ones(len(index_list)) * expected).all() - - -def test_calculate_scaling_factors_in_regions(snapshot): - """ - Test calculation of scaling factor parameter for regions. - For visual QC of the calculated scaling factors, write the grid to EGRID format - and the parameter to GRDECL format for easy import into a visualization tool. - """ - grid, nx, ny, nz = create_box_grid(use_actnum=True) - region_param_masked, _ = create_region_parameter(grid, BoxDimensions(nx, ny, nz)) - active_segment_list = [1, 2, 3] - scaling_value_list = [1.0, 0.5, 0.2] - smooth_range_list = None - ( - scaling_factor_param, - active_region_values_used_masked, - _, - ) = calculate_scaling_factors_in_regions( - grid, - region_param_masked, - active_segment_list, - scaling_value_list, - smooth_range_list, - ) - active_region_param = np.zeros(nx * ny * nz, dtype=np.int32) - active = ~active_region_values_used_masked.mask - active_region_param[active] = active_region_values_used_masked[active] - - region_param = np.zeros(nx * ny * nz, dtype=np.int32) - active_specified = ~region_param_masked.mask - region_param[active_specified] = region_param_masked[active_specified] - - snapshot.assert_match(str(scaling_factor_param), "testdata_scaling.txt") - - -def test_smooth_parameter(snapshot): - grid, nx, ny, nz = create_box_grid(use_actnum=True) - - scaling_per_region = [0, 1.0, 0.5, 0.2] - region_param_used, scaling_param = create_region_parameter( - grid, - BoxDimensions(nx, ny, nz), - scaling_per_region=scaling_per_region, - used_regions=[1, 2, 3], - ) - - smooth_range_list = [1, 1] - smooth_param = smooth_parameter( - grid, smooth_range_list, scaling_param, region_param_used - ) - - snapshot.assert_match(str(smooth_param), "testdata_scaling_smooth.txt") - - -def test_decay_function_with_new_options(snapshot): - grid, _, _, _ = create_box_grid( - dimensions=BoxDimensions(25, 25, 10), - increments=BoxIncrements(20.0, 20.0, 10.0), - use_actnum=False, - ) - grid.save_EGRID("tmp_grid.EGRID") - - method_name = "const_gaussian_decay" - scaling_values = create_parameter_from_decay_functions(method_name, grid) - snapshot.assert_match(str(scaling_values), "testdata_scaling_decay_method1.txt") - - method_name = "const_exponential_decay" - scaling_values = create_parameter_from_decay_functions(method_name, grid) - snapshot.assert_match(str(scaling_values), "testdata_scaling_decay_method2.txt") - - method_name = "gaussian_decay" - scaling_values = create_parameter_from_decay_functions(method_name, grid) - snapshot.assert_match(str(scaling_values), "testdata_scaling_decay_method3.txt") - - method_name = "exponential_decay" - scaling_values = create_parameter_from_decay_functions(method_name, grid) - snapshot.assert_match(str(scaling_values), "testdata_scaling_decay_method4.txt") - - method_name = "constant" - scaling_values = create_parameter_from_decay_functions(method_name, grid) - snapshot.assert_match(str(scaling_values), "testdata_scaling_decay_method5.txt") diff --git a/tests/workflows/localisation/test_valid_rst.py b/tests/workflows/localisation/test_valid_rst.py deleted file mode 100644 index 1ace96256..000000000 --- a/tests/workflows/localisation/test_valid_rst.py +++ /dev/null @@ -1,13 +0,0 @@ -import pytest -import rstcheck_core.checker - -from semeio.workflows.localisation.local_config_script import DESCRIPTION, EXAMPLES - - -@pytest.mark.parametrize("rst_text", [DESCRIPTION, EXAMPLES]) -def test_valid_rst(rst_text): - """ - Check that the documentation passed through the plugin system is - valid rst - """ - assert not list(rstcheck_core.checker.check_source(rst_text)) From 832ceb6d8cf748b3a49b1db5cfa9d3e2fec35e56 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=98yvind=20Eide?= Date: Mon, 25 Nov 2024 13:19:17 +0100 Subject: [PATCH 2/2] Drop support for python 3.8 --- .github/workflows/testing.yml | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 69238db02..f0b9094e7 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -17,7 +17,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: [ "3.8", "3.9", "3.10", "3.11", "3.12" ] + python-version: ["3.10", "3.11", "3.12" ] os: [ ubuntu-latest ] include: - python-version: "3.12" @@ -28,10 +28,6 @@ jobs: with: fetch-depth: 0 - - name: Install HDF5 source files - if: runner.os == 'macOS' - run: brew install hdf5 - - uses: actions/setup-python@v5 with: python-version: "${{ matrix.python-version }}"