From 491261c098e95c96666d1c28da38ccbcc401bb75 Mon Sep 17 00:00:00 2001 From: bvandekerkhof Date: Wed, 13 Mar 2024 11:49:38 +0100 Subject: [PATCH] Black edits Signed-off-by: bvandekerkhof --- src/pyelq/component/background.py | 7 +++--- src/pyelq/sensor/sensor.py | 2 +- tests/conftest.py | 6 +++-- tests/sensor/test_sensorgroup.py | 4 ++-- .../test_spatio_temporal_interpolation.py | 24 ++++++++++++------- tests/test_gaussian_plume.py | 17 +++++++------ tests/test_meteorology.py | 8 ++++--- tests/test_preprocessing.py | 6 ++--- 8 files changed, 45 insertions(+), 29 deletions(-) diff --git a/src/pyelq/component/background.py b/src/pyelq/component/background.py index 6bf4c35..e7b3782 100644 --- a/src/pyelq/component/background.py +++ b/src/pyelq/component/background.py @@ -176,7 +176,7 @@ def initialise(self, sensor_object: SensorGroup, meteorology: MeteorologyGroup, """ self.n_obs = sensor_object.nof_observations self.time, unique_inverse = np.unique(sensor_object.time, return_inverse=True) - self.time = pd.array(self.time, dtype='datetime64[ns]') + self.time = pd.array(self.time, dtype="datetime64[ns]") self.n_parameter = len(self.time) self.basis_matrix = sparse.csr_array((np.ones(self.n_obs), (np.array(range(self.n_obs)), unique_inverse))) self.precision_matrix = gmrf.precision_temporal(time=self.time) @@ -300,12 +300,13 @@ def make_temporal_knots(self, sensor_object: SensorGroup): """ if self.n_time is None: - self.time = pd.array(np.unique(sensor_object.time), dtype='datetime64[ns]') + self.time = pd.array(np.unique(sensor_object.time), dtype="datetime64[ns]") self.n_time = len(self.time) else: self.time = pd.array( pd.date_range(start=np.min(sensor_object.time), end=np.max(sensor_object.time), periods=self.n_time), - dtype='datetime64[ns]') + dtype="datetime64[ns]", + ) def make_spatial_knots(self, sensor_object: SensorGroup): """Create the spatial grid for the model. diff --git a/src/pyelq/sensor/sensor.py b/src/pyelq/sensor/sensor.py index 07ccd6a..2f352c7 100644 --- a/src/pyelq/sensor/sensor.py +++ b/src/pyelq/sensor/sensor.py @@ -137,7 +137,7 @@ def concentration(self) -> np.ndarray: @property def time(self) -> pd.arrays.DatetimeArray: """DatetimeArray: Column vector of time values across all sensors.""" - return pd.array(np.concatenate([sensor.time for sensor in self.values()]), dtype='datetime64[ns]') + return pd.array(np.concatenate([sensor.time for sensor in self.values()]), dtype="datetime64[ns]") @property def location(self) -> Coordinate: diff --git a/tests/conftest.py b/tests/conftest.py index 9249297..5fe09ad 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -60,7 +60,8 @@ def fix_sensor_group(request, ref_longitude, ref_latitude): sensor[device_name] = Sensor() sensor[device_name].time = pd.array( pd.date_range(start=datetime.now(), end=datetime.now() + timedelta(hours=1.0), periods=n_time), - dtype='datetime64[ns]') + dtype="datetime64[ns]", + ) sensor[device_name].concentration = np.random.random_sample(size=(n_time,)) sensor[device_name].location = ENU( east=locations[k, 0], @@ -77,7 +78,8 @@ def fix_sensor_group(request, ref_longitude, ref_latitude): sensor[device_name] = Beam() sensor[device_name].time = pd.array( pd.date_range(start=datetime.now(), end=datetime.now() + timedelta(hours=1.0), periods=n_time), - dtype='datetime64[ns]') + dtype="datetime64[ns]", + ) sensor[device_name].concentration = np.random.random_sample(size=(n_time,)) sensor[device_name].location = ENU( east=np.array([0, locations[k, 0]]), diff --git a/tests/sensor/test_sensorgroup.py b/tests/sensor/test_sensorgroup.py index 3810bfa..e66fe69 100644 --- a/tests/sensor/test_sensorgroup.py +++ b/tests/sensor/test_sensorgroup.py @@ -28,7 +28,7 @@ def test_sensorgroup(): nof_observations = np.random.randint(1, 10) total_observations += nof_observations sensor.concentration = np.random.rand(nof_observations, 1) - sensor.time = pd.array(pd.date_range(start="1/1/2022", periods=nof_observations), dtype='datetime64[ns]') + sensor.time = pd.array(pd.date_range(start="1/1/2022", periods=nof_observations), dtype="datetime64[ns]") sensor.location = LLA( latitude=0.01 * np.random.rand(), longitude=0.01 * np.random.rand(), altitude=0.01 * np.random.rand() ) @@ -57,7 +57,7 @@ def test_plotting(): nof_observations = np.random.randint(5, 10) total_observations += nof_observations sensor.concentration = np.random.rand(nof_observations, 1) - sensor.time = pd.array(pd.date_range(start="1/1/2022", periods=nof_observations), dtype='datetime64[ns]') + sensor.time = pd.array(pd.date_range(start="1/1/2022", periods=nof_observations), dtype="datetime64[ns]") location = LLA() location.latitude = np.array(idx) location.longitude = np.array(idx) diff --git a/tests/support_functions/test_spatio_temporal_interpolation.py b/tests/support_functions/test_spatio_temporal_interpolation.py index 5f3b419..1aa1de0 100644 --- a/tests/support_functions/test_spatio_temporal_interpolation.py +++ b/tests/support_functions/test_spatio_temporal_interpolation.py @@ -30,7 +30,9 @@ def test_default_returns(): same.""" loc_in = np.array([[0, 0, 0], [1, 1, 1]]) - time_in = pd.array(pd.date_range(pd.Timestamp.now(), periods=loc_in.shape[0], freq="h"), dtype='datetime64[ns]')[:, None] + time_in = pd.array(pd.date_range(pd.Timestamp.now(), periods=loc_in.shape[0], freq="h"), dtype="datetime64[ns]")[ + :, None + ] vals = np.random.random((loc_in.shape[0], 1)) # check if same input/output locations and time give the same answer return_vals = sti.interpolate( @@ -47,7 +49,7 @@ def test_single_value(): """Tests if all interpolated values are set to the same value when 1 input value is provided.""" loc_in = np.array([[0, 0, 0], [1, 1, 1]]) n_obs = loc_in.shape[0] - time_in = pd.array(pd.date_range(pd.Timestamp.now(), periods=n_obs, freq="h"), dtype='datetime64[ns]')[:, None] + time_in = pd.array(pd.date_range(pd.Timestamp.now(), periods=n_obs, freq="h"), dtype="datetime64[ns]")[:, None] vals = np.random.random((loc_in.shape[0], 1)) # Check if we get the same output for all values when 1 value is provided @@ -73,7 +75,7 @@ def test_temporal_interpolation(): interpolation in 1d) Also checks if we get the same values when an array of integers (representing seconds) is supplied instead of an array of datetimes.""" periods = 10 - time_in = pd.array(pd.date_range(pd.Timestamp.now(), periods=periods, freq="s"), dtype='datetime64[ns]')[:, None] + time_in = pd.array(pd.date_range(pd.Timestamp.now(), periods=periods, freq="s"), dtype="datetime64[ns]")[:, None] time_in_array = np.array(range(periods))[:, None] vals = np.random.random(time_in.size) random_index = np.random.randint(0, periods - 1) @@ -133,13 +135,17 @@ def test_fill_value(): def test_consistent_shapes(): """Test if output shapes are consistent with provided input.""" loc_in = np.array([[0, 0, 0], [1, 1, 1]]) - time_in = pd.array(pd.date_range(pd.Timestamp.now(), periods=loc_in.shape[0] - 1, freq="h"), dtype='datetime64[ns]')[:, None] + time_in = pd.array( + pd.date_range(pd.Timestamp.now(), periods=loc_in.shape[0] - 1, freq="h"), dtype="datetime64[ns]" + )[:, None] vals = np.random.random((loc_in.shape[0], 1)) with pytest.raises(ValueError): sti.interpolate(location_in=loc_in, time_in=time_in, values_in=vals, location_out=loc_in, time_out=time_in) loc_in = np.array([[0, 0, 0], [0, 1, 0], [1, 0.5, 0], [0.5, 0.5, 1]]) - time_in = pd.array(pd.date_range(pd.Timestamp.now(), periods=loc_in.shape[0], freq="h"), dtype='datetime64[ns]')[:, None] + time_in = pd.array(pd.date_range(pd.Timestamp.now(), periods=loc_in.shape[0], freq="h"), dtype="datetime64[ns]")[ + :, None + ] vals = np.random.random((loc_in.shape[0], 1)) return_vals = sti.interpolate( location_in=loc_in, time_in=time_in, values_in=vals, location_out=loc_in, time_out=time_in @@ -174,16 +180,18 @@ def test_temporal_resampling(): time_in = [datetime(2000, 1, 1, 0, 0, 1) + timedelta(minutes=i) for i in range(n_values_in)] time_bin_edges = pd.array( pd.to_datetime([datetime(2000, 1, 1) + timedelta(minutes=i * 10) for i in range(n_time_out + 1)]), - dtype='datetime64[ns]') + dtype="datetime64[ns]", + ) correct_values_out_mean = np.array([np.mean(i) for i in np.split(values_in, n_time_out)]) correct_values_out_max = np.array([np.max(i) for i in np.split(values_in, n_time_out)]) correct_values_out_min = np.array([np.min(i) for i in np.split(values_in, n_time_out)]) time_bin_edges_non_monotonic = pd.array( - pd.Series(list(time_bin_edges)[:-1] + [datetime(1999, 1, 1)]), dtype='datetime64[ns]') + pd.Series(list(time_bin_edges)[:-1] + [datetime(1999, 1, 1)]), dtype="datetime64[ns]" + ) - time_in = pd.array(pd.to_datetime(time_in + [datetime(2001, 1, 1)]), dtype='datetime64[ns]') + time_in = pd.array(pd.to_datetime(time_in + [datetime(2001, 1, 1)]), dtype="datetime64[ns]") values_in = np.append(values_in, 1000000) p = np.random.permutation(len(time_in)) diff --git a/tests/test_gaussian_plume.py b/tests/test_gaussian_plume.py index 9889521..a919048 100644 --- a/tests/test_gaussian_plume.py +++ b/tests/test_gaussian_plume.py @@ -33,7 +33,8 @@ def fixture_met_object(): location.from_array(loc_in) time = pd.array( pd.date_range(pd.Timestamp.fromisoformat("2022-01-01 00:00:00"), periods=loc_in.shape[0], freq="s"), - dtype='datetime64[ns]')[:, None] + dtype="datetime64[ns]", + )[:, None] met_object = Meteorology() met_object.location = location met_object.time = time @@ -56,7 +57,8 @@ def fixture_met_object_single(): location.from_array(loc_in) time = pd.array( pd.date_range(pd.Timestamp.fromisoformat("2022-01-01 00:00:00"), periods=loc_in.shape[0], freq="s"), - dtype='datetime64[ns]')[:, None] + dtype="datetime64[ns]", + )[:, None] met_object = Meteorology() met_object.location = location met_object.time = time @@ -79,8 +81,8 @@ def fixture_sensor_object(): location.from_array(np.array([[25, 0, 0]])) sensor_object.location = location time = pd.array( - pd.date_range(pd.Timestamp.fromisoformat("2022-01-01 00:00:00"), periods=5, freq="ns"), - dtype='datetime64[ns]')[:, None] + pd.date_range(pd.Timestamp.fromisoformat("2022-01-01 00:00:00"), periods=5, freq="ns"), dtype="datetime64[ns]" + )[:, None] sensor_object.time = time sensor_object.concentration = np.zeros(time.size) sensor_object.label = "Generic" @@ -97,7 +99,8 @@ def fixture_drone_object(): sensor_object.location = location time = pd.array( pd.date_range(pd.Timestamp.fromisoformat("2022-01-01 00:00:00"), periods=loc_in.shape[0], freq="s"), - dtype='datetime64[ns]')[:, None] + dtype="datetime64[ns]", + )[:, None] sensor_object.time = time sensor_object.concentration = np.zeros(time.size) sensor_object.label = "Generic" @@ -112,8 +115,8 @@ def fixture_beam_object(): beam_object = Beam() beam_object.location = beam_location time = pd.array( - pd.date_range(pd.Timestamp.fromisoformat("2022-01-01 00:00:00"), periods=4, freq="ns"), - dtype='datetime64[ns]')[:, None] + pd.date_range(pd.Timestamp.fromisoformat("2022-01-01 00:00:00"), periods=4, freq="ns"), dtype="datetime64[ns]" + )[:, None] beam_object.time = time beam_object.concentration = np.zeros(time.size) beam_object.label = "Beam" diff --git a/tests/test_meteorology.py b/tests/test_meteorology.py index be5e181..2c1ebae 100644 --- a/tests/test_meteorology.py +++ b/tests/test_meteorology.py @@ -209,13 +209,15 @@ def test_calculate_wind_turbulence_horizontal(): met = Meteorology() met.time = pd.array( np.array([dt.datetime(2023, 1, 1), dt.datetime(2023, 1, 1), dt.datetime(2023, 1, 1)]).astype("datetime64[ns]"), - dtype='datetime64[ns]') + dtype="datetime64[ns]", + ) met.wind_direction = np.linspace(0, 360, met.time.shape[0]) sigma = 3 - met.time = pd.array(pd.date_range(dt.datetime(2023, 1, 1), dt.datetime(2023, 1, 2), freq="5s"), - dtype='datetime64[ns]') + met.time = pd.array( + pd.date_range(dt.datetime(2023, 1, 1), dt.datetime(2023, 1, 2), freq="5s"), dtype="datetime64[ns]" + ) met.wind_direction = np.random.normal(180, sigma, met.time.shape[0]) met.calculate_wind_turbulence_horizontal(window="300s") diff --git a/tests/test_preprocessing.py b/tests/test_preprocessing.py index a3daa4b..8d50bd3 100644 --- a/tests/test_preprocessing.py +++ b/tests/test_preprocessing.py @@ -29,7 +29,7 @@ def fix_time_bin_edges(sensor_group): """Fix the time bin edges to be used for aggregation.""" min_time, max_time = get_time_lims(sensor_group=sensor_group) min_time, max_time = min_time - timedelta(seconds=60), max_time + timedelta(seconds=60) - time_bin_edges = pd.array(pd.date_range(min_time, max_time, freq="120s"), dtype='datetime64[ns]') + time_bin_edges = pd.array(pd.date_range(min_time, max_time, freq="120s"), dtype="datetime64[ns]") return time_bin_edges @@ -38,7 +38,7 @@ def fix_block_times(sensor_group): """Fix the time bin edges for re-blocking the processed data.""" min_time, max_time = get_time_lims(sensor_group=sensor_group) min_time, max_time = min_time - timedelta(hours=1), max_time + timedelta(hours=1) - block_times = pd.array(pd.date_range(min_time, max_time, freq="1200s"), dtype='datetime64[ns]') + block_times = pd.array(pd.date_range(min_time, max_time, freq="1200s"), dtype="datetime64[ns]") return block_times @@ -77,7 +77,7 @@ def fix_meteorology(request, sensor_group): with_nans = request.param min_time, max_time = get_time_lims(sensor_group=sensor_group) meteorology = Meteorology() - meteorology.time = pd.array(pd.date_range(min_time, max_time, freq="1s"), dtype='datetime64[ns]') + meteorology.time = pd.array(pd.date_range(min_time, max_time, freq="1s"), dtype="datetime64[ns]") meteorology.wind_speed = 1.9 + 0.2 * np.random.random_sample(size=meteorology.time.shape) meteorology.wind_direction = np.mod(358.0 + 4.0 * np.random.random_sample(size=meteorology.time.shape), 360) meteorology.wind_turbulence_horizontal = 10.0 * np.ones(shape=meteorology.time.shape)