Skip to content

Commit

Permalink
Black edits
Browse files Browse the repository at this point in the history
Signed-off-by: bvandekerkhof <[email protected]>
  • Loading branch information
bvandekerkhof committed Mar 13, 2024
1 parent 681fa5e commit 491261c
Show file tree
Hide file tree
Showing 8 changed files with 45 additions and 29 deletions.
7 changes: 4 additions & 3 deletions src/pyelq/component/background.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ def initialise(self, sensor_object: SensorGroup, meteorology: MeteorologyGroup,
"""
self.n_obs = sensor_object.nof_observations
self.time, unique_inverse = np.unique(sensor_object.time, return_inverse=True)
self.time = pd.array(self.time, dtype='datetime64[ns]')
self.time = pd.array(self.time, dtype="datetime64[ns]")
self.n_parameter = len(self.time)
self.basis_matrix = sparse.csr_array((np.ones(self.n_obs), (np.array(range(self.n_obs)), unique_inverse)))
self.precision_matrix = gmrf.precision_temporal(time=self.time)
Expand Down Expand Up @@ -300,12 +300,13 @@ def make_temporal_knots(self, sensor_object: SensorGroup):
"""
if self.n_time is None:
self.time = pd.array(np.unique(sensor_object.time), dtype='datetime64[ns]')
self.time = pd.array(np.unique(sensor_object.time), dtype="datetime64[ns]")
self.n_time = len(self.time)
else:
self.time = pd.array(
pd.date_range(start=np.min(sensor_object.time), end=np.max(sensor_object.time), periods=self.n_time),
dtype='datetime64[ns]')
dtype="datetime64[ns]",
)

def make_spatial_knots(self, sensor_object: SensorGroup):
"""Create the spatial grid for the model.
Expand Down
2 changes: 1 addition & 1 deletion src/pyelq/sensor/sensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ def concentration(self) -> np.ndarray:
@property
def time(self) -> pd.arrays.DatetimeArray:
"""DatetimeArray: Column vector of time values across all sensors."""
return pd.array(np.concatenate([sensor.time for sensor in self.values()]), dtype='datetime64[ns]')
return pd.array(np.concatenate([sensor.time for sensor in self.values()]), dtype="datetime64[ns]")

@property
def location(self) -> Coordinate:
Expand Down
6 changes: 4 additions & 2 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,8 @@ def fix_sensor_group(request, ref_longitude, ref_latitude):
sensor[device_name] = Sensor()
sensor[device_name].time = pd.array(
pd.date_range(start=datetime.now(), end=datetime.now() + timedelta(hours=1.0), periods=n_time),
dtype='datetime64[ns]')
dtype="datetime64[ns]",
)
sensor[device_name].concentration = np.random.random_sample(size=(n_time,))
sensor[device_name].location = ENU(
east=locations[k, 0],
Expand All @@ -77,7 +78,8 @@ def fix_sensor_group(request, ref_longitude, ref_latitude):
sensor[device_name] = Beam()
sensor[device_name].time = pd.array(
pd.date_range(start=datetime.now(), end=datetime.now() + timedelta(hours=1.0), periods=n_time),
dtype='datetime64[ns]')
dtype="datetime64[ns]",
)
sensor[device_name].concentration = np.random.random_sample(size=(n_time,))
sensor[device_name].location = ENU(
east=np.array([0, locations[k, 0]]),
Expand Down
4 changes: 2 additions & 2 deletions tests/sensor/test_sensorgroup.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def test_sensorgroup():
nof_observations = np.random.randint(1, 10)
total_observations += nof_observations
sensor.concentration = np.random.rand(nof_observations, 1)
sensor.time = pd.array(pd.date_range(start="1/1/2022", periods=nof_observations), dtype='datetime64[ns]')
sensor.time = pd.array(pd.date_range(start="1/1/2022", periods=nof_observations), dtype="datetime64[ns]")
sensor.location = LLA(
latitude=0.01 * np.random.rand(), longitude=0.01 * np.random.rand(), altitude=0.01 * np.random.rand()
)
Expand Down Expand Up @@ -57,7 +57,7 @@ def test_plotting():
nof_observations = np.random.randint(5, 10)
total_observations += nof_observations
sensor.concentration = np.random.rand(nof_observations, 1)
sensor.time = pd.array(pd.date_range(start="1/1/2022", periods=nof_observations), dtype='datetime64[ns]')
sensor.time = pd.array(pd.date_range(start="1/1/2022", periods=nof_observations), dtype="datetime64[ns]")
location = LLA()
location.latitude = np.array(idx)
location.longitude = np.array(idx)
Expand Down
24 changes: 16 additions & 8 deletions tests/support_functions/test_spatio_temporal_interpolation.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,9 @@ def test_default_returns():
same."""

loc_in = np.array([[0, 0, 0], [1, 1, 1]])
time_in = pd.array(pd.date_range(pd.Timestamp.now(), periods=loc_in.shape[0], freq="h"), dtype='datetime64[ns]')[:, None]
time_in = pd.array(pd.date_range(pd.Timestamp.now(), periods=loc_in.shape[0], freq="h"), dtype="datetime64[ns]")[
:, None
]
vals = np.random.random((loc_in.shape[0], 1))
# check if same input/output locations and time give the same answer
return_vals = sti.interpolate(
Expand All @@ -47,7 +49,7 @@ def test_single_value():
"""Tests if all interpolated values are set to the same value when 1 input value is provided."""
loc_in = np.array([[0, 0, 0], [1, 1, 1]])
n_obs = loc_in.shape[0]
time_in = pd.array(pd.date_range(pd.Timestamp.now(), periods=n_obs, freq="h"), dtype='datetime64[ns]')[:, None]
time_in = pd.array(pd.date_range(pd.Timestamp.now(), periods=n_obs, freq="h"), dtype="datetime64[ns]")[:, None]
vals = np.random.random((loc_in.shape[0], 1))

# Check if we get the same output for all values when 1 value is provided
Expand All @@ -73,7 +75,7 @@ def test_temporal_interpolation():
interpolation in 1d) Also checks if we get the same values when an array of integers (representing seconds) is
supplied instead of an array of datetimes."""
periods = 10
time_in = pd.array(pd.date_range(pd.Timestamp.now(), periods=periods, freq="s"), dtype='datetime64[ns]')[:, None]
time_in = pd.array(pd.date_range(pd.Timestamp.now(), periods=periods, freq="s"), dtype="datetime64[ns]")[:, None]
time_in_array = np.array(range(periods))[:, None]
vals = np.random.random(time_in.size)
random_index = np.random.randint(0, periods - 1)
Expand Down Expand Up @@ -133,13 +135,17 @@ def test_fill_value():
def test_consistent_shapes():
"""Test if output shapes are consistent with provided input."""
loc_in = np.array([[0, 0, 0], [1, 1, 1]])
time_in = pd.array(pd.date_range(pd.Timestamp.now(), periods=loc_in.shape[0] - 1, freq="h"), dtype='datetime64[ns]')[:, None]
time_in = pd.array(
pd.date_range(pd.Timestamp.now(), periods=loc_in.shape[0] - 1, freq="h"), dtype="datetime64[ns]"
)[:, None]
vals = np.random.random((loc_in.shape[0], 1))
with pytest.raises(ValueError):
sti.interpolate(location_in=loc_in, time_in=time_in, values_in=vals, location_out=loc_in, time_out=time_in)

loc_in = np.array([[0, 0, 0], [0, 1, 0], [1, 0.5, 0], [0.5, 0.5, 1]])
time_in = pd.array(pd.date_range(pd.Timestamp.now(), periods=loc_in.shape[0], freq="h"), dtype='datetime64[ns]')[:, None]
time_in = pd.array(pd.date_range(pd.Timestamp.now(), periods=loc_in.shape[0], freq="h"), dtype="datetime64[ns]")[
:, None
]
vals = np.random.random((loc_in.shape[0], 1))
return_vals = sti.interpolate(
location_in=loc_in, time_in=time_in, values_in=vals, location_out=loc_in, time_out=time_in
Expand Down Expand Up @@ -174,16 +180,18 @@ def test_temporal_resampling():
time_in = [datetime(2000, 1, 1, 0, 0, 1) + timedelta(minutes=i) for i in range(n_values_in)]
time_bin_edges = pd.array(
pd.to_datetime([datetime(2000, 1, 1) + timedelta(minutes=i * 10) for i in range(n_time_out + 1)]),
dtype='datetime64[ns]')
dtype="datetime64[ns]",
)

correct_values_out_mean = np.array([np.mean(i) for i in np.split(values_in, n_time_out)])
correct_values_out_max = np.array([np.max(i) for i in np.split(values_in, n_time_out)])
correct_values_out_min = np.array([np.min(i) for i in np.split(values_in, n_time_out)])

time_bin_edges_non_monotonic = pd.array(
pd.Series(list(time_bin_edges)[:-1] + [datetime(1999, 1, 1)]), dtype='datetime64[ns]')
pd.Series(list(time_bin_edges)[:-1] + [datetime(1999, 1, 1)]), dtype="datetime64[ns]"
)

time_in = pd.array(pd.to_datetime(time_in + [datetime(2001, 1, 1)]), dtype='datetime64[ns]')
time_in = pd.array(pd.to_datetime(time_in + [datetime(2001, 1, 1)]), dtype="datetime64[ns]")
values_in = np.append(values_in, 1000000)

p = np.random.permutation(len(time_in))
Expand Down
17 changes: 10 additions & 7 deletions tests/test_gaussian_plume.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@ def fixture_met_object():
location.from_array(loc_in)
time = pd.array(
pd.date_range(pd.Timestamp.fromisoformat("2022-01-01 00:00:00"), periods=loc_in.shape[0], freq="s"),
dtype='datetime64[ns]')[:, None]
dtype="datetime64[ns]",
)[:, None]
met_object = Meteorology()
met_object.location = location
met_object.time = time
Expand All @@ -56,7 +57,8 @@ def fixture_met_object_single():
location.from_array(loc_in)
time = pd.array(
pd.date_range(pd.Timestamp.fromisoformat("2022-01-01 00:00:00"), periods=loc_in.shape[0], freq="s"),
dtype='datetime64[ns]')[:, None]
dtype="datetime64[ns]",
)[:, None]
met_object = Meteorology()
met_object.location = location
met_object.time = time
Expand All @@ -79,8 +81,8 @@ def fixture_sensor_object():
location.from_array(np.array([[25, 0, 0]]))
sensor_object.location = location
time = pd.array(
pd.date_range(pd.Timestamp.fromisoformat("2022-01-01 00:00:00"), periods=5, freq="ns"),
dtype='datetime64[ns]')[:, None]
pd.date_range(pd.Timestamp.fromisoformat("2022-01-01 00:00:00"), periods=5, freq="ns"), dtype="datetime64[ns]"
)[:, None]
sensor_object.time = time
sensor_object.concentration = np.zeros(time.size)
sensor_object.label = "Generic"
Expand All @@ -97,7 +99,8 @@ def fixture_drone_object():
sensor_object.location = location
time = pd.array(
pd.date_range(pd.Timestamp.fromisoformat("2022-01-01 00:00:00"), periods=loc_in.shape[0], freq="s"),
dtype='datetime64[ns]')[:, None]
dtype="datetime64[ns]",
)[:, None]
sensor_object.time = time
sensor_object.concentration = np.zeros(time.size)
sensor_object.label = "Generic"
Expand All @@ -112,8 +115,8 @@ def fixture_beam_object():
beam_object = Beam()
beam_object.location = beam_location
time = pd.array(
pd.date_range(pd.Timestamp.fromisoformat("2022-01-01 00:00:00"), periods=4, freq="ns"),
dtype='datetime64[ns]')[:, None]
pd.date_range(pd.Timestamp.fromisoformat("2022-01-01 00:00:00"), periods=4, freq="ns"), dtype="datetime64[ns]"
)[:, None]
beam_object.time = time
beam_object.concentration = np.zeros(time.size)
beam_object.label = "Beam"
Expand Down
8 changes: 5 additions & 3 deletions tests/test_meteorology.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,13 +209,15 @@ def test_calculate_wind_turbulence_horizontal():
met = Meteorology()
met.time = pd.array(
np.array([dt.datetime(2023, 1, 1), dt.datetime(2023, 1, 1), dt.datetime(2023, 1, 1)]).astype("datetime64[ns]"),
dtype='datetime64[ns]')
dtype="datetime64[ns]",
)
met.wind_direction = np.linspace(0, 360, met.time.shape[0])

sigma = 3

met.time = pd.array(pd.date_range(dt.datetime(2023, 1, 1), dt.datetime(2023, 1, 2), freq="5s"),
dtype='datetime64[ns]')
met.time = pd.array(
pd.date_range(dt.datetime(2023, 1, 1), dt.datetime(2023, 1, 2), freq="5s"), dtype="datetime64[ns]"
)
met.wind_direction = np.random.normal(180, sigma, met.time.shape[0])

met.calculate_wind_turbulence_horizontal(window="300s")
Expand Down
6 changes: 3 additions & 3 deletions tests/test_preprocessing.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def fix_time_bin_edges(sensor_group):
"""Fix the time bin edges to be used for aggregation."""
min_time, max_time = get_time_lims(sensor_group=sensor_group)
min_time, max_time = min_time - timedelta(seconds=60), max_time + timedelta(seconds=60)
time_bin_edges = pd.array(pd.date_range(min_time, max_time, freq="120s"), dtype='datetime64[ns]')
time_bin_edges = pd.array(pd.date_range(min_time, max_time, freq="120s"), dtype="datetime64[ns]")
return time_bin_edges


Expand All @@ -38,7 +38,7 @@ def fix_block_times(sensor_group):
"""Fix the time bin edges for re-blocking the processed data."""
min_time, max_time = get_time_lims(sensor_group=sensor_group)
min_time, max_time = min_time - timedelta(hours=1), max_time + timedelta(hours=1)
block_times = pd.array(pd.date_range(min_time, max_time, freq="1200s"), dtype='datetime64[ns]')
block_times = pd.array(pd.date_range(min_time, max_time, freq="1200s"), dtype="datetime64[ns]")
return block_times


Expand Down Expand Up @@ -77,7 +77,7 @@ def fix_meteorology(request, sensor_group):
with_nans = request.param
min_time, max_time = get_time_lims(sensor_group=sensor_group)
meteorology = Meteorology()
meteorology.time = pd.array(pd.date_range(min_time, max_time, freq="1s"), dtype='datetime64[ns]')
meteorology.time = pd.array(pd.date_range(min_time, max_time, freq="1s"), dtype="datetime64[ns]")
meteorology.wind_speed = 1.9 + 0.2 * np.random.random_sample(size=meteorology.time.shape)
meteorology.wind_direction = np.mod(358.0 + 4.0 * np.random.random_sample(size=meteorology.time.shape), 360)
meteorology.wind_turbulence_horizontal = 10.0 * np.ones(shape=meteorology.time.shape)
Expand Down

0 comments on commit 491261c

Please sign in to comment.