Skip to content

Commit

Permalink
Merge pull request #96 from ludwigVonKoopa/validation_particle
Browse files Browse the repository at this point in the history
update coherence forward/backward
  • Loading branch information
ludwigVonKoopa authored Jun 22, 2021
2 parents bfd6e6a + 2f20ca6 commit 6921457
Show file tree
Hide file tree
Showing 8 changed files with 199 additions and 155 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,12 @@ Changed
^^^^^^^
Fixed
^^^^^
- GridCollection get_next_time_step & get_previous_time_step needed more files to work in the dataset list.
The loop needed explicitly self.dataset[i+-1] even when i==0, therefore indice went out of range
Added
^^^^^


[3.4.0] - 2021-03-29
--------------------
Changed
Expand Down
11 changes: 2 additions & 9 deletions examples/16_network/pet_follow_particle.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
from py_eddy_tracker.dataset.grid import GridCollection
from py_eddy_tracker.observations.groups import particle_candidate
from py_eddy_tracker.observations.network import NetworkObservations
from py_eddy_tracker.poly import group_obs

start_logger().setLevel("ERROR")

Expand Down Expand Up @@ -128,25 +127,19 @@ def update(frame):
# ^^^^^^^^^^^^^^^^^^
step = 1 / 60.0

x, y = meshgrid(arange(24, 36, step), arange(31, 36, step))
x0, y0 = x.reshape(-1), y.reshape(-1)
# Pre-order to speed up
_, i = group_obs(x0, y0, 1, 360)
x0, y0 = x0[i], y0[i]

t_start, t_end = n.period
dt = 14

shape = (n.obs.size, 2)
# Forward run
i_target_f, pct_target_f = -ones(shape, dtype="i4"), zeros(shape, dtype="i1")
for t in range(t_start, t_end - dt):
particle_candidate(x0, y0, c, n, t, i_target_f, pct_target_f, n_days=dt)
particle_candidate(c, n, step, t, i_target_f, pct_target_f, n_days=dt)

# Backward run
i_target_b, pct_target_b = -ones(shape, dtype="i4"), zeros(shape, dtype="i1")
for t in range(t_start + dt, t_end):
particle_candidate(x0, y0, c, n, t, i_target_b, pct_target_b, n_days=-dt)
particle_candidate(c, n, step, t, i_target_b, pct_target_b, n_days=-dt)

# %%
fig = plt.figure(figsize=(10, 10))
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
matplotlib
netCDF4
numba
numba>=0.53
numpy
opencv-python
pint
Expand Down
4 changes: 1 addition & 3 deletions src/py_eddy_tracker/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,7 @@


def start_logger():
FORMAT_LOG = (
"%(levelname)-8s %(asctime)s %(module)s.%(funcName)s :\n\t\t\t\t\t%(message)s"
)
FORMAT_LOG = "%(levelname)-8s %(asctime)s %(module)s.%(funcName)s :\n\t%(message)s"
logger = logging.getLogger("pet")
if len(logger.handlers) == 0:
# set up logging to CONSOLE
Expand Down
22 changes: 11 additions & 11 deletions src/py_eddy_tracker/dataset/grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -2260,11 +2260,13 @@ def from_netcdf_cube(cls, filename, x_name, y_name, t_name, heigth=None):
@classmethod
def from_netcdf_list(cls, filenames, t, x_name, y_name, indexs=None, heigth=None):
new = cls()
for i, t in enumerate(t):
d = RegularGridDataset(filenames[i], x_name, y_name, indexs=indexs)
for i, _t in enumerate(t):
filename = filenames[i]
logger.debug(f"load file {i:02d}/{len(t)} t={_t} : {filename}")
d = RegularGridDataset(filename, x_name, y_name, indexs=indexs)
if heigth is not None:
d.add_uv(heigth)
new.datasets.append((t, d))
new.datasets.append((_t, d))
return new

def shift_files(self, t, filename, heigth=None, **rgd_kwargs):
Expand All @@ -2276,6 +2278,7 @@ def shift_files(self, t, filename, heigth=None, **rgd_kwargs):
if heigth is not None:
d.add_uv(heigth)
self.datasets.append((t, d))
logger.debug(f"shift and adding i={len(self.datasets)} t={t} : {filename}")

def interp(self, grid_name, t, lons, lats, method="bilinear"):
"""
Expand Down Expand Up @@ -2436,6 +2439,7 @@ def advect(
else:
mask_particule += isnan(x) + isnan(y)
while True:
logger.debug(f"advect : t={t}")
if (backward and t <= t1) or (not backward and t >= t1):
t0, u0, v0, m0 = t1, u1, v1, m1
t1, d1 = generator.__next__()
Expand All @@ -2462,25 +2466,21 @@ def advect(
yield t, x, y

def get_next_time_step(self, t_init):
first = True
for i, (t, dataset) in enumerate(self.datasets):
if t < t_init:
continue
if first:
first = False
yield self.datasets[i - 1]

logger.debug(f"i={i}, t={t}, dataset={dataset}")
yield t, dataset

def get_previous_time_step(self, t_init):
first = True
i = len(self.datasets)
for t, dataset in reversed(self.datasets):
i -= 1
if t > t_init:
continue
if first:
first = False
yield self.datasets[i + 1]

logger.debug(f"i={i}, t={t}, dataset={dataset}")
yield t, dataset


Expand Down
18 changes: 9 additions & 9 deletions src/py_eddy_tracker/observations/groups.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,11 +87,9 @@ def advect(x, y, c, t0, n_days):
return t, x, y


def particle_candidate(x, y, c, eddies, t_start, i_target, pct, **kwargs):
def particle_candidate(c, eddies, step_mesh, t_start, i_target, pct, **kwargs):
"""Select particles within eddies, advect them, return target observation and associated percentages
:param np.array(float) x: longitude of particles
:param np.array(float) y: latitude of particles
:param `~py_eddy_tracker.dataset.grid.GridCollection` c: GridCollection with speed for particles
:param GroupEddiesObservations eddies: GroupEddiesObservations considered
:param int t_start: julian day of the advection
Expand All @@ -102,24 +100,26 @@ def particle_candidate(x, y, c, eddies, t_start, i_target, pct, **kwargs):
"""
# Obs from initial time
m_start = eddies.time == t_start

e = eddies.extract_with_mask(m_start)

# to be able to get global index
translate_start = where(m_start)[0]
# Identify particle in eddies (only in core)
i_start = e.contains(x, y, intern=True)
m = i_start != -1

x, y, i_start = x[m], y[m], i_start[m]
# Advect
x, y, i_start = e.create_particles(step_mesh)

# Advection
t_end, x, y = advect(x, y, c, t_start, **kwargs)

# eddies at last date
m_end = eddies.time == t_end / 86400
e_end = eddies.extract_with_mask(m_end)

# to be able to get global index
translate_end = where(m_end)[0]

# Id eddies for each alive particle (in core and extern)
i_end = e_end.contains(x, y)

# compute matrix and fill target array
get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct)

Expand Down
Loading

0 comments on commit 6921457

Please sign in to comment.