Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

dev2main merge #45

Merged
merged 30 commits into from
Jun 12, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
2da42ff
Update LiCSBAS15_mask_ts.py
espiritocz Mar 4, 2024
8ec1b93
Update LiCSBAS15_mask_ts.py
espiritocz Mar 13, 2024
cdc100b
add comment
espiritocz Mar 26, 2024
1228179
Update LiCSBAS12_loop_closure.py
espiritocz Apr 4, 2024
ad6bf4f
regular update
Apr 16, 2024
1026f7e
regular update
Apr 17, 2024
6a93635
regular update
espiritocz Apr 22, 2024
a6874e3
regular update
espiritocz Apr 22, 2024
1be1f3c
regular update
espiritocz Apr 22, 2024
c7ad280
regular update
Apr 23, 2024
0e7fc8a
bugfix parallel singular
espiritocz Apr 24, 2024
81657a3
Update LiCSBAS_inv_lib.py
espiritocz Apr 25, 2024
375dee1
regular update
Apr 26, 2024
2f000bd
Update LiCSBAS_inv_lib.py
espiritocz Apr 29, 2024
5f647e0
Merge branch 'dev' of https://github.com/comet-licsar/LiCSBAS into dev
espiritocz Apr 29, 2024
e17fb45
Update LiCSBAS_inv_lib.py
espiritocz Apr 29, 2024
380cff6
Update LiCSBAS_inv_lib.py
espiritocz Apr 29, 2024
2f03899
Create LiCSBAS01_get_geotiff_ALOS.py
espiritocz May 2, 2024
d2389d8
bugfix: --thres was inactive in L02to05
espiritocz May 10, 2024
6bdc1f7
Update LiCSBAS02to05_unwrap.py
espiritocz May 10, 2024
9ced5c2
Update LiCSBAS12_loop_closure.py
espiritocz May 30, 2024
94790fc
Update LiCSBAS12_loop_closure.py
espiritocz May 30, 2024
81518cc
Update LiCSBAS12_loop_closure.py
espiritocz May 30, 2024
99e18db
regular update
May 31, 2024
bcbb719
regular update
May 31, 2024
cbb9ef7
bugfix (missing global variable)
espiritocz May 31, 2024
7d06f01
Update LiCSBAS12_loop_closure.py
espiritocz Jun 1, 2024
81ff4de
use of step 120
espiritocz Jun 3, 2024
d929dd1
Update LiCSBAS01_get_geotiff.py
espiritocz Jun 6, 2024
b7d319c
Update LiCSBAS12_loop_closure.py
espiritocz Jun 12, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
146 changes: 85 additions & 61 deletions LiCSBAS_lib/LiCSBAS_inv_lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@
=========
Changelog
=========
20240423 ML
- parallelised singular (with correct vel/vconst estimates)
20231101 Yasser Maghsoudi (and ML), Uni Leeds
- changed least squares function from np to scipy.sparse for faster NSBAS inversion
v1.5.2 20211122 Milan Lazecky, Uni Leeds
Expand Down Expand Up @@ -116,8 +118,8 @@ def invert_nsbas(unw, G, dt_cum, gamma, n_core, gpu, singular=False, only_sb=Fal
if n_core != 1:
global Gall, unw_tmp, mask ## for para_wrapper
# is multicore, let's not use any simplifications
only_sb = False
singular = False
#only_sb = False
#singular = False

if gpu:
only_sb = False
Expand All @@ -132,11 +134,11 @@ def invert_nsbas(unw, G, dt_cum, gamma, n_core, gpu, singular=False, only_sb=Fal
# (note 2: using G or Gall for full unw data leads to EXACTLY SAME result. but perhaps G is a tiny bit faster..)
if only_sb or singular:
result = np.zeros((G.shape[1], n_pt), dtype=np.float32)*np.nan

else:
# do the original NSBAS inversion
result = np.zeros((n_im+1, n_pt), dtype=np.float32)*np.nan #[inc, vel, const]

### Set matrix of NSBAS part (bottom)
Gbl = np.tril(np.ones((n_im, n_im-1), dtype=np.float32), k=-1) #lower tri matrix without diag
Gbr = -np.ones((n_im, 2), dtype=np.float32)
Expand Down Expand Up @@ -179,37 +181,51 @@ def invert_nsbas(unw, G, dt_cum, gamma, n_core, gpu, singular=False, only_sb=Fal
unw_tmp = np.concatenate((unw[~bool_pt_full, :], np.zeros((n_pt-n_pt_full, n_im), dtype=np.float32)), axis=1).transpose()
mask = (~np.isnan(unw_tmp))
unw_tmp[np.isnan(unw_tmp)] = 0

else:
#print('using the singular approach (faster and more suitable for non-linear gap filling)')
d = unw[~bool_pt_full, :].transpose()
m = result[:, ~bool_pt_full]
if n_core == 1:
if not singular:
result[:, ~bool_pt_full] = censored_lstsq_slow(Gall, unw_tmp, mask) #(n_im+1, n_pt)
else:
print('using low precision approach (but much faster)')
d = unw[~bool_pt_full, :].transpose()
m = result[:, ~bool_pt_full]
result[:, ~bool_pt_full] = singular_nsbas(d,G,m,dt_cum)

else:
print(' {} parallel processing'.format(n_core), flush=True)

#
args = [i for i in range(n_pt-n_pt_full)]
q = multi.get_context('fork')
p = q.Pool(n_core)
A = csc_array(Gall) # or csr?
_result = p.map(censored_lstsq_slow_para_wrapper, args) #list[n_pt][length]
if not singular:
A = csc_array(Gall) # or csr?
_result = p.map(censored_lstsq_slow_para_wrapper, args) #list[n_pt][length]
else:
from functools import partial
func = partial(singular_nsbas_onepoint, d, G, m, dt_cum)
_result = p.map(func, args)
result[:, ~bool_pt_full] = np.array(_result).T
#
if only_sb or singular:
# SB/singular-NSBAS result matrix: based on G only, need to calculate vel, setting vconst=0
inc = result
vel = result.sum(axis=0)/dt_cum[-1]
vconst = np.zeros_like(vel)
try:
### Cumulative displacememt
cum = np.zeros((n_im, n_pt), dtype=np.float32)*np.nan
cum[1:, :] = np.cumsum(inc, axis=0)
#
## Fill 1st image with 0 at unnan points from 2nd images
bool_unnan_pt = ~np.isnan(cum[1, :])
cum[0, bool_unnan_pt] = 0
vel, vconst = calc_vel(cum.T, dt_cum)
except:
print('WARNING, some error getting cum/vel/vconst after non-NSBAS inversion')
print('rolling back to simplified vel estimate (note vconst=0)')
vel = result.sum(axis=0)/dt_cum[-1]
vconst = np.zeros_like(vel)
else:
# NSBAS result matrix: last 2 rows are vel and vconst
inc = result[:n_im-1, :]
vel = result[n_im-1, :]
vconst = result[n_im, :]

return inc, vel, vconst


Expand All @@ -221,56 +237,64 @@ def singular_nsbas(d,G,m,dt_cum):
#from scipy.optimize import curve_fit
#def func_vel(x, a):
# return a * x

#
for px in range(m.shape[1]):
if np.mod(px, 100) == 0:
print('\r Running {0:6}/{1:6}th point...'.format(px, m.shape[1]), end='', flush=True)
dpx = d[:,px]
mpx = m[:,px]
# first, work only with values without nans. check if it doesn't remove increments, if so, estimate the inc
okpx = ~np.isnan(dpx)
Gpx_ok = G[okpx,:]
dpx_ok = dpx[okpx]
badincs = np.sum(Gpx_ok,axis=0)==0

if not max(badincs):
# if actually all are fine, just run LS:
mpx = np.linalg.lstsq(Gpx_ok, dpx_ok, rcond=None)[0]
else:
# if there is at least one im with no related ifg:
mpx[~badincs] = np.linalg.lstsq(Gpx_ok[:,~badincs], dpx_ok, rcond=None)[0]
badinc_index = np.where(badincs)[0]
bi_prev = 0
s = []
t = []

# ensure the algorithm goes towards the end of the mpx line
for bi in np.append(badinc_index,len(mpx)):
group_mpx = mpx[bi_prev:bi]
#use at least 2 ifgs for the vel estimate
if group_mpx.size > 0:
group_time = dt_cum[bi_prev:bi+1]
s.append(group_mpx.sum())
t.append(group_time[-1] - group_time[0])
bi_prev = bi+1
s = np.array(s)
t = np.array(t)
# is only one value ok? maybe increase the threshold here:
if len(s)>0:
velpx = s.sum()/t.sum()
else:
velpx = np.nan # not sure what will happen. putting 0 may be safer
#if len(s) == 1:
# velpx = s[0]/t[0]
#else:
# velpx = curve_fit(func_vel, t, s)[0][0]
mpx[badincs] = (dt_cum[badinc_index+1]-dt_cum[badinc_index]) * velpx

m[:,px] = mpx
#if np.mod(px, 100) == 0:
# print('\r Running {0:6}/{1:6}th point...'.format(px, m.shape[1]), end='', flush=True)
#
m[:,px] = singular_nsbas_onepoint(d,G,m,dt_cum, px)

return m


def singular_nsbas_onepoint(d,G,m,dt_cum, i):
''' same as singular nsbas, should be used primarily'''
px = i
if np.mod(px, 100) == 0:
print('\r Running {0:6}/{1:6}th point...'.format(px, m.shape[1]), end='', flush=True)
dpx = d[:,px]
mpx = m[:,px]
# first, work only with values without nans. check if it doesn't remove increments, if so, estimate the inc
okpx = ~np.isnan(dpx)
Gpx_ok = G[okpx,:]
dpx_ok = dpx[okpx]
badincs = np.sum(Gpx_ok,axis=0)==0

if not max(badincs):
# if actually all are fine, just run LS:
mpx = np.linalg.lstsq(Gpx_ok, dpx_ok, rcond=None)[0]
else:
# if there is at least one im with no related ifg:
mpx[~badincs] = np.linalg.lstsq(Gpx_ok[:,~badincs], dpx_ok, rcond=None)[0]
badinc_index = np.where(badincs)[0]
bi_prev = 0
s = []
t = []

# ensure the algorithm goes towards the end of the mpx line
for bi in np.append(badinc_index,len(mpx)):
group_mpx = mpx[bi_prev:bi]
#use at least 2 ifgs for the vel estimate
if group_mpx.size > 0:
group_time = dt_cum[bi_prev:bi+1]
s.append(group_mpx.sum())
t.append(group_time[-1] - group_time[0])
bi_prev = bi+1
s = np.array(s)
t = np.array(t)
# is only one value ok? maybe increase the threshold here:
if len(s)>0:
velpx = s.sum()/t.sum() # mm/day
else:
velpx = np.nan # not sure what will happen. putting 0 may be safer
#if len(s) == 1:
# velpx = s[0]/t[0]
#else:
# velpx = curve_fit(func_vel, t, s)[0][0]
mpx[badincs] = (dt_cum[badinc_index+1]-dt_cum[badinc_index]) * velpx

return mpx


def censored_lstsq_slow_para_wrapper(i):
### Use global value
Expand Down
8 changes: 4 additions & 4 deletions bin/LiCSBAS01_get_geotiff.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ def main(argv=None):
else:
### Get available dates
print('Searching latest epoch for mli...', flush=True)
url = os.path.join(LiCSARweb, trackID, frameID, 'epochs')
url = os.path.join(LiCSARweb, trackID, frameID, 'epochs/')
response = requests.get(url)

response.encoding = response.apparent_encoding #avoid garble
Expand Down Expand Up @@ -269,7 +269,7 @@ def main(argv=None):

### Get available dates
print('\nDownload GACOS data', flush=True)
url = os.path.join(LiCSARweb, trackID, frameID, 'epochs')
url = os.path.join(LiCSARweb, trackID, frameID, 'epochs/')
response = requests.get(url)
response.encoding = response.apparent_encoding #avoid garble
html_doc = response.text
Expand Down Expand Up @@ -333,7 +333,7 @@ def main(argv=None):
#%% InSAR data
### Get available dates
print('\nDownload geotiff of InSAR products', flush=True)
url_ifgdir = os.path.join(LiCSARweb, trackID, frameID, 'interferograms')
url_ifgdir = os.path.join(LiCSARweb, trackID, frameID, 'interferograms/')
response = requests.get(url_ifgdir)

response.encoding = response.apparent_encoding #avoid garble
Expand Down Expand Up @@ -401,7 +401,7 @@ def main(argv=None):

### Get available dates
print('\nDownload MLI data', flush=True)
url = os.path.join(LiCSARweb, trackID, frameID, 'epochs')
url = os.path.join(LiCSARweb, trackID, frameID, 'epochs/')
response = requests.get(url)
response.encoding = response.apparent_encoding # avoid garble
html_doc = response.text
Expand Down
Loading