From 268e084bfa08fb7c363773caf1fffda755d94f28 Mon Sep 17 00:00:00 2001 From: rzinke Date: Thu, 28 Mar 2024 17:43:31 -0700 Subject: [PATCH 01/44] Start of GPS overhaul. Use only default UNR option. --- src/mintpy/objects/gps.py | 696 +++++++++++++++++++++++++++----------- src/mintpy/utils/plot.py | 2 +- src/mintpy/view.py | 4 +- 3 files changed, 498 insertions(+), 204 deletions(-) diff --git a/src/mintpy/objects/gps.py b/src/mintpy/objects/gps.py index 354e5a3e8..67395f00b 100644 --- a/src/mintpy/objects/gps.py +++ b/src/mintpy/objects/gps.py @@ -1,141 +1,238 @@ -"""Class / utilities for GPS download / operations.""" ############################################################ # Program is part of MintPy # # Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi # # Author: Zhang Yunjun, Jul 2018 # ############################################################ +# Utility scripts for GPS handling # Recommend import: # from mintpy.objects.gps import GPS +import os import csv -import datetime as dt import glob -import os -from urllib.request import urlretrieve - +import datetime as dt import numpy as np from pyproj import Geod +from urllib.request import urlretrieve +import pandas as pd +import zipfile +import matplotlib.pyplot as plt from mintpy.objects.coord import coordinate -from mintpy.utils import ptime, readfile, time_func, utils1 as ut +from mintpy.utils import ptime, time_func, readfile, utils1 as ut -UNR_SITE_LIST_FILE = 'http://geodesy.unr.edu/NGLStationPages/DataHoldings.txt' +supported_sources = ['UNR', 'ESESES'] -def dload_site_list(out_file=None, url=UNR_SITE_LIST_FILE, print_msg=True): - """download DataHoldings.txt. +UNR_site_list_file_url = 'http://geodesy.unr.edu/NGLStationPages/DataHoldings.txt' - Parameters: out_file - str, path to the local file. - Returns: out_file - str, path to the local file. - default: './GPS/DataHoldings.txt' +ESESES_site_list_file_url = 'http://garner.ucsd.edu/pub/measuresESESES_products/Velocities/ESESES_Velocities.txt' + + + +def dload_site_list(out_file=None, source='UNR', print_msg=True) -> str: + """Download single file with list of GPS site locations. """ - # output file path - if not out_file: - out_dir = os.path.abspath('./GPS') - out_file = os.path.join(out_dir, os.path.basename(url)) - else: - out_dir = os.path.abspath(os.path.dirname(out_file)) + # Check source is supported + assert source in supported_sources, \ + f'Source {source:s} not supported. Use one of {supported_sources}' + + # Determine URL + if source == 'UNR': + site_list_file_url = UNR_site_list_file_url + elif source == 'ESESES': + site_list_file_url = ESESES_site_list_file_url - # output file directory - if not os.path.isdir(out_dir): - if print_msg: - print('create directory:', out_dir) - os.makedirs(out_dir) + # Handle output file + if out_file is None: + out_file = os.path.basename(site_list_file_url) + # Report if requested if print_msg: - print(f'downloading site list from UNR Geod Lab: {url} to {out_file}') - urlretrieve(url, out_file) + print(f'Downloading site list from {source}: {site_list_file_url} to {out_file}') + + # Download file + urlretrieve(site_list_file_url, out_file) + return out_file -def search_gps(SNWE, start_date=None, end_date=None, site_list_file=None, min_num_solution=50, print_msg=True): - """Search available GPS sites within the geo bounding box from UNR website. - Parameters: SNWE - tuple of 4 float, indicating (South, North, West, East) in degrees - start_date - str in YYYYMMDD format - end_date - str in YYYYMMDD format - site_list_file - str. - min_num_solution - int, minimum number of solutions available - Returns: site_names - 1D np.array of string for GPS station names - site_lats - 1D np.array for lat - site_lons - 1D np.array for lon +def search_gps(SNWE, source='UNR', start_date=None, end_date=None, + site_list_file=None, min_num_solution=None, print_msg=True): + """Search available GPS sites within the geo bounding box from UNR website + Parameters: SNWE : tuple of 4 float, indicating (South, North, West, East) in degrees + start_date : string in YYYYMMDD format + end_date : string in YYYYMMDD format + site_list_file : string. + min_num_solution : int, minimum number of solutions available + Returns: site_names : 1D np.array of string for GPS station names + site_lats : 1D np.array for lat + site_lons : 1D np.array for lon """ - # download site list file if it's not found in current directory + print('Searching!', source) + # Check start and end dates if provided + if start_date is not None: + start_date = dt.datetime.strptime(start_date, '%Y%m%d') + if end_date is not None: + end_date = dt.datetime.strptime(end_date, '%Y%m%d') + if start_date is not None and end_date is not None: + assert(start_date < end_date), 'Start date must be before end date' + + # Check file name if site_list_file is None: - site_list_file = os.path.basename(UNR_SITE_LIST_FILE) + if source == 'UNR': + print('Using UNR!') + site_list_file = os.path.basename(UNR_site_list_file_url) + elif source == 'ESESES': + site_list_file = os.path.basename(ESESES_site_list_file_url) + # Check whether site list file is in current directory if not os.path.isfile(site_list_file): + # Download file dload_site_list(site_list_file, print_msg=print_msg) - txt_data = np.loadtxt(site_list_file, - dtype=bytes, - skiprows=1, - usecols=(0,1,2,3,4,5,6,7,8,9,10)).astype(str) - site_names = txt_data[:, 0] - site_lats, site_lons = txt_data[:, 1:3].astype(np.float32).T - site_lons -= np.round(site_lons / (360.)) * 360. - t0s = np.array([dt.datetime.strptime(i, "%Y-%m-%d") for i in txt_data[:, 7].astype(str)]) - t1s = np.array([dt.datetime.strptime(i, "%Y-%m-%d") for i in txt_data[:, 8].astype(str)]) - num_solution = txt_data[:, 10].astype(np.int16) + # Parse data from file + if source == 'UNR': + site_data = read_UNR_station_list(site_list_file) + elif source == 'ESESES': + site_data = read_ESESES_station_list(site_list_file) + + if print_msg == True: + print('Loaded data for fields: {:s}'.\ + format(' '.join(list(site_data.columns)))) + + # Parse bounding box + lat_min, lat_max, lon_min, lon_max = SNWE + assert (lon_min < lon_max) and (lat_min < lat_max), \ + 'Check bounding box' + + if print_msg == True: + print('Cropping to') + print(f'lon range: {lon_min:.5f} to {lon_max:.5f}') + print(f'lat range: {lat_min:.5f} to {lat_max:.5f}') + + # Ensure lon values in (-180, 180] + site_data['lon'] = [lon - 360 if lon > 180 else lon \ + for lon in site_data['lon']] + + # Limit in space + drop_ndx = (site_data.lat < lat_min) \ + | (site_data.lat > lat_max) \ + | (site_data.lon < lon_min) \ + | (site_data.lon > lon_max) + site_data.drop(site_data[drop_ndx].index, inplace=True) + + # Limit in time + if start_date is not None: + if hasattr(site_data, 'start_date'): + drop_ndx = site_data.start_date > start_date + site_data.drop(site_data[drop_ndx].index, inplace=True) + else: + print('No date information available--date range not applied to GPS site selection') + + if end_date is not None: + if hasattr(site_data, 'end_date'): + drop_ndx = site_data.end_date < end_date + site_data.drop(site_data[drop_ndx].index, inplace=True) + else: + print('No date information available--date range not applied to GPS site selection') - # limit in space - idx = ((site_lats >= SNWE[0]) * (site_lats <= SNWE[1]) * - (site_lons >= SNWE[2]) * (site_lons <= SNWE[3])) + # Limit based on number of solutions + if hasattr(site_data, 'num_solution'): + drop_ndx = site_data.num_solution < min_num_solution + site_data.drop(site_data[drop_ndx].index, inplace=True) - # limit in time - t0 = ptime.date_list2vector([start_date])[0][0] if start_date else None - t1 = ptime.date_list2vector([end_date])[0][0] if end_date else None - if start_date: - idx *= t1s >= t0 - if end_date: - idx *= t0s <= t1 + # Final reporting + if print_msg == True: + print('{:d} stations available'.format(site_data.shape[0])) + + return (site_data.site.to_numpy(), + site_data.lat.to_numpy(), + site_data.lon.to_numpy()) + +def read_UNR_station_list(site_list_file, print_msg=True): + """Return names and lon/lat values for UNR GNSS stations. + """ + if print_msg == True: + print('Parsing UNR site list file') + + # Read file contents + site_data = pd.read_fwf(site_list_file, + widths=(4, 9, 12, 9, 14, 14, 14, 11, 11, 11, 7)) + + # Rename columns for uniformity + site_data.rename(columns={'Sta': 'site', + 'Lat(deg)': 'lat', 'Long(deg)': 'lon', + 'Dtbeg': 'start_date', 'Dtend': 'end_date', + 'NumSol': 'num_solution'}, inplace=True) + + # Format dates + site_data['start_date'] = [dt.datetime.strptime(date, '%Y-%m-%d') \ + for date in site_data.start_date] + site_data['end_date'] = [dt.datetime.strptime(date, '%Y-%m-%d') \ + for date in site_data.end_date] + + return site_data + +def read_ESESES_station_list(site_list_file, print_msg=True): + """Return names and lon/lat values for JPL GNSS stations. + """ + if print_msg == True: + print('Parsing ESESES site list file') - # limit on number of solutions - if min_num_solution is not None: - idx *= num_solution >= min_num_solution + # Read file contents + site_data = pd.read_csv(site_list_file, header = 14, delim_whitespace=True) - return site_names[idx], site_lats[idx], site_lons[idx] + # Rename columns for uniformity + site_data.rename(columns={'Site': 'site', + 'Latitude': 'lat', 'Longitude': 'lon'}, inplace=True) + + return site_data def get_baseline_change(dates1, pos_x1, pos_y1, pos_z1, dates2, pos_x2, pos_y2, pos_z2): - """Calculate the baseline change between two GPS displacement time-series. - Parameters: dates1/2 - 1D np.array of datetime.datetime object - pos_x/y/z1/2 - 1D np.ndarray of displacement in meters in np.float32 - Returns: dates - 1D np.array of datetime.datetime object for the common dates - bases - 1D np.ndarray of displacement in meters in np.float32 for the common dates + """Calculate the baseline change between two GPS displacement time-series + Parameters: dates1/2 : 1D np.array of datetime.datetime object + pos_x/y/z1/2 : 1D np.ndarray of displacement in meters in float32 + Returns: dates : 1D np.array of datetime.datetime object for the common dates + bases : 1D np.ndarray of displacement in meters in float32 for the common + dates """ dates = np.array(sorted(list(set(dates1) & set(dates2)))) - bases = np.zeros(dates.shape, dtype=np.float64) - for i, date_str in enumerate(dates): - idx1 = np.where(dates1 == date_str)[0][0] - idx2 = np.where(dates2 == date_str)[0][0] + bases = np.zeros(dates.shape, dtype=float) + for i in range(len(dates)): + idx1 = np.where(dates1 == dates[i])[0][0] + idx2 = np.where(dates2 == dates[i])[0][0] basei = ((pos_x1[idx1] - pos_x2[idx2]) ** 2 + (pos_y1[idx1] - pos_y2[idx2]) ** 2 + (pos_z1[idx1] - pos_z2[idx2]) ** 2) ** 0.5 bases[i] = basei bases -= bases[0] - bases = np.array(bases, dtype=np.float32) + bases = np.array(bases, dtype=float) + return dates, bases -def get_gps_los_obs(meta, obs_type, site_names, start_date, end_date, gps_comp='enu2los', - horz_az_angle=-90., model=None, print_msg=True, redo=False): +def get_gps_los_obs(meta, obs_type, site_names, start_date, end_date, source='UNR', + gps_comp='enu2los', horz_az_angle=-90., model=None, + print_msg=True,redo=False): """Get the GPS LOS observations given the query info. - Parameters: meta - dict, dictionary of metadata of the InSAR file - obs_type - str, GPS observation data type, displacement or velocity. - site_names - list of str, GPS sites, output of search_gps() - start_date - str, date in YYYYMMDD format - end_date - str, date in YYYYMMDD format - gps_comp - str, flag of projecting 2/3D GPS into LOS - e.g. enu2los, hz2los, up2los + Parameters: meta - dict, dictionary of metadata of the InSAR file + obs_type - str, GPS observation data type, displacement or velocity. + site_names - list of str, GPS sites, output of search_gps() + start_date - str, date in YYYYMMDD format + end_date - str, date in YYYYMMDD format + gps_comp - str, flag of projecting 2/3D GPS into LOS + e.g. enu2los, hz2los, up2los horz_az_angle - float, azimuth angle of the horizontal motion in degree - measured from the north with anti-clockwise as positive + measured from the north with anti-clockwise as positive model - dict, time function model, e.g. {'polynomial': 1, 'periodic': [1.0, 0.5]} - print_msg - bool, print verbose info - redo - bool, ignore existing CSV file and re-calculate - Returns: site_obs - 1D np.ndarray(), GPS LOS velocity or displacement in m or m/yr + print_msg - bool, print verbose info + redo - bool, ignore existing CSV file and re-calculate + Returns: site_obs - 1D np.ndarray(), GPS LOS velocity or displacement in m or m/yr Examples: from mintpy.objects import gps from mintpy.utils import readfile, utils as ut meta = readfile.read_attribute('geo/geo_velocity.h5') @@ -173,7 +270,7 @@ def get_gps_los_obs(meta, obs_type, site_names, start_date, end_date, gps_comp=' if not redo and os.path.isfile(csv_file) and num_row >= num_site: # read from existing CSV file - vprint(f'read GPS observations from file: {csv_file}') + vprint('read GPS observations from file: {}'.format(csv_file)) fc = np.genfromtxt(csv_file, dtype=col_types, delimiter=',', names=True) site_obs = fc[col_names[obs_ind]] @@ -181,8 +278,8 @@ def get_gps_los_obs(meta, obs_type, site_names, start_date, end_date, gps_comp=' # in case the site_names are not consistent with the CSV file. if num_row != num_site: temp_names = fc[col_names[0]] - temp_obs = np.array(site_obs, dtype=np.float32) - site_obs = np.zeros(num_site, dtype=np.float32) * np.nan + temp_obs = np.array(site_obs, dtype=float) + site_obs = np.zeros(num_site, dtype=float) * np.nan for i, site_name in enumerate(site_names): if site_name in temp_names: site_obs[i] = temp_obs[temp_names == site_name][0] @@ -193,10 +290,12 @@ def get_gps_los_obs(meta, obs_type, site_names, start_date, end_date, gps_comp=' vprint('calculating GPS observation ...') # get geom_obj (meta / geom_file) - geom_file = ut.get_geometry_file(['incidenceAngle','azimuthAngle'], work_dir=file_dir, coord='geo') + geom_file = ut.get_geometry_file(['incidenceAngle','azimuthAngle'], + work_dir=file_dir, coord='geo') if geom_file: geom_obj = geom_file - vprint(f'use incidence / azimuth angle from file: {os.path.basename(geom_file)}') + vprint('use incidence / azimuth angle from file: {}'.\ + format(os.path.basename(geom_file))) else: geom_obj = meta vprint('use incidence / azimuth angle from metadata') @@ -204,10 +303,11 @@ def get_gps_los_obs(meta, obs_type, site_names, start_date, end_date, gps_comp=' # loop for calculation prog_bar = ptime.progressBar(maxValue=num_site, print_msg=print_msg) for i, site_name in enumerate(site_names): - prog_bar.update(i+1, suffix=f'{i+1}/{num_site} {site_name}') + prog_bar.update(i+1, suffix='{}/{} {}'.format(i+1, num_site, site_name)) # calculate gps data value - obj = GPS(site_name) + obj = GPS(site_name, source=source) + obj.open(print_msg=print_msg) vel, dis_ts = obj.get_gps_los_velocity( geom_obj, start_date=start_date, @@ -230,7 +330,7 @@ def get_gps_los_obs(meta, obs_type, site_names, start_date, end_date, gps_comp=' # data_list = [x for x in data_list if not np.isnan(x[-1])] # write to CSV file - vprint(f'write GPS observations to file: {csv_file}') + vprint('write GPS observations to file: {}'.format(csv_file)) with open(csv_file, 'w') as fc: fcw = csv.writer(fc) fcw.writerow(col_names) @@ -243,7 +343,6 @@ def get_gps_los_obs(meta, obs_type, site_names, start_date, end_date, gps_comp=' - #################################### Beginning of GPS-GSI utility functions ######################## def read_pos_file(fname): import codecs @@ -255,14 +354,15 @@ def read_pos_file(fname): ds = fc[:,2].astype(int) dates = [dt.datetime(year=y, month=m, day=d) for y,m,d in zip(ys, ms, ds)] - X = fc[:,4].astype(np.float64).tolist() - Y = fc[:,5].astype(np.float64).tolist() - Z = fc[:,6].astype(np.float64).tolist() + X = fc[:,4].astype(float64).tolist() + Y = fc[:,5].astype(float64).tolist() + Z = fc[:,6].astype(float64).tolist() + return dates, X, Y, Z def get_pos_years(gps_dir, site): - fnames = glob.glob(os.path.join(gps_dir, f'{site}.*.pos')) + fnames = glob.glob(os.path.join(gps_dir, '{}.*.pos'.format(site))) years = [os.path.basename(i).split('.')[1] for i in fnames] years = ptime.yy2yyyy(years) return years @@ -276,7 +376,7 @@ def read_GSI_F3(gps_dir, site, start_date=None, end_date=None): dates, X, Y, Z = [], [], [], [] for i in range(num_year): yeari = str(year0 + i) - fname = os.path.join(gps_dir, f'{site}.{yeari[2:]}.pos') + fname = os.path.join(gps_dir, '{}.{}.pos'.format(site, yeari[2:])) datesi, Xi, Yi, Zi = read_pos_file(fname) dates += datesi X += Xi @@ -294,99 +394,152 @@ def read_GSI_F3(gps_dir, site, start_date=None, end_date=None): flag[dates > date1] = False return dates[flag], X[flag], Y[flag], Z[flag] -#################################### End of GPS-GSI utility functions ############################## +#################################### End of GPS-GSI utility functions ############################## -#################################### Beginning of GPS-UNR class #################################### +#################################### Beginning of GPS class ######################################## class GPS: - """GPS class for GPS time-series of daily solution - - Example: - import matplotlib.pyplot as plt - from mintpy.objects.gps import GPS - from mintpy.utils import utils as ut - gps_obj = GPS(site='GV05', data_dir='~/insarlab/GPS') - gps_obj.open() - dis_los = ut.enu2los(gps_obj.dis_e, - gps_obj.dis_n, - gps_obj.dis_u) - dates = gps_obj.dates - plt.figure() - plt.scatter(dates, dis_los) - plt.show() + """GPS class for GPS time-series of daily solution. """ - def __init__(self, site, data_dir='./GPS', version='IGS14'): + def __init__(self, site: str, source='UNR', data_dir='./GPS', + version='IGS14'): + # Check inputs + assert source in supported_sources, \ + f'Source {source:s} not supported. Use one of {supported_sources}' + + # Record properties self.site = site - self.data_dir = os.path.abspath(data_dir) + self.source = source self.version = version - self.source = 'Nevada Geodetic Lab' - - # time-series data from Nevada Geodetic Lab - # example link: http://geodesy.unr.edu/gps_timeseries/tenv3/IGS08/1LSU.IGS08.tenv3 - # http://geodesy.unr.edu/gps_timeseries/tenv3/IGS14/CASU.tenv3 - if version == 'IGS08': - self.file = os.path.join(data_dir, f'{site}.{version}.tenv3') - elif version == 'IGS14': - self.file = os.path.join(data_dir, f'{site}.tenv3') - else: - raise ValueError(f'un-recognized GPS data version: {version}') - url_prefix = 'http://geodesy.unr.edu/gps_timeseries/tenv3' - self.file_url = os.path.join(url_prefix, version, os.path.basename(self.file)) - - # time-series plot from Nevada Geodetic Lab - # example link: http://geodesy.unr.edu/tsplots/IGS08/TimeSeries/CAMO.png - # http://geodesy.unr.edu/tsplots/IGS14/IGS14/TimeSeries/CASU.png - self.plot_file = os.path.join(data_dir, f'pic/{site}.png') - - url_prefix = 'http://geodesy.unr.edu/tsplots' - if version == 'IGS08': - url_prefix += f'/{version}' - elif version == 'IGS14': - url_prefix += '/{0}/{0}'.format(version) - self.plot_file_url = os.path.join(url_prefix, f'TimeSeries/{site}.png') - - # list of stations from Nevada Geodetic Lab - self.site_list_file = os.path.join(data_dir, 'DataHoldings.txt') - if not os.path.isfile(self.site_list_file): - dload_site_list(self.site_list_file) - site_names = np.loadtxt(self.site_list_file, dtype=bytes, skiprows=1, usecols=(0)).astype(str) - if site not in site_names: - raise ValueError(f'Site {site} NOT found in file: {UNR_SITE_LIST_FILE}') - - # directories for data files and plot files - for fdir in [data_dir, os.path.dirname(self.plot_file)]: - if not os.path.isdir(fdir): - os.makedirs(fdir) - - def open(self, print_msg=True): - if not os.path.isfile(self.file): - self.dload_site() + # Create data directory if not exist + self.data_dir = data_dir + if not os.path.exists(self.data_dir): + os.mkdir(self.data_dir) + + return None + + + def open(self, file=None, print_msg=True): + """Read the lat/lon and displacement data of the station. + Download if necessary. + """ + # Download file if not present + if not hasattr(self, 'file'): + self.dload_site(print_msg=print_msg) + + # Retrieve data from file self.get_stat_lat_lon(print_msg=print_msg) self.read_displacement(print_msg=print_msg) + return None + + def dload_site(self, print_msg=True): - if print_msg: - print(f'downloading {self.site} from {self.file_url}') + """Download the station displacement data from the + specified source. + + Modifies: self.file : str for local file path/name + self.file_url : str for file URL - urlretrieve(self.file_url, self.file) - urlretrieve(self.plot_file_url, self.plot_file) + Returns: self.file + """ + if print_msg == True: + print(f"Downloading data for site {self.site:s} from {self.source:s}") + + # Download displacement data based on source + if self.source == 'UNR': + self.__download_unr_file__(print_msg=print_msg) + elif self.source == 'ESESES': + self.__download_eseses_file__(print_msg=print_msg) return self.file + def __download_unr_file__(self, print_msg): + """Download GPS displacement data from UNR. + """ + # URL and file name specs + url_prefix = 'http://geodesy.unr.edu/gps_timeseries/tenv3' + if self.version == 'IGS08': + self.file = os.path.join(self.data_dir, + '{site:s}.{version:s}.tenv3'.\ + format(site=self.site)) + elif self.version == 'IGS14': + self.file = os.path.join(self.data_dir, + '{site:s}.tenv3'.\ + format(site=self.site)) + self.file_url = os.path.join(url_prefix, self.version, + os.path.basename(self.file)) + + # Download file if not present + if os.path.exists(self.file): + print(f'File {self.file} exists--reading') + else: + if print_msg == True: + print(f'... downloading {self.file_url:s} to {self.file:s}') + urlretrieve(self.file_url, self.file) + + return None + + def __download_eseses_file__(self, print_msg): + """Download GPS displacement data from ESESES. + """ + # URL and file name specs + url_prefix = 'http://garner.ucsd.edu/pub/measuresESESES_products/Timeseries/CurrentUntarred/Clean_TrendNeuTimeSeries_comb_20240320' + self.file = os.path.join(self.data_dir, + '{site:s}CleanTrend.neu.Z'.\ + format(site=self.site.lower())) + self.file_url = os.path.join(url_prefix, os.path.basename(self.file)) + + # Download file if not present + if os.path.exists(self.file): + print(f'File {self.file} exists--reading') + else: + if print_msg == True: + print(f'... downloading {self.file_url:s} to {self.file:s}') + urlretrieve(self.file_url, self.file) + + # Unzip file + with zipfile.ZipFile(self.file, 'r') as Zfile: + Zfile.extractall(self.data_dir) + + # Update file name + self.file = self.file.strip('.Z') + if print_msg == True: + print(f'... extracted to {self.file:s}') + + return None + + def get_stat_lat_lon(self, print_msg=True): - """Get station lat/lon""" - if print_msg: + """Get station lat/lon based on processing source. + Retrieve data from the displacement file. + """ + if print_msg == True: print('calculating station lat/lon') if not os.path.isfile(self.file): self.dload_site(print_msg=print_msg) + # Retrieve lat/lon based on processing source + if self.source == 'UNR': + self.__get_unr_lat_lon__() + elif self.source == 'ESESES': + self.__get_eseses_lat_lon__() + + if print_msg == True: + print(f'\t{self.site_lat:f}, {self.site_lon:f}') + + return self.site_lat, self.site_lon + + def __get_unr_lat_lon__(self): + """Get station lat/lon for UNR data. + """ data = np.loadtxt(self.file, dtype=bytes, skiprows=1).astype(str) ref_lon, ref_lat = float(data[0, 6]), 0. - e0, e_off, n0, n_off = data[0, 7:11].astype(np.float32) + e0, e_off, n0, n_off = data[0, 7:11].astype(float) e0 += e_off n0 += n_off @@ -394,28 +547,85 @@ def get_stat_lat_lon(self, print_msg=True): dist = np.sqrt(e0**2 + n0**2) g = Geod(ellps='WGS84') self.site_lon, self.site_lat = g.fwd(ref_lon, ref_lat, az, dist)[0:2] - return self.site_lat, self.site_lon - def read_displacement(self, start_date=None, end_date=None, print_msg=True, display=False): - """ Read GPS displacement time-series (defined by start/end_date). - Parameters: start/end_date - str in YYYYMMDD format - Returns: dates - 1D np.ndarray of datetime.datetime object - dis_e/n/u - 1D np.ndarray of displacement in meters in np.float32 - std_e/n/u - 1D np.ndarray of displacement STD in meters in np.float32 + return None + + @staticmethod + def lon_360to180(lon: float) -> float: + """Convert longitude in the range [0, 360) to + range (-180, 180]. """ - # download file if it's not exists. + if lon > 180: + lon -= 360 + return lon + + def __get_eseses_lat_lon__(self): + """Get station lat/lon for ESESES data. + """ + with open(self.file, 'r') as data_file: + # Read raw file contents + lines = data_file.readlines() + + # Determine reference latitude + lat_line = [line for line in lines \ + if line.find('# Latitude') != -1] + lat_line = lat_line[0].strip('\n') + self.site_lat = float(lat_line.split()[-1]) + + # Determine reference longitude + lon_line = [line for line in lines \ + if line.find('# East Longitude') != -1] + lon_line = lon_line[0].strip('\n') + site_lon = float(lon_line.split()[-1]) + self.site_lon = self.lon_360to180(site_lon) + + return None + + + def read_displacement(self, start_date=None, end_date=None, print_msg=True, + display=False): + """Read GPS displacement time-series (defined by start/end_date) + Parameters: start/end_date : str in YYYYMMDD format + Returns: dates : 1D np.ndarray of datetime.datetime object + dis_e/n/u : 1D np.ndarray of displacement in meters in float32 + std_e/n/u : 1D np.ndarray of displacement STD in meters in float32 + """ + # Download file if it does not exist if not os.path.isfile(self.file): self.dload_site(print_msg=print_msg) - # read dates, dis_e, dis_n, dis_u - if print_msg: + # Read dates, dis_e, dis_n, dis_u + if print_msg == True: print('reading time and displacement in east/north/vertical direction') + + if self.source == 'UNR': + self.__read_unr_displacement__() + elif self.source == 'ESESES': + self.__read_eseses_displacement__() + + # Cut out the specified time range + self.__crop_to_date_range__(start_date, end_date) + + # Display if requested + if display == True: + self.display_data() + + return (self.dates, + self.dis_e, self.dis_n, self.dis_u, + self.std_e, self.std_n, self.std_u) + + def __read_unr_displacement__(self): + """Read GPS displacement time-series processed by UNR. + """ + # Read data from file data = np.loadtxt(self.file, dtype=bytes, skiprows=1).astype(str) - self.dates = np.array([dt.datetime.strptime(i, "%y%b%d") for i in data[:, 1]]) - #self.dates = np.array([ptime.decimal_year2datetime(i) for i in data[:, 2]]) + # Parse dates + self.dates = np.array([dt.datetime.strptime(i, "%y%b%d") \ + for i in data[:,1]]) self.date_list = [x.strftime('%Y%m%d') for x in self.dates] + # Parse displacement data (self.dis_e, self.dis_n, self.dis_u, @@ -423,8 +633,37 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, disp self.std_n, self.std_u) = data[:, (8,10,12,14,15,16)].astype(np.float32).T - # cut out the specified time range - t_flag = np.ones(len(self.dates), np.bool_) + return None + + def __read_eseses_displacement__(self): + """Read GPS displacement time-series processed by ESESES. + """ + # Read data from file + data = np.loadtxt(self.file, usecols=tuple(range(0,12))) + n_data = data.shape[0] + + # Parse dates + dates = [dt.datetime(int(data[i,1]), 1, 1) \ + + dt.timedelta(days=int(data[i,2])) \ + for i in range(n_data)] + self.dates = np.array(dates) + self.date_list = [date.strftime('%Y%m%d') for date in self.dates] + + # Parse displacement data + (self.dis_n, + self.dis_e, + self.dis_u, + self.std_n, + self.std_e, + self.std_u) = data[:, 3:9].astype(np.float32).T / 1000 + + return None + + def __crop_to_date_range__(self, start_date: str, end_date: str): + """Cut out the specified time range. + start/end_date in format YYYYMMDD + """ + t_flag = np.ones(len(self.dates), bool) if start_date: t0 = ptime.date_list2vector([start_date])[0][0] t_flag[self.dates < t0] = 0 @@ -439,21 +678,34 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, disp self.std_n = self.std_n[t_flag] self.std_u = self.std_u[t_flag] - if display: - import matplotlib.pyplot as plt - _, ax = plt.subplots(nrows=3, ncols=1, sharex=True) - ax[0].scatter(self.dates, self.dis_e, s=2**2, label='East') - ax[1].scatter(self.dates, self.dis_n, s=2**2, label='North') - ax[2].scatter(self.dates, self.dis_u, s=2**2, label='Up') - plt.show() + return None - return (self.dates, - self.dis_e, self.dis_n, self.dis_u, - self.std_e, self.std_n, self.std_u) + + def display_data(self): + """Display displacement data. + """ + # Instantiate figure and axes + fig, ax = plt.subplots(nrows=3, ncols=1, sharex=True) + + # Plot data + ax[0].scatter(self.dates, self.dis_e, s=2**2, + c='k', label='East') + ax[1].scatter(self.dates, self.dis_n, s=2**2, + c='k', label='North') + ax[2].scatter(self.dates, self.dis_u, s=2**2, + c='k', label='Up') + + # Format plot + fig.suptitle(f'{self.site:s} ({self.source:s})') + + plt.show() + + return fig, ax ##################################### Utility Functions ################################### - def displacement_enu2los(self, inc_angle:float, az_angle:float, gps_comp='enu2los', horz_az_angle=-90.): + def displacement_enu2los(self, inc_angle:float, az_angle:float, gps_comp='enu2los', + horz_az_angle=-90., display=False, model=None): """Convert displacement in ENU to LOS direction. Parameters: inc_angle - float, LOS incidence angle in degree @@ -482,6 +734,22 @@ def displacement_enu2los(self, inc_angle:float, az_angle:float, gps_comp='enu2lo + (self.std_n * unit_vec[1])**2 + (self.std_u * unit_vec[2])**2 ) ** 0.5 + # display if requested + if display == True: + # Instantiate figure and axes + fig, ax = plt.subplots(sharex=True) + + # Plot LOS displacement + ax.scatter(self.dates, self.dis_los, s=2**2, + c='k', label='LOS') + + # Plot fit if model specified + if model is not None: + # specific time_func model + date_list = [dt.datetime.strftime(i, '%Y%m%d') for i in dates] + A = time_func.get_design_matrix4time_func(date_list, model=model) + estm_dis = np.dot(np.linalg.pinv(A), self.dis_los) + return self.dis_los, self.std_los @@ -557,16 +825,37 @@ def read_gps_los_displacement(self, geom_obj, start_date=None, end_date=None, re return dates, dis, std, site_lalo, ref_site_lalo - def get_gps_los_velocity(self, geom_obj, start_date=None, end_date=None, ref_site=None, - gps_comp='enu2los', horz_az_angle=-90., model=None): - - dates, dis = self.read_gps_los_displacement( - geom_obj, - start_date=start_date, - end_date=end_date, - ref_site=ref_site, - gps_comp=gps_comp, - horz_az_angle=horz_az_angle)[:2] + def get_gps_los_velocity(self, geom_obj, start_date=None, end_date=None, + ref_site=None, gps_comp='enu2los', + horz_az_angle=-90., model=None, + print_msg=True): + """Convert the three-component displacement data into LOS + velocity. + + Parameters: geom_obj : dict / str, metadata of InSAR file, or + geometry file path + start_date : string in YYYYMMDD format + end_date : string in YYYYMMDD format + ref_site : string, reference GPS site + gps_comp : string, GPS components used to convert to + LOS direction + horz_az_angle : float, fault azimuth angle used to convert + horizontal to fault-parallel + model : dict, time function model, e.g. + {'polynomial': 1, 'periodic': [1.0, 0.5]} + Returns: dates : 1D np.array of datetime.datetime object + dis : 1D np.array of displacement in meters + std : 1D np.array of displacement uncertainty in meters + site_lalo : tuple of 2 float, lat/lon of GPS site + ref_site_lalo : tuple of 2 float, lat/lon of reference GPS site + """ + # Retrieve displacement data + dates, dis = self.read_gps_los_displacement(geom_obj, + start_date=start_date, + end_date=end_date, + ref_site=ref_site, + gps_comp=gps_comp, + horz_az_angle=horz_az_angle)[:2] # displacement -> velocity # if: @@ -588,7 +877,10 @@ def get_gps_los_velocity(self, geom_obj, start_date=None, end_date=None, ref_sit self.velocity = np.dot(np.linalg.pinv(A), dis)[1] else: self.velocity = np.nan + if print_msg == True: + print(f'Velocity calculation failed for site {self.site}') return self.velocity, dis -#################################### End of GPS-UNR class #################################### + +#################################### End of GPS class #################################### diff --git a/src/mintpy/utils/plot.py b/src/mintpy/utils/plot.py index 008a4a33c..11b3ae1dd 100644 --- a/src/mintpy/utils/plot.py +++ b/src/mintpy/utils/plot.py @@ -1136,7 +1136,7 @@ def plot_gps(ax, SNWE, inps, metadata=dict(), print_msg=True): SNWE = (south, north, west, east) # query for GNSS stations - site_names, site_lats, site_lons = gps.search_gps(SNWE, start_date, end_date) + site_names, site_lats, site_lons = gps.search_gps(SNWE, start_date=start_date, end_date=end_date) if site_names.size == 0: warnings.warn(f'No GNSS found within {SNWE} during {start_date} - {end_date}!') print(' continue without GNSS plots.') diff --git a/src/mintpy/view.py b/src/mintpy/view.py index 464fc919b..d4fdaaa8b 100644 --- a/src/mintpy/view.py +++ b/src/mintpy/view.py @@ -541,7 +541,9 @@ def extent2meshgrid(extent: tuple, ds_shape: list): # Reference (InSAR) data to a GNSS site coord = ut.coordinate(metadata) if inps.disp_gps and inps.gps_component and inps.ref_gps_site: - ref_site_lalo = GPS(site=inps.ref_gps_site).get_stat_lat_lon(print_msg=False) + ref_site_gps = GPS(site=inps.ref_gps_site) + ref_site_gps.open() + ref_site_lalo = ref_site_gps.get_stat_lat_lon(print_msg=False) y, x = coord.geo2radar(ref_site_lalo[0], ref_site_lalo[1])[0:2] ref_data = data[y - inps.pix_box[1], x - inps.pix_box[0]] data -= ref_data From 29f53708716e658d2dfbdf56db6cc15759578123 Mon Sep 17 00:00:00 2001 From: rzinke Date: Sun, 31 Mar 2024 21:12:29 -0700 Subject: [PATCH 02/44] Created objects enabling ingest of different GPS processing sources. --- src/mintpy/objects/gps.py | 583 ++++++++++++++++------------- src/mintpy/objects/insar_vs_gps.py | 13 +- src/mintpy/utils/arg_utils.py | 3 + src/mintpy/utils/plot.py | 3 +- src/mintpy/view.py | 6 +- 5 files changed, 351 insertions(+), 257 deletions(-) diff --git a/src/mintpy/objects/gps.py b/src/mintpy/objects/gps.py index 67395f00b..9ef5a75d2 100644 --- a/src/mintpy/objects/gps.py +++ b/src/mintpy/objects/gps.py @@ -61,16 +61,16 @@ def dload_site_list(out_file=None, source='UNR', print_msg=True) -> str: def search_gps(SNWE, source='UNR', start_date=None, end_date=None, site_list_file=None, min_num_solution=None, print_msg=True): """Search available GPS sites within the geo bounding box from UNR website - Parameters: SNWE : tuple of 4 float, indicating (South, North, West, East) in degrees - start_date : string in YYYYMMDD format - end_date : string in YYYYMMDD format - site_list_file : string. - min_num_solution : int, minimum number of solutions available - Returns: site_names : 1D np.array of string for GPS station names - site_lats : 1D np.array for lat - site_lons : 1D np.array for lon + Parameters: SNWE - tuple of 4 float, indicating (South, North, West, East) in degrees + source - str, program or institution that processed the GPS data + start_date - str, date in YYYYMMDD format + end_date - str, date in YYYYMMDD format + site_list_file - str + min_num_solution - int, minimum number of solutions available + Returns: site_names - 1D np.array of string, GPS station names + site_lats - 1D np.array, lat + site_lons - 1D np.array, lon """ - print('Searching!', source) # Check start and end dates if provided if start_date is not None: start_date = dt.datetime.strptime(start_date, '%Y%m%d') @@ -82,7 +82,6 @@ def search_gps(SNWE, source='UNR', start_date=None, end_date=None, # Check file name if site_list_file is None: if source == 'UNR': - print('Using UNR!') site_list_file = os.path.basename(UNR_site_list_file_url) elif source == 'ESESES': site_list_file = os.path.basename(ESESES_site_list_file_url) @@ -102,6 +101,9 @@ def search_gps(SNWE, source='UNR', start_date=None, end_date=None, print('Loaded data for fields: {:s}'.\ format(' '.join(list(site_data.columns)))) + # Ensure that station name is consistent + site_data['site'] = [site_data.iloc[i,:].site.upper() for i in range(site_data.shape[0])] + # Parse bounding box lat_min, lat_max, lon_min, lon_max = SNWE assert (lon_min < lon_max) and (lat_min < lat_max), \ @@ -113,8 +115,7 @@ def search_gps(SNWE, source='UNR', start_date=None, end_date=None, print(f'lat range: {lat_min:.5f} to {lat_max:.5f}') # Ensure lon values in (-180, 180] - site_data['lon'] = [lon - 360 if lon > 180 else lon \ - for lon in site_data['lon']] + site_data['lon'] = [lon - 360 if lon > 180 else lon for lon in site_data['lon']] # Limit in space drop_ndx = (site_data.lat < lat_min) \ @@ -151,7 +152,7 @@ def search_gps(SNWE, source='UNR', start_date=None, end_date=None, site_data.lat.to_numpy(), site_data.lon.to_numpy()) -def read_UNR_station_list(site_list_file, print_msg=True): +def read_UNR_station_list(site_list_file:str, print_msg=True) -> pd.DataFrame: """Return names and lon/lat values for UNR GNSS stations. """ if print_msg == True: @@ -175,14 +176,14 @@ def read_UNR_station_list(site_list_file, print_msg=True): return site_data -def read_ESESES_station_list(site_list_file, print_msg=True): +def read_ESESES_station_list(site_list_file:str, print_msg=True) -> pd.DataFrame: """Return names and lon/lat values for JPL GNSS stations. """ if print_msg == True: print('Parsing ESESES site list file') # Read file contents - site_data = pd.read_csv(site_list_file, header = 14, delim_whitespace=True) + site_data = pd.read_csv(site_list_file, header = 14, sep='\s+') # Rename columns for uniformity site_data.rename(columns={'Site': 'site', @@ -194,11 +195,12 @@ def read_ESESES_station_list(site_list_file, print_msg=True): def get_baseline_change(dates1, pos_x1, pos_y1, pos_z1, dates2, pos_x2, pos_y2, pos_z2): """Calculate the baseline change between two GPS displacement time-series - Parameters: dates1/2 : 1D np.array of datetime.datetime object - pos_x/y/z1/2 : 1D np.ndarray of displacement in meters in float32 - Returns: dates : 1D np.array of datetime.datetime object for the common dates - bases : 1D np.ndarray of displacement in meters in float32 for the common - dates + Parameters: dates1/2 - 1D np.array, datetime.datetime object + pos_x/y/z1/2 - 1D np.ndarray, displacement in meters in float32 + Returns: dates - 1D np.array, datetime.datetime object for the + common dates + bases - 1D np.ndarray, displacement in meters in float32 + for the common dates """ dates = np.array(sorted(list(set(dates1) & set(dates2)))) bases = np.zeros(dates.shape, dtype=float) @@ -323,12 +325,6 @@ def get_gps_los_obs(meta, obs_type, site_names, start_date, end_date, source='UN data_list.append([obj.site, obj.site_lon, obj.site_lat, dis, vel]) prog_bar.close() - # # discard invalid sites - # flag = np.isnan([x[-1] for x in data_list]) - # vprint('discard extra {} stations due to limited overlap/observations in time:'.format(np.sum(flag))) - # vprint(' {}'.format(np.array(data_list)[flag][:,0].tolist())) - # data_list = [x for x in data_list if not np.isnan(x[-1])] - # write to CSV file vprint('write GPS observations to file: {}'.format(csv_file)) with open(csv_file, 'w') as fc: @@ -402,27 +398,26 @@ def read_GSI_F3(gps_dir, site, start_date=None, end_date=None): #################################### Beginning of GPS class ######################################## class GPS: """GPS class for GPS time-series of daily solution. + + The GPS class is solely meant to be a parent class. Child classes, defined + below, support functions for downloading and parsing GPS position based on + the processing source (e.g., UNR, etc.). Use the `get_gps_obj_by_source` + method to determine appropriate child class. """ - def __init__(self, site: str, source='UNR', data_dir='./GPS', + def __init__(self, site: str, data_dir='./GPS', version='IGS14'): - # Check inputs - assert source in supported_sources, \ - f'Source {source:s} not supported. Use one of {supported_sources}' - # Record properties self.site = site - self.source = source self.version = version # Create data directory if not exist - self.data_dir = data_dir + self.data_dir = os.path.abspath(data_dir) if not os.path.exists(self.data_dir): os.mkdir(self.data_dir) return None - def open(self, file=None, print_msg=True): """Read the lat/lon and displacement data of the station. Download if necessary. @@ -437,118 +432,25 @@ def open(self, file=None, print_msg=True): return None - - def dload_site(self, print_msg=True): - """Download the station displacement data from the - specified source. - - Modifies: self.file : str for local file path/name - self.file_url : str for file URL - - Returns: self.file - """ - if print_msg == True: - print(f"Downloading data for site {self.site:s} from {self.source:s}") - - # Download displacement data based on source - if self.source == 'UNR': - self.__download_unr_file__(print_msg=print_msg) - elif self.source == 'ESESES': - self.__download_eseses_file__(print_msg=print_msg) - - return self.file - - def __download_unr_file__(self, print_msg): - """Download GPS displacement data from UNR. - """ - # URL and file name specs - url_prefix = 'http://geodesy.unr.edu/gps_timeseries/tenv3' - if self.version == 'IGS08': - self.file = os.path.join(self.data_dir, - '{site:s}.{version:s}.tenv3'.\ - format(site=self.site)) - elif self.version == 'IGS14': - self.file = os.path.join(self.data_dir, - '{site:s}.tenv3'.\ - format(site=self.site)) - self.file_url = os.path.join(url_prefix, self.version, - os.path.basename(self.file)) - - # Download file if not present - if os.path.exists(self.file): - print(f'File {self.file} exists--reading') - else: - if print_msg == True: - print(f'... downloading {self.file_url:s} to {self.file:s}') - urlretrieve(self.file_url, self.file) - - return None - - def __download_eseses_file__(self, print_msg): - """Download GPS displacement data from ESESES. + @staticmethod + def get_gps_obj_by_source(source:str): + """Return the appropriate GPS child class based on processing source. """ - # URL and file name specs - url_prefix = 'http://garner.ucsd.edu/pub/measuresESESES_products/Timeseries/CurrentUntarred/Clean_TrendNeuTimeSeries_comb_20240320' - self.file = os.path.join(self.data_dir, - '{site:s}CleanTrend.neu.Z'.\ - format(site=self.site.lower())) - self.file_url = os.path.join(url_prefix, os.path.basename(self.file)) - - # Download file if not present - if os.path.exists(self.file): - print(f'File {self.file} exists--reading') + if source == 'UNR': + return UNR_GPS + elif source == 'ESESES': + return ESESES_GPS else: - if print_msg == True: - print(f'... downloading {self.file_url:s} to {self.file:s}') - urlretrieve(self.file_url, self.file) - - # Unzip file - with zipfile.ZipFile(self.file, 'r') as Zfile: - Zfile.extractall(self.data_dir) - - # Update file name - self.file = self.file.strip('.Z') - if print_msg == True: - print(f'... extracted to {self.file:s}') - - return None + raise ValueError(f'{source:s} source not supported.') + def dload_site(self, print_msg=True): + raise NotImplementedError('Func. dload_site not implemented. Override with child class.') def get_stat_lat_lon(self, print_msg=True): - """Get station lat/lon based on processing source. - Retrieve data from the displacement file. - """ - if print_msg == True: - print('calculating station lat/lon') - if not os.path.isfile(self.file): - self.dload_site(print_msg=print_msg) - - # Retrieve lat/lon based on processing source - if self.source == 'UNR': - self.__get_unr_lat_lon__() - elif self.source == 'ESESES': - self.__get_eseses_lat_lon__() - - if print_msg == True: - print(f'\t{self.site_lat:f}, {self.site_lon:f}') - - return self.site_lat, self.site_lon - - def __get_unr_lat_lon__(self): - """Get station lat/lon for UNR data. - """ - data = np.loadtxt(self.file, dtype=bytes, skiprows=1).astype(str) - ref_lon, ref_lat = float(data[0, 6]), 0. - e0, e_off, n0, n_off = data[0, 7:11].astype(float) - e0 += e_off - n0 += n_off - - az = np.arctan2(e0, n0) / np.pi * 180. - dist = np.sqrt(e0**2 + n0**2) - g = Geod(ellps='WGS84') - self.site_lon, self.site_lat = g.fwd(ref_lon, ref_lat, az, dist)[0:2] + raise NotImplementedError('Func. get_stat_lat_lon not implemented. Override with child class.') - return None + def read_displacement(self, start_date=None, end_date=None, print_msg=True, display=False): + raise NotImplementedError('Func. read_displacement not implemented. Override with child class.') @staticmethod def lon_360to180(lon: float) -> float: @@ -559,106 +461,6 @@ def lon_360to180(lon: float) -> float: lon -= 360 return lon - def __get_eseses_lat_lon__(self): - """Get station lat/lon for ESESES data. - """ - with open(self.file, 'r') as data_file: - # Read raw file contents - lines = data_file.readlines() - - # Determine reference latitude - lat_line = [line for line in lines \ - if line.find('# Latitude') != -1] - lat_line = lat_line[0].strip('\n') - self.site_lat = float(lat_line.split()[-1]) - - # Determine reference longitude - lon_line = [line for line in lines \ - if line.find('# East Longitude') != -1] - lon_line = lon_line[0].strip('\n') - site_lon = float(lon_line.split()[-1]) - self.site_lon = self.lon_360to180(site_lon) - - return None - - - def read_displacement(self, start_date=None, end_date=None, print_msg=True, - display=False): - """Read GPS displacement time-series (defined by start/end_date) - Parameters: start/end_date : str in YYYYMMDD format - Returns: dates : 1D np.ndarray of datetime.datetime object - dis_e/n/u : 1D np.ndarray of displacement in meters in float32 - std_e/n/u : 1D np.ndarray of displacement STD in meters in float32 - """ - # Download file if it does not exist - if not os.path.isfile(self.file): - self.dload_site(print_msg=print_msg) - - # Read dates, dis_e, dis_n, dis_u - if print_msg == True: - print('reading time and displacement in east/north/vertical direction') - - if self.source == 'UNR': - self.__read_unr_displacement__() - elif self.source == 'ESESES': - self.__read_eseses_displacement__() - - # Cut out the specified time range - self.__crop_to_date_range__(start_date, end_date) - - # Display if requested - if display == True: - self.display_data() - - return (self.dates, - self.dis_e, self.dis_n, self.dis_u, - self.std_e, self.std_n, self.std_u) - - def __read_unr_displacement__(self): - """Read GPS displacement time-series processed by UNR. - """ - # Read data from file - data = np.loadtxt(self.file, dtype=bytes, skiprows=1).astype(str) - - # Parse dates - self.dates = np.array([dt.datetime.strptime(i, "%y%b%d") \ - for i in data[:,1]]) - self.date_list = [x.strftime('%Y%m%d') for x in self.dates] - - # Parse displacement data - (self.dis_e, - self.dis_n, - self.dis_u, - self.std_e, - self.std_n, - self.std_u) = data[:, (8,10,12,14,15,16)].astype(np.float32).T - - return None - - def __read_eseses_displacement__(self): - """Read GPS displacement time-series processed by ESESES. - """ - # Read data from file - data = np.loadtxt(self.file, usecols=tuple(range(0,12))) - n_data = data.shape[0] - - # Parse dates - dates = [dt.datetime(int(data[i,1]), 1, 1) \ - + dt.timedelta(days=int(data[i,2])) \ - for i in range(n_data)] - self.dates = np.array(dates) - self.date_list = [date.strftime('%Y%m%d') for date in self.dates] - - # Parse displacement data - (self.dis_n, - self.dis_e, - self.dis_u, - self.std_n, - self.std_e, - self.std_u) = data[:, 3:9].astype(np.float32).T / 1000 - - return None - def __crop_to_date_range__(self, start_date: str, end_date: str): """Cut out the specified time range. start/end_date in format YYYYMMDD @@ -681,21 +483,30 @@ def __crop_to_date_range__(self, start_date: str, end_date: str): return None - def display_data(self): + def display_data(self, marker_size=2, marker_color='k', plot_errors=True): """Display displacement data. """ # Instantiate figure and axes fig, ax = plt.subplots(nrows=3, ncols=1, sharex=True) - # Plot data - ax[0].scatter(self.dates, self.dis_e, s=2**2, - c='k', label='East') - ax[1].scatter(self.dates, self.dis_n, s=2**2, - c='k', label='North') - ax[2].scatter(self.dates, self.dis_u, s=2**2, - c='k', label='Up') + # Plot displacement data + ax[0].scatter(self.dates, self.dis_e, s=marker_size**2, c=marker_color) + ax[1].scatter(self.dates, self.dis_n, s=marker_size**2, c=marker_color) + ax[2].scatter(self.dates, self.dis_u, s=marker_size**2, c=marker_color) + + # Plot displacement errors + if plot_errors == True: + ax[0].errorbar(self.dates, self.dis_e, yerr=self.std_e, + linestyle='none', color=marker_color) + ax[1].errorbar(self.dates, self.dis_n, yerr=self.std_n, + linestyle='none', color=marker_color) + ax[2].errorbar(self.dates, self.dis_u, yerr=self.std_u, + linestyle='none', color=marker_color) # Format plot + ax[0].set_ylabel('East (m)') + ax[1].set_ylabel('North (m)') + ax[2].set_ylabel('Up (m)') fig.suptitle(f'{self.site:s} ({self.source:s})') plt.show() @@ -786,11 +597,12 @@ def read_gps_los_displacement(self, geom_obj, start_date=None, end_date=None, re """Read GPS displacement in LOS direction. Parameters: geom_obj - dict / str, metadata of InSAR file, or geometry file path - start_date - str in YYYYMMDD format - end_date - str in YYYYMMDD format + start_date - str, dates in YYYYMMDD format + end_date - str, dates in YYYYMMDD format ref_site - str, reference GPS site gps_comp - str, GPS components used to convert to LOS direction - horz_az_angle - float, fault azimuth angle used to convert horizontal to fault-parallel + horz_az_angle - float, fault azimuth angle used to convert horizontal + to fault-parallel Returns: dates - 1D np.array of datetime.datetime object dis/std - 1D np.array of displacement / uncertainty in meters site_lalo - tuple of 2 float, lat/lon of GPS site @@ -799,15 +611,21 @@ def read_gps_los_displacement(self, geom_obj, start_date=None, end_date=None, re # read GPS object inc_angle, az_angle = self.get_los_geometry(geom_obj) dates = self.read_displacement(start_date, end_date, print_msg=print_msg)[0] - dis, std = self.displacement_enu2los(inc_angle, az_angle, gps_comp=gps_comp, horz_az_angle=horz_az_angle) + dis, std = self.displacement_enu2los(inc_angle, az_angle, gps_comp=gps_comp, + horz_az_angle=horz_az_angle) site_lalo = self.get_stat_lat_lon(print_msg=print_msg) + # define GPS station object based on processing source + GPS = self.get_gps_obj_by_source(self.source) + # get LOS displacement relative to another GPS site if ref_site: ref_obj = GPS(site=ref_site, data_dir=self.data_dir) + ref_obj.open() ref_obj.read_displacement(start_date, end_date, print_msg=print_msg) inc_angle, az_angle = ref_obj.get_los_geometry(geom_obj) - ref_obj.displacement_enu2los(inc_angle, az_angle, gps_comp=gps_comp, horz_az_angle=horz_az_angle) + ref_obj.displacement_enu2los(inc_angle, az_angle, gps_comp=gps_comp, + horz_az_angle=horz_az_angle) ref_site_lalo = ref_obj.get_stat_lat_lon(print_msg=print_msg) # get relative LOS displacement on common dates @@ -883,4 +701,263 @@ def get_gps_los_velocity(self, geom_obj, start_date=None, end_date=None, return self.velocity, dis + +class UNR_GPS(GPS): + """GPS class for daily solutions processed by UNR NGL. + + This object will assign the attributes: + site - str, four-digit site code + site_lat/lon - float + dates - 1D np.ndarray + date_list - list + dis_e/n/u - 1D np.ndarray + std_e,n,u - 1D np.ndarray + + based on the specific formats of the data source, using the functions: + dload_site + get_stat_lat_lon + read_displacement + """ + source = 'UNR' + + def dload_site(self, print_msg=True) -> str: + """Download the station displacement data from the + specified source. + + Modifies: self.file - str, local file path/name + self.file_url - str, file URL + Returns: self.file - str, local file path/name + """ + if print_msg == True: + print(f"Downloading data for site {self.site:s} from UNR NGL source") + + # URL and file name specs + url_prefix = 'http://geodesy.unr.edu/gps_timeseries/tenv3' + if self.version == 'IGS08': + self.file = os.path.join(self.data_dir, + '{site:s}.{version:s}.tenv3'.\ + format(site=self.site)) + elif self.version == 'IGS14': + self.file = os.path.join(self.data_dir, + '{site:s}.tenv3'.\ + format(site=self.site)) + self.file_url = os.path.join(url_prefix, self.version, + os.path.basename(self.file)) + + # Download file if not present + if os.path.exists(self.file): + print(f'File {self.file} exists--reading') + else: + if print_msg == True: + print(f'... downloading {self.file_url:s} to {self.file:s}') + urlretrieve(self.file_url, self.file) + + return self.file + + def get_stat_lat_lon(self, print_msg=True) -> (str, str): + """Get station lat/lon based on processing source. + Retrieve data from the displacement file. + + Modifies: self.lat/lon - str + Returns: self.lat/lon - str + """ + if print_msg == True: + print('calculating station lat/lon') + + data = np.loadtxt(self.file, dtype=bytes, skiprows=1).astype(str) + ref_lon, ref_lat = float(data[0, 6]), 0. + e0, e_off, n0, n_off = data[0, 7:11].astype(float) + e0 += e_off + n0 += n_off + + az = np.arctan2(e0, n0) / np.pi * 180. + dist = np.sqrt(e0**2 + n0**2) + g = Geod(ellps='WGS84') + self.site_lon, self.site_lat = g.fwd(ref_lon, ref_lat, az, dist)[0:2] + + if print_msg == True: + print(f'\t{self.site_lat:f}, {self.site_lon:f}') + + return self.site_lat, self.site_lon + + def read_displacement(self, start_date=None, end_date=None, print_msg=True, + display=False): + """Read GPS displacement time-series (defined by start/end_date) + Parameters: start/end_date - str, date in YYYYMMDD format + Returns: dates - 1D np.ndarray of datetime.datetime object + dis_e/n/u - 1D np.ndarray of displacement in meters in float32 + std_e/n/u - 1D np.ndarray of displacement STD in meters in float32 + """ + # Download file if it does not exist + if not os.path.isfile(self.file): + self.dload_site(print_msg=print_msg) + + # Read dates, dis_e, dis_n, dis_u + if print_msg == True: + print('reading time and displacement in east/north/vertical direction') + + # Read data from file + data = np.loadtxt(self.file, dtype=bytes, skiprows=1).astype(str) + + # Parse dates + self.dates = np.array([dt.datetime.strptime(i, "%y%b%d") \ + for i in data[:,1]]) + self.date_list = [date.strftime('%Y%m%d') for date in self.dates] + + # Parse displacement data + (self.dis_e, + self.dis_n, + self.dis_u, + self.std_e, + self.std_n, + self.std_u) = data[:, (8,10,12,14,15,16)].astype(np.float32).T + + # Cut out the specified time range + self.__crop_to_date_range__(start_date, end_date) + + # Display if requested + if display == True: + self.display_data() + + return (self.dates, + self.dis_e, self.dis_n, self.dis_u, + self.std_e, self.std_n, self.std_u) + + + +class ESESES_GPS(GPS): + """GPS class for daily solutions processed by ESESES. + + This object will assign the attributes: + site - str, four-digit site code + site_lat/lon - float + dates - 1D np.ndarray + date_list - list + dis_e/n/u - 1D np.ndarray + std_e,n,u - 1D np.ndarray + + based on the specific formats of the data source, using the functions: + dload_site + get_stat_lat_lon + read_displacement + """ + source = 'ESESES' + + def dload_site(self, print_msg=True) -> str: + """Download the station displacement data from the + specified source. + + Modifies: self.file - str, local file path/name + self.file_url - str, file URL + Returns: self.file - str, local file path/name + """ + if print_msg == True: + print(f'downloading data for site {self.site:s} from the ESESES source') + + # URL and file name specs + url_prefix = 'http://garner.ucsd.edu/pub/measuresESESES_products/Timeseries/CurrentUntarred/Clean_TrendNeuTimeSeries_comb_20240320' + self.file = os.path.join(self.data_dir, + '{site:s}CleanTrend.neu.Z'.\ + format(site=self.site.lower())) + self.file_url = os.path.join(url_prefix, os.path.basename(self.file)) + + # Download file if not present + if os.path.exists(self.file): + print(f'File {self.file} exists--reading') + else: + if print_msg == True: + print(f'... downloading {self.file_url:s} to {self.file:s}') + urlretrieve(self.file_url, self.file) + + # Unzip file + with zipfile.ZipFile(self.file, 'r') as Zfile: + Zfile.extractall(self.data_dir) + + # Update file name + self.file = self.file.strip('.Z') + if print_msg == True: + print(f'... extracted to {self.file:s}') + + return self.file + + + def get_stat_lat_lon(self, print_msg=True) -> (str, str): + """Get station lat/lon based on processing source. + Retrieve data from the displacement file. + + Modifies: self.lat/lon - str + Returns: self.lat/lon - str + """ + if print_msg == True: + print('calculating station lat/lon') + + with open(self.file, 'r') as data_file: + # Read raw file contents + lines = data_file.readlines() + + # Determine reference latitude + lat_line = [line for line in lines \ + if line.find('# Latitude') != -1] + lat_line = lat_line[0].strip('\n') + self.site_lat = float(lat_line.split()[-1]) + + # Determine reference longitude + lon_line = [line for line in lines \ + if line.find('# East Longitude') != -1] + lon_line = lon_line[0].strip('\n') + site_lon = float(lon_line.split()[-1]) + self.site_lon = self.lon_360to180(site_lon) + + if print_msg == True: + print(f'\t{self.site_lat:f}, {self.site_lon:f}') + + return self.site_lat, self.site_lon + + def read_displacement(self, start_date=None, end_date=None, print_msg=True, + display=False): + """Read GPS displacement time-series (defined by start/end_date) + Parameters: start/end_date - str, date in YYYYMMDD format + Returns: dates - 1D np.ndarray of datetime.datetime object + dis_e/n/u - 1D np.ndarray of displacement in meters in float32 + std_e/n/u - 1D np.ndarray of displacement STD in meters in float32 + """ + # Download file if it does not exist + if not os.path.isfile(self.file): + self.dload_site(print_msg=print_msg) + + # Read dates, dis_e, dis_n, dis_u + if print_msg == True: + print('reading time and displacement in east/north/vertical direction') + + # read data from file + data = np.loadtxt(self.file, usecols=tuple(range(0,12))) + n_data = data.shape[0] + + # Parse dates + dates = [dt.datetime(int(data[i,1]), 1, 1) \ + + dt.timedelta(days=int(data[i,2])) \ + for i in range(n_data)] + self.dates = np.array(dates) + self.date_list = [date.strftime('%Y%m%d') for date in self.dates] + + # Parse displacement data + (self.dis_n, + self.dis_e, + self.dis_u, + self.std_n, + self.std_e, + self.std_u) = data[:, 3:9].astype(np.float32).T / 1000 + + # Cut out the specified time range + self.__crop_to_date_range__(start_date, end_date) + + # Display if requested + if display == True: + self.display_data() + + return (self.dates, + self.dis_e, self.dis_n, self.dis_u, + self.std_e, self.std_n, self.std_u) + + #################################### End of GPS class #################################### diff --git a/src/mintpy/objects/insar_vs_gps.py b/src/mintpy/objects/insar_vs_gps.py index d6d512cd1..5401ce35f 100644 --- a/src/mintpy/objects/insar_vs_gps.py +++ b/src/mintpy/objects/insar_vs_gps.py @@ -17,7 +17,7 @@ from mintpy.defaults.plot import * from mintpy.objects import giantTimeseries, timeseries -from mintpy.objects.gps import GPS +from mintpy.objects import gps from mintpy.utils import readfile, utils as ut @@ -28,6 +28,7 @@ class insar_vs_gps: geom_file : str, geometry HDF5 file temp_coh_file : str, temporal coherence HDF5 file site_names : list of str, GPS site names + gps_source : str, program or institution that processed the GPS data gps_dir : str, directory of the local GPS data files ref_site : str, common reference site in space for InSAR and GPS start/end_date : str, date in YYYYMMDD format for the start/end date @@ -60,12 +61,13 @@ class insar_vs_gps: """ def __init__(self, ts_file, geom_file, temp_coh_file, - site_names, gps_dir='./GPS', ref_site='GV01', + site_names, gps_source='UNR', gps_dir='./GPS', ref_site='GV01', start_date=None, end_date=None, min_ref_date=None): self.insar_file = ts_file self.geom_file = geom_file self.temp_coh_file = temp_coh_file self.site_names = site_names + self.gps_source = gps_source self.gps_dir = gps_dir self.ref_site = ref_site self.num_site = len(site_names) @@ -105,6 +107,13 @@ def open(self): return def read_gps(self): + # define GPS station object based on processing source + if self.gps_source == 'UNR': + GPS = gps.UNR_GPS + elif self.gps_source == 'ESESES': + GPS = gps.ESESES_GPS + + # read data for each GPS site for sname in self.site_names: site = {} site['name'] = sname diff --git a/src/mintpy/utils/arg_utils.py b/src/mintpy/utils/arg_utils.py index 3bf37faee..a2374742a 100644 --- a/src/mintpy/utils/arg_utils.py +++ b/src/mintpy/utils/arg_utils.py @@ -247,6 +247,9 @@ def add_figure_argument(parser, figsize_img=False): def add_gps_argument(parser): """Argument group parser for GPS options""" gps = parser.add_argument_group('GPS', 'GPS data to display') + gps.add_argument('--gps-source', dest='gps_source', default='UNR', + choices={'UNR', 'ESESES'}, + help='GPS processing source') gps.add_argument('--show-gps', dest='disp_gps', action='store_true', help='Show UNR GPS location within the coverage.') gps.add_argument('--mask-gps', dest='mask_gps', action='store_true', diff --git a/src/mintpy/utils/plot.py b/src/mintpy/utils/plot.py index 11b3ae1dd..713e3fc7d 100644 --- a/src/mintpy/utils/plot.py +++ b/src/mintpy/utils/plot.py @@ -1136,7 +1136,8 @@ def plot_gps(ax, SNWE, inps, metadata=dict(), print_msg=True): SNWE = (south, north, west, east) # query for GNSS stations - site_names, site_lats, site_lons = gps.search_gps(SNWE, start_date=start_date, end_date=end_date) + site_names, site_lats, site_lons = gps.search_gps(SNWE, source=inps.gps_source, + start_date=start_date, end_date=end_date) if site_names.size == 0: warnings.warn(f'No GNSS found within {SNWE} during {start_date} - {end_date}!') print(' continue without GNSS plots.') diff --git a/src/mintpy/view.py b/src/mintpy/view.py index d4fdaaa8b..f79202caa 100644 --- a/src/mintpy/view.py +++ b/src/mintpy/view.py @@ -22,7 +22,7 @@ from mintpy import subset, version from mintpy.multilook import multilook_data from mintpy.objects import TIMESERIES_KEY_NAMES, giantIfgramStack, ifgramStack -from mintpy.objects.gps import GPS +from mintpy.objects import gps from mintpy.utils import plot as pp, ptime, readfile, utils as ut @@ -541,6 +541,10 @@ def extent2meshgrid(extent: tuple, ds_shape: list): # Reference (InSAR) data to a GNSS site coord = ut.coordinate(metadata) if inps.disp_gps and inps.gps_component and inps.ref_gps_site: + # define GPS station object based on processing source + GPS = gps.GPS.get_gps_obj_by_source(inps.gps_source) + + # GPS reference site ref_site_gps = GPS(site=inps.ref_gps_site) ref_site_gps.open() ref_site_lalo = ref_site_gps.get_stat_lat_lon(print_msg=False) From 3da2035fa42691e5f71d0ad80fd9d889233d19fe Mon Sep 17 00:00:00 2001 From: rzinke Date: Mon, 1 Apr 2024 12:46:02 -0700 Subject: [PATCH 03/44] Changes to GPS for codacy and pre-commit compliance. --- src/mintpy/objects/gps.py | 121 +++++++++++++++++++++++++------------- 1 file changed, 80 insertions(+), 41 deletions(-) diff --git a/src/mintpy/objects/gps.py b/src/mintpy/objects/gps.py index 9ef5a75d2..15a4812dd 100644 --- a/src/mintpy/objects/gps.py +++ b/src/mintpy/objects/gps.py @@ -14,13 +14,13 @@ import datetime as dt import numpy as np from pyproj import Geod -from urllib.request import urlretrieve +from urllib.request import urlretrieve, urlopen import pandas as pd import zipfile import matplotlib.pyplot as plt -from mintpy.objects.coord import coordinate from mintpy.utils import ptime, time_func, readfile, utils1 as ut +from mintpy.objects.coord import coordinate supported_sources = ['UNR', 'ESESES'] @@ -53,6 +53,7 @@ def dload_site_list(out_file=None, source='UNR', print_msg=True) -> str: print(f'Downloading site list from {source}: {site_list_file_url} to {out_file}') # Download file + #nosec urlretrieve(site_list_file_url, out_file) return out_file @@ -204,7 +205,7 @@ def get_baseline_change(dates1, pos_x1, pos_y1, pos_z1, """ dates = np.array(sorted(list(set(dates1) & set(dates2)))) bases = np.zeros(dates.shape, dtype=float) - for i in range(len(dates)): + for i, date in enumerate(dates): idx1 = np.where(dates1 == dates[i])[0][0] idx2 = np.where(dates2 == dates[i])[0][0] basei = ((pos_x1[idx1] - pos_x2[idx2]) ** 2 @@ -219,7 +220,7 @@ def get_baseline_change(dates1, pos_x1, pos_y1, pos_z1, def get_gps_los_obs(meta, obs_type, site_names, start_date, end_date, source='UNR', gps_comp='enu2los', horz_az_angle=-90., model=None, - print_msg=True,redo=False): + print_msg=True, redo=False): """Get the GPS LOS observations given the query info. Parameters: meta - dict, dictionary of metadata of the InSAR file @@ -227,6 +228,7 @@ def get_gps_los_obs(meta, obs_type, site_names, start_date, end_date, source='UN site_names - list of str, GPS sites, output of search_gps() start_date - str, date in YYYYMMDD format end_date - str, date in YYYYMMDD format + source - str, program or institution that processed the GPS data gps_comp - str, flag of projecting 2/3D GPS into LOS e.g. enu2los, hz2los, up2los horz_az_angle - float, azimuth angle of the horizontal motion in degree @@ -308,7 +310,8 @@ def get_gps_los_obs(meta, obs_type, site_names, start_date, end_date, source='UN prog_bar.update(i+1, suffix='{}/{} {}'.format(i+1, num_site, site_name)) # calculate gps data value - obj = GPS(site_name, source=source) + GPSclass = GPS.get_gps_obj_by_source(source) + obj = GPSclass(site_name) obj.open(print_msg=print_msg) vel, dis_ts = obj.get_gps_los_velocity( geom_obj, @@ -350,9 +353,9 @@ def read_pos_file(fname): ds = fc[:,2].astype(int) dates = [dt.datetime(year=y, month=m, day=d) for y,m,d in zip(ys, ms, ds)] - X = fc[:,4].astype(float64).tolist() - Y = fc[:,5].astype(float64).tolist() - Z = fc[:,6].astype(float64).tolist() + X = fc[:,4].astype(float).tolist() + Y = fc[:,5].astype(float).tolist() + Z = fc[:,6].astype(float).tolist() return dates, X, Y, Z @@ -411,22 +414,32 @@ def __init__(self, site: str, data_dir='./GPS', self.site = site self.version = version - # Create data directory if not exist + # create data directory if not exist self.data_dir = os.path.abspath(data_dir) if not os.path.exists(self.data_dir): os.mkdir(self.data_dir) + # variables to be filled by child classes + self.dates = None + self.date_list = None + self.dis_e = None + self.dis_n = None + self.dis_u = None + self.std_e = None + self.std_n = None + self.std_u = None + return None def open(self, file=None, print_msg=True): """Read the lat/lon and displacement data of the station. Download if necessary. """ - # Download file if not present + # download file if not present if not hasattr(self, 'file'): self.dload_site(print_msg=print_msg) - # Retrieve data from file + # retrieve data from file self.get_stat_lat_lon(print_msg=print_msg) self.read_displacement(print_msg=print_msg) @@ -486,15 +499,15 @@ def __crop_to_date_range__(self, start_date: str, end_date: str): def display_data(self, marker_size=2, marker_color='k', plot_errors=True): """Display displacement data. """ - # Instantiate figure and axes + # instantiate figure and axes fig, ax = plt.subplots(nrows=3, ncols=1, sharex=True) - # Plot displacement data + # plot displacement data ax[0].scatter(self.dates, self.dis_e, s=marker_size**2, c=marker_color) ax[1].scatter(self.dates, self.dis_n, s=marker_size**2, c=marker_color) ax[2].scatter(self.dates, self.dis_u, s=marker_size**2, c=marker_color) - # Plot displacement errors + # plot displacement errors if plot_errors == True: ax[0].errorbar(self.dates, self.dis_e, yerr=self.std_e, linestyle='none', color=marker_color) @@ -503,7 +516,7 @@ def display_data(self, marker_size=2, marker_color='k', plot_errors=True): ax[2].errorbar(self.dates, self.dis_u, yerr=self.std_u, linestyle='none', color=marker_color) - # Format plot + # format plot ax[0].set_ylabel('East (m)') ax[1].set_ylabel('North (m)') ax[2].set_ylabel('Up (m)') @@ -547,19 +560,20 @@ def displacement_enu2los(self, inc_angle:float, az_angle:float, gps_comp='enu2lo # display if requested if display == True: - # Instantiate figure and axes + # instantiate figure and axes fig, ax = plt.subplots(sharex=True) - # Plot LOS displacement + # plot LOS displacement ax.scatter(self.dates, self.dis_los, s=2**2, c='k', label='LOS') - # Plot fit if model specified + # plot fit if model specified if model is not None: # specific time_func model - date_list = [dt.datetime.strftime(i, '%Y%m%d') for i in dates] - A = time_func.get_design_matrix4time_func(date_list, model=model) + A = time_func.get_design_matrix4time_func(self.date_list, model=model) estm_dis = np.dot(np.linalg.pinv(A), self.dis_los) + ax.plot(self.dates, estm_dis, 'b', label='model') + ax.legend() return self.dis_los, self.std_los @@ -736,7 +750,7 @@ def dload_site(self, print_msg=True) -> str: if self.version == 'IGS08': self.file = os.path.join(self.data_dir, '{site:s}.{version:s}.tenv3'.\ - format(site=self.site)) + format(site=self.site, version=version)) elif self.version == 'IGS14': self.file = os.path.join(self.data_dir, '{site:s}.tenv3'.\ @@ -744,12 +758,13 @@ def dload_site(self, print_msg=True) -> str: self.file_url = os.path.join(url_prefix, self.version, os.path.basename(self.file)) - # Download file if not present + # download file if not present if os.path.exists(self.file): print(f'File {self.file} exists--reading') else: if print_msg == True: print(f'... downloading {self.file_url:s} to {self.file:s}') + #nosec urlretrieve(self.file_url, self.file) return self.file @@ -788,23 +803,22 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, dis_e/n/u - 1D np.ndarray of displacement in meters in float32 std_e/n/u - 1D np.ndarray of displacement STD in meters in float32 """ - # Download file if it does not exist + # download file if it does not exist if not os.path.isfile(self.file): self.dload_site(print_msg=print_msg) - # Read dates, dis_e, dis_n, dis_u + # read dates, dis_e, dis_n, dis_u if print_msg == True: print('reading time and displacement in east/north/vertical direction') - # Read data from file + # read data from file data = np.loadtxt(self.file, dtype=bytes, skiprows=1).astype(str) # Parse dates self.dates = np.array([dt.datetime.strptime(i, "%y%b%d") \ for i in data[:,1]]) - self.date_list = [date.strftime('%Y%m%d') for date in self.dates] - # Parse displacement data + # parse displacement data (self.dis_e, self.dis_n, self.dis_u, @@ -812,10 +826,13 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, self.std_n, self.std_u) = data[:, (8,10,12,14,15,16)].astype(np.float32).T - # Cut out the specified time range + # cut out the specified time range self.__crop_to_date_range__(start_date, end_date) - # Display if requested + # formulate date list + self.date_list = [date.strftime('%Y%m%d') for date in self.dates] + + # display if requested if display == True: self.display_data() @@ -854,26 +871,46 @@ def dload_site(self, print_msg=True) -> str: if print_msg == True: print(f'downloading data for site {self.site:s} from the ESESES source') - # URL and file name specs - url_prefix = 'http://garner.ucsd.edu/pub/measuresESESES_products/Timeseries/CurrentUntarred/Clean_TrendNeuTimeSeries_comb_20240320' + # determine proper URL + url_fmt = 'http://garner.ucsd.edu/pub/measuresESESES_products/Timeseries/CurrentUntarred/Clean_TrendNeuTimeSeries_comb_{:d}' + + # start with today and check back in time + today = int(dt.date.today().strftime('%Y%m%d')) + day_lim = 21 + for days_ago in range(day_lim): + # formulate URL based on date + url_prefix = url_fmt.format(today - days_ago) + + # check if page exists + try: + urlopen(url_prefix) + break + except: + if days_ago == day_lim - 1: + raise FileNotFoundError('The ESESES source repository cannot be found.') + else: + pass + + # file name and full url self.file = os.path.join(self.data_dir, '{site:s}CleanTrend.neu.Z'.\ format(site=self.site.lower())) self.file_url = os.path.join(url_prefix, os.path.basename(self.file)) - # Download file if not present + # download file if not present if os.path.exists(self.file): print(f'File {self.file} exists--reading') else: if print_msg == True: print(f'... downloading {self.file_url:s} to {self.file:s}') + #nosec urlretrieve(self.file_url, self.file) - # Unzip file + # unzip file with zipfile.ZipFile(self.file, 'r') as Zfile: Zfile.extractall(self.data_dir) - # Update file name + # update file name self.file = self.file.strip('.Z') if print_msg == True: print(f'... extracted to {self.file:s}') @@ -921,11 +958,11 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, dis_e/n/u - 1D np.ndarray of displacement in meters in float32 std_e/n/u - 1D np.ndarray of displacement STD in meters in float32 """ - # Download file if it does not exist + # download file if it does not exist if not os.path.isfile(self.file): self.dload_site(print_msg=print_msg) - # Read dates, dis_e, dis_n, dis_u + # read dates, dis_e, dis_n, dis_u if print_msg == True: print('reading time and displacement in east/north/vertical direction') @@ -933,14 +970,13 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, data = np.loadtxt(self.file, usecols=tuple(range(0,12))) n_data = data.shape[0] - # Parse dates + # parse dates dates = [dt.datetime(int(data[i,1]), 1, 1) \ + dt.timedelta(days=int(data[i,2])) \ for i in range(n_data)] self.dates = np.array(dates) - self.date_list = [date.strftime('%Y%m%d') for date in self.dates] - # Parse displacement data + # parse displacement data (self.dis_n, self.dis_e, self.dis_u, @@ -948,10 +984,13 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, self.std_e, self.std_u) = data[:, 3:9].astype(np.float32).T / 1000 - # Cut out the specified time range + # cut out the specified time range self.__crop_to_date_range__(start_date, end_date) - # Display if requested + # formulate date list + self.date_list = [date.strftime('%Y%m%d') for date in self.dates] + + # display if requested if display == True: self.display_data() From 7e5900210964d4db4f3af696115ea943425ca504 Mon Sep 17 00:00:00 2001 From: rzinke Date: Mon, 1 Apr 2024 13:58:12 -0700 Subject: [PATCH 04/44] More changes to GPS for codacy and pre-commit compliance. --- src/mintpy/objects/gps.py | 25 +++++++++++-------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/src/mintpy/objects/gps.py b/src/mintpy/objects/gps.py index 15a4812dd..1a96788c4 100644 --- a/src/mintpy/objects/gps.py +++ b/src/mintpy/objects/gps.py @@ -34,27 +34,26 @@ def dload_site_list(out_file=None, source='UNR', print_msg=True) -> str: """Download single file with list of GPS site locations. """ - # Check source is supported + # check source is supported assert source in supported_sources, \ f'Source {source:s} not supported. Use one of {supported_sources}' - # Determine URL + # determine URL if source == 'UNR': site_list_file_url = UNR_site_list_file_url elif source == 'ESESES': site_list_file_url = ESESES_site_list_file_url - # Handle output file + # handle output file if out_file is None: out_file = os.path.basename(site_list_file_url) - # Report if requested + # report if requested if print_msg: print(f'Downloading site list from {source}: {site_list_file_url} to {out_file}') - # Download file - #nosec - urlretrieve(site_list_file_url, out_file) + # download file + urlretrieve(site_list_file_url, out_file) #nosec return out_file @@ -206,8 +205,8 @@ def get_baseline_change(dates1, pos_x1, pos_y1, pos_z1, dates = np.array(sorted(list(set(dates1) & set(dates2)))) bases = np.zeros(dates.shape, dtype=float) for i, date in enumerate(dates): - idx1 = np.where(dates1 == dates[i])[0][0] - idx2 = np.where(dates2 == dates[i])[0][0] + idx1 = np.where(dates1 == date)[0][0] + idx2 = np.where(dates2 == date)[0][0] basei = ((pos_x1[idx1] - pos_x2[idx2]) ** 2 + (pos_y1[idx1] - pos_y2[idx2]) ** 2 + (pos_z1[idx1] - pos_z2[idx2]) ** 2) ** 0.5 @@ -750,7 +749,7 @@ def dload_site(self, print_msg=True) -> str: if self.version == 'IGS08': self.file = os.path.join(self.data_dir, '{site:s}.{version:s}.tenv3'.\ - format(site=self.site, version=version)) + format(site=self.site, version=self.version)) elif self.version == 'IGS14': self.file = os.path.join(self.data_dir, '{site:s}.tenv3'.\ @@ -764,8 +763,7 @@ def dload_site(self, print_msg=True) -> str: else: if print_msg == True: print(f'... downloading {self.file_url:s} to {self.file:s}') - #nosec - urlretrieve(self.file_url, self.file) + urlretrieve(self.file_url, self.file) #nosec return self.file @@ -903,8 +901,7 @@ def dload_site(self, print_msg=True) -> str: else: if print_msg == True: print(f'... downloading {self.file_url:s} to {self.file:s}') - #nosec - urlretrieve(self.file_url, self.file) + urlretrieve(self.file_url, self.file) #nosec # unzip file with zipfile.ZipFile(self.file, 'r') as Zfile: From ef11a9a5ef53d9e6b229ed6cb77a88e87629cdc3 Mon Sep 17 00:00:00 2001 From: rzinke Date: Mon, 1 Apr 2024 14:19:34 -0700 Subject: [PATCH 05/44] More changes to GPS for codacy and pre-commit compliance again. --- src/mintpy/objects/gps.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/src/mintpy/objects/gps.py b/src/mintpy/objects/gps.py index 1a96788c4..ea028f8da 100644 --- a/src/mintpy/objects/gps.py +++ b/src/mintpy/objects/gps.py @@ -870,21 +870,24 @@ def dload_site(self, print_msg=True) -> str: print(f'downloading data for site {self.site:s} from the ESESES source') # determine proper URL - url_fmt = 'http://garner.ucsd.edu/pub/measuresESESES_products/Timeseries/CurrentUntarred/Clean_TrendNeuTimeSeries_comb_{:d}' + url_fmt = 'http://garner.ucsd.edu/pub/measuresESESES_products/Timeseries/CurrentUntarred/Clean_TrendNeuTimeSeries_comb_{:s}' # start with today and check back in time - today = int(dt.date.today().strftime('%Y%m%d')) + today = dt.date.today() day_lim = 21 - for days_ago in range(day_lim): + for days in range(day_lim): + # formulate "days ago" + days_ago = dt.timedelta(days=days) + # formulate URL based on date - url_prefix = url_fmt.format(today - days_ago) + url_prefix = url_fmt.format((today - days_ago).strftime('%Y%m%d')) # check if page exists try: - urlopen(url_prefix) + urlopen(url_prefix) #nosec break - except: - if days_ago == day_lim - 1: + except Exception as excp: + if days_ago.days == (day_lim - 1): raise FileNotFoundError('The ESESES source repository cannot be found.') else: pass From f978a74d14cbd0a64010983b6f8b67831ad76783 Mon Sep 17 00:00:00 2001 From: rzinke Date: Mon, 1 Apr 2024 14:24:46 -0700 Subject: [PATCH 06/44] More changes to GPS for codacy and pre-commit compliance again. --- src/mintpy/objects/gps.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/mintpy/objects/gps.py b/src/mintpy/objects/gps.py index ea028f8da..2d1928119 100644 --- a/src/mintpy/objects/gps.py +++ b/src/mintpy/objects/gps.py @@ -560,7 +560,7 @@ def displacement_enu2los(self, inc_angle:float, az_angle:float, gps_comp='enu2lo # display if requested if display == True: # instantiate figure and axes - fig, ax = plt.subplots(sharex=True) + _, ax = plt.subplots(sharex=True) # plot LOS displacement ax.scatter(self.dates, self.dis_los, s=2**2, @@ -886,7 +886,7 @@ def dload_site(self, print_msg=True) -> str: try: urlopen(url_prefix) #nosec break - except Exception as excp: + except Exception: if days_ago.days == (day_lim - 1): raise FileNotFoundError('The ESESES source repository cannot be found.') else: From 528f5b881c4bcaa95c5be2929e061f2bbf68d3e3 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 21:36:11 +0000 Subject: [PATCH 07/44] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/mintpy/objects/gps.py | 16 ++++++++-------- src/mintpy/objects/insar_vs_gps.py | 3 +-- src/mintpy/view.py | 8 ++++++-- 3 files changed, 15 insertions(+), 12 deletions(-) diff --git a/src/mintpy/objects/gps.py b/src/mintpy/objects/gps.py index 2d1928119..723a4f590 100644 --- a/src/mintpy/objects/gps.py +++ b/src/mintpy/objects/gps.py @@ -8,20 +8,20 @@ # from mintpy.objects.gps import GPS -import os import csv -import glob import datetime as dt -import numpy as np -from pyproj import Geod -from urllib.request import urlretrieve, urlopen -import pandas as pd +import glob +import os import zipfile +from urllib.request import urlopen, urlretrieve + import matplotlib.pyplot as plt +import numpy as np +import pandas as pd +from pyproj import Geod -from mintpy.utils import ptime, time_func, readfile, utils1 as ut from mintpy.objects.coord import coordinate - +from mintpy.utils import ptime, readfile, time_func, utils1 as ut supported_sources = ['UNR', 'ESESES'] diff --git a/src/mintpy/objects/insar_vs_gps.py b/src/mintpy/objects/insar_vs_gps.py index 5401ce35f..8b8b21f08 100644 --- a/src/mintpy/objects/insar_vs_gps.py +++ b/src/mintpy/objects/insar_vs_gps.py @@ -16,8 +16,7 @@ from scipy.interpolate import griddata from mintpy.defaults.plot import * -from mintpy.objects import giantTimeseries, timeseries -from mintpy.objects import gps +from mintpy.objects import giantTimeseries, gps, timeseries from mintpy.utils import readfile, utils as ut diff --git a/src/mintpy/view.py b/src/mintpy/view.py index f79202caa..cf790dd6e 100644 --- a/src/mintpy/view.py +++ b/src/mintpy/view.py @@ -21,8 +21,12 @@ from mintpy import subset, version from mintpy.multilook import multilook_data -from mintpy.objects import TIMESERIES_KEY_NAMES, giantIfgramStack, ifgramStack -from mintpy.objects import gps +from mintpy.objects import ( + TIMESERIES_KEY_NAMES, + giantIfgramStack, + gps, + ifgramStack, +) from mintpy.utils import plot as pp, ptime, readfile, utils as ut From 7182d1a6336e65e0e5a9908a6274c90954850651 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 21:46:29 +0000 Subject: [PATCH 08/44] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/mintpy/objects/gps.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/mintpy/objects/gps.py b/src/mintpy/objects/gps.py index 723a4f590..93cf37ffe 100644 --- a/src/mintpy/objects/gps.py +++ b/src/mintpy/objects/gps.py @@ -146,7 +146,7 @@ def search_gps(SNWE, source='UNR', start_date=None, end_date=None, # Final reporting if print_msg == True: - print('{:d} stations available'.format(site_data.shape[0])) + print(f'{site_data.shape[0]:d} stations available') return (site_data.site.to_numpy(), site_data.lat.to_numpy(), @@ -183,7 +183,7 @@ def read_ESESES_station_list(site_list_file:str, print_msg=True) -> pd.DataFrame print('Parsing ESESES site list file') # Read file contents - site_data = pd.read_csv(site_list_file, header = 14, sep='\s+') + site_data = pd.read_csv(site_list_file, header = 14, sep=r'\s+') # Rename columns for uniformity site_data.rename(columns={'Site': 'site', @@ -273,7 +273,7 @@ def get_gps_los_obs(meta, obs_type, site_names, start_date, end_date, source='UN if not redo and os.path.isfile(csv_file) and num_row >= num_site: # read from existing CSV file - vprint('read GPS observations from file: {}'.format(csv_file)) + vprint(f'read GPS observations from file: {csv_file}') fc = np.genfromtxt(csv_file, dtype=col_types, delimiter=',', names=True) site_obs = fc[col_names[obs_ind]] @@ -306,7 +306,7 @@ def get_gps_los_obs(meta, obs_type, site_names, start_date, end_date, source='UN # loop for calculation prog_bar = ptime.progressBar(maxValue=num_site, print_msg=print_msg) for i, site_name in enumerate(site_names): - prog_bar.update(i+1, suffix='{}/{} {}'.format(i+1, num_site, site_name)) + prog_bar.update(i+1, suffix=f'{i+1}/{num_site} {site_name}') # calculate gps data value GPSclass = GPS.get_gps_obj_by_source(source) @@ -328,7 +328,7 @@ def get_gps_los_obs(meta, obs_type, site_names, start_date, end_date, source='UN prog_bar.close() # write to CSV file - vprint('write GPS observations to file: {}'.format(csv_file)) + vprint(f'write GPS observations to file: {csv_file}') with open(csv_file, 'w') as fc: fcw = csv.writer(fc) fcw.writerow(col_names) @@ -360,7 +360,7 @@ def read_pos_file(fname): def get_pos_years(gps_dir, site): - fnames = glob.glob(os.path.join(gps_dir, '{}.*.pos'.format(site))) + fnames = glob.glob(os.path.join(gps_dir, f'{site}.*.pos')) years = [os.path.basename(i).split('.')[1] for i in fnames] years = ptime.yy2yyyy(years) return years @@ -374,7 +374,7 @@ def read_GSI_F3(gps_dir, site, start_date=None, end_date=None): dates, X, Y, Z = [], [], [], [] for i in range(num_year): yeari = str(year0 + i) - fname = os.path.join(gps_dir, '{}.{}.pos'.format(site, yeari[2:])) + fname = os.path.join(gps_dir, f'{site}.{yeari[2:]}.pos') datesi, Xi, Yi, Zi = read_pos_file(fname) dates += datesi X += Xi @@ -928,7 +928,7 @@ def get_stat_lat_lon(self, print_msg=True) -> (str, str): if print_msg == True: print('calculating station lat/lon') - with open(self.file, 'r') as data_file: + with open(self.file) as data_file: # Read raw file contents lines = data_file.readlines() From f91808520861f596fa72da124f231db915d38b3d Mon Sep 17 00:00:00 2001 From: rzinke Date: Mon, 1 Apr 2024 16:28:25 -0700 Subject: [PATCH 09/44] Made print statement optional --- src/mintpy/objects/gps.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/mintpy/objects/gps.py b/src/mintpy/objects/gps.py index 93cf37ffe..32c15fefd 100644 --- a/src/mintpy/objects/gps.py +++ b/src/mintpy/objects/gps.py @@ -759,7 +759,8 @@ def dload_site(self, print_msg=True) -> str: # download file if not present if os.path.exists(self.file): - print(f'File {self.file} exists--reading') + if print_msg == True: + print(f'File {self.file} exists--reading') else: if print_msg == True: print(f'... downloading {self.file_url:s} to {self.file:s}') @@ -900,7 +901,8 @@ def dload_site(self, print_msg=True) -> str: # download file if not present if os.path.exists(self.file): - print(f'File {self.file} exists--reading') + if print_msg == True: + print(f'File {self.file} exists--reading') else: if print_msg == True: print(f'... downloading {self.file_url:s} to {self.file:s}') From 52dbebb63beb4a17da4e85add1b0b43ee7a9ef0f Mon Sep 17 00:00:00 2001 From: rzinke Date: Tue, 2 Apr 2024 18:32:32 -0700 Subject: [PATCH 10/44] Updated source specification for search_gps --- src/mintpy/objects/gps.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/mintpy/objects/gps.py b/src/mintpy/objects/gps.py index 32c15fefd..7f9ee2d41 100644 --- a/src/mintpy/objects/gps.py +++ b/src/mintpy/objects/gps.py @@ -89,7 +89,7 @@ def search_gps(SNWE, source='UNR', start_date=None, end_date=None, # Check whether site list file is in current directory if not os.path.isfile(site_list_file): # Download file - dload_site_list(site_list_file, print_msg=print_msg) + dload_site_list(site_list_file, source=source, print_msg=print_msg) # Parse data from file if source == 'UNR': From e38323842bfd40c4f981609b509839472bd03dea Mon Sep 17 00:00:00 2001 From: rzinke Date: Wed, 3 Apr 2024 09:32:23 -0700 Subject: [PATCH 11/44] Changed GPS to GNSS for generalization --- src/mintpy/objects/{gps.py => gnss.py} | 246 ++++++++++++------------- src/mintpy/objects/insar_vs_gps.py | 130 ++++++------- src/mintpy/utils/arg_utils.py | 53 +++--- src/mintpy/utils/plot.py | 156 ++++++++-------- src/mintpy/view.py | 28 +-- 5 files changed, 307 insertions(+), 306 deletions(-) rename src/mintpy/objects/{gps.py => gnss.py} (82%) diff --git a/src/mintpy/objects/gps.py b/src/mintpy/objects/gnss.py similarity index 82% rename from src/mintpy/objects/gps.py rename to src/mintpy/objects/gnss.py index 7f9ee2d41..2704ce430 100644 --- a/src/mintpy/objects/gps.py +++ b/src/mintpy/objects/gnss.py @@ -3,9 +3,9 @@ # Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi # # Author: Zhang Yunjun, Jul 2018 # ############################################################ -# Utility scripts for GPS handling +# Utility scripts for GNSS handling # Recommend import: -# from mintpy.objects.gps import GPS +# from mintpy.objects import gnss import csv @@ -23,16 +23,12 @@ from mintpy.objects.coord import coordinate from mintpy.utils import ptime, readfile, time_func, utils1 as ut -supported_sources = ['UNR', 'ESESES'] - -UNR_site_list_file_url = 'http://geodesy.unr.edu/NGLStationPages/DataHoldings.txt' - -ESESES_site_list_file_url = 'http://garner.ucsd.edu/pub/measuresESESES_products/Velocities/ESESES_Velocities.txt' +supported_sources = ['UNR', 'ESESES'] def dload_site_list(out_file=None, source='UNR', print_msg=True) -> str: - """Download single file with list of GPS site locations. + """Download single file with list of GNSS site locations. """ # check source is supported assert source in supported_sources, \ @@ -40,8 +36,10 @@ def dload_site_list(out_file=None, source='UNR', print_msg=True) -> str: # determine URL if source == 'UNR': + UNR_site_list_file_url = 'http://geodesy.unr.edu/NGLStationPages/DataHoldings.txt' site_list_file_url = UNR_site_list_file_url elif source == 'ESESES': + ESESES_site_list_file_url = 'http://garner.ucsd.edu/pub/measuresESESES_products/Velocities/ESESES_Velocities.txt' site_list_file_url = ESESES_site_list_file_url # handle output file @@ -53,25 +51,28 @@ def dload_site_list(out_file=None, source='UNR', print_msg=True) -> str: print(f'Downloading site list from {source}: {site_list_file_url} to {out_file}') # download file - urlretrieve(site_list_file_url, out_file) #nosec + if not os.path.exists(out_file): + if print_msg: + print(f'Downloading site list from {source:s}: {site_list_file_url:s} to {out_file:s}') + urlretrieve(site_list_file_url, out_file) #nosec return out_file -def search_gps(SNWE, source='UNR', start_date=None, end_date=None, +def search_gnss(SNWE, source='UNR', start_date=None, end_date=None, site_list_file=None, min_num_solution=None, print_msg=True): - """Search available GPS sites within the geo bounding box from UNR website + """Search available GNSS sites within the geo bounding box from UNR website Parameters: SNWE - tuple of 4 float, indicating (South, North, West, East) in degrees - source - str, program or institution that processed the GPS data + source - str, program or institution that processed the GNSS data start_date - str, date in YYYYMMDD format end_date - str, date in YYYYMMDD format site_list_file - str min_num_solution - int, minimum number of solutions available - Returns: site_names - 1D np.array of string, GPS station names + Returns: site_names - 1D np.array of string, GNSS station names site_lats - 1D np.array, lat site_lons - 1D np.array, lon """ - # Check start and end dates if provided + # check start and end dates if provided if start_date is not None: start_date = dt.datetime.strptime(start_date, '%Y%m%d') if end_date is not None: @@ -79,19 +80,19 @@ def search_gps(SNWE, source='UNR', start_date=None, end_date=None, if start_date is not None and end_date is not None: assert(start_date < end_date), 'Start date must be before end date' - # Check file name + # check file name if site_list_file is None: - if source == 'UNR': - site_list_file = os.path.basename(UNR_site_list_file_url) - elif source == 'ESESES': - site_list_file = os.path.basename(ESESES_site_list_file_url) + if source == 'Generic': + raise ValueError('Site list file must be specified for generic inputs') + else: + site_list_file = dload_site_list(source=source, print_msg=print_msg) - # Check whether site list file is in current directory + # check whether site list file is in current directory if not os.path.isfile(site_list_file): # Download file dload_site_list(site_list_file, source=source, print_msg=print_msg) - # Parse data from file + # parse data from file if source == 'UNR': site_data = read_UNR_station_list(site_list_file) elif source == 'ESESES': @@ -101,13 +102,12 @@ def search_gps(SNWE, source='UNR', start_date=None, end_date=None, print('Loaded data for fields: {:s}'.\ format(' '.join(list(site_data.columns)))) - # Ensure that station name is consistent + # ensure that station name is consistent site_data['site'] = [site_data.iloc[i,:].site.upper() for i in range(site_data.shape[0])] - # Parse bounding box + # parse bounding box lat_min, lat_max, lon_min, lon_max = SNWE - assert (lon_min < lon_max) and (lat_min < lat_max), \ - 'Check bounding box' + assert (lon_min < lon_max) and (lat_min < lat_max), 'Check bounding box' if print_msg == True: print('Cropping to') @@ -117,34 +117,34 @@ def search_gps(SNWE, source='UNR', start_date=None, end_date=None, # Ensure lon values in (-180, 180] site_data['lon'] = [lon - 360 if lon > 180 else lon for lon in site_data['lon']] - # Limit in space + # limit in space drop_ndx = (site_data.lat < lat_min) \ | (site_data.lat > lat_max) \ | (site_data.lon < lon_min) \ | (site_data.lon > lon_max) site_data.drop(site_data[drop_ndx].index, inplace=True) - # Limit in time + # limit in time if start_date is not None: if hasattr(site_data, 'start_date'): drop_ndx = site_data.start_date > start_date site_data.drop(site_data[drop_ndx].index, inplace=True) else: - print('No date information available--date range not applied to GPS site selection') + print('No date information available--date range not applied to GNSS site selection') if end_date is not None: if hasattr(site_data, 'end_date'): drop_ndx = site_data.end_date < end_date site_data.drop(site_data[drop_ndx].index, inplace=True) else: - print('No date information available--date range not applied to GPS site selection') + print('No date information available--date range not applied to GNSS site selection') - # Limit based on number of solutions + # limit based on number of solutions if hasattr(site_data, 'num_solution'): drop_ndx = site_data.num_solution < min_num_solution site_data.drop(site_data[drop_ndx].index, inplace=True) - # Final reporting + # final reporting if print_msg == True: print(f'{site_data.shape[0]:d} stations available') @@ -194,7 +194,7 @@ def read_ESESES_station_list(site_list_file:str, print_msg=True) -> pd.DataFrame def get_baseline_change(dates1, pos_x1, pos_y1, pos_z1, dates2, pos_x2, pos_y2, pos_z2): - """Calculate the baseline change between two GPS displacement time-series + """Calculate the baseline change between two GNSS displacement time-series Parameters: dates1/2 - 1D np.array, datetime.datetime object pos_x/y/z1/2 - 1D np.ndarray, displacement in meters in float32 Returns: dates - 1D np.array, datetime.datetime object for the @@ -217,32 +217,32 @@ def get_baseline_change(dates1, pos_x1, pos_y1, pos_z1, return dates, bases -def get_gps_los_obs(meta, obs_type, site_names, start_date, end_date, source='UNR', - gps_comp='enu2los', horz_az_angle=-90., model=None, +def get_gnss_los_obs(meta, obs_type, site_names, start_date, end_date, source='UNR', + gnss_comp='enu2los', horz_az_angle=-90., model=None, print_msg=True, redo=False): - """Get the GPS LOS observations given the query info. + """Get the GNSS LOS observations given the query info. Parameters: meta - dict, dictionary of metadata of the InSAR file - obs_type - str, GPS observation data type, displacement or velocity. - site_names - list of str, GPS sites, output of search_gps() + obs_type - str, GNSS observation data type, displacement or velocity. + site_names - list of str, GNSS sites, output of search_gnss() start_date - str, date in YYYYMMDD format end_date - str, date in YYYYMMDD format - source - str, program or institution that processed the GPS data - gps_comp - str, flag of projecting 2/3D GPS into LOS + source - str, program or institution that processed the GNSS data + gnss_comp - str, flag of projecting 2/3D GNSS into LOS e.g. enu2los, hz2los, up2los horz_az_angle - float, azimuth angle of the horizontal motion in degree measured from the north with anti-clockwise as positive model - dict, time function model, e.g. {'polynomial': 1, 'periodic': [1.0, 0.5]} print_msg - bool, print verbose info redo - bool, ignore existing CSV file and re-calculate - Returns: site_obs - 1D np.ndarray(), GPS LOS velocity or displacement in m or m/yr - Examples: from mintpy.objects import gps + Returns: site_obs - 1D np.ndarray(), GNSS LOS velocity or displacement in m or m/yr + Examples: from mintpy.objects import gnss from mintpy.utils import readfile, utils as ut meta = readfile.read_attribute('geo/geo_velocity.h5') SNWE = ut.four_corners(meta) - site_names = gps.search_gps(SNWE, start_date='20150101', end_date='20190619') - vel = gps.get_gps_los_obs(meta, 'velocity', site_names, start_date='20150101', end_date='20190619') - dis = gps.get_gps_los_obs(meta, 'displacement', site_names, start_date='20150101', end_date='20190619') + site_names = gnss.search_gnss(SNWE, start_date='20150101', end_date='20190619') + vel = gnss.get_gnss_los_obs(meta, 'velocity', site_names, start_date='20150101', end_date='20190619') + dis = gnss.get_gnss_los_obs(meta, 'displacement', site_names, start_date='20150101', end_date='20190619') """ vprint = print if print_msg else lambda *args, **kwargs: None num_site = len(site_names) @@ -253,16 +253,16 @@ def get_gps_los_obs(meta, obs_type, site_names, start_date, end_date, source='UN raise ValueError(f'un-supported obs_type: {obs_type}') obs_ind = 3 if obs_type.lower() == 'displacement' else 4 - # GPS CSV file info + # GNSS CSV file info file_dir = os.path.dirname(meta['FILE_PATH']) - csv_file = os.path.join(file_dir, f'gps_{gps_comp}') - csv_file += f'{horz_az_angle:.0f}' if gps_comp == 'horz' else '' + csv_file = os.path.join(file_dir, f'gnss_{gnss_comp}') + csv_file += f'{horz_az_angle:.0f}' if gnss_comp == 'horz' else '' csv_file += '.csv' col_names = ['Site', 'Lon', 'Lat', 'Displacement', 'Velocity'] col_types = ['U10'] + ['f8'] * (len(col_names) - 1) - vprint(f'default GPS observation file name: {csv_file}') + vprint(f'default GNSS observation file name: {csv_file}') - # skip re-calculate GPS if: + # skip re-calculate GNSS if: # 1. redo is False AND # 2. csv_file exists (equivalent to num_row > 0) AND # 3. num_row >= num_site @@ -273,7 +273,7 @@ def get_gps_los_obs(meta, obs_type, site_names, start_date, end_date, source='UN if not redo and os.path.isfile(csv_file) and num_row >= num_site: # read from existing CSV file - vprint(f'read GPS observations from file: {csv_file}') + vprint(f'read GNSS observations from file: {csv_file}') fc = np.genfromtxt(csv_file, dtype=col_types, delimiter=',', names=True) site_obs = fc[col_names[obs_ind]] @@ -290,7 +290,7 @@ def get_gps_los_obs(meta, obs_type, site_names, start_date, end_date, source='UN else: # calculate and save to CSV file data_list = [] - vprint('calculating GPS observation ...') + vprint('calculating GNSS observation ...') # get geom_obj (meta / geom_file) geom_file = ut.get_geometry_file(['incidenceAngle','azimuthAngle'], @@ -308,15 +308,15 @@ def get_gps_los_obs(meta, obs_type, site_names, start_date, end_date, source='UN for i, site_name in enumerate(site_names): prog_bar.update(i+1, suffix=f'{i+1}/{num_site} {site_name}') - # calculate gps data value - GPSclass = GPS.get_gps_obj_by_source(source) - obj = GPSclass(site_name) + # calculate GNSS data value + GNSSclass = GNSS.get_gnss_obj_by_source(source) + obj = GNSSclass(site_name) obj.open(print_msg=print_msg) - vel, dis_ts = obj.get_gps_los_velocity( + vel, dis_ts = obj.get_gnss_los_velocity( geom_obj, start_date=start_date, end_date=end_date, - gps_comp=gps_comp, + gnss_comp=gnss_comp, horz_az_angle=horz_az_angle, model=model) @@ -328,7 +328,7 @@ def get_gps_los_obs(meta, obs_type, site_names, start_date, end_date, source='UN prog_bar.close() # write to CSV file - vprint(f'write GPS observations to file: {csv_file}') + vprint(f'write GNSS observations to file: {csv_file}') with open(csv_file, 'w') as fc: fcw = csv.writer(fc) fcw.writerow(col_names) @@ -341,7 +341,7 @@ def get_gps_los_obs(meta, obs_type, site_names, start_date, end_date, source='UN -#################################### Beginning of GPS-GSI utility functions ######################## +#################################### Beginning of GNSS-GSI utility functions ######################## def read_pos_file(fname): import codecs fcp = codecs.open(fname, encoding = 'cp1252') @@ -359,14 +359,14 @@ def read_pos_file(fname): return dates, X, Y, Z -def get_pos_years(gps_dir, site): - fnames = glob.glob(os.path.join(gps_dir, f'{site}.*.pos')) +def get_pos_years(gnss_dir, site): + fnames = glob.glob(os.path.join(gnss_dir, f'{site}.*.pos')) years = [os.path.basename(i).split('.')[1] for i in fnames] years = ptime.yy2yyyy(years) return years -def read_GSI_F3(gps_dir, site, start_date=None, end_date=None): +def read_GSI_F3(gnss_dir, site, start_date=None, end_date=None): year0 = int(start_date[0:4]) year1 = int(end_date[0:4]) num_year = year1 - year0 + 1 @@ -374,7 +374,7 @@ def read_GSI_F3(gps_dir, site, start_date=None, end_date=None): dates, X, Y, Z = [], [], [], [] for i in range(num_year): yeari = str(year0 + i) - fname = os.path.join(gps_dir, f'{site}.{yeari[2:]}.pos') + fname = os.path.join(gnss_dir, f'{site}.{yeari[2:]}.pos') datesi, Xi, Yi, Zi = read_pos_file(fname) dates += datesi X += Xi @@ -393,22 +393,21 @@ def read_GSI_F3(gps_dir, site, start_date=None, end_date=None): return dates[flag], X[flag], Y[flag], Z[flag] -#################################### End of GPS-GSI utility functions ############################## +#################################### End of GNSS-GSI utility functions ############################## -#################################### Beginning of GPS class ######################################## -class GPS: - """GPS class for GPS time-series of daily solution. +#################################### Beginning of GNSS class ######################################## +class GNSS: + """GNSS class for time-series of daily solution. - The GPS class is solely meant to be a parent class. Child classes, defined - below, support functions for downloading and parsing GPS position based on - the processing source (e.g., UNR, etc.). Use the `get_gps_obj_by_source` + The GNSS class is solely meant to be a parent class. Child classes, defined + below, support functions for downloading and parsing GNSS position based on + the processing source (e.g., UNR, etc.). Use the `get_gnss_obj_by_source` method to determine appropriate child class. """ - def __init__(self, site: str, data_dir='./GPS', - version='IGS14'): + def __init__(self, site: str, data_dir='./GNSS', version='IGS14'): # Record properties self.site = site self.version = version @@ -445,13 +444,13 @@ def open(self, file=None, print_msg=True): return None @staticmethod - def get_gps_obj_by_source(source:str): - """Return the appropriate GPS child class based on processing source. + def get_gnss_obj_by_source(source:str): + """Return the appropriate GNSS child class based on processing source. """ if source == 'UNR': - return UNR_GPS + return UNR_GNSS elif source == 'ESESES': - return ESESES_GPS + return ESESES_GNSS else: raise ValueError(f'{source:s} source not supported.') @@ -527,14 +526,14 @@ def display_data(self, marker_size=2, marker_color='k', plot_errors=True): ##################################### Utility Functions ################################### - def displacement_enu2los(self, inc_angle:float, az_angle:float, gps_comp='enu2los', + def displacement_enu2los(self, inc_angle:float, az_angle:float, gnss_comp='enu2los', horz_az_angle=-90., display=False, model=None): """Convert displacement in ENU to LOS direction. Parameters: inc_angle - float, LOS incidence angle in degree az_angle - float, LOS aziuth angle in degree from the north, defined as positive in clock-wise direction - gps_comp - str, GPS components used to convert to LOS direction + gnss_comp - str, GNSS components used to convert to LOS direction horz_az_angle - float, fault azimuth angle used to convert horizontal to fault-parallel measured from the north with anti-clockwise as positive Returns: dis_los - 1D np.array for displacement in LOS direction @@ -544,7 +543,7 @@ def displacement_enu2los(self, inc_angle:float, az_angle:float, gps_comp='enu2lo unit_vec = ut.get_unit_vector4component_of_interest( los_inc_angle=inc_angle, los_az_angle=az_angle, - comp=gps_comp.lower(), + comp=gnss_comp.lower(), horz_az_angle=horz_az_angle, ) @@ -591,8 +590,10 @@ def get_los_geometry(self, geom_obj, print_msg=False): y = max(0, y); y = min(int(atr['LENGTH'])-1, y) x = max(0, x); x = min(int(atr['WIDTH'])-1, x) box = (x, y, x+1, y+1) - inc_angle = readfile.read(geom_obj, datasetName='incidenceAngle', box=box, print_msg=print_msg)[0][0,0] - az_angle = readfile.read(geom_obj, datasetName='azimuthAngle', box=box, print_msg=print_msg)[0][0,0] + inc_angle = readfile.read(geom_obj, datasetName='incidenceAngle', box=box, + print_msg=print_msg)[0][0,0] + az_angle = readfile.read(geom_obj, datasetName='azimuthAngle', box=box, + print_msg=print_msg)[0][0,0] elif isinstance(geom_obj, dict): # use mean inc/az_angle from metadata @@ -605,39 +606,39 @@ def get_los_geometry(self, geom_obj, print_msg=False): return inc_angle, az_angle - def read_gps_los_displacement(self, geom_obj, start_date=None, end_date=None, ref_site=None, - gps_comp='enu2los', horz_az_angle=-90., print_msg=False): - """Read GPS displacement in LOS direction. + def read_gnss_los_displacement(self, geom_obj, start_date=None, end_date=None, ref_site=None, + gnss_comp='enu2los', horz_az_angle=-90., print_msg=False): + """Read GNSS displacement in LOS direction. Parameters: geom_obj - dict / str, metadata of InSAR file, or geometry file path start_date - str, dates in YYYYMMDD format end_date - str, dates in YYYYMMDD format - ref_site - str, reference GPS site - gps_comp - str, GPS components used to convert to LOS direction + ref_site - str, reference GNSS site + gnss_comp - str, GNSS components used to convert to LOS direction horz_az_angle - float, fault azimuth angle used to convert horizontal to fault-parallel Returns: dates - 1D np.array of datetime.datetime object dis/std - 1D np.array of displacement / uncertainty in meters - site_lalo - tuple of 2 float, lat/lon of GPS site - ref_site_lalo - tuple of 2 float, lat/lon of reference GPS site + site_lalo - tuple of 2 float, lat/lon of GNSS site + ref_site_lalo - tuple of 2 float, lat/lon of reference GNSS site """ - # read GPS object + # read GNSS object inc_angle, az_angle = self.get_los_geometry(geom_obj) dates = self.read_displacement(start_date, end_date, print_msg=print_msg)[0] - dis, std = self.displacement_enu2los(inc_angle, az_angle, gps_comp=gps_comp, + dis, std = self.displacement_enu2los(inc_angle, az_angle, gnss_comp=gnss_comp, horz_az_angle=horz_az_angle) site_lalo = self.get_stat_lat_lon(print_msg=print_msg) - # define GPS station object based on processing source - GPS = self.get_gps_obj_by_source(self.source) + # define GNSS station object based on processing source + GNSS = self.get_gnss_obj_by_source(self.source) - # get LOS displacement relative to another GPS site + # get LOS displacement relative to another GNSS site if ref_site: - ref_obj = GPS(site=ref_site, data_dir=self.data_dir) + ref_obj = GNSS(site=ref_site, data_dir=self.data_dir) ref_obj.open() ref_obj.read_displacement(start_date, end_date, print_msg=print_msg) inc_angle, az_angle = ref_obj.get_los_geometry(geom_obj) - ref_obj.displacement_enu2los(inc_angle, az_angle, gps_comp=gps_comp, + ref_obj.displacement_enu2los(inc_angle, az_angle, gnss_comp=gnss_comp, horz_az_angle=horz_az_angle) ref_site_lalo = ref_obj.get_stat_lat_lon(print_msg=print_msg) @@ -656,36 +657,36 @@ def read_gps_los_displacement(self, geom_obj, start_date=None, end_date=None, re return dates, dis, std, site_lalo, ref_site_lalo - def get_gps_los_velocity(self, geom_obj, start_date=None, end_date=None, - ref_site=None, gps_comp='enu2los', + def get_gnss_los_velocity(self, geom_obj, start_date=None, end_date=None, + ref_site=None, gnss_comp='enu2los', horz_az_angle=-90., model=None, print_msg=True): """Convert the three-component displacement data into LOS velocity. - Parameters: geom_obj : dict / str, metadata of InSAR file, or + Parameters: geom_obj - dict / str, metadata of InSAR file, or geometry file path - start_date : string in YYYYMMDD format - end_date : string in YYYYMMDD format - ref_site : string, reference GPS site - gps_comp : string, GPS components used to convert to + start_date - str, YYYYMMDD format + end_date - str, YYYYMMDD format + ref_site - str, reference GNSS site + gnss_comp - str, GNSS components used to convert to LOS direction - horz_az_angle : float, fault azimuth angle used to convert + horz_az_angle - float, fault azimuth angle used to convert horizontal to fault-parallel - model : dict, time function model, e.g. + model - dict, time function model, e.g. {'polynomial': 1, 'periodic': [1.0, 0.5]} - Returns: dates : 1D np.array of datetime.datetime object - dis : 1D np.array of displacement in meters - std : 1D np.array of displacement uncertainty in meters - site_lalo : tuple of 2 float, lat/lon of GPS site - ref_site_lalo : tuple of 2 float, lat/lon of reference GPS site + Returns: dates - 1D np.array, datetime.datetime object + dis - 1D np.array, displacement in meters + std - 1D np.array, displacement uncertainty in meters + site_lalo - tuple of 2 float, lat/lon of GNSS site + ref_site_lalo - tuple of 2 float, lat/lon of reference GNSS site """ - # Retrieve displacement data - dates, dis = self.read_gps_los_displacement(geom_obj, + # retrieve displacement data + dates, dis = self.read_gnss_los_displacement(geom_obj, start_date=start_date, end_date=end_date, ref_site=ref_site, - gps_comp=gps_comp, + gnss_comp=gnss_comp, horz_az_angle=horz_az_angle)[:2] # displacement -> velocity @@ -715,8 +716,8 @@ def get_gps_los_velocity(self, geom_obj, start_date=None, end_date=None, -class UNR_GPS(GPS): - """GPS class for daily solutions processed by UNR NGL. +class UNR_GNSS(GNSS): + """GNSS class for daily solutions processed by UNR NGL. This object will assign the attributes: site - str, four-digit site code @@ -734,8 +735,7 @@ class UNR_GPS(GPS): source = 'UNR' def dload_site(self, print_msg=True) -> str: - """Download the station displacement data from the - specified source. + """Download the station displacement data from the specified source. Modifies: self.file - str, local file path/name self.file_url - str, file URL @@ -745,7 +745,7 @@ def dload_site(self, print_msg=True) -> str: print(f"Downloading data for site {self.site:s} from UNR NGL source") # URL and file name specs - url_prefix = 'http://geodesy.unr.edu/gps_timeseries/tenv3' + url_prefix = 'http://geodesy.unr.edu/gnss_timeseries/tenv3' if self.version == 'IGS08': self.file = os.path.join(self.data_dir, '{site:s}.{version:s}.tenv3'.\ @@ -760,7 +760,7 @@ def dload_site(self, print_msg=True) -> str: # download file if not present if os.path.exists(self.file): if print_msg == True: - print(f'File {self.file} exists--reading') + print(f'file {self.file} exists--reading') else: if print_msg == True: print(f'... downloading {self.file_url:s} to {self.file:s}') @@ -796,7 +796,7 @@ def get_stat_lat_lon(self, print_msg=True) -> (str, str): def read_displacement(self, start_date=None, end_date=None, print_msg=True, display=False): - """Read GPS displacement time-series (defined by start/end_date) + """Read GNSS displacement time-series (defined by start/end_date) Parameters: start/end_date - str, date in YYYYMMDD format Returns: dates - 1D np.ndarray of datetime.datetime object dis_e/n/u - 1D np.ndarray of displacement in meters in float32 @@ -841,8 +841,8 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, -class ESESES_GPS(GPS): - """GPS class for daily solutions processed by ESESES. +class ESESES_GNSS(GNSS): + """GNSS class for daily solutions processed by ESESES. This object will assign the attributes: site - str, four-digit site code @@ -902,7 +902,7 @@ def dload_site(self, print_msg=True) -> str: # download file if not present if os.path.exists(self.file): if print_msg == True: - print(f'File {self.file} exists--reading') + print(f'file {self.file} exists--reading') else: if print_msg == True: print(f'... downloading {self.file_url:s} to {self.file:s}') @@ -954,7 +954,7 @@ def get_stat_lat_lon(self, print_msg=True) -> (str, str): def read_displacement(self, start_date=None, end_date=None, print_msg=True, display=False): - """Read GPS displacement time-series (defined by start/end_date) + """Read GNSS displacement time-series (defined by start/end_date) Parameters: start/end_date - str, date in YYYYMMDD format Returns: dates - 1D np.ndarray of datetime.datetime object dis_e/n/u - 1D np.ndarray of displacement in meters in float32 @@ -1001,4 +1001,4 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, self.std_e, self.std_n, self.std_u) -#################################### End of GPS class #################################### +#################################### End of GNSS class #################################### diff --git a/src/mintpy/objects/insar_vs_gps.py b/src/mintpy/objects/insar_vs_gps.py index 8b8b21f08..b0db7547a 100644 --- a/src/mintpy/objects/insar_vs_gps.py +++ b/src/mintpy/objects/insar_vs_gps.py @@ -1,11 +1,11 @@ -"""Class for comparing InSAR with GPS.""" +"""Class for comparing InSAR with GNSS.""" ############################################################ # Program is part of MintPy # # Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi # # Author: Zhang Yunjun, 2018 # ############################################################ # Recommend import: -# from mintpy.objects.insar_vs_gps import insar_vs_gps +# from mintpy.objects.insar_vs_gnss import insar_vs_gnss import datetime as dt @@ -16,34 +16,34 @@ from scipy.interpolate import griddata from mintpy.defaults.plot import * -from mintpy.objects import giantTimeseries, gps, timeseries +from mintpy.objects import giantTimeseries, gnss, timeseries from mintpy.utils import readfile, utils as ut -############################## beginning of insar_vs_gps class ############################## -class insar_vs_gps: - """ Comparing InSAR time-series with GPS time-series in LOS direction +############################## beginning of insar_vs_gnss class ############################## +class insar_vs_gnss: + """ Comparing InSAR time-series with GNSS time-series in LOS direction Parameters: ts_file : str, time-series HDF5 file geom_file : str, geometry HDF5 file temp_coh_file : str, temporal coherence HDF5 file - site_names : list of str, GPS site names - gps_source : str, program or institution that processed the GPS data - gps_dir : str, directory of the local GPS data files - ref_site : str, common reference site in space for InSAR and GPS + site_names : list of str, GNSS site names + gnss_source : str, program or institution that processed the GNSS data + gnss_dir : str, directory of the local GNSS data files + ref_site : str, common reference site in space for InSAR and GNSS start/end_date : str, date in YYYYMMDD format for the start/end date min_ref_date : str, date in YYYYMMDD format for the earliest common - reference date between InSAR and GPS + reference date between InSAR and GNSS Returns: ds : dict, each element has the following components: 'GV03': { 'name': 'GV03', 'lat': -0.7977926892712729, 'lon': -91.13294444114553, - 'gps_datetime': array([datetime.datetime(2014, 11, 1, 0, 0), + 'gnss_datetime': array([datetime.datetime(2014, 11, 1, 0, 0), datetime.datetime(2014, 11, 2, 0, 0), ..., datetime.datetime(2018, 6, 25, 0, 0)], dtype=object), - 'gps_dis': array([-2.63673663e-02, ..., 6.43612206e-01], dtype=float32), - 'gps_std': array([0.00496152, ..., 0.00477411], dtype=float32), + 'gnss_dis': array([-2.63673663e-02, ..., 6.43612206e-01], dtype=float32), + 'gnss_std': array([0.00496152, ..., 0.00477411], dtype=float32), 'reference_site': 'GV01', 'insar_datetime': array([datetime.datetime(2014, 12, 13, 0, 0), datetime.datetime(2014, 12, 25, 0, 0), @@ -51,8 +51,8 @@ class insar_vs_gps: datetime.datetime(2018, 6, 19, 0, 0)], dtype=object), 'insar_dis_linear': array([-0.01476493, ..., 0.62273948]), 'temp_coh': 0.9961861392598478, - 'gps_std_mean': 0.004515478, - 'comm_dis_gps': array([-0.02635017, ..., 0.61315614], dtype=float32), + 'gnss_std_mean': 0.004515478, + 'comm_dis_gnss': array([-0.02635017, ..., 0.61315614], dtype=float32), 'comm_dis_insar': array([-0.01476493, ..., 0.60640174], dtype=float32), 'r_square': 0.9993494518609801, 'dis_rmse': 0.008023425326946351 @@ -60,14 +60,14 @@ class insar_vs_gps: """ def __init__(self, ts_file, geom_file, temp_coh_file, - site_names, gps_source='UNR', gps_dir='./GPS', ref_site='GV01', + site_names, gnss_source='UNR', gnss_dir='./GNSS', ref_site='GV01', start_date=None, end_date=None, min_ref_date=None): self.insar_file = ts_file self.geom_file = geom_file self.temp_coh_file = temp_coh_file self.site_names = site_names - self.gps_source = gps_source - self.gps_dir = gps_dir + self.gnss_source = gnss_source + self.gnss_dir = gnss_dir self.ref_site = ref_site self.num_site = len(site_names) self.ds = {} @@ -87,7 +87,7 @@ def open(self): ts_obj.open(print_msg=False) self.metadata = dict(ts_obj.metadata) self.num_date = ts_obj.numDate - # remove time info from insar_datetime to be consistent with gps_datetime + # remove time info from insar_datetime to be consistent with gnss_datetime self.insar_datetime = np.array([i.replace(hour=0, minute=0, second=0, microsecond=0) for i in ts_obj.times]) @@ -100,42 +100,42 @@ def open(self): msg = f'min_ref_date {self.min_ref_date} does NOT exist in InSAR file: {self.insar_file}' raise ValueError(msg) - self.read_gps() + self.read_gnss() self.read_insar() self.calculate_rmse() return - def read_gps(self): - # define GPS station object based on processing source - if self.gps_source == 'UNR': - GPS = gps.UNR_GPS - elif self.gps_source == 'ESESES': - GPS = gps.ESESES_GPS + def read_gnss(self): + # define GNSS station object based on processing source + if self.gnss_source == 'UNR': + GNSS = gnss.UNR_GNSS + elif self.gnss_source == 'ESESES': + GNSS = gnss.ESESES_GNSS - # read data for each GPS site + # read data for each GNSS site for sname in self.site_names: site = {} site['name'] = sname - gps_obj = GPS(sname, data_dir=self.gps_dir) - gps_obj.open(print_msg=False) - site['lat'] = gps_obj.site_lat - site['lon'] = gps_obj.site_lon + gnss_obj = GNSS(sname, data_dir=self.gnss_dir) + gnss_obj.open(print_msg=False) + site['lat'] = gnss_obj.site_lat + site['lon'] = gnss_obj.site_lon - dates, dis, dis_std = gps_obj.read_gps_los_displacement( + dates, dis, dis_std = gnss_obj.read_gnss_los_displacement( self.geom_file, start_date=self.start_date, end_date=self.end_date, ref_site=self.ref_site, - gps_comp='enu2los', + gnss_comp='enu2los', )[0:3] - site['gps_datetime'] = dates - site['gps_dis'] = dis - site['gps_std'] = dis_std + site['gnss_datetime'] = dates + site['gnss_dis'] = dis + site['gnss_std'] = dis_std site['reference_site'] = self.ref_site self.ds[sname] = site - sys.stdout.write(f'\rreading GPS {sname}') + sys.stdout.write(f'\rreading GNSS {sname}') sys.stdout.flush() print() return @@ -184,33 +184,33 @@ def read_insar(self): site[self.insar_dis_name] = insar_dis[i,:] - insar_dis_ref site['temp_coh'] = temp_coh[i] - # 2.4 reference insar and gps to a common date - print('reference insar and gps to a common date') + # 2.4 reference insar and gnss to a common date + print('reference insar and gnss to a common date') for i in range(self.num_site): site = self.ds[self.site_names[i]] - gps_date = site['gps_datetime'] + gnss_date = site['gnss_datetime'] insar_date = site['insar_datetime'] # find common reference date ref_date = dt.datetime.strptime(self.min_ref_date, "%Y%m%d") ref_idx = insar_date.tolist().index(ref_date) while ref_idx < self.num_date: - if insar_date[ref_idx] not in gps_date: + if insar_date[ref_idx] not in gnss_date: ref_idx += 1 else: break if ref_idx == self.num_date: - msg = f"InSAR and GPS do not share ANY date for site: {site['name']}" + msg = f"InSAR and GNSS do not share ANY date for site: {site['name']}" raise RuntimeError(msg) comm_date = insar_date[ref_idx] # reference insar in time site[self.insar_dis_name] -= site[self.insar_dis_name][ref_idx] - # reference gps dis/std in time - ref_idx_gps = np.where(gps_date == comm_date)[0][0] - site['gps_dis'] -= site['gps_dis'][ref_idx_gps] - site['gps_std'] = np.sqrt(site['gps_std']**2 + site['gps_std'][ref_idx_gps]**2) - site['gps_std_mean'] = np.mean(site['gps_std']) + # reference gnss dis/std in time + ref_idx_gnss = np.where(gnss_date == comm_date)[0][0] + site['gnss_dis'] -= site['gnss_dis'][ref_idx_gnss] + site['gnss_std'] = np.sqrt(site['gnss_std']**2 + site['gnss_std'][ref_idx_gnss]**2) + site['gnss_std_mean'] = np.mean(site['gnss_std']) return @@ -218,23 +218,23 @@ def calculate_rmse(self): ## 3. calculate RMSE for i in range(self.num_site): site = self.ds[self.site_names[i]] - gps_date = site['gps_datetime'] + gnss_date = site['gnss_datetime'] insar_date = site['insar_datetime'] - comm_dates = np.array(sorted(list(set(gps_date) & set(insar_date)))) + comm_dates = np.array(sorted(list(set(gnss_date) & set(insar_date)))) num_comm_date = len(comm_dates) # get displacement at common dates comm_dis_insar = np.zeros(num_comm_date, np.float32) - comm_dis_gps = np.zeros(num_comm_date, np.float32) + comm_dis_gnss = np.zeros(num_comm_date, np.float32) for j in range(num_comm_date): - idx1 = np.where(gps_date == comm_dates[j])[0][0] + idx1 = np.where(gnss_date == comm_dates[j])[0][0] idx2 = np.where(insar_date == comm_dates[j])[0][0] - comm_dis_gps[j] = site['gps_dis'][idx1] + comm_dis_gnss[j] = site['gnss_dis'][idx1] comm_dis_insar[j] = site[self.insar_dis_name][idx2] - site['comm_dis_gps'] = comm_dis_gps + site['comm_dis_gnss'] = comm_dis_gnss site['comm_dis_insar'] = comm_dis_insar - site['r_square'] = stats.linregress(comm_dis_gps, comm_dis_insar)[2] - site['dis_rmse'] = np.sqrt(np.sum(np.square(comm_dis_gps - comm_dis_insar)) / (num_comm_date - 1)) + site['r_square'] = stats.linregress(comm_dis_gnss, comm_dis_insar)[2] + site['dis_rmse'] = np.sqrt(np.sum(np.square(comm_dis_gnss - comm_dis_insar)) / (num_comm_date - 1)) #print('site: {}, RMSE: {:.1f} cm'.format(self.site_names[i], dis_rmse*100.)) @@ -245,12 +245,12 @@ def sort_by_velocity(ds): for sname in site_names: site = ds[sname] # design matrix - yr_diff = np.array([i.year + (i.timetuple().tm_yday - 1) / 365.25 for i in site['gps_datetime']]) + yr_diff = np.array([i.year + (i.timetuple().tm_yday - 1) / 365.25 for i in site['gnss_datetime']]) yr_diff -= yr_diff[0] - A = np.ones([len(site['gps_datetime']), 2], dtype=np.float32) + A = np.ones([len(site['gnss_datetime']), 2], dtype=np.float32) A[:, 0] = yr_diff # LS estimation - ts = np.array(site['gps_dis']) + ts = np.array(site['gnss_dis']) ts -= ts[0] X = np.dot(np.linalg.pinv(A), ts)[0] site_vel[sname] = X @@ -272,14 +272,14 @@ def print_stats(ds): return def plot_one_site(ax, site, offset=0.): - # GPS - ax.errorbar(site['gps_datetime'], - site['gps_dis']-offset, - yerr=site['gps_std']*3., + # GNSS + ax.errorbar(site['gnss_datetime'], + site['gnss_dis']-offset, + yerr=site['gnss_std']*3., ms=marker_size*0.2, lw=0, alpha=1., fmt='-o', elinewidth=edge_width*0.5, ecolor='C0', capsize=marker_size*0.25, markeredgewidth=edge_width*0.5, - label='GPS', zorder=1) + label='GNSS', zorder=1) # InSAR ecolor = 'gray' if site['temp_coh'] < 0.7 else 'C1' insar_dis_name = [i for i in site.keys() if i.startswith('insar_dis')][0] @@ -298,4 +298,4 @@ def plot_one_site(ax, site, offset=0.): color='k', fontsize=font_size) return ax -############################## end of insar_vs_gps class #################################### +############################## end of insar_vs_gnss class #################################### diff --git a/src/mintpy/utils/arg_utils.py b/src/mintpy/utils/arg_utils.py index a2374742a..ca336782b 100644 --- a/src/mintpy/utils/arg_utils.py +++ b/src/mintpy/utils/arg_utils.py @@ -244,40 +244,41 @@ def add_figure_argument(parser, figsize_img=False): return parser -def add_gps_argument(parser): - """Argument group parser for GPS options""" - gps = parser.add_argument_group('GPS', 'GPS data to display') - gps.add_argument('--gps-source', dest='gps_source', default='UNR', +def add_gnss_argument(parser): + """Argument group parser for GNSS options""" + gnss = parser.add_argument_group('GNSS', 'GNSS data to display') + gnss.add_argument('--gnss-source', dest='gnss_source', default='UNR', choices={'UNR', 'ESESES'}, - help='GPS processing source') - gps.add_argument('--show-gps', dest='disp_gps', action='store_true', - help='Show UNR GPS location within the coverage.') - gps.add_argument('--mask-gps', dest='mask_gps', action='store_true', - help='Mask out GPS stations not coincident with valid data pixels') - gps.add_argument('--gps-label', dest='disp_gps_label', action='store_true', - help='Show GPS site name') - gps.add_argument('--gps-ms', dest='gps_marker_size', type=float, default=6, - help='Plot GPS value as scatter in size of ms**2 (default: %(default)s).') - gps.add_argument('--gps-comp', dest='gps_component', + help='GNSS processing source') + gnss.add_argument('--show-gnss', dest='disp_gnss', action='store_true', + help='Show UNR GNSS location within the coverage.') + gnss.add_argument('--mask-gnss', dest='mask_gnss', action='store_true', + help='Mask out GNSS stations not coincident with valid data pixels') + gnss.add_argument('--gnss-label', dest='disp_gnss_label', action='store_true', + help='Show GNSS site name') + gnss.add_argument('--gnss-ms', dest='gnss_marker_size', type=float, default=6, + help='Plot GNSS value as scatter in size of ms**2 (default: %(default)s).') + gnss.add_argument('--gnss-comp', dest='gnss_component', choices={'enu2los', 'hz2los', 'up2los', 'horz', 'vert'}, - help='Plot GPS in color indicating deformation velocity direction') - gps.add_argument('--gps-redo', dest='gps_redo', action='store_true', - help='Re-calculate GPS observations in LOS direction, ' + help='Plot GNSS in color indicating deformation velocity direction') + gnss.add_argument('--gnss-redo', dest='gnss_redo', action='store_true', + help='Re-calculate GNSS observations in LOS direction, ' 'instead of read from existing CSV file.') - gps.add_argument('--ref-gps', dest='ref_gps_site', type=str, help='Reference GPS site') - gps.add_argument('--ex-gps', dest='ex_gps_sites', type=str, nargs='*', - help='Exclude GPS sites, require --gps-comp.') - - gps.add_argument('--gps-start-date', dest='gps_start_date', type=str, metavar='YYYYMMDD', - help='start date of GPS data, default is date of the 1st SAR acquisition') - gps.add_argument('--gps-end-date', dest='gps_end_date', type=str, metavar='YYYYMMDD', - help='start date of GPS data, default is date of the last SAR acquisition') - gps.add_argument('--horz-az','--hz-az', dest='horz_az_angle', type=float, default=-90., + gnss.add_argument('--ref-gnss', dest='ref_gnss_site', type=str, help='Reference GNSS site') + gnss.add_argument('--ex-gnss', dest='ex_gnss_sites', type=str, nargs='*', + help='Exclude GNSS sites, require --gnss-comp.') + + gnss.add_argument('--gnss-start-date', dest='gnss_start_date', type=str, metavar='YYYYMMDD', + help='start date of GNSS data, default is date of the 1st SAR acquisition') + gnss.add_argument('--gnss-end-date', dest='gnss_end_date', type=str, metavar='YYYYMMDD', + help='start date of GNSS data, default is date of the last SAR acquisition') + gnss.add_argument('--horz-az','--hz-az', dest='horz_az_angle', type=float, default=-90., help='Azimuth angle (anti-clockwise from the north) of the horizontal movement in degrees\n' 'E.g.: -90. for east direction [default]\n' ' 0. for north direction\n' 'Set to the azimuth angle of the strike-slip fault to ' 'show the fault-parallel displacement.') + return parser diff --git a/src/mintpy/utils/plot.py b/src/mintpy/utils/plot.py index 713e3fc7d..ddc1c9526 100644 --- a/src/mintpy/utils/plot.py +++ b/src/mintpy/utils/plot.py @@ -1106,7 +1106,7 @@ def plot_timeseries_rms(rms_file, cutoff=3, out_fig=None, disp_fig=True, ############################################### GNSS ############################################### -def plot_gps(ax, SNWE, inps, metadata=dict(), print_msg=True): +def plot_gnss(ax, SNWE, inps, metadata=dict(), print_msg=True): """Plot GNSS as scatters on top of the input matplotlib.axes. Parameters: ax - matplotlib.axes object @@ -1116,7 +1116,7 @@ def plot_gps(ax, SNWE, inps, metadata=dict(), print_msg=True): Returns: ax - matplotlib.axes object """ - from mintpy.objects import gps + from mintpy.objects import gnss vprint = print if print_msg else lambda *args, **kwargs: None vmin, vmax = inps.vlim @@ -1126,8 +1126,8 @@ def plot_gps(ax, SNWE, inps, metadata=dict(), print_msg=True): atr['UNIT'] = 'm' unit_fac = scale_data2disp_unit(metadata=atr, disp_unit=inps.disp_unit)[2] - start_date = inps.gps_start_date if inps.gps_start_date else metadata.get('START_DATE', None) - end_date = inps.gps_end_date if inps.gps_end_date else metadata.get('END_DATE', None) + start_date = inps.gnss_start_date if inps.gnss_start_date else metadata.get('START_DATE', None) + end_date = inps.gnss_end_date if inps.gnss_end_date else metadata.get('END_DATE', None) # pre-query: convert UTM to lat/lon for query if 'UTM_ZONE' in metadata.keys(): @@ -1136,7 +1136,7 @@ def plot_gps(ax, SNWE, inps, metadata=dict(), print_msg=True): SNWE = (south, north, west, east) # query for GNSS stations - site_names, site_lats, site_lons = gps.search_gps(SNWE, source=inps.gps_source, + site_names, site_lats, site_lons = gnss.search_gnss(SNWE, source=inps.gnss_source, start_date=start_date, end_date=end_date) if site_names.size == 0: warnings.warn(f'No GNSS found within {SNWE} during {start_date} - {end_date}!') @@ -1148,7 +1148,7 @@ def plot_gps(ax, SNWE, inps, metadata=dict(), print_msg=True): site_lats, site_lons = ut0.latlon2utm(site_lats, site_lons) # mask out stations not coincident with InSAR data - if inps.mask_gps and inps.msk is not None: + if inps.mask_gnss and inps.msk is not None: msk = inps.msk if inps.msk.ndim == 2 else np.prod(inps.msk, axis=-1) coord = coordinate(metadata) site_ys, site_xs = coord.geo2radar(site_lats, site_lons)[0:2] @@ -1159,49 +1159,49 @@ def plot_gps(ax, SNWE, inps, metadata=dict(), print_msg=True): site_lons = site_lons[flag] # check if site_names.size == 0: - raise ValueError('No GNSS left after --mask-gps!') + raise ValueError('No GNSS left after --mask-gnss!') - if inps.ref_gps_site and inps.ref_gps_site not in site_names: - raise ValueError(f'input reference GPS site "{inps.ref_gps_site}" not available!') + if inps.ref_gnss_site and inps.ref_gnss_site not in site_names: + raise ValueError(f'input reference GNSS site "{inps.ref_gnss_site}" not available!') k = metadata['FILE_TYPE'] - if inps.gps_component and k not in ['velocity', 'timeseries', 'displacement']: - inps.gps_component = None - vprint(f'WARNING: --gps-comp is not implemented for {k} file yet, set --gps-comp = None and continue') + if inps.gnss_component and k not in ['velocity', 'timeseries', 'displacement']: + inps.gnss_component = None + vprint(f'WARNING: --gnss-comp is not implemented for {k} file yet, set --gnss-comp = None and continue') - plot_kwargs = dict(s=inps.gps_marker_size**2, edgecolors='k', lw=0.5, zorder=10) - if inps.gps_component: - # plot GPS velocity/displacement along LOS direction + plot_kwargs = dict(s=inps.gnss_marker_size**2, edgecolors='k', lw=0.5, zorder=10) + if inps.gnss_component: + # plot GNSS velocity/displacement along LOS direction vprint('-'*30) - msg = 'plotting GPS ' + msg = 'plotting GNSS ' msg += 'velocity' if k == 'velocity' else 'displacement' - msg += f' in IGS14 reference frame in {inps.gps_component} direction' - msg += f' with respect to {inps.ref_gps_site} ...' if inps.ref_gps_site else ' ...' + msg += f' in IGS14 reference frame in {inps.gnss_component} direction' + msg += f' with respect to {inps.ref_gnss_site} ...' if inps.ref_gnss_site else ' ...' vprint(msg) - vprint(f'number of available GPS stations: {len(site_names)}') + vprint(f'number of available GNSS stations: {len(site_names)}') vprint(f'start date: {start_date}') vprint(f'end date: {end_date}') - vprint(f'components projection: {inps.gps_component}') + vprint(f'components projection: {inps.gnss_component}') - # get GPS LOS observations + # get GNSS LOS observations # save absolute value to support both spatially relative and absolute comparison # without compromising the re-usability of the CSV file obs_type = 'velocity' if k == 'velocity' else 'displacement' - site_obs = gps.get_gps_los_obs( + site_obs = gnss.get_gnss_los_obs( meta=metadata, obs_type=obs_type, site_names=site_names, start_date=start_date, end_date=end_date, - gps_comp=inps.gps_component, + gnss_comp=inps.gnss_component, horz_az_angle=inps.horz_az_angle, print_msg=print_msg, - redo=inps.gps_redo, + redo=inps.gnss_redo, ) - # reference GPS - if inps.ref_gps_site: - ref_ind = site_names.tolist().index(inps.ref_gps_site) + # reference GNSS + if inps.ref_gnss_site: + ref_ind = site_names.tolist().index(inps.ref_gnss_site) # plot label of the reference site #ax.annotate(site_names[ref_ind], xy=(site_lons[ref_ind], site_lats[ref_ind]), fontsize=inps.font_size) # update value @@ -1213,8 +1213,8 @@ def plot_gps(ax, SNWE, inps, metadata=dict(), print_msg=True): site_obs *= unit_fac # exclude sites - if inps.ex_gps_sites: - ex_flag = np.array([x in inps.ex_gps_sites for x in site_names], dtype=np.bool_) + if inps.ex_gnss_sites: + ex_flag = np.array([x in inps.ex_gnss_sites for x in site_names], dtype=np.bool_) if np.sum(ex_flag) > 0: vprint(f'ignore the following specified stations:\n {site_names[ex_flag]}') site_names = site_names[~ex_flag] @@ -1234,43 +1234,43 @@ def plot_gps(ax, SNWE, inps, metadata=dict(), print_msg=True): ax.scatter(lon, lat, color=color, **plot_kwargs) else: - # plot GPS locations only - vprint('showing GPS locations') + # plot GNSS locations only + vprint('showing GNSS locations') ax.scatter(site_lons, site_lats, color='w', **plot_kwargs) - # plot GPS label - if inps.disp_gps_label: + # plot GNSS label + if inps.disp_gnss_label: for site_name, lat, lon in zip(site_names, site_lats, site_lons): ax.annotate(site_name, xy=(lon, lat), fontsize=inps.font_size) return ax -def plot_insar_vs_gps_scatter(vel_file, csv_file='gps_enu2los.csv', msk_file=None, ref_gps_site=None, cutoff=5, - fig_size=[4, 4], xname='InSAR', vlim=None, ex_gps_sites=[], display=True): - """Scatter plot to compare the velocities between SAR/InSAR and GPS. +def plot_insar_vs_gnss_scatter(vel_file, csv_file='gnss_enu2los.csv', msk_file=None, ref_gnss_site=None, cutoff=5, + fig_size=[4, 4], xname='InSAR', vlim=None, ex_gnss_sites=[], display=True): + """Scatter plot to compare the velocities between SAR/InSAR and GNSS. - Parameters: vel_file - str, path of InSAR LOS velocity HDF5 file. - csv_file - str, path of GNSS CSV file, generated after running view.py --gps-comp - msk_file - str, path of InSAR mask file. - ref_gps_site - str, reference GNSS site name - cutoff - float, threshold in terms of med abs dev (MAD) for outlier detection - xname - str, xaxis label - vlim - list of 2 float, display value range in the unit of cm/yr + Parameters: vel_file - str, path of InSAR LOS velocity HDF5 file. + csv_file - str, path of GNSS CSV file, generated after running view.py --gnss-comp + msk_file - str, path of InSAR mask file. + ref_gnss_site - str, reference GNSS site name + cutoff - float, threshold in terms of med abs dev (MAD) for outlier detection + xname - str, xaxis label + vlim - list of 2 float, display value range in the unit of cm/yr Default is None to grab from data - If set, the range will be used to prune the SAR and GPS observations - ex_gps_sites - list of str, exclude GNSS sites for analysis and plotting. - Returns: sites - list of str, GNSS site names used for comparison - insar_obs - 1D np.ndarray in float32, InSAR velocity in cm/yr - gps_obs - 1D np.ndarray in float32, GNSS velocity in cm/yr + If set, the range will be used to prune the SAR and GNSS observations + ex_gnss_sites - list of str, exclude GNSS sites for analysis and plotting. + Returns: sites - list of str, GNSS site names used for comparison + insar_obs - 1D np.ndarray in float32, InSAR velocity in cm/yr + gnss_obs - 1D np.ndarray in float32, GNSS velocity in cm/yr Example: from mintpy.utils import plot as pp - csv_file = os.path.join(work_dir, 'geo/gps_enu2los.csv') + csv_file = os.path.join(work_dir, 'geo/gnss_enu2los.csv') vel_file = os.path.join(work_dir, 'geo/geo_velocity.h5') msk_file = os.path.join(work_dir, 'geo/geo_maskTempCoh.h5') - pp.plot_insar_vs_gps_scatter( + pp.plot_insar_vs_gnss_scatter( vel_file, - ref_gps_site='CACT', + ref_gnss_site='CACT', csv_file=csv_file, msk_file=msk_file, vlim=[-2.5, 2], @@ -1280,25 +1280,25 @@ def plot_insar_vs_gps_scatter(vel_file, csv_file='gps_enu2los.csv', msk_file=Non disp_unit = 'cm/yr' unit_fac = 100. - # read GPS velocity from CSV file (generated by gps.get_gps_los_obs()) + # read GNSS velocity from CSV file (generated by gnss.get_gnss_los_obs()) col_names = ['Site', 'Lon', 'Lat', 'Displacement', 'Velocity'] num_col = len(col_names) col_types = ['U10'] + ['f8'] * (num_col - 1) - print(f'read GPS velocity from file: {csv_file}') + print(f'read GNSS velocity from file: {csv_file}') fc = np.genfromtxt(csv_file, dtype=col_types, delimiter=',', names=True) sites = fc['Site'] lats = fc['Lat'] lons = fc['Lon'] - gps_obs = fc[col_names[-1]] * unit_fac + gnss_obs = fc[col_names[-1]] * unit_fac - if ex_gps_sites: - ex_flag = np.array([x in ex_gps_sites for x in sites], dtype=np.bool_) + if ex_gnss_sites: + ex_flag = np.array([x in ex_gnss_sites for x in sites], dtype=np.bool_) if np.sum(ex_flag) > 0: sites = sites[~ex_flag] lats = lats[~ex_flag] lons = lons[~ex_flag] - gps_obs = gps_obs[~ex_flag] + gnss_obs = gnss_obs[~ex_flag] # read InSAR velocity print(f'read InSAR velocity from file: {vel_file}') @@ -1319,46 +1319,46 @@ def plot_insar_vs_gps_scatter(vel_file, csv_file='gps_enu2los.csv', msk_file=Non prog_bar.update(i+1, suffix=f'{i+1}/{num_site} {sites[i]}') prog_bar.close() - off_med = np.nanmedian(insar_obs - gps_obs) - print(f'median offset between InSAR and GPS [before common referencing]: {off_med:.2f} cm/year') + off_med = np.nanmedian(insar_obs - gnss_obs) + print(f'median offset between InSAR and GNSS [before common referencing]: {off_med:.2f} cm/year') # reference site - if ref_gps_site: - print(f'referencing both InSAR and GPS data to site: {ref_gps_site}') - ref_ind = sites.tolist().index(ref_gps_site) - gps_obs -= gps_obs[ref_ind] + if ref_gnss_site: + print(f'referencing both InSAR and GNSS data to site: {ref_gnss_site}') + ref_ind = sites.tolist().index(ref_gnss_site) + gnss_obs -= gnss_obs[ref_ind] insar_obs -= insar_obs[ref_ind] # remove NaN value - print(f'removing sites with NaN values in GPS or {xname}') - flag = np.multiply(~np.isnan(insar_obs), ~np.isnan(gps_obs)) + print(f'removing sites with NaN values in GNSS or {xname}') + flag = np.multiply(~np.isnan(insar_obs), ~np.isnan(gnss_obs)) if vlim is not None: print(f'pruning sites with value range: {vlim} {disp_unit}') - flag *= gps_obs >= vlim[0] - flag *= gps_obs <= vlim[1] + flag *= gnss_obs >= vlim[0] + flag *= gnss_obs <= vlim[1] flag *= insar_obs >= vlim[0] flag *= insar_obs <= vlim[1] - gps_obs = gps_obs[flag] + gnss_obs = gnss_obs[flag] insar_obs = insar_obs[flag] sites = sites[flag] # stats - print(f'GPS min/max: {np.nanmin(gps_obs):.2f} / {np.nanmax(gps_obs):.2f}') + print(f'GNSS min/max: {np.nanmin(gnss_obs):.2f} / {np.nanmax(gnss_obs):.2f}') print(f'InSAR min/max: {np.nanmin(insar_obs):.2f} / {np.nanmax(insar_obs):.2f}') - rmse = np.sqrt(np.sum((insar_obs - gps_obs)**2) / (gps_obs.size - 1)) - r2 = stats.linregress(insar_obs, gps_obs)[2] + rmse = np.sqrt(np.sum((insar_obs - gnss_obs)**2) / (gnss_obs.size - 1)) + r2 = stats.linregress(insar_obs, gnss_obs)[2] print(f'RMSE = {rmse:.2f} {disp_unit}') print(f'R^2 = {r2:.2f}') # preliminary outlier detection - diff_mad = ut0.median_abs_deviation(abs(insar_obs - gps_obs), center=0) + diff_mad = ut0.median_abs_deviation(abs(insar_obs - gnss_obs), center=0) print(f'Preliminary outliers detection: abs(InSAR - GNSS) > med abs dev ({diff_mad:.2f}) * {cutoff}') print('Site: InSAR GNSS') - for site_name, insar_val, gps_val in zip(sites, insar_obs, gps_obs): - if abs(insar_val - gps_val) > diff_mad * cutoff: - print(f'{site_name:s}: {insar_val:5.1f}, {gps_val:5.1f} {disp_unit}') + for site_name, insar_val, gnss_val in zip(sites, insar_obs, gnss_obs): + if abs(insar_val - gnss_val) > diff_mad * cutoff: + print(f'{site_name:s}: {insar_val:5.1f}, {gnss_val:5.1f} {disp_unit}') # plot if display: @@ -1370,7 +1370,7 @@ def plot_insar_vs_gps_scatter(vel_file, csv_file='gps_enu2los.csv', msk_file=Non fig, ax = plt.subplots(figsize=fig_size) ax.plot((vlim[0], vlim[1]), (vlim[0], vlim[1]), 'k--') - ax.plot(insar_obs, gps_obs, '.', ms=15) + ax.plot(insar_obs, gnss_obs, '.', ms=15) # axis format ax.set_xlim(vlim) @@ -1381,12 +1381,12 @@ def plot_insar_vs_gps_scatter(vel_file, csv_file='gps_enu2los.csv', msk_file=Non fig.tight_layout() # output - out_fig = f'{xname.lower()}_vs_gps_scatter.pdf' + out_fig = f'{xname.lower()}_vs_gnss_scatter.pdf' plt.savefig(out_fig, bbox_inches='tight', transparent=True, dpi=300) print('save figure to file', out_fig) plt.show() - return sites, insar_obs, gps_obs + return sites, insar_obs, gnss_obs def plot_colorbar(inps, im, cax): diff --git a/src/mintpy/view.py b/src/mintpy/view.py index cf790dd6e..090fab3eb 100644 --- a/src/mintpy/view.py +++ b/src/mintpy/view.py @@ -24,7 +24,7 @@ from mintpy.objects import ( TIMESERIES_KEY_NAMES, giantIfgramStack, - gps, + gnss, ifgramStack, ) from mintpy.utils import plot as pp, ptime, readfile, utils as ut @@ -544,19 +544,19 @@ def extent2meshgrid(extent: tuple, ds_shape: list): # Reference (InSAR) data to a GNSS site coord = ut.coordinate(metadata) - if inps.disp_gps and inps.gps_component and inps.ref_gps_site: - # define GPS station object based on processing source - GPS = gps.GPS.get_gps_obj_by_source(inps.gps_source) - - # GPS reference site - ref_site_gps = GPS(site=inps.ref_gps_site) - ref_site_gps.open() - ref_site_lalo = ref_site_gps.get_stat_lat_lon(print_msg=False) + if inps.disp_gnss and inps.gnss_component and inps.ref_gnss_site: + # define GNSS station object based on processing source + GNSS = gnss.GNSS.get_gnss_obj_by_source(inps.gnss_source) + + # GNSS reference site + ref_site_gnss = GNSS(site=inps.ref_gnss_site) + ref_site_gnss.open() + ref_site_lalo = ref_site_gnss.get_stat_lat_lon(print_msg=False) y, x = coord.geo2radar(ref_site_lalo[0], ref_site_lalo[1])[0:2] ref_data = data[y - inps.pix_box[1], x - inps.pix_box[0]] data -= ref_data vprint('referencing InSAR data to the pixel nearest to GNSS station: ' - f'{inps.ref_gps_site} at [{ref_site_lalo[0]:.6f}, {ref_site_lalo[1]:.6f}] ' + f'{inps.ref_gnss_site} at [{ref_site_lalo[0]:.6f}, {ref_site_lalo[1]:.6f}] ' f'by substrating {ref_data:.3f} {inps.disp_unit}') # do not show the original InSAR reference point inps.disp_ref_pixel = False @@ -636,9 +636,9 @@ def extent2meshgrid(extent: tuple, ds_shape: list): mec='k', mew=1.) vprint('plot points of interest') - # Show UNR GPS stations - if inps.disp_gps: - ax = pp.plot_gps(ax, SNWE, inps, metadata, print_msg=inps.print_msg) + # Show UNR GNSS stations + if inps.disp_gnss: + ax = pp.plot_gnss(ax, SNWE, inps, metadata, print_msg=inps.print_msg) # Status bar if inps.dem_file: @@ -1694,7 +1694,7 @@ def plot(self): # Multiple Subplots else: # warn single-subplot options - opt_names = ['--show-gps', '--coastline', '--lalo-label', '--lalo-step', '--scalebar', + opt_names = ['--show-gnss', '--coastline', '--lalo-label', '--lalo-step', '--scalebar', '--pts-yx', '--pts-lalo', '--pts-file'] opt_names = list(set(opt_names) & set(self.argv)) for opt_name in opt_names: From dd8542fa6042d90805c165444e410edf8b537648 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 3 Apr 2024 16:57:32 +0000 Subject: [PATCH 12/44] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/mintpy/objects/gnss.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index 2704ce430..557c235fb 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -23,7 +23,6 @@ from mintpy.objects.coord import coordinate from mintpy.utils import ptime, readfile, time_func, utils1 as ut - supported_sources = ['UNR', 'ESESES'] From 96c72138fe9fcb1930125c01be99b13702734bb6 Mon Sep 17 00:00:00 2001 From: rzinke Date: Wed, 3 Apr 2024 09:59:34 -0700 Subject: [PATCH 13/44] attempt fix circle_ci error in plot_insar_vs_gnss_scatter --- src/mintpy/utils/plot.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/mintpy/utils/plot.py b/src/mintpy/utils/plot.py index ddc1c9526..699782932 100644 --- a/src/mintpy/utils/plot.py +++ b/src/mintpy/utils/plot.py @@ -1285,7 +1285,7 @@ def plot_insar_vs_gnss_scatter(vel_file, csv_file='gnss_enu2los.csv', msk_file=N num_col = len(col_names) col_types = ['U10'] + ['f8'] * (num_col - 1) - print(f'read GNSS velocity from file: {csv_file}') + print(f'read GNSS velocity from file: {csv_file:s}') fc = np.genfromtxt(csv_file, dtype=col_types, delimiter=',', names=True) sites = fc['Site'] lats = fc['Lat'] @@ -1301,7 +1301,7 @@ def plot_insar_vs_gnss_scatter(vel_file, csv_file='gnss_enu2los.csv', msk_file=N gnss_obs = gnss_obs[~ex_flag] # read InSAR velocity - print(f'read InSAR velocity from file: {vel_file}') + print(f'read InSAR velocity from file: {vel_file:s}') atr = readfile.read_attribute(vel_file) length, width = int(atr['LENGTH']), int(atr['WIDTH']) ys, xs = coordinate(atr).geo2radar(lats, lons)[:2] From 34c51078238a9497ac3c85d7784ca97c5b91152d Mon Sep 17 00:00:00 2001 From: rzinke Date: Wed, 3 Apr 2024 10:06:06 -0700 Subject: [PATCH 14/44] fix dangerous default value in plot_insar_vs_gnss_scatter --- src/mintpy/utils/plot.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/mintpy/utils/plot.py b/src/mintpy/utils/plot.py index 699782932..045734df0 100644 --- a/src/mintpy/utils/plot.py +++ b/src/mintpy/utils/plot.py @@ -1247,7 +1247,7 @@ def plot_gnss(ax, SNWE, inps, metadata=dict(), print_msg=True): def plot_insar_vs_gnss_scatter(vel_file, csv_file='gnss_enu2los.csv', msk_file=None, ref_gnss_site=None, cutoff=5, - fig_size=[4, 4], xname='InSAR', vlim=None, ex_gnss_sites=[], display=True): + fig_size=[4, 4], xname='InSAR', vlim=None, ex_gnss_sites=None, display=True): """Scatter plot to compare the velocities between SAR/InSAR and GNSS. Parameters: vel_file - str, path of InSAR LOS velocity HDF5 file. @@ -1292,8 +1292,8 @@ def plot_insar_vs_gnss_scatter(vel_file, csv_file='gnss_enu2los.csv', msk_file=N lons = fc['Lon'] gnss_obs = fc[col_names[-1]] * unit_fac - if ex_gnss_sites: - ex_flag = np.array([x in ex_gnss_sites for x in sites], dtype=np.bool_) + if ex_gnss_sites is not None: + ex_flag = np.array([site in ex_gnss_sites for site in sites], dtype=np.bool_) if np.sum(ex_flag) > 0: sites = sites[~ex_flag] lats = lats[~ex_flag] From b3ddcc6c28c3788e6376f39fe27d09cd4fd62582 Mon Sep 17 00:00:00 2001 From: rzinke Date: Wed, 3 Apr 2024 10:51:19 -0700 Subject: [PATCH 15/44] no mutable values as inputs in plot_insar_vs_gnss_scatter --- src/mintpy/utils/plot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/mintpy/utils/plot.py b/src/mintpy/utils/plot.py index 045734df0..9dd206df8 100644 --- a/src/mintpy/utils/plot.py +++ b/src/mintpy/utils/plot.py @@ -1247,7 +1247,7 @@ def plot_gnss(ax, SNWE, inps, metadata=dict(), print_msg=True): def plot_insar_vs_gnss_scatter(vel_file, csv_file='gnss_enu2los.csv', msk_file=None, ref_gnss_site=None, cutoff=5, - fig_size=[4, 4], xname='InSAR', vlim=None, ex_gnss_sites=None, display=True): + fig_size=(4, 4), xname='InSAR', vlim=None, ex_gnss_sites=None, display=True): """Scatter plot to compare the velocities between SAR/InSAR and GNSS. Parameters: vel_file - str, path of InSAR LOS velocity HDF5 file. From 6afac2e3ea83eef54bd8ea8382aa0eba0fa5600f Mon Sep 17 00:00:00 2001 From: rzinke Date: Wed, 3 Apr 2024 10:59:08 -0700 Subject: [PATCH 16/44] gps to gnss args in cli calls --- src/mintpy/cli/tsview.py | 8 ++++---- src/mintpy/cli/view.py | 10 +++++----- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/src/mintpy/cli/tsview.py b/src/mintpy/cli/tsview.py index b93c64d88..ae2589344 100755 --- a/src/mintpy/cli/tsview.py +++ b/src/mintpy/cli/tsview.py @@ -103,7 +103,7 @@ def create_parser(subparsers=None): parser = arg_utils.add_data_disp_argument(parser) parser = arg_utils.add_dem_argument(parser) parser = arg_utils.add_figure_argument(parser, figsize_img=True) - parser = arg_utils.add_gps_argument(parser) + parser = arg_utils.add_gnss_argument(parser) parser = arg_utils.add_mask_argument(parser) parser = arg_utils.add_map_argument(parser) parser = arg_utils.add_memory_argument(parser) @@ -124,9 +124,9 @@ def cmd_line_parse(iargs=None): # use sys.argv[1:] for command line call inps.argv = iargs if iargs else sys.argv[1:] - # check: --gps-comp option (not implemented for tsview yet) - if inps.gps_component: - msg = f'--gps-comp is not supported for {os.path.basename(__file__)}' + # check: --gnss-comp option (not implemented for tsview yet) + if inps.gnss_component: + msg = f'--gnss-comp is not supported for {os.path.basename(__file__)}' raise NotImplementedError(msg) # check: --label option (same number as input files) diff --git a/src/mintpy/cli/view.py b/src/mintpy/cli/view.py index a1e8caf43..b1264821a 100755 --- a/src/mintpy/cli/view.py +++ b/src/mintpy/cli/view.py @@ -35,9 +35,9 @@ view.py ifgramStack.h5 'coherence*20171010*' #all coherence related with 20171010 # GPS (for one subplot in geo-coordinates only) - view.py geo_velocity_msk.h5 velocity --show-gps --gps-label #show locations of available GPS - view.py geo_velocity_msk.h5 velocity --show-gps --gps-comp enu2los --ref-gps GV01 - view.py geo_timeseries_ERA5_ramp_demErr.h5 20180619 --ref-date 20141213 --show-gps --gps-comp enu2los --ref-gps GV01 + view.py geo_velocity_msk.h5 velocity --show-gnss --gnss-label #show locations of available GPS + view.py geo_velocity_msk.h5 velocity --show-gnss --gnss-comp enu2los --ref-gnss GV01 + view.py geo_timeseries_ERA5_ramp_demErr.h5 20180619 --ref-date 20141213 --show-gnss --gnss-comp enu2los --ref-gnss GV01 # Faults view.py filt_dense_offsets.bil range --faultline simple_fault_confident.lonlat @@ -83,7 +83,7 @@ def create_parser(subparsers=None): parser = arg_utils.add_data_disp_argument(parser) parser = arg_utils.add_dem_argument(parser) parser = arg_utils.add_figure_argument(parser) - parser = arg_utils.add_gps_argument(parser) + parser = arg_utils.add_gnss_argument(parser) parser = arg_utils.add_mask_argument(parser) parser = arg_utils.add_map_argument(parser) parser = arg_utils.add_memory_argument(parser) @@ -154,7 +154,7 @@ def cmd_line_parse(iargs=None): print('WARNING: --cbar-ext is NOT compatible with --dem-blend, ignore --cbar-ext and continue.') # check: conflicted options (geo-only options if inpput file is in radar-coordinates) - geo_opt_names = ['--coord', '--show-gps', '--coastline', '--lalo-label', '--lalo-step', '--scalebar', '--faultline'] + geo_opt_names = ['--coord', '--show-gnss', '--coastline', '--lalo-label', '--lalo-step', '--scalebar', '--faultline'] geo_opt_names = list(set(geo_opt_names) & set(inps.argv)) if geo_opt_names and 'Y_FIRST' not in readfile.read_attribute(inps.file).keys(): for opt_name in geo_opt_names: From c74c26a08c4bf3d80b10dbff2558c2ba4e2150ce Mon Sep 17 00:00:00 2001 From: rzinke Date: Wed, 3 Apr 2024 13:29:14 -0700 Subject: [PATCH 17/44] Added JPL-SIDESHOW and Generic source capabilities --- src/mintpy/objects/gnss.py | 394 ++++++++++++++++++++++++++++++++++--- 1 file changed, 370 insertions(+), 24 deletions(-) diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index 557c235fb..e39e3ef9d 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -23,7 +23,8 @@ from mintpy.objects.coord import coordinate from mintpy.utils import ptime, readfile, time_func, utils1 as ut -supported_sources = ['UNR', 'ESESES'] + +supported_sources = ['UNR', 'ESESES', 'JPL-SIDESHOW', 'Generic'] def dload_site_list(out_file=None, source='UNR', print_msg=True) -> str: @@ -40,15 +41,14 @@ def dload_site_list(out_file=None, source='UNR', print_msg=True) -> str: elif source == 'ESESES': ESESES_site_list_file_url = 'http://garner.ucsd.edu/pub/measuresESESES_products/Velocities/ESESES_Velocities.txt' site_list_file_url = ESESES_site_list_file_url + elif source == 'JPL-SIDESHOW': + JPL_SIDESHOW_site_list_file_url = 'https://sideshow.jpl.nasa.gov/post/tables/table2.html' + site_list_file_url = JPL_SIDESHOW_site_list_file_url # handle output file if out_file is None: out_file = os.path.basename(site_list_file_url) - # report if requested - if print_msg: - print(f'Downloading site list from {source}: {site_list_file_url} to {out_file}') - # download file if not os.path.exists(out_file): if print_msg: @@ -96,6 +96,10 @@ def search_gnss(SNWE, source='UNR', start_date=None, end_date=None, site_data = read_UNR_station_list(site_list_file) elif source == 'ESESES': site_data = read_ESESES_station_list(site_list_file) + elif source == 'JPL-SIDESHOW': + site_data = read_JPL_SIDESHOW_station_list(site_list_file) + elif source == 'Generic': + site_data = read_Generic_station_list(site_list_file) if print_msg == True: print('Loaded data for fields: {:s}'.\ @@ -190,6 +194,91 @@ def read_ESESES_station_list(site_list_file:str, print_msg=True) -> pd.DataFrame return site_data +def read_JPL_SIDESHOW_station_list(site_list_file:str, print_msg=True) -> pd.DataFrame: + """Return names and lon/lat values for JPL-SIDESHOW GNSS stations. + """ + if print_msg == True: + print('Parsing JPL-SIDESHOW site list file') + + # read file contents + with open(site_list_file, 'r') as site_list: + lines = site_list.readlines() + + # find lines containing position and velocity data + line_len = 8 # number of entries in a data line + name_len = 4 # number of letters in a station name + data_lines = [line.strip('\n') for line in lines if (len(line.split()) == line_len) \ + and (len(line.split()[0]) == name_len)] + n_data_lines = len(data_lines) + + # transform format from (POS \n VEL) to (POS VEL) + pos_lines = data_lines[0::2] + vel_lines = data_lines[1::2] + combo_lines = list(zip(pos_lines, vel_lines)) + + # empty lists + sites = [] + lats = [] + lons = [] + elevs = [] + Nvels = [] + Evels = [] + Uvels = [] + Nerrs = [] + Eerrs = [] + Uerrs = [] + + # parse data + for line in combo_lines: + pos_info, vel_info = line + + # parse line values + site, _, lat, lon, elev, _, _, _ = pos_info.split() + _ , _, Nvel, Evel, Uvel, Nerr, Eerr, Uerr = vel_info.split() + + # format data + sites.append(site) + lats.append(float(lat)) + lons.append(float(lon)) + elevs.append(float(elev)) + Nvels.append(float(Nvel)) + Evels.append(float(Evel)) + Uvels.append(float(Uvel)) + Nerrs.append(float(Nerr)) + Eerrs.append(float(Eerr)) + Uerrs.append(float(Uerr)) + + # format data frame + data = {'site': sites, + 'lat': lats, 'lon': lons, 'elev': elevs, + 'vel_n': Nvels, 'vel_e': Evels, 'vel_u': Uvels, + 'err_n': Nerrs, 'err_e': Eerrs, 'err_u': Uerrs} + site_data = pd.DataFrame(data) + + return site_data + +def read_Generic_station_list(site_list_file:str, print_msg=True) -> pd.DataFrame: + """Return names and lon/lat values for GNSS stations processed by an + otherwise-unsupported source. + + The user must format the station position data in a file named + GenericList.txt The file should have three, nine, or eleven space- + separated columns: + + site lat lon [vel_e vel_n vel_u err_e err_n err_u] [start_date end_date] + + where site is the four-digit, alphanumeric (uppercase) site code; and + lat/lon are in decimal degrees. If included, vel should be in units of + m/yr; and dates should be in format YYYYMMDD. + """ + if print_msg == True: + print('Parsing JPL-SIDESHOW site list file') + + # read file contents + site_data = pd.read_csv(site_list_file, delimiter=' ', names=('site', 'lat', 'lon')) + + return site_data + def get_baseline_change(dates1, pos_x1, pos_y1, pos_z1, dates2, pos_x2, pos_y2, pos_z2): @@ -254,12 +343,12 @@ def get_gnss_los_obs(meta, obs_type, site_names, start_date, end_date, source='U # GNSS CSV file info file_dir = os.path.dirname(meta['FILE_PATH']) - csv_file = os.path.join(file_dir, f'gnss_{gnss_comp}') + csv_file = os.path.join(file_dir, f'gnss_{gnss_comp:s}') csv_file += f'{horz_az_angle:.0f}' if gnss_comp == 'horz' else '' csv_file += '.csv' col_names = ['Site', 'Lon', 'Lat', 'Displacement', 'Velocity'] col_types = ['U10'] + ['f8'] * (len(col_names) - 1) - vprint(f'default GNSS observation file name: {csv_file}') + vprint(f'default GNSS observation file name: {csv_file:s}') # skip re-calculate GNSS if: # 1. redo is False AND @@ -272,7 +361,7 @@ def get_gnss_los_obs(meta, obs_type, site_names, start_date, end_date, source='U if not redo and os.path.isfile(csv_file) and num_row >= num_site: # read from existing CSV file - vprint(f'read GNSS observations from file: {csv_file}') + vprint(f'read GNSS observations from file: {csv_file:s}') fc = np.genfromtxt(csv_file, dtype=col_types, delimiter=',', names=True) site_obs = fc[col_names[obs_ind]] @@ -305,7 +394,7 @@ def get_gnss_los_obs(meta, obs_type, site_names, start_date, end_date, source='U # loop for calculation prog_bar = ptime.progressBar(maxValue=num_site, print_msg=print_msg) for i, site_name in enumerate(site_names): - prog_bar.update(i+1, suffix=f'{i+1}/{num_site} {site_name}') + prog_bar.update(i+1, suffix=f'{i+1}/{num_site} {site_name:s}') # calculate GNSS data value GNSSclass = GNSS.get_gnss_obj_by_source(source) @@ -327,7 +416,7 @@ def get_gnss_los_obs(meta, obs_type, site_names, start_date, end_date, source='U prog_bar.close() # write to CSV file - vprint(f'write GNSS observations to file: {csv_file}') + vprint(f'write GNSS observations to file: {csv_file:s}') with open(csv_file, 'w') as fc: fcw = csv.writer(fc) fcw.writerow(col_names) @@ -359,7 +448,7 @@ def read_pos_file(fname): def get_pos_years(gnss_dir, site): - fnames = glob.glob(os.path.join(gnss_dir, f'{site}.*.pos')) + fnames = glob.glob(os.path.join(gnss_dir, f'{site:s}.*.pos')) years = [os.path.basename(i).split('.')[1] for i in fnames] years = ptime.yy2yyyy(years) return years @@ -373,7 +462,7 @@ def read_GSI_F3(gnss_dir, site, start_date=None, end_date=None): dates, X, Y, Z = [], [], [], [] for i in range(num_year): yeari = str(year0 + i) - fname = os.path.join(gnss_dir, f'{site}.{yeari[2:]}.pos') + fname = os.path.join(gnss_dir, f'{site:d}.{yeari[2:]:s}.pos') datesi, Xi, Yi, Zi = read_pos_file(fname) dates += datesi X += Xi @@ -450,6 +539,10 @@ def get_gnss_obj_by_source(source:str): return UNR_GNSS elif source == 'ESESES': return ESESES_GNSS + elif source == 'JPL-SIDESHOW': + return JPL_SIDESHOW_GNSS + elif source == 'Generic': + return Generic_GNSS else: raise ValueError(f'{source:s} source not supported.') @@ -709,7 +802,7 @@ def get_gnss_los_velocity(self, geom_obj, start_date=None, end_date=None, else: self.velocity = np.nan if print_msg == True: - print(f'Velocity calculation failed for site {self.site}') + print(f'Velocity calculation failed for site {self.site:s}') return self.velocity, dis @@ -744,7 +837,7 @@ def dload_site(self, print_msg=True) -> str: print(f"Downloading data for site {self.site:s} from UNR NGL source") # URL and file name specs - url_prefix = 'http://geodesy.unr.edu/gnss_timeseries/tenv3' + url_prefix = 'http://geodesy.unr.edu/gps_timeseries/tenv3' if self.version == 'IGS08': self.file = os.path.join(self.data_dir, '{site:s}.{version:s}.tenv3'.\ @@ -759,7 +852,7 @@ def dload_site(self, print_msg=True) -> str: # download file if not present if os.path.exists(self.file): if print_msg == True: - print(f'file {self.file} exists--reading') + print(f'file {self.file:s} exists--reading') else: if print_msg == True: print(f'... downloading {self.file_url:s} to {self.file:s}') @@ -767,12 +860,12 @@ def dload_site(self, print_msg=True) -> str: return self.file - def get_stat_lat_lon(self, print_msg=True) -> (str, str): + def get_stat_lat_lon(self, print_msg=True) -> (float, float): """Get station lat/lon based on processing source. Retrieve data from the displacement file. - Modifies: self.lat/lon - str - Returns: self.lat/lon - str + Modifies: self.lat/lon - float + Returns: self.lat/lon - float """ if print_msg == True: print('calculating station lat/lon') @@ -839,7 +932,6 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, self.std_e, self.std_n, self.std_u) - class ESESES_GNSS(GNSS): """GNSS class for daily solutions processed by ESESES. @@ -901,7 +993,7 @@ def dload_site(self, print_msg=True) -> str: # download file if not present if os.path.exists(self.file): if print_msg == True: - print(f'file {self.file} exists--reading') + print(f'file {self.file:s} exists--reading') else: if print_msg == True: print(f'... downloading {self.file_url:s} to {self.file:s}') @@ -918,13 +1010,12 @@ def dload_site(self, print_msg=True) -> str: return self.file - - def get_stat_lat_lon(self, print_msg=True) -> (str, str): + def get_stat_lat_lon(self, print_msg=True) -> (float, float): """Get station lat/lon based on processing source. Retrieve data from the displacement file. - Modifies: self.lat/lon - str - Returns: self.lat/lon - str + Modifies: self.lat/lon - float + Returns: self.lat/lon - float """ if print_msg == True: print('calculating station lat/lon') @@ -1000,4 +1091,259 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, self.std_e, self.std_n, self.std_u) +class JPL_SIDESHOW_GNSS(GNSS): + """GNSS class for daily solutions processed by JPL-SIDESHOW. + + This object will assign the attributes: + site - str, four-digit site code + site_lat/lon - float + dates - 1D np.ndarray + date_list - list + dis_e/n/u - 1D np.ndarray + std_e,n,u - 1D np.ndarray + + based on the specific formats of the data source, using the functions: + dload_site + get_stat_lat_lon + read_displacement + """ + source = 'JPL-SIDESHOW' + + def dload_site(self, print_msg=True) -> str: + """Download the station displacement data from the + specified source. + + Modifies: self.file - str, local file path/name + self.file_url - str, file URL + Returns: self.file - str, local file path/name + """ + if print_msg == True: + print(f'downloading data for site {self.site:s} from the JPL-SIDESHOW source') + + # URL and file name specs + url_prefix = 'https://sideshow.jpl.nasa.gov/pub/JPL_GPS_Timeseries/repro2018a/post/point/' + self.file = os.path.join(self.data_dir, f'{self.site:s}.series') + self.file_url = os.path.join(url_prefix, os.path.basename(self.file)) + + # download file if not present + if os.path.exists(self.file): + if print_msg == True: + print(f'file {self.file:s} exists--reading') + else: + if print_msg == True: + print(f'... downloading {self.file_url:s} to {self.file:s}') + urlretrieve(self.file_url, self.file) #nosec + + return self.file + + def get_stat_lat_lon(self, print_msg=True) -> (float, float): + """Get station lat/lon based on processing source. + Retrieve data from the displacement file. + + Modifies: self.lat/lon - float + Returns: self.lat/lon - float + """ + if print_msg == True: + print('calculating station lat/lon') + + # need to refer to the site list + site_list_file = dload_site_list(source='JPL-SIDESHOW') + + # find site in site list file + with open(site_list_file, 'r') as site_list: + for line in site_list: + if (line[:4] == self.site) and (line[5:8] == 'POS'): + site_lat, site_lon = line.split()[2:4] + + # format + self.site_lat = float(site_lat) + self.site_lon = float(site_lon) + + if print_msg == True: + print(f'\t{self.site_lat:f}, {self.site_lon:f}') + + return self.site_lat, self.site_lon + + def read_displacement(self, start_date=None, end_date=None, print_msg=True, + display=False): + """Read GNSS displacement time-series (defined by start/end_date) + Parameters: start/end_date - str, date in YYYYMMDD format + Returns: dates - 1D np.ndarray of datetime.datetime object + dis_e/n/u - 1D np.ndarray of displacement in meters in float32 + std_e/n/u - 1D np.ndarray of displacement STD in meters in float32 + """ + # download file if it does not exist + if not os.path.isfile(self.file): + self.dload_site(print_msg=print_msg) + + # read dates, dis_e, dis_n, dis_u + if print_msg == True: + print('reading time and displacement in east/north/vertical direction') + + # read data from file + data = np.loadtxt(self.file) + n_data = data.shape[0] + + # parse dates + self.dates = np.array([dt.datetime(*data[i,-6:].astype(int)) for i in range(n_data)]) + + # parse displacement data + (self.dis_e, + self.dis_n, + self.dis_u, + self.std_e, + self.std_n, + self.std_u) = data[:, 1:7].astype(float).T + + # cut out the specified time range + self.__crop_to_date_range__(start_date, end_date) + + # formulate date list + self.date_list = [date.strftime('%Y%m%d') for date in self.dates] + + # display if requested + if display == True: + self.display_data() + + return (self.dates, + self.dis_e, self.dis_n, self.dis_u, + self.std_e, self.std_n, self.std_u) + + +class Generic_GNSS(GNSS): + """GNSS class for daily solutions of an otherwise-unsupported source. + The user should format the station position data in a file called + .dat The file should have seven space-separated columns: + + date dis_e dis_n dis_u std_e std_n std_u + + where date is in the format or T; and + displacement values are in meters. + + For the generic type, it is necessary to have an accompanying file with + the site reference coordinates in the current folder. The specifications + for the GenericList.txt file are given above. + + This object will assign the attributes: + site - str, four-digit site code + site_lat/lon - float + dates - 1D np.ndarray + date_list - list + dis_e/n/u - 1D np.ndarray + std_e,n,u - 1D np.ndarray + + based on the specific formats of the data source, using the functions: + dload_site + get_stat_lat_lon + read_displacement + """ + source = 'Generic' + + def dload_site(self, print_msg=True) -> str: + """Download the station displacement data from the + specified source. + + Modifies: self.file - str, local file path/name + self.file_url - str, file URL + Returns: self.file - str, local file path/name + """ + if print_msg == True: + print(f'reading data for site {self.site:s}') + + # URL and file name specs + self.file = os.path.join(self.data_dir, f'{self.site:s}.txt') + self.file_url = '' + + # download file if not present + if print_msg == True: + print(f'reading file {self.file:s}') + + return self.file + + def get_stat_lat_lon(self, print_msg=True) -> (str, str): + """Get station lat/lon based on processing source. + Retrieve data from the displacement file. + + Modifies: self.lat/lon - str + Returns: self.lat/lon - str + """ + if print_msg == True: + print('calculating station lat/lon') + + # need to refer to the site list + site_list_file = 'GenericList.txt' + + # find site in site list file + with open(site_list_file, 'r') as site_list: + for line in site_list: + if line[:4] == self.site: + site_lat, site_lon = line.split()[1:3] + + # format + self.site_lat = float(site_lat) + self.site_lon = float(site_lon) + + if print_msg == True: + print(f'\t{self.site_lat:f}, {self.site_lon:f}') + + return self.site_lat, self.site_lon + + def read_displacement(self, start_date=None, end_date=None, print_msg=True, + display=False): + """Read GNSS displacement time-series (defined by start/end_date) + Parameters: start/end_date - str, date in YYYYMMDD format + Returns: dates - 1D np.ndarray of datetime.datetime object + dis_e/n/u - 1D np.ndarray of displacement in meters in float32 + std_e/n/u - 1D np.ndarray of displacement STD in meters in float32 + """ + # download file if it does not exist + if not os.path.isfile(self.file): + self.dload_site(print_msg=print_msg) + + # read dates, dis_e, dis_n, dis_u + if print_msg == True: + print('reading time and displacement in east/north/vertical direction') + + # parse dates + with open(self.file, 'r') as data_file: + lines = data_file.readlines() + self.dates = [] + for line in lines: + date = line.split()[0] + date_len = len(date) + + # format + if date_len == 8: + datetime = dt.datetime.strptime(date, '%Y%m%d') + elif date_len == 15: + datetime = dt.datetime.strptime(date, '%Y%m%dT%H%M%S') + else: + raise ValueError('Date/time format not recognized') + + self.dates.append(datetime) + self.dates = np.array(self.dates) + + # parse displacement data + (self.dis_e, + self.dis_n, + self.dis_u, + self.std_e, + self.std_n, + self.std_u) = np.loadtxt(self.file, usecols=tuple(range(1,7))).T + + # cut out the specified time range + self.__crop_to_date_range__(start_date, end_date) + + # formulate date list + self.date_list = [date.strftime('%Y%m%d') for date in self.dates] + + # display if requested + if display == True: + self.display_data() + + return (self.dates, + self.dis_e, self.dis_n, self.dis_u, + self.std_e, self.std_n, self.std_u) + + #################################### End of GNSS class #################################### From 0cb2d6834ade18a88fafcda14be5c4087c2d6197 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 3 Apr 2024 20:32:16 +0000 Subject: [PATCH 18/44] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/mintpy/objects/gnss.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index e39e3ef9d..6d6cb34db 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -23,7 +23,6 @@ from mintpy.objects.coord import coordinate from mintpy.utils import ptime, readfile, time_func, utils1 as ut - supported_sources = ['UNR', 'ESESES', 'JPL-SIDESHOW', 'Generic'] From 29d58e78dd4d9ce2a215e045bc9a9acee7fe88c2 Mon Sep 17 00:00:00 2001 From: rzinke Date: Wed, 3 Apr 2024 13:35:56 -0700 Subject: [PATCH 19/44] codacy --- src/mintpy/objects/gnss.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index 6d6cb34db..b30e6f777 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -208,7 +208,6 @@ def read_JPL_SIDESHOW_station_list(site_list_file:str, print_msg=True) -> pd.Dat name_len = 4 # number of letters in a station name data_lines = [line.strip('\n') for line in lines if (len(line.split()) == line_len) \ and (len(line.split()[0]) == name_len)] - n_data_lines = len(data_lines) # transform format from (POS \n VEL) to (POS VEL) pos_lines = data_lines[0::2] From a52d22be6cb7c3046b0023bd39953a45136f5328 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 3 Apr 2024 20:36:48 +0000 Subject: [PATCH 20/44] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/mintpy/objects/gnss.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index b30e6f777..f20a7f8c9 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -200,7 +200,7 @@ def read_JPL_SIDESHOW_station_list(site_list_file:str, print_msg=True) -> pd.Dat print('Parsing JPL-SIDESHOW site list file') # read file contents - with open(site_list_file, 'r') as site_list: + with open(site_list_file) as site_list: lines = site_list.readlines() # find lines containing position and velocity data @@ -1148,7 +1148,7 @@ def get_stat_lat_lon(self, print_msg=True) -> (float, float): site_list_file = dload_site_list(source='JPL-SIDESHOW') # find site in site list file - with open(site_list_file, 'r') as site_list: + with open(site_list_file) as site_list: for line in site_list: if (line[:4] == self.site) and (line[5:8] == 'POS'): site_lat, site_lon = line.split()[2:4] @@ -1272,7 +1272,7 @@ def get_stat_lat_lon(self, print_msg=True) -> (str, str): site_list_file = 'GenericList.txt' # find site in site list file - with open(site_list_file, 'r') as site_list: + with open(site_list_file) as site_list: for line in site_list: if line[:4] == self.site: site_lat, site_lon = line.split()[1:3] @@ -1303,7 +1303,7 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, print('reading time and displacement in east/north/vertical direction') # parse dates - with open(self.file, 'r') as data_file: + with open(self.file) as data_file: lines = data_file.readlines() self.dates = [] for line in lines: From 9c04a5d9207be8ddd49e9d6c69fd0752980eed18 Mon Sep 17 00:00:00 2001 From: Zhang Yunjun Date: Mon, 15 Apr 2024 15:12:22 +0800 Subject: [PATCH 21/44] arg_utils: re-organize GNSS options into smaller groups --- src/mintpy/objects/gnss.py | 2 +- src/mintpy/utils/arg_utils.py | 57 +++++++++++++++++++---------------- src/mintpy/utils/plot.py | 12 +++++--- src/mintpy/view.py | 7 +---- 4 files changed, 41 insertions(+), 37 deletions(-) diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index f20a7f8c9..854c368f2 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -57,7 +57,7 @@ def dload_site_list(out_file=None, source='UNR', print_msg=True) -> str: return out_file -def search_gnss(SNWE, source='UNR', start_date=None, end_date=None, +def search_gnss(SNWE, start_date=None, end_date=None, source='UNR', site_list_file=None, min_num_solution=None, print_msg=True): """Search available GNSS sites within the geo bounding box from UNR website Parameters: SNWE - tuple of 4 float, indicating (South, North, West, East) in degrees diff --git a/src/mintpy/utils/arg_utils.py b/src/mintpy/utils/arg_utils.py index ca336782b..e2dc4e0f2 100644 --- a/src/mintpy/utils/arg_utils.py +++ b/src/mintpy/utils/arg_utils.py @@ -247,37 +247,42 @@ def add_figure_argument(parser, figsize_img=False): def add_gnss_argument(parser): """Argument group parser for GNSS options""" gnss = parser.add_argument_group('GNSS', 'GNSS data to display') - gnss.add_argument('--gnss-source', dest='gnss_source', default='UNR', - choices={'UNR', 'ESESES'}, - help='GNSS processing source') gnss.add_argument('--show-gnss', dest='disp_gnss', action='store_true', - help='Show UNR GNSS location within the coverage.') - gnss.add_argument('--mask-gnss', dest='mask_gnss', action='store_true', - help='Mask out GNSS stations not coincident with valid data pixels') - gnss.add_argument('--gnss-label', dest='disp_gnss_label', action='store_true', - help='Show GNSS site name') - gnss.add_argument('--gnss-ms', dest='gnss_marker_size', type=float, default=6, - help='Plot GNSS value as scatter in size of ms**2 (default: %(default)s).') + help='Show UNR GNSS location within the coverage.') + gnss.add_argument('--gnss-source', dest='gnss_source', default='UNR', + choices={'UNR', 'ESESES'}, + help='Source of the GNSS displacement solution (default: %(default)s).') + + # compare GNSS with InSAR gnss.add_argument('--gnss-comp', dest='gnss_component', - choices={'enu2los', 'hz2los', 'up2los', 'horz', 'vert'}, - help='Plot GNSS in color indicating deformation velocity direction') - gnss.add_argument('--gnss-redo', dest='gnss_redo', action='store_true', - help='Re-calculate GNSS observations in LOS direction, ' - 'instead of read from existing CSV file.') - gnss.add_argument('--ref-gnss', dest='ref_gnss_site', type=str, help='Reference GNSS site') - gnss.add_argument('--ex-gnss', dest='ex_gnss_sites', type=str, nargs='*', - help='Exclude GNSS sites, require --gnss-comp.') + choices={'enu2los', 'hz2los', 'up2los', 'horz', 'vert'}, + help='Plot GNSS in color indicating deformation velocity direction (default: %(default)s).') + gnss.add_argument('--ref-gnss', dest='ref_gnss_site', type=str, metavar='SITE_NAME', + help='Reference GNSS site') + gnss.add_argument('--ex-gnss', dest='ex_gnss_sites', type=str, nargs='*', metavar='SITE_NAME', + help='Exclude GNSS sites, require --gnss-comp.') gnss.add_argument('--gnss-start-date', dest='gnss_start_date', type=str, metavar='YYYYMMDD', - help='start date of GNSS data, default is date of the 1st SAR acquisition') + help='start date of GNSS data, default: the 1st SAR acquisition') gnss.add_argument('--gnss-end-date', dest='gnss_end_date', type=str, metavar='YYYYMMDD', - help='start date of GNSS data, default is date of the last SAR acquisition') - gnss.add_argument('--horz-az','--hz-az', dest='horz_az_angle', type=float, default=-90., - help='Azimuth angle (anti-clockwise from the north) of the horizontal movement in degrees\n' - 'E.g.: -90. for east direction [default]\n' - ' 0. for north direction\n' - 'Set to the azimuth angle of the strike-slip fault to ' - 'show the fault-parallel displacement.') + help='end date of GNSS data, default: the last SAR acquisition') + gnss.add_argument('--horz-az','--hz-az', dest='horz_az_angle', type=float, default=-90., metavar='NUM', + help='Azimuth angle (anti-clockwise from the north) of the horizontal movement in degrees\n' + 'E.g.: -90. for east direction [default]\n' + ' 0. for north direction\n' + 'Set to the azimuth angle of the strike-slip fault to ' + 'show the fault-parallel displacement.') + gnss.add_argument('--gnss-redo', dest='gnss_redo', action='store_true', + help='Re-calculate GNSS observations in LOS direction, ' + 'instead of read from existing CSV file.') + + # plot style + gnss.add_argument('--gnss-label', dest='disp_gnss_label', action='store_true', + help='Show GNSS site name') + gnss.add_argument('--mask-gnss', dest='mask_gnss', action='store_true', + help='Mask out GNSS stations not coincident with valid data pixels') + gnss.add_argument('--gnss-ms', dest='gnss_marker_size', type=float, default=6, metavar='NUM', + help='Plot GNSS value as scatter in size of ms**2 (default: %(default)s).') return parser diff --git a/src/mintpy/utils/plot.py b/src/mintpy/utils/plot.py index 9dd206df8..4e602e58f 100644 --- a/src/mintpy/utils/plot.py +++ b/src/mintpy/utils/plot.py @@ -1136,8 +1136,12 @@ def plot_gnss(ax, SNWE, inps, metadata=dict(), print_msg=True): SNWE = (south, north, west, east) # query for GNSS stations - site_names, site_lats, site_lons = gnss.search_gnss(SNWE, source=inps.gnss_source, - start_date=start_date, end_date=end_date) + site_names, site_lats, site_lons = gnss.search_gnss( + SNWE, + start_date=start_date, + end_date=end_date, + source=inps.gnss_source, + ) if site_names.size == 0: warnings.warn(f'No GNSS found within {SNWE} during {start_date} - {end_date}!') print(' continue without GNSS plots.') @@ -1257,8 +1261,8 @@ def plot_insar_vs_gnss_scatter(vel_file, csv_file='gnss_enu2los.csv', msk_file=N cutoff - float, threshold in terms of med abs dev (MAD) for outlier detection xname - str, xaxis label vlim - list of 2 float, display value range in the unit of cm/yr - Default is None to grab from data - If set, the range will be used to prune the SAR and GNSS observations + Default is None to grab from data + If set, the range will be used to prune the SAR and GNSS observations ex_gnss_sites - list of str, exclude GNSS sites for analysis and plotting. Returns: sites - list of str, GNSS site names used for comparison insar_obs - 1D np.ndarray in float32, InSAR velocity in cm/yr diff --git a/src/mintpy/view.py b/src/mintpy/view.py index 090fab3eb..2d8917134 100644 --- a/src/mintpy/view.py +++ b/src/mintpy/view.py @@ -545,13 +545,8 @@ def extent2meshgrid(extent: tuple, ds_shape: list): # Reference (InSAR) data to a GNSS site coord = ut.coordinate(metadata) if inps.disp_gnss and inps.gnss_component and inps.ref_gnss_site: - # define GNSS station object based on processing source GNSS = gnss.GNSS.get_gnss_obj_by_source(inps.gnss_source) - - # GNSS reference site - ref_site_gnss = GNSS(site=inps.ref_gnss_site) - ref_site_gnss.open() - ref_site_lalo = ref_site_gnss.get_stat_lat_lon(print_msg=False) + ref_site_lalo = GNSS(site=inps.ref_gnss_site).get_stat_lat_lon(print_msg=False) y, x = coord.geo2radar(ref_site_lalo[0], ref_site_lalo[1])[0:2] ref_data = data[y - inps.pix_box[1], x - inps.pix_box[0]] data -= ref_data From e5ba8e324e2ec729e2f3fb0c8be7fceee96ca35a Mon Sep 17 00:00:00 2001 From: Zhang Yunjun Date: Mon, 15 Apr 2024 16:12:56 +0800 Subject: [PATCH 22/44] arg_utils: add *gps* option names for backward compatibility + utils.arg_utils.add_gnss_argument(): add *gps* names as an alternative to the new *gnss* names, for backward compatibility. + objects.gnss.py: - add GNSS_SITE_LIST_URLS to simplify dload_site_list() - move get_gnss_class() outside of class definition for easy re-use. --- src/mintpy/objects/gnss.py | 68 +++++++++++++++++------------------ src/mintpy/utils/arg_utils.py | 24 ++++++------- src/mintpy/view.py | 2 +- 3 files changed, 45 insertions(+), 49 deletions(-) diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index 854c368f2..011b7d5ea 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -1,7 +1,7 @@ ############################################################ # Program is part of MintPy # # Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi # -# Author: Zhang Yunjun, Jul 2018 # +# Author: Zhang Yunjun, Robert Zinke, Jul 2018 # ############################################################ # Utility scripts for GNSS handling # Recommend import: @@ -23,42 +23,40 @@ from mintpy.objects.coord import coordinate from mintpy.utils import ptime, readfile, time_func, utils1 as ut -supported_sources = ['UNR', 'ESESES', 'JPL-SIDESHOW', 'Generic'] +GNSS_SITE_LIST_URLS = { + 'UNR' : 'http://geodesy.unr.edu/NGLStationPages/DataHoldings.txt', + 'ESESES' : 'http://garner.ucsd.edu/pub/measuresESESES_products/Velocities/ESESES_Velocities.txt', + 'JPL-SIDESHOW' : 'https://sideshow.jpl.nasa.gov/post/tables/table2.html', + 'Generic' : None, +} +GNSS_SOURCES = list(GNSS_SITE_LIST_URLS.keys()) def dload_site_list(out_file=None, source='UNR', print_msg=True) -> str: """Download single file with list of GNSS site locations. """ # check source is supported - assert source in supported_sources, \ - f'Source {source:s} not supported. Use one of {supported_sources}' + assert source in GNSS_SOURCES, \ + f'{source:s} GNSS is NOT supported! Use one of {GNSS_SOURCES}.' # determine URL - if source == 'UNR': - UNR_site_list_file_url = 'http://geodesy.unr.edu/NGLStationPages/DataHoldings.txt' - site_list_file_url = UNR_site_list_file_url - elif source == 'ESESES': - ESESES_site_list_file_url = 'http://garner.ucsd.edu/pub/measuresESESES_products/Velocities/ESESES_Velocities.txt' - site_list_file_url = ESESES_site_list_file_url - elif source == 'JPL-SIDESHOW': - JPL_SIDESHOW_site_list_file_url = 'https://sideshow.jpl.nasa.gov/post/tables/table2.html' - site_list_file_url = JPL_SIDESHOW_site_list_file_url + site_list_url = GNSS_SITE_LIST_URLS[source] # handle output file if out_file is None: - out_file = os.path.basename(site_list_file_url) + out_file = os.path.basename(site_list_url) # download file if not os.path.exists(out_file): if print_msg: - print(f'Downloading site list from {source:s}: {site_list_file_url:s} to {out_file:s}') - urlretrieve(site_list_file_url, out_file) #nosec + print(f'Downloading site list from {source:s}: {site_list_url:s} to {out_file:s}') + urlretrieve(site_list_url, out_file) #nosec return out_file def search_gnss(SNWE, start_date=None, end_date=None, source='UNR', - site_list_file=None, min_num_solution=None, print_msg=True): + site_list_file=None, min_num_solution=None, print_msg=True): """Search available GNSS sites within the geo bounding box from UNR website Parameters: SNWE - tuple of 4 float, indicating (South, North, West, East) in degrees source - str, program or institution that processed the GNSS data @@ -395,8 +393,7 @@ def get_gnss_los_obs(meta, obs_type, site_names, start_date, end_date, source='U prog_bar.update(i+1, suffix=f'{i+1}/{num_site} {site_name:s}') # calculate GNSS data value - GNSSclass = GNSS.get_gnss_obj_by_source(source) - obj = GNSSclass(site_name) + obj = get_gnss_class(source)(site_name) obj.open(print_msg=print_msg) vel, dis_ts = obj.get_gnss_los_velocity( geom_obj, @@ -482,6 +479,20 @@ def read_GSI_F3(gnss_dir, site, start_date=None, end_date=None): #################################### End of GNSS-GSI utility functions ############################## +def get_gnss_class(source:str): + """Return the appropriate GNSS child class based on processing source. + """ + if source == 'UNR': + return UNR_GNSS + elif source == 'ESESES': + return ESESES_GNSS + elif source == 'JPL-SIDESHOW': + return JPL_SIDESHOW_GNSS + elif source == 'Generic': + return Generic_GNSS + else: + raise ValueError(f'{source:s} source not supported.') + #################################### Beginning of GNSS class ######################################## class GNSS: @@ -489,7 +500,7 @@ class GNSS: The GNSS class is solely meant to be a parent class. Child classes, defined below, support functions for downloading and parsing GNSS position based on - the processing source (e.g., UNR, etc.). Use the `get_gnss_obj_by_source` + the processing source (e.g., UNR, etc.). Use the `get_gnss_class` method to determine appropriate child class. """ @@ -529,21 +540,6 @@ def open(self, file=None, print_msg=True): return None - @staticmethod - def get_gnss_obj_by_source(source:str): - """Return the appropriate GNSS child class based on processing source. - """ - if source == 'UNR': - return UNR_GNSS - elif source == 'ESESES': - return ESESES_GNSS - elif source == 'JPL-SIDESHOW': - return JPL_SIDESHOW_GNSS - elif source == 'Generic': - return Generic_GNSS - else: - raise ValueError(f'{source:s} source not supported.') - def dload_site(self, print_msg=True): raise NotImplementedError('Func. dload_site not implemented. Override with child class.') @@ -720,7 +716,7 @@ def read_gnss_los_displacement(self, geom_obj, start_date=None, end_date=None, r site_lalo = self.get_stat_lat_lon(print_msg=print_msg) # define GNSS station object based on processing source - GNSS = self.get_gnss_obj_by_source(self.source) + GNSS = get_gnss_class(self.source) # get LOS displacement relative to another GNSS site if ref_site: diff --git a/src/mintpy/utils/arg_utils.py b/src/mintpy/utils/arg_utils.py index e2dc4e0f2..c6a62f8f3 100644 --- a/src/mintpy/utils/arg_utils.py +++ b/src/mintpy/utils/arg_utils.py @@ -247,24 +247,24 @@ def add_figure_argument(parser, figsize_img=False): def add_gnss_argument(parser): """Argument group parser for GNSS options""" gnss = parser.add_argument_group('GNSS', 'GNSS data to display') - gnss.add_argument('--show-gnss', dest='disp_gnss', action='store_true', + gnss.add_argument('--show-gnss','--show-gps', dest='disp_gnss', action='store_true', help='Show UNR GNSS location within the coverage.') - gnss.add_argument('--gnss-source', dest='gnss_source', default='UNR', + gnss.add_argument('--gnss-source','--gps-source', dest='gnss_source', default='UNR', choices={'UNR', 'ESESES'}, help='Source of the GNSS displacement solution (default: %(default)s).') # compare GNSS with InSAR - gnss.add_argument('--gnss-comp', dest='gnss_component', + gnss.add_argument('--gnss-comp','--gps-comp', dest='gnss_component', choices={'enu2los', 'hz2los', 'up2los', 'horz', 'vert'}, - help='Plot GNSS in color indicating deformation velocity direction (default: %(default)s).') - gnss.add_argument('--ref-gnss', dest='ref_gnss_site', type=str, metavar='SITE_NAME', + help='Plot GNSS in color indicating deformation velocity in (default: %(default)s).') + gnss.add_argument('--ref-gnss','--ref-gps', dest='ref_gnss_site', type=str, metavar='SITE_NAME', help='Reference GNSS site') - gnss.add_argument('--ex-gnss', dest='ex_gnss_sites', type=str, nargs='*', metavar='SITE_NAME', + gnss.add_argument('--ex-gnss','--ex-gps', dest='ex_gnss_sites', type=str, nargs='*', metavar='SITE_NAME', help='Exclude GNSS sites, require --gnss-comp.') - gnss.add_argument('--gnss-start-date', dest='gnss_start_date', type=str, metavar='YYYYMMDD', + gnss.add_argument('--gnss-start-date','--gps-start-date', dest='gnss_start_date', type=str, metavar='YYYYMMDD', help='start date of GNSS data, default: the 1st SAR acquisition') - gnss.add_argument('--gnss-end-date', dest='gnss_end_date', type=str, metavar='YYYYMMDD', + gnss.add_argument('--gnss-end-date','--gps-end-date', dest='gnss_end_date', type=str, metavar='YYYYMMDD', help='end date of GNSS data, default: the last SAR acquisition') gnss.add_argument('--horz-az','--hz-az', dest='horz_az_angle', type=float, default=-90., metavar='NUM', help='Azimuth angle (anti-clockwise from the north) of the horizontal movement in degrees\n' @@ -272,16 +272,16 @@ def add_gnss_argument(parser): ' 0. for north direction\n' 'Set to the azimuth angle of the strike-slip fault to ' 'show the fault-parallel displacement.') - gnss.add_argument('--gnss-redo', dest='gnss_redo', action='store_true', + gnss.add_argument('--gnss-redo','--gps-redo', dest='gnss_redo', action='store_true', help='Re-calculate GNSS observations in LOS direction, ' 'instead of read from existing CSV file.') # plot style - gnss.add_argument('--gnss-label', dest='disp_gnss_label', action='store_true', + gnss.add_argument('--gnss-label','--gps-label', dest='disp_gnss_label', action='store_true', help='Show GNSS site name') - gnss.add_argument('--mask-gnss', dest='mask_gnss', action='store_true', + gnss.add_argument('--mask-gnss','--mask-gps', dest='mask_gnss', action='store_true', help='Mask out GNSS stations not coincident with valid data pixels') - gnss.add_argument('--gnss-ms', dest='gnss_marker_size', type=float, default=6, metavar='NUM', + gnss.add_argument('--gnss-ms','--gps-ms', dest='gnss_marker_size', type=float, default=6, metavar='NUM', help='Plot GNSS value as scatter in size of ms**2 (default: %(default)s).') return parser diff --git a/src/mintpy/view.py b/src/mintpy/view.py index 2d8917134..023028c38 100644 --- a/src/mintpy/view.py +++ b/src/mintpy/view.py @@ -545,7 +545,7 @@ def extent2meshgrid(extent: tuple, ds_shape: list): # Reference (InSAR) data to a GNSS site coord = ut.coordinate(metadata) if inps.disp_gnss and inps.gnss_component and inps.ref_gnss_site: - GNSS = gnss.GNSS.get_gnss_obj_by_source(inps.gnss_source) + GNSS = gnss.get_gnss_class(inps.gnss_source) ref_site_lalo = GNSS(site=inps.ref_gnss_site).get_stat_lat_lon(print_msg=False) y, x = coord.geo2radar(ref_site_lalo[0], ref_site_lalo[1])[0:2] ref_data = data[y - inps.pix_box[1], x - inps.pix_box[0]] From b038b0072ad8f2b6963c38be0343e95e9706490f Mon Sep 17 00:00:00 2001 From: Zhang Yunjun Date: Mon, 15 Apr 2024 22:13:16 +0800 Subject: [PATCH 23/44] fix bug introduced in the previous 2 commits --- src/mintpy/objects/gnss.py | 31 ++++++++++++++++--------------- src/mintpy/view.py | 5 +++-- 2 files changed, 19 insertions(+), 17 deletions(-) diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index 011b7d5ea..f4d60c448 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -302,8 +302,8 @@ def get_baseline_change(dates1, pos_x1, pos_y1, pos_z1, def get_gnss_los_obs(meta, obs_type, site_names, start_date, end_date, source='UNR', - gnss_comp='enu2los', horz_az_angle=-90., model=None, - print_msg=True, redo=False): + gnss_comp='enu2los', horz_az_angle=-90., model=None, + print_msg=True, redo=False): """Get the GNSS LOS observations given the query info. Parameters: meta - dict, dictionary of metadata of the InSAR file @@ -393,21 +393,22 @@ def get_gnss_los_obs(meta, obs_type, site_names, start_date, end_date, source='U prog_bar.update(i+1, suffix=f'{i+1}/{num_site} {site_name:s}') # calculate GNSS data value - obj = get_gnss_class(source)(site_name) - obj.open(print_msg=print_msg) - vel, dis_ts = obj.get_gnss_los_velocity( + gnss_obj = get_gnss_class(source)(site_name) + gnss_obj.open(print_msg=False) + vel, dis_ts = gnss_obj.get_gnss_los_velocity( geom_obj, start_date=start_date, end_date=end_date, gnss_comp=gnss_comp, horz_az_angle=horz_az_angle, - model=model) + model=model, + ) # ignore time-series if the estimated velocity is nan dis = np.nan if np.isnan(vel) else dis_ts[-1] - dis_ts[0] # save data to list - data_list.append([obj.site, obj.site_lon, obj.site_lat, dis, vel]) + data_list.append([gnss_obj.site, gnss_obj.site_lon, gnss_obj.site_lat, dis, vel]) prog_bar.close() # write to CSV file @@ -744,9 +745,9 @@ def read_gnss_los_displacement(self, geom_obj, start_date=None, end_date=None, r def get_gnss_los_velocity(self, geom_obj, start_date=None, end_date=None, - ref_site=None, gnss_comp='enu2los', - horz_az_angle=-90., model=None, - print_msg=True): + ref_site=None, gnss_comp='enu2los', + horz_az_angle=-90., model=None, + print_msg=True): """Convert the three-component displacement data into LOS velocity. @@ -769,11 +770,11 @@ def get_gnss_los_velocity(self, geom_obj, start_date=None, end_date=None, """ # retrieve displacement data dates, dis = self.read_gnss_los_displacement(geom_obj, - start_date=start_date, - end_date=end_date, - ref_site=ref_site, - gnss_comp=gnss_comp, - horz_az_angle=horz_az_angle)[:2] + start_date=start_date, + end_date=end_date, + ref_site=ref_site, + gnss_comp=gnss_comp, + horz_az_angle=horz_az_angle)[:2] # displacement -> velocity # if: diff --git a/src/mintpy/view.py b/src/mintpy/view.py index 023028c38..b9c24951e 100644 --- a/src/mintpy/view.py +++ b/src/mintpy/view.py @@ -545,8 +545,9 @@ def extent2meshgrid(extent: tuple, ds_shape: list): # Reference (InSAR) data to a GNSS site coord = ut.coordinate(metadata) if inps.disp_gnss and inps.gnss_component and inps.ref_gnss_site: - GNSS = gnss.get_gnss_class(inps.gnss_source) - ref_site_lalo = GNSS(site=inps.ref_gnss_site).get_stat_lat_lon(print_msg=False) + gnss_obj = gnss.get_gnss_class(inps.gnss_source)(site=inps.ref_gnss_site) + gnss_obj.open(print_msg=False) + ref_site_lalo = gnss_obj.get_stat_lat_lon(print_msg=False) y, x = coord.geo2radar(ref_site_lalo[0], ref_site_lalo[1])[0:2] ref_data = data[y - inps.pix_box[1], x - inps.pix_box[0]] data -= ref_data From b7a7e057b303a58f0e1ad02c31850df370224d6f Mon Sep 17 00:00:00 2001 From: rzinke Date: Mon, 15 Apr 2024 14:55:22 -0700 Subject: [PATCH 24/44] Changed pandas functions to numpy. Set source-based folder names to default. Changed Generic to upper case. Added options for JPL-SIDESHOW and GENERIC in arg_utils choices. --- src/mintpy/objects/gnss.py | 344 ++++++++++++++++++++++------------ src/mintpy/utils/arg_utils.py | 2 +- 2 files changed, 226 insertions(+), 120 deletions(-) diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index f4d60c448..d209da5ff 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -17,7 +17,6 @@ import matplotlib.pyplot as plt import numpy as np -import pandas as pd from pyproj import Geod from mintpy.objects.coord import coordinate @@ -27,7 +26,7 @@ 'UNR' : 'http://geodesy.unr.edu/NGLStationPages/DataHoldings.txt', 'ESESES' : 'http://garner.ucsd.edu/pub/measuresESESES_products/Velocities/ESESES_Velocities.txt', 'JPL-SIDESHOW' : 'https://sideshow.jpl.nasa.gov/post/tables/table2.html', - 'Generic' : None, + 'GENERIC' : None, } GNSS_SOURCES = list(GNSS_SITE_LIST_URLS.keys()) @@ -55,6 +54,108 @@ def dload_site_list(out_file=None, source='UNR', print_msg=True) -> str: return out_file +def crop_site_data_by_index(site_data: dict, idx: np.ndarray) -> dict: + """Remove GNSS sites by index from a site_data dictionary. + + Parameters: site_data - dict of ndarray, table-like object with GNSS sites + loaded from the search_gnss function + idx - ndarray of bool, indices to keep/exclude + Returns: site_data - dict of ndarray, site_data cropped by index + """ + for key in site_data: + site_data[key] = site_data[key][idx] + + return site_data + +def crop_site_data_in_space(site_data: dict, SNWE: tuple, print_msg=False) -> dict: + """Remove GNSS sites by geographic location. + + Parameters: site_data - dict of ndarray, table-like object with GNSS sites + loaded from the search_gnss function + SNWE - tuple of 4 float, indicating (South, North, West, East) in degrees + Returns: site_data - dict of ndarray, cropped site_data + """ + # parse bounding box + lat_min, lat_max, lon_min, lon_max = SNWE + assert (lon_min < lon_max) and (lat_min < lat_max), 'Check bounding box' + + if print_msg == True: + print('cropping to') + print(f'lon range: {lon_min:.5f} to {lon_max:.5f}') + print(f'lat range: {lat_min:.5f} to {lat_max:.5f}') + + # limit in space + idx = (site_data['lat'] >= lat_min) \ + & (site_data['lat'] <= lat_max) \ + & (site_data['lon'] >= lon_min) \ + & (site_data['lon'] <= lon_max) + site_data = crop_site_data_by_index(site_data, idx) + + if print_msg == True: + print('... {:d} sites remaining'.format(len(site_data['site']))) + + return site_data + +def crop_site_data_in_time(site_data: dict, start_date: None | str, end_date: None | str, + print_msg=True) -> dict: + """Remove GNSS sites by start/end date. + + Parameters: site_data - dict of ndarray, table-like object with GNSS sites + loaded from the search_gnss function + start_date - str, date in YYYYMMDD format + end_date - str, date in YYYYMMDD format + Returns: site_data - dict of ndarray, cropped site_data + """ + # check start and end dates if provided + if start_date is not None: + start_date = dt.datetime.strptime(start_date, '%Y%m%d') + if end_date is not None: + end_date = dt.datetime.strptime(end_date, '%Y%m%d') + if start_date is not None and end_date is not None: + assert(start_date < end_date), 'start date must be before end date' + + if print_msg == True: + print(f'cropping by date range {start_date} to {end_date}') + + # crop by start date + if start_date is not None: + if 'start_date' in site_data.keys(): + idx = site_data['start_date'] <= start_date + site_data = crop_site_data_by_index(site_data, idx) + + if end_date is not None: + if 'start_date' in site_data.keys(): + idx = site_data['end_date'] >= end_date + site_data = crop_site_data_by_index(site_data, idx) + + if print_msg == True: + print('... {:d} sites remaining'.format(len(site_data['site']))) + + return site_data + +def crop_site_data_by_num_solutions(site_data: dict, min_num_solution: None | int, + print_msg=True) -> dict: + """Remove GNSS sites based on a minimum number of solutions. + + Parameters: site_data - dict of ndarray, table-like object with GNSS sites + loaded from the search_gnss function + min_num_solution - int, minimum number of positions + Returns: site_data - dict of ndarray, cropped site_data + """ + if print_msg == True: + print(f'cropping data by min num solutions: {min_num_solution}') + + if min_num_solution is not None: + if 'num_solution' in site_data.keys(): + idx = site_data.num_solution >= min_num_solution + site_data = crop_site_data_by_index(site_data, idx) + + if print_msg == True: + print('... {:d} sites remaining'.format(len(site_data['site']))) + + return site_data + + def search_gnss(SNWE, start_date=None, end_date=None, source='UNR', site_list_file=None, min_num_solution=None, print_msg=True): """Search available GNSS sites within the geo bounding box from UNR website @@ -68,18 +169,10 @@ def search_gnss(SNWE, start_date=None, end_date=None, source='UNR', site_lats - 1D np.array, lat site_lons - 1D np.array, lon """ - # check start and end dates if provided - if start_date is not None: - start_date = dt.datetime.strptime(start_date, '%Y%m%d') - if end_date is not None: - end_date = dt.datetime.strptime(end_date, '%Y%m%d') - if start_date is not None and end_date is not None: - assert(start_date < end_date), 'Start date must be before end date' - # check file name if site_list_file is None: - if source == 'Generic': - raise ValueError('Site list file must be specified for generic inputs') + if source == 'GENERIC': + raise ValueError('site list file must be specified for generic inputs') else: site_list_file = dload_site_list(source=source, print_msg=print_msg) @@ -95,107 +188,85 @@ def search_gnss(SNWE, start_date=None, end_date=None, source='UNR', site_data = read_ESESES_station_list(site_list_file) elif source == 'JPL-SIDESHOW': site_data = read_JPL_SIDESHOW_station_list(site_list_file) - elif source == 'Generic': - site_data = read_Generic_station_list(site_list_file) + elif source == 'GENERIC': + site_data = read_GENERIC_station_list(site_list_file) if print_msg == True: - print('Loaded data for fields: {:s}'.\ - format(' '.join(list(site_data.columns)))) - - # ensure that station name is consistent - site_data['site'] = [site_data.iloc[i,:].site.upper() for i in range(site_data.shape[0])] + print('loaded {:d} sites with fields: {:s}'.\ + format(len(site_data['site']), ' '.join(site_data.keys()))) - # parse bounding box - lat_min, lat_max, lon_min, lon_max = SNWE - assert (lon_min < lon_max) and (lat_min < lat_max), 'Check bounding box' - - if print_msg == True: - print('Cropping to') - print(f'lon range: {lon_min:.5f} to {lon_max:.5f}') - print(f'lat range: {lat_min:.5f} to {lat_max:.5f}') - - # Ensure lon values in (-180, 180] - site_data['lon'] = [lon - 360 if lon > 180 else lon for lon in site_data['lon']] + # ensure that site data formatting is consistent + site_data['site'] = np.array([site.upper() for site in site_data['site']]) + site_data['lat'] = site_data['lat'].astype(float) + site_data['lon'] = site_data['lon'].astype(float) + site_data['lon'][site_data['lon'] > 180] -= 360 # ensure lon values in (-180, 180] # limit in space - drop_ndx = (site_data.lat < lat_min) \ - | (site_data.lat > lat_max) \ - | (site_data.lon < lon_min) \ - | (site_data.lon > lon_max) - site_data.drop(site_data[drop_ndx].index, inplace=True) + site_data = crop_site_data_in_space(site_data, SNWE, print_msg=print_msg) # limit in time - if start_date is not None: - if hasattr(site_data, 'start_date'): - drop_ndx = site_data.start_date > start_date - site_data.drop(site_data[drop_ndx].index, inplace=True) - else: - print('No date information available--date range not applied to GNSS site selection') - - if end_date is not None: - if hasattr(site_data, 'end_date'): - drop_ndx = site_data.end_date < end_date - site_data.drop(site_data[drop_ndx].index, inplace=True) - else: - print('No date information available--date range not applied to GNSS site selection') + site_data = crop_site_data_in_time(site_data, start_date, end_date, print_msg=print_msg) # limit based on number of solutions - if hasattr(site_data, 'num_solution'): - drop_ndx = site_data.num_solution < min_num_solution - site_data.drop(site_data[drop_ndx].index, inplace=True) - - # final reporting - if print_msg == True: - print(f'{site_data.shape[0]:d} stations available') + site_data = crop_site_data_by_num_solutions(site_data, min_num_solution, print_msg=print_msg) - return (site_data.site.to_numpy(), - site_data.lat.to_numpy(), - site_data.lon.to_numpy()) + return (site_data['site'], + site_data['lat'], + site_data['lon']) -def read_UNR_station_list(site_list_file:str, print_msg=True) -> pd.DataFrame: +def read_UNR_station_list(site_list_file:str, print_msg=True) -> np.ndarray: """Return names and lon/lat values for UNR GNSS stations. """ if print_msg == True: - print('Parsing UNR site list file') + print('parsing UNR site list file') - # Read file contents - site_data = pd.read_fwf(site_list_file, - widths=(4, 9, 12, 9, 14, 14, 14, 11, 11, 11, 7)) - - # Rename columns for uniformity - site_data.rename(columns={'Sta': 'site', - 'Lat(deg)': 'lat', 'Long(deg)': 'lon', - 'Dtbeg': 'start_date', 'Dtend': 'end_date', - 'NumSol': 'num_solution'}, inplace=True) - - # Format dates - site_data['start_date'] = [dt.datetime.strptime(date, '%Y-%m-%d') \ - for date in site_data.start_date] - site_data['end_date'] = [dt.datetime.strptime(date, '%Y-%m-%d') \ - for date in site_data.end_date] + # read file contents + txt_data = np.loadtxt(site_list_file, + dtype=bytes, + skiprows=1, + usecols=(0,1,2,3,4,5,6,7,8,9,10)).astype(str) + + # write data to dictionary + site_data = { + 'site': txt_data[:,0], + 'lat': txt_data[:,1], + 'lon': txt_data[:,2], + 'start_date': txt_data[:,7], + 'end_date': txt_data[:,8], + 'num_solution': txt_data[:,10], + } + + # format dates + site_data['start_date'] = np.array([dt.datetime.strptime(date, '%Y-%m-%d') \ + for date in site_data['start_date']]) + site_data['end_date'] = np.array([dt.datetime.strptime(date, '%Y-%m-%d') \ + for date in site_data['end_date']]) return site_data -def read_ESESES_station_list(site_list_file:str, print_msg=True) -> pd.DataFrame: +def read_ESESES_station_list(site_list_file:str, print_msg=True) -> np.ndarray: """Return names and lon/lat values for JPL GNSS stations. """ if print_msg == True: - print('Parsing ESESES site list file') + print('parsing ESESES site list file') - # Read file contents - site_data = pd.read_csv(site_list_file, header = 14, sep=r'\s+') + # read file contents + txt_data = np.loadtxt(site_list_file, skiprows=17, dtype=str) - # Rename columns for uniformity - site_data.rename(columns={'Site': 'site', - 'Latitude': 'lat', 'Longitude': 'lon'}, inplace=True) + # write data to dictionary + site_data = { + 'site': txt_data[:,0], + 'lon': txt_data[:,1], + 'lat': txt_data[:,2], + } return site_data -def read_JPL_SIDESHOW_station_list(site_list_file:str, print_msg=True) -> pd.DataFrame: +def read_JPL_SIDESHOW_station_list(site_list_file:str, print_msg=True) -> np.ndarray: """Return names and lon/lat values for JPL-SIDESHOW GNSS stations. """ if print_msg == True: - print('Parsing JPL-SIDESHOW site list file') + print('parsing JPL-SIDESHOW site list file') # read file contents with open(site_list_file) as site_list: @@ -244,16 +315,15 @@ def read_JPL_SIDESHOW_station_list(site_list_file:str, print_msg=True) -> pd.Dat Eerrs.append(float(Eerr)) Uerrs.append(float(Uerr)) - # format data frame - data = {'site': sites, - 'lat': lats, 'lon': lons, 'elev': elevs, - 'vel_n': Nvels, 'vel_e': Evels, 'vel_u': Uvels, - 'err_n': Nerrs, 'err_e': Eerrs, 'err_u': Uerrs} - site_data = pd.DataFrame(data) + # write data to dictionary + site_data = {'site': np.array(sites), + 'lat': np.array(lats), + 'lon': np.array(lons), + } return site_data -def read_Generic_station_list(site_list_file:str, print_msg=True) -> pd.DataFrame: +def read_GENERIC_station_list(site_list_file:str, print_msg=True) -> np.ndarray: """Return names and lon/lat values for GNSS stations processed by an otherwise-unsupported source. @@ -261,17 +331,24 @@ def read_Generic_station_list(site_list_file:str, print_msg=True) -> pd.DataFram GenericList.txt The file should have three, nine, or eleven space- separated columns: - site lat lon [vel_e vel_n vel_u err_e err_n err_u] [start_date end_date] + SITE lat lon [vel_e vel_n vel_u err_e err_n err_u] [start_date end_date] where site is the four-digit, alphanumeric (uppercase) site code; and lat/lon are in decimal degrees. If included, vel should be in units of m/yr; and dates should be in format YYYYMMDD. """ if print_msg == True: - print('Parsing JPL-SIDESHOW site list file') + print('Parsing GENERIC site list file') # read file contents - site_data = pd.read_csv(site_list_file, delimiter=' ', names=('site', 'lat', 'lon')) + txt_data = np.loadtxt(site_list_file, dtype=str) + + # write data to dictionary + site_data = { + 'site': txt_data[:,0], + 'lon': txt_data[:,1], + 'lat': txt_data[:,2], + } return site_data @@ -489,8 +566,8 @@ def get_gnss_class(source:str): return ESESES_GNSS elif source == 'JPL-SIDESHOW': return JPL_SIDESHOW_GNSS - elif source == 'Generic': - return Generic_GNSS + elif source == 'GENERIC': + return GENERIC_GNSS else: raise ValueError(f'{source:s} source not supported.') @@ -504,16 +581,13 @@ class GNSS: the processing source (e.g., UNR, etc.). Use the `get_gnss_class` method to determine appropriate child class. """ + source = 'none' - def __init__(self, site: str, data_dir='./GNSS', version='IGS14'): + def __init__(self, site: str, data_dir=None, version='IGS14'): # Record properties self.site = site self.version = version - - # create data directory if not exist - self.data_dir = os.path.abspath(data_dir) - if not os.path.exists(self.data_dir): - os.mkdir(self.data_dir) + self.data_dir = self.__format_data_dir__(data_dir) # variables to be filled by child classes self.dates = None @@ -527,6 +601,24 @@ def __init__(self, site: str, data_dir='./GNSS', version='IGS14'): return None + def __format_data_dir__(self, data_dir) -> str: + """Check formatting of GNSS data directory and ensure that directory + exists. + + Parameters: data_dir - None or str, data directory with GNSS position files + Returns: data_dir - str, full path to data directory + """ + # format data directory name based on processing source + if data_dir is None: + data_dir = f'GNSS-{self.source:s}' + data_dir = os.path.abspath(data_dir) + + # ensure directory exists + if not os.path.exists(data_dir): + os.mkdir(data_dir) + + return data_dir + def open(self, file=None, print_msg=True): """Read the lat/lon and displacement data of the station. Download if necessary. @@ -581,6 +673,7 @@ def __crop_to_date_range__(self, start_date: str, end_date: str): return None + ##################################### Utility Functions ################################### def display_data(self, marker_size=2, marker_color='k', plot_errors=True): """Display displacement data. """ @@ -612,7 +705,6 @@ def display_data(self, marker_size=2, marker_color='k', plot_errors=True): return fig, ax - ##################################### Utility Functions ################################### def displacement_enu2los(self, inc_angle:float, az_angle:float, gnss_comp='enu2los', horz_az_angle=-90., display=False, model=None): """Convert displacement in ENU to LOS direction. @@ -865,16 +957,8 @@ def get_stat_lat_lon(self, print_msg=True) -> (float, float): if print_msg == True: print('calculating station lat/lon') - data = np.loadtxt(self.file, dtype=bytes, skiprows=1).astype(str) - ref_lon, ref_lat = float(data[0, 6]), 0. - e0, e_off, n0, n_off = data[0, 7:11].astype(float) - e0 += e_off - n0 += n_off - - az = np.arctan2(e0, n0) / np.pi * 180. - dist = np.sqrt(e0**2 + n0**2) - g = Geod(ellps='WGS84') - self.site_lon, self.site_lat = g.fwd(ref_lon, ref_lat, az, dist)[0:2] + data = np.loadtxt(self.file, dtype=bytes, skiprows=1) + self.site_lat, self.site_lon = data[0,20:22].astype(float) if print_msg == True: print(f'\t{self.site_lat:f}, {self.site_lon:f}') @@ -1205,7 +1289,7 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, self.std_e, self.std_n, self.std_u) -class Generic_GNSS(GNSS): +class GENERIC_GNSS(GNSS): """GNSS class for daily solutions of an otherwise-unsupported source. The user should format the station position data in a file called .dat The file should have seven space-separated columns: @@ -1232,11 +1316,17 @@ class Generic_GNSS(GNSS): get_stat_lat_lon read_displacement """ - source = 'Generic' + source = 'GENERIC' def dload_site(self, print_msg=True) -> str: - """Download the station displacement data from the - specified source. + """Read displacement data from a GENERIC the station file. + In this case, the site data must already be downloaded and located in + the directory specified on instantiation (e.g., GNSS-GENERIC). + The file name convention should be: + + .txt + + where the site name is in all caps. Modifies: self.file - str, local file path/name self.file_url - str, file URL @@ -1257,7 +1347,15 @@ def dload_site(self, print_msg=True) -> str: def get_stat_lat_lon(self, print_msg=True) -> (str, str): """Get station lat/lon based on processing source. - Retrieve data from the displacement file. + Retrieve data from the site list file, which should be located in the + current directory. + The file should be called "GenericList.txt" and should consist of + three columns: + + + + where site is a four-digit site code in all caps. Lat/lon should be in + decimal degrees. Modifies: self.lat/lon - str Returns: self.lat/lon - str @@ -1286,6 +1384,14 @@ def get_stat_lat_lon(self, print_msg=True) -> (str, str): def read_displacement(self, start_date=None, end_date=None, print_msg=True, display=False): """Read GNSS displacement time-series (defined by start/end_date) + The position file for a GENERIC site must consist of seven columns: + + + + date is in format YYYYMMDD or YYYYMMDD:HHMMSS + displacements are in meters + if standard deviations or uncertainties are not availabe, fill columns with zeros + Parameters: start/end_date - str, date in YYYYMMDD format Returns: dates - 1D np.ndarray of datetime.datetime object dis_e/n/u - 1D np.ndarray of displacement in meters in float32 @@ -1311,7 +1417,7 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, if date_len == 8: datetime = dt.datetime.strptime(date, '%Y%m%d') elif date_len == 15: - datetime = dt.datetime.strptime(date, '%Y%m%dT%H%M%S') + datetime = dt.datetime.strptime(date, '%Y%m%d:%H%M%S') else: raise ValueError('Date/time format not recognized') diff --git a/src/mintpy/utils/arg_utils.py b/src/mintpy/utils/arg_utils.py index c6a62f8f3..f2ee402bf 100644 --- a/src/mintpy/utils/arg_utils.py +++ b/src/mintpy/utils/arg_utils.py @@ -250,7 +250,7 @@ def add_gnss_argument(parser): gnss.add_argument('--show-gnss','--show-gps', dest='disp_gnss', action='store_true', help='Show UNR GNSS location within the coverage.') gnss.add_argument('--gnss-source','--gps-source', dest='gnss_source', default='UNR', - choices={'UNR', 'ESESES'}, + choices={'UNR', 'ESESES', 'JPL-SIDESHOW', 'GENERIC'}, help='Source of the GNSS displacement solution (default: %(default)s).') # compare GNSS with InSAR From 50d4892c9f70fd5324e73767e2b676e9b92796c0 Mon Sep 17 00:00:00 2001 From: rzinke Date: Mon, 15 Apr 2024 14:59:19 -0700 Subject: [PATCH 25/44] Removed unnecessary Geod module import from gnss.py --- src/mintpy/objects/gnss.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index d209da5ff..e8d35c2a7 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -17,7 +17,6 @@ import matplotlib.pyplot as plt import numpy as np -from pyproj import Geod from mintpy.objects.coord import coordinate from mintpy.utils import ptime, readfile, time_func, utils1 as ut From 0681888a730b572e56a2be9ea39d652115d8d2d6 Mon Sep 17 00:00:00 2001 From: Zhang Yunjun Date: Fri, 19 Apr 2024 00:43:53 +0800 Subject: [PATCH 26/44] refactor `search_gnss()` + read_JPL_SIDESHOW_station_list(): use `np.loadtxt` for simplicity + search_gnss(): move crop in space/time/# code into here, for simplicity and readability --- src/mintpy/objects/gnss.py | 378 ++++++++++------------------------ src/mintpy/utils/arg_utils.py | 2 +- 2 files changed, 110 insertions(+), 270 deletions(-) diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index e8d35c2a7..9162811e3 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -30,299 +30,142 @@ GNSS_SOURCES = list(GNSS_SITE_LIST_URLS.keys()) -def dload_site_list(out_file=None, source='UNR', print_msg=True) -> str: - """Download single file with list of GNSS site locations. - """ - # check source is supported - assert source in GNSS_SOURCES, \ - f'{source:s} GNSS is NOT supported! Use one of {GNSS_SOURCES}.' - - # determine URL - site_list_url = GNSS_SITE_LIST_URLS[source] - - # handle output file - if out_file is None: - out_file = os.path.basename(site_list_url) - - # download file - if not os.path.exists(out_file): - if print_msg: - print(f'Downloading site list from {source:s}: {site_list_url:s} to {out_file:s}') - urlretrieve(site_list_url, out_file) #nosec - return out_file - - -def crop_site_data_by_index(site_data: dict, idx: np.ndarray) -> dict: - """Remove GNSS sites by index from a site_data dictionary. - - Parameters: site_data - dict of ndarray, table-like object with GNSS sites - loaded from the search_gnss function - idx - ndarray of bool, indices to keep/exclude - Returns: site_data - dict of ndarray, site_data cropped by index - """ - for key in site_data: - site_data[key] = site_data[key][idx] - - return site_data - -def crop_site_data_in_space(site_data: dict, SNWE: tuple, print_msg=False) -> dict: - """Remove GNSS sites by geographic location. - - Parameters: site_data - dict of ndarray, table-like object with GNSS sites - loaded from the search_gnss function - SNWE - tuple of 4 float, indicating (South, North, West, East) in degrees - Returns: site_data - dict of ndarray, cropped site_data - """ - # parse bounding box - lat_min, lat_max, lon_min, lon_max = SNWE - assert (lon_min < lon_max) and (lat_min < lat_max), 'Check bounding box' - - if print_msg == True: - print('cropping to') - print(f'lon range: {lon_min:.5f} to {lon_max:.5f}') - print(f'lat range: {lat_min:.5f} to {lat_max:.5f}') - - # limit in space - idx = (site_data['lat'] >= lat_min) \ - & (site_data['lat'] <= lat_max) \ - & (site_data['lon'] >= lon_min) \ - & (site_data['lon'] <= lon_max) - site_data = crop_site_data_by_index(site_data, idx) - - if print_msg == True: - print('... {:d} sites remaining'.format(len(site_data['site']))) - - return site_data - -def crop_site_data_in_time(site_data: dict, start_date: None | str, end_date: None | str, - print_msg=True) -> dict: - """Remove GNSS sites by start/end date. - - Parameters: site_data - dict of ndarray, table-like object with GNSS sites - loaded from the search_gnss function - start_date - str, date in YYYYMMDD format - end_date - str, date in YYYYMMDD format - Returns: site_data - dict of ndarray, cropped site_data - """ - # check start and end dates if provided - if start_date is not None: - start_date = dt.datetime.strptime(start_date, '%Y%m%d') - if end_date is not None: - end_date = dt.datetime.strptime(end_date, '%Y%m%d') - if start_date is not None and end_date is not None: - assert(start_date < end_date), 'start date must be before end date' - - if print_msg == True: - print(f'cropping by date range {start_date} to {end_date}') - - # crop by start date - if start_date is not None: - if 'start_date' in site_data.keys(): - idx = site_data['start_date'] <= start_date - site_data = crop_site_data_by_index(site_data, idx) - - if end_date is not None: - if 'start_date' in site_data.keys(): - idx = site_data['end_date'] >= end_date - site_data = crop_site_data_by_index(site_data, idx) - - if print_msg == True: - print('... {:d} sites remaining'.format(len(site_data['site']))) - - return site_data - -def crop_site_data_by_num_solutions(site_data: dict, min_num_solution: None | int, - print_msg=True) -> dict: - """Remove GNSS sites based on a minimum number of solutions. - - Parameters: site_data - dict of ndarray, table-like object with GNSS sites - loaded from the search_gnss function - min_num_solution - int, minimum number of positions - Returns: site_data - dict of ndarray, cropped site_data - """ - if print_msg == True: - print(f'cropping data by min num solutions: {min_num_solution}') +######################################### Search GNSS ############################################### - if min_num_solution is not None: - if 'num_solution' in site_data.keys(): - idx = site_data.num_solution >= min_num_solution - site_data = crop_site_data_by_index(site_data, idx) +def search_gnss(SNWE, start_date=None, end_date=None, source='UNR', site_list_file=None, + min_num_solution=None, print_msg=True): + """Search available GNSS sites within the geo bounding box for a given GNSS source. - if print_msg == True: - print('... {:d} sites remaining'.format(len(site_data['site']))) - - return site_data - - -def search_gnss(SNWE, start_date=None, end_date=None, source='UNR', - site_list_file=None, min_num_solution=None, print_msg=True): - """Search available GNSS sites within the geo bounding box from UNR website Parameters: SNWE - tuple of 4 float, indicating (South, North, West, East) in degrees source - str, program or institution that processed the GNSS data start_date - str, date in YYYYMMDD format end_date - str, date in YYYYMMDD format - site_list_file - str + site_list_file - str, site list file name min_num_solution - int, minimum number of solutions available - Returns: site_names - 1D np.array of string, GNSS station names - site_lats - 1D np.array, lat - site_lons - 1D np.array, lon + Returns: site_names - 1D np.ndarray in string, GNSS station names + site_lats - 1D np.ndarray in float32, latitude + site_lons - 1D np.ndarray in float32, longitude """ - # check file name + vprint = print if print_msg else lambda *args, **kwargs: None + + # check: site_list_file name if site_list_file is None: if source == 'GENERIC': - raise ValueError('site list file must be specified for generic inputs') + raise ValueError('Site list file must be specified for GENERIC GNSS source!') else: - site_list_file = dload_site_list(source=source, print_msg=print_msg) + site_list_file = os.path.basename(GNSS_SITE_LIST_URLS[source]) - # check whether site list file is in current directory + # download site_list_file (if it does not exist in current directory) if not os.path.isfile(site_list_file): - # Download file dload_site_list(site_list_file, source=source, print_msg=print_msg) - # parse data from file + # read site_list_file if source == 'UNR': - site_data = read_UNR_station_list(site_list_file) + sites = read_UNR_station_list(site_list_file) elif source == 'ESESES': - site_data = read_ESESES_station_list(site_list_file) + sites = read_ESESES_station_list(site_list_file) elif source == 'JPL-SIDESHOW': - site_data = read_JPL_SIDESHOW_station_list(site_list_file) + sites = read_JPL_SIDESHOW_station_list(site_list_file) elif source == 'GENERIC': - site_data = read_GENERIC_station_list(site_list_file) - - if print_msg == True: - print('loaded {:d} sites with fields: {:s}'.\ - format(len(site_data['site']), ' '.join(site_data.keys()))) + sites = read_GENERIC_station_list(site_list_file) # ensure that site data formatting is consistent - site_data['site'] = np.array([site.upper() for site in site_data['site']]) - site_data['lat'] = site_data['lat'].astype(float) - site_data['lon'] = site_data['lon'].astype(float) - site_data['lon'][site_data['lon'] > 180] -= 360 # ensure lon values in (-180, 180] + sites['site'] = np.array([site.upper() for site in sites['site']]) + sites['lon'][sites['lon'] > 180] -= 360 # ensure lon values in (-180, 180] + vprint(f'load {len(sites["site"]):d} sites with fields: {" ".join(sites.keys())}') # limit in space - site_data = crop_site_data_in_space(site_data, SNWE, print_msg=print_msg) + idx = ((sites['lat'] >= SNWE[0]) * (sites['lat'] <= SNWE[1]) * + (sites['lon'] >= SNWE[2]) * (sites['lon'] <= SNWE[3])) + vprint(f'keep sites within SNWE of {SNWE}: [{np.sum(idx)}]') # limit in time - site_data = crop_site_data_in_time(site_data, start_date, end_date, print_msg=print_msg) + if start_date and 'end_date' in sites.keys(): + start_dt = ptime.date_list2vector([start_date])[0][0] + idx *= sites['end_date'] >= start_dt + vprint(f'keep sites with end_date >= {start_date}: [{np.sum(idx)}]') + if end_date and 'start_date' in sites.keys(): + end_dt = ptime.date_list2vector([end_date])[0][0] + idx *= sites['start_date'] <= end_dt + vprint(f'keep sites with start_date <= {end_date}: [{np.sum(idx)}]') # limit based on number of solutions - site_data = crop_site_data_by_num_solutions(site_data, min_num_solution, print_msg=print_msg) + if min_num_solution is not None and 'num_solution' in site_data.keys(): + idx *= sites['num_solution'] >= min_num_solution + vprint(f'keep sites with # of solutions >= {min_num_solution}: [{np.sum(idx)}]') - return (site_data['site'], - site_data['lat'], - site_data['lon']) + return sites['site'][idx], sites['lat'][idx], sites['lon'][idx] -def read_UNR_station_list(site_list_file:str, print_msg=True) -> np.ndarray: - """Return names and lon/lat values for UNR GNSS stations. + +def dload_site_list(out_file=None, source='UNR', print_msg=True) -> str: + """Download single file with list of GNSS site locations. """ - if print_msg == True: - print('parsing UNR site list file') - - # read file contents - txt_data = np.loadtxt(site_list_file, - dtype=bytes, - skiprows=1, - usecols=(0,1,2,3,4,5,6,7,8,9,10)).astype(str) - - # write data to dictionary - site_data = { - 'site': txt_data[:,0], - 'lat': txt_data[:,1], - 'lon': txt_data[:,2], - 'start_date': txt_data[:,7], - 'end_date': txt_data[:,8], - 'num_solution': txt_data[:,10], - } - - # format dates - site_data['start_date'] = np.array([dt.datetime.strptime(date, '%Y-%m-%d') \ - for date in site_data['start_date']]) - site_data['end_date'] = np.array([dt.datetime.strptime(date, '%Y-%m-%d') \ - for date in site_data['end_date']]) - - return site_data - -def read_ESESES_station_list(site_list_file:str, print_msg=True) -> np.ndarray: - """Return names and lon/lat values for JPL GNSS stations. + # check source is supported + assert source in GNSS_SOURCES, \ + f'{source:s} GNSS is NOT supported! Use one of {GNSS_SOURCES}.' + + # determine URL + site_list_url = GNSS_SITE_LIST_URLS[source] + + # handle output file + if out_file is None: + out_file = os.path.basename(site_list_url) + + # download file + if not os.path.exists(out_file): + if print_msg: + print(f'Downloading site list from {source:s}: {site_list_url:s} to {out_file:s}') + urlretrieve(site_list_url, out_file) #nosec + + return out_file + + +def read_UNR_station_list(site_list_file:str): + """Return names and lon/lat values for UNR GNSS stations. """ - if print_msg == True: - print('parsing ESESES site list file') + fc = np.loadtxt(site_list_file, dtype=str, skiprows=1, usecols=(0,1,2,3,4,5,6,7,8,9,10)) + sites = { + 'site' : fc[:,0], + 'lat' : fc[:,1].astype(np.float32), + 'lon' : fc[:,2].astype(np.float32), + 'start_date' : fc[:,7], + 'end_date' : fc[:,8], + 'num_solution' : fc[:,10].astype(np.int16), + } - # read file contents - txt_data = np.loadtxt(site_list_file, skiprows=17, dtype=str) + # re-format dates + sites['start_date'] = np.array([dt.datetime.strptime(x, '%Y-%m-%d') for x in sites['start_date']]) + sites['end_date'] = np.array([dt.datetime.strptime(x, '%Y-%m-%d') for x in sites['end_date']]) - # write data to dictionary - site_data = { - 'site': txt_data[:,0], - 'lon': txt_data[:,1], - 'lat': txt_data[:,2], - } + return sites - return site_data -def read_JPL_SIDESHOW_station_list(site_list_file:str, print_msg=True) -> np.ndarray: +def read_ESESES_station_list(site_list_file:str): + """Return names and lon/lat values for JPL GNSS stations. + """ + fc = np.loadtxt(site_list_file, skiprows=17, dtype=str) + sites = { + 'site' : fc[:,0], + 'lon' : fc[:,1].astype(np.float32), + 'lat' : fc[:,2].astype(np.float32), + } + return sites + + +def read_JPL_SIDESHOW_station_list(site_list_file:str): """Return names and lon/lat values for JPL-SIDESHOW GNSS stations. """ - if print_msg == True: - print('parsing JPL-SIDESHOW site list file') - - # read file contents - with open(site_list_file) as site_list: - lines = site_list.readlines() - - # find lines containing position and velocity data - line_len = 8 # number of entries in a data line - name_len = 4 # number of letters in a station name - data_lines = [line.strip('\n') for line in lines if (len(line.split()) == line_len) \ - and (len(line.split()[0]) == name_len)] - - # transform format from (POS \n VEL) to (POS VEL) - pos_lines = data_lines[0::2] - vel_lines = data_lines[1::2] - combo_lines = list(zip(pos_lines, vel_lines)) - - # empty lists - sites = [] - lats = [] - lons = [] - elevs = [] - Nvels = [] - Evels = [] - Uvels = [] - Nerrs = [] - Eerrs = [] - Uerrs = [] - - # parse data - for line in combo_lines: - pos_info, vel_info = line - - # parse line values - site, _, lat, lon, elev, _, _, _ = pos_info.split() - _ , _, Nvel, Evel, Uvel, Nerr, Eerr, Uerr = vel_info.split() - - # format data - sites.append(site) - lats.append(float(lat)) - lons.append(float(lon)) - elevs.append(float(elev)) - Nvels.append(float(Nvel)) - Evels.append(float(Evel)) - Uvels.append(float(Uvel)) - Nerrs.append(float(Nerr)) - Eerrs.append(float(Eerr)) - Uerrs.append(float(Uerr)) - - # write data to dictionary - site_data = {'site': np.array(sites), - 'lat': np.array(lats), - 'lon': np.array(lons), - } - - return site_data - -def read_GENERIC_station_list(site_list_file:str, print_msg=True) -> np.ndarray: + fc = np.loadtxt(fname, comments='<', skiprows=9, dtype=str) + sites = { + 'site' : fc[::2, 0], + 'lat' : fc[::2, 2].astype(np.float32), + 'lon' : fc[::2, 3].astype(np.float32), + } + return sites + + +def read_GENERIC_station_list(site_list_file:str): """Return names and lon/lat values for GNSS stations processed by an otherwise-unsupported source. @@ -330,27 +173,24 @@ def read_GENERIC_station_list(site_list_file:str, print_msg=True) -> np.ndarray: GenericList.txt The file should have three, nine, or eleven space- separated columns: - SITE lat lon [vel_e vel_n vel_u err_e err_n err_u] [start_date end_date] + SITE lat lon [vel_e vel_n vel_u err_e err_n err_u] [start_date end_date] where site is the four-digit, alphanumeric (uppercase) site code; and lat/lon are in decimal degrees. If included, vel should be in units of m/yr; and dates should be in format YYYYMMDD. """ - if print_msg == True: - print('Parsing GENERIC site list file') + fc = np.loadtxt(site_list_file, dtype=str) + sites = { + 'site' : fc[:,0], + 'lon' : fc[:,1].astype(np.float32), + 'lat' : fc[:,2].astype(np.float32), + } - # read file contents - txt_data = np.loadtxt(site_list_file, dtype=str) + return sites - # write data to dictionary - site_data = { - 'site': txt_data[:,0], - 'lon': txt_data[:,1], - 'lat': txt_data[:,2], - } - return site_data +######################################### Utils Functions ########################################### def get_baseline_change(dates1, pos_x1, pos_y1, pos_z1, dates2, pos_x2, pos_y2, pos_z2): @@ -400,7 +240,7 @@ def get_gnss_los_obs(meta, obs_type, site_names, start_date, end_date, source='U from mintpy.utils import readfile, utils as ut meta = readfile.read_attribute('geo/geo_velocity.h5') SNWE = ut.four_corners(meta) - site_names = gnss.search_gnss(SNWE, start_date='20150101', end_date='20190619') + site_names = gnss.search_gnss(SNWE, start_date='20150101', end_date='20190619')[0] vel = gnss.get_gnss_los_obs(meta, 'velocity', site_names, start_date='20150101', end_date='20190619') dis = gnss.get_gnss_los_obs(meta, 'displacement', site_names, start_date='20150101', end_date='20190619') """ @@ -1225,7 +1065,7 @@ def get_stat_lat_lon(self, print_msg=True) -> (float, float): print('calculating station lat/lon') # need to refer to the site list - site_list_file = dload_site_list(source='JPL-SIDESHOW') + site_list_file = os.path.basename(GNSS_SITE_LIST_URLS['JPL-SIDESHOW']) # find site in site list file with open(site_list_file) as site_list: diff --git a/src/mintpy/utils/arg_utils.py b/src/mintpy/utils/arg_utils.py index f2ee402bf..26642fb09 100644 --- a/src/mintpy/utils/arg_utils.py +++ b/src/mintpy/utils/arg_utils.py @@ -249,7 +249,7 @@ def add_gnss_argument(parser): gnss = parser.add_argument_group('GNSS', 'GNSS data to display') gnss.add_argument('--show-gnss','--show-gps', dest='disp_gnss', action='store_true', help='Show UNR GNSS location within the coverage.') - gnss.add_argument('--gnss-source','--gps-source', dest='gnss_source', default='UNR', + gnss.add_argument('--gnss-source','--gnss-src','--gps-source', dest='gnss_source', default='UNR', choices={'UNR', 'ESESES', 'JPL-SIDESHOW', 'GENERIC'}, help='Source of the GNSS displacement solution (default: %(default)s).') From a9683b7eb24d6f59f876a20369784f96a06b7090 Mon Sep 17 00:00:00 2001 From: Zhang Yunjun Date: Fri, 19 Apr 2024 00:47:46 +0800 Subject: [PATCH 27/44] codacy fix --- src/mintpy/objects/gnss.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index 9162811e3..54532843f 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -91,7 +91,7 @@ def search_gnss(SNWE, start_date=None, end_date=None, source='UNR', site_list_fi vprint(f'keep sites with start_date <= {end_date}: [{np.sum(idx)}]') # limit based on number of solutions - if min_num_solution is not None and 'num_solution' in site_data.keys(): + if min_num_solution is not None and 'num_solution' in sites.keys(): idx *= sites['num_solution'] >= min_num_solution vprint(f'keep sites with # of solutions >= {min_num_solution}: [{np.sum(idx)}]') @@ -156,7 +156,7 @@ def read_ESESES_station_list(site_list_file:str): def read_JPL_SIDESHOW_station_list(site_list_file:str): """Return names and lon/lat values for JPL-SIDESHOW GNSS stations. """ - fc = np.loadtxt(fname, comments='<', skiprows=9, dtype=str) + fc = np.loadtxt(site_list_file, comments='<', skiprows=9, dtype=str) sites = { 'site' : fc[::2, 0], 'lat' : fc[::2, 2].astype(np.float32), From bc6567c69f832d9c6aac6a45f5abad0ef143d6fd Mon Sep 17 00:00:00 2001 From: Zhang Yunjun Date: Sun, 21 Apr 2024 17:07:18 +0800 Subject: [PATCH 28/44] refactor GNSS_UNR + objects.gnss: - search_gnss(): set back the min_num_solution default value to 50 - use os.path.isfile() to check the existence of file, instead of os.path.exists() - rename get_gnss_los_obs() to get_los_obs() - simplify GNSS.__crop_to_date_range__() and display_data() - rename display_data() to plot() - call self.open() when needed in the member functions, so that it can be removed on the user side, to simplify the usage - fix two issues: 1) add __init__() in the child class, define self.file; 2) retry downloading if failed; 3) retry downloading if loadtxt() failed on existing data file, due to download interuption in the previous run - rename child class names, to be consistent with the folder name + utils.plot.plot_gnss(): print the nearest GNSS site to the current reference point, to faciltate the --ref-gnss option setup --- src/mintpy/objects/gnss.py | 577 +++++++++++++++-------------- src/mintpy/objects/insar_vs_gps.py | 7 +- src/mintpy/utils/plot.py | 19 +- src/mintpy/view.py | 3 +- 4 files changed, 316 insertions(+), 290 deletions(-) diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index 54532843f..c1b6bf476 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -34,7 +34,7 @@ ######################################### Search GNSS ############################################### def search_gnss(SNWE, start_date=None, end_date=None, source='UNR', site_list_file=None, - min_num_solution=None, print_msg=True): + min_num_solution=50, print_msg=True): """Search available GNSS sites within the geo bounding box for a given GNSS source. Parameters: SNWE - tuple of 4 float, indicating (South, North, West, East) in degrees @@ -95,6 +95,9 @@ def search_gnss(SNWE, start_date=None, end_date=None, source='UNR', site_list_fi idx *= sites['num_solution'] >= min_num_solution vprint(f'keep sites with # of solutions >= {min_num_solution}: [{np.sum(idx)}]') + # print remaining site names + vprint(sites['site'][idx]) + return sites['site'][idx], sites['lat'][idx], sites['lon'][idx] @@ -102,8 +105,7 @@ def dload_site_list(out_file=None, source='UNR', print_msg=True) -> str: """Download single file with list of GNSS site locations. """ # check source is supported - assert source in GNSS_SOURCES, \ - f'{source:s} GNSS is NOT supported! Use one of {GNSS_SOURCES}.' + assert source in GNSS_SOURCES, f'{source:s} GNSS is NOT supported! Use one of {GNSS_SOURCES}.' # determine URL site_list_url = GNSS_SITE_LIST_URLS[source] @@ -113,9 +115,9 @@ def dload_site_list(out_file=None, source='UNR', print_msg=True) -> str: out_file = os.path.basename(site_list_url) # download file - if not os.path.exists(out_file): + if not os.path.isfile(out_file): if print_msg: - print(f'Downloading site list from {source:s}: {site_list_url:s} to {out_file:s}') + print(f'downloading site list from {source:s}: {site_list_url:s} to {out_file:s}') urlretrieve(site_list_url, out_file) #nosec return out_file @@ -192,34 +194,8 @@ def read_GENERIC_station_list(site_list_file:str): ######################################### Utils Functions ########################################### -def get_baseline_change(dates1, pos_x1, pos_y1, pos_z1, - dates2, pos_x2, pos_y2, pos_z2): - """Calculate the baseline change between two GNSS displacement time-series - Parameters: dates1/2 - 1D np.array, datetime.datetime object - pos_x/y/z1/2 - 1D np.ndarray, displacement in meters in float32 - Returns: dates - 1D np.array, datetime.datetime object for the - common dates - bases - 1D np.ndarray, displacement in meters in float32 - for the common dates - """ - dates = np.array(sorted(list(set(dates1) & set(dates2)))) - bases = np.zeros(dates.shape, dtype=float) - for i, date in enumerate(dates): - idx1 = np.where(dates1 == date)[0][0] - idx2 = np.where(dates2 == date)[0][0] - basei = ((pos_x1[idx1] - pos_x2[idx2]) ** 2 - + (pos_y1[idx1] - pos_y2[idx2]) ** 2 - + (pos_z1[idx1] - pos_z2[idx2]) ** 2) ** 0.5 - bases[i] = basei - bases -= bases[0] - bases = np.array(bases, dtype=float) - - return dates, bases - - -def get_gnss_los_obs(meta, obs_type, site_names, start_date, end_date, source='UNR', - gnss_comp='enu2los', horz_az_angle=-90., model=None, - print_msg=True, redo=False): +def get_los_obs(meta, obs_type, site_names, start_date, end_date, source='UNR', gnss_comp='enu2los', + horz_az_angle=-90., model=None, print_msg=True, redo=False): """Get the GNSS LOS observations given the query info. Parameters: meta - dict, dictionary of metadata of the InSAR file @@ -232,7 +208,7 @@ def get_gnss_los_obs(meta, obs_type, site_names, start_date, end_date, source='U e.g. enu2los, hz2los, up2los horz_az_angle - float, azimuth angle of the horizontal motion in degree measured from the north with anti-clockwise as positive - model - dict, time function model, e.g. {'polynomial': 1, 'periodic': [1.0, 0.5]} + model - dict, time function model, e.g. {'polynomial': 1, 'periodic': [1.0, 0.5]} print_msg - bool, print verbose info redo - bool, ignore existing CSV file and re-calculate Returns: site_obs - 1D np.ndarray(), GNSS LOS velocity or displacement in m or m/yr @@ -241,16 +217,14 @@ def get_gnss_los_obs(meta, obs_type, site_names, start_date, end_date, source='U meta = readfile.read_attribute('geo/geo_velocity.h5') SNWE = ut.four_corners(meta) site_names = gnss.search_gnss(SNWE, start_date='20150101', end_date='20190619')[0] - vel = gnss.get_gnss_los_obs(meta, 'velocity', site_names, start_date='20150101', end_date='20190619') - dis = gnss.get_gnss_los_obs(meta, 'displacement', site_names, start_date='20150101', end_date='20190619') + vel = gnss.get_los_obs(meta, 'velocity', site_names, start_date='20150101', end_date='20190619') + dis = gnss.get_los_obs(meta, 'displacement', site_names, start_date='20150101', end_date='20190619') """ vprint = print if print_msg else lambda *args, **kwargs: None num_site = len(site_names) # obs_type --> obs_ind - obs_types = ['displacement', 'velocity'] - if obs_type not in obs_types: - raise ValueError(f'un-supported obs_type: {obs_type}') + assert obs_type in ['displacement', 'velocity'], f'un-supported obs_type: {obs_type}' obs_ind = 3 if obs_type.lower() == 'displacement' else 4 # GNSS CSV file info @@ -311,7 +285,7 @@ def get_gnss_los_obs(meta, obs_type, site_names, start_date, end_date, source='U # calculate GNSS data value gnss_obj = get_gnss_class(source)(site_name) gnss_obj.open(print_msg=False) - vel, dis_ts = gnss_obj.get_gnss_los_velocity( + vel, dis_ts = gnss_obj.get_los_velocity( geom_obj, start_date=start_date, end_date=end_date, @@ -340,8 +314,51 @@ def get_gnss_los_obs(meta, obs_type, site_names, start_date, end_date, source='U return site_obs +def get_baseline_change(dates1, pos_x1, pos_y1, pos_z1, + dates2, pos_x2, pos_y2, pos_z2): + """Calculate the baseline change between two GNSS displacement time-series. + + Parameters: dates1/2 - 1D np.ndarray, datetime.datetime object + pos_x/y/z1 - 1D np.ndarray in float32, displacement in meters of the 1st site + pos_x/y/z2 - 1D np.ndarray in float32, displacement in meters of the 2nd site + Returns: dates - 1D np.ndarray in dt.datetime object for the common dates + bases - 1D np.ndarray in float32, baseline in meters for the common dates + """ + dates = np.array(sorted(list(set(dates1) & set(dates2)))) + bases = np.zeros(dates.shape, dtype=float) + + for i, date in enumerate(dates): + idx1 = np.where(dates1 == date)[0][0] + idx2 = np.where(dates2 == date)[0][0] + basei = ((pos_x1[idx1] - pos_x2[idx2]) ** 2 + + (pos_y1[idx1] - pos_y2[idx2]) ** 2 + + (pos_z1[idx1] - pos_z2[idx2]) ** 2) ** 0.5 + bases[i] = basei + + bases -= bases[0] + bases = np.array(bases, dtype=float) + + return dates, bases + + +def get_gnss_class(source:str): + """Return the appropriate GNSS child class based on processing source. + """ + if source == 'UNR': + return GNSS_UNR + elif source == 'ESESES': + return GNSS_ESESES + elif source == 'JPL-SIDESHOW': + return GNSS_JPL_SIDESHOW + elif source == 'GENERIC': + return GNSS_GENERIC + else: + raise ValueError(f'GNSS source {source:s} is NOT supported!') + + + +#################################### GNSS-GSI utility functions ##################################### -#################################### Beginning of GNSS-GSI utility functions ######################## def read_pos_file(fname): import codecs fcp = codecs.open(fname, encoding = 'cp1252') @@ -393,40 +410,27 @@ def read_GSI_F3(gnss_dir, site, start_date=None, end_date=None): return dates[flag], X[flag], Y[flag], Z[flag] -#################################### End of GNSS-GSI utility functions ############################## - - -def get_gnss_class(source:str): - """Return the appropriate GNSS child class based on processing source. - """ - if source == 'UNR': - return UNR_GNSS - elif source == 'ESESES': - return ESESES_GNSS - elif source == 'JPL-SIDESHOW': - return JPL_SIDESHOW_GNSS - elif source == 'GENERIC': - return GENERIC_GNSS - else: - raise ValueError(f'{source:s} source not supported.') +#################################### GNSS parent/child classes ###################################### -#################################### Beginning of GNSS class ######################################## class GNSS: - """GNSS class for time-series of daily solution. + """GNSS parent class for time-series of daily solution. The GNSS class is solely meant to be a parent class. Child classes, defined below, support functions for downloading and parsing GNSS position based on the processing source (e.g., UNR, etc.). Use the `get_gnss_class` method to determine appropriate child class. """ - source = 'none' - def __init__(self, site: str, data_dir=None, version='IGS14'): - # Record properties + def __init__(self, site: str, data_dir=None, version='IGS14', source='UNR'): + # site info self.site = site + self.source = source self.version = version + + # site info [local] self.data_dir = self.__format_data_dir__(data_dir) + self.file = None # variables to be filled by child classes self.dates = None @@ -438,106 +442,107 @@ def __init__(self, site: str, data_dir=None, version='IGS14'): self.std_n = None self.std_u = None - return None - - def __format_data_dir__(self, data_dir) -> str: - """Check formatting of GNSS data directory and ensure that directory - exists. - - Parameters: data_dir - None or str, data directory with GNSS position files - Returns: data_dir - str, full path to data directory - """ - # format data directory name based on processing source - if data_dir is None: - data_dir = f'GNSS-{self.source:s}' - data_dir = os.path.abspath(data_dir) - - # ensure directory exists - if not os.path.exists(data_dir): - os.mkdir(data_dir) - - return data_dir def open(self, file=None, print_msg=True): """Read the lat/lon and displacement data of the station. Download if necessary. """ # download file if not present - if not hasattr(self, 'file'): + if not os.path.isfile(self.file): self.dload_site(print_msg=print_msg) # retrieve data from file - self.get_stat_lat_lon(print_msg=print_msg) + self.get_stat_lat_lon() self.read_displacement(print_msg=print_msg) - return None def dload_site(self, print_msg=True): - raise NotImplementedError('Func. dload_site not implemented. Override with child class.') + """Download GNSS site data file.""" + raise NotImplementedError('dload_site() is NOT implemented. Override with child class.') def get_stat_lat_lon(self, print_msg=True): - raise NotImplementedError('Func. get_stat_lat_lon not implemented. Override with child class.') + """Get the GNSS site latitude & longitude into: + Returns: site_lat/lon - float, site latitude/longitude in degree + """ + raise NotImplementedError('get_stat_lat_lon() is NOT implemented. Override with child class.') def read_displacement(self, start_date=None, end_date=None, print_msg=True, display=False): - raise NotImplementedError('Func. read_displacement not implemented. Override with child class.') + """Get the GNSS time/displacement(Std) into: + Returns: dates - 1D np.ndarray in datetime.datetime object + date_list - list(str), date in YYYYMMDD format + dis_e/n/u - 1D np.ndarray in float32, displacement in meters + std_e/n/u - 1D np.ndarray in float32, displacement STD in meters + """ + raise NotImplementedError('read_displacement() is NOT implemented. Override with child class.') + - @staticmethod - def lon_360to180(lon: float) -> float: - """Convert longitude in the range [0, 360) to - range (-180, 180]. + def __format_data_dir__(self, data_dir) -> str: + """Check formatting of GNSS data directory and ensure that directory exists. + + Parameters: data_dir - None or str, data directory with GNSS position files + Returns: data_dir - str, full path to data directory """ - if lon > 180: - lon -= 360 - return lon + # format data directory name based on processing source + if data_dir is None: + data_dir = f'GNSS-{self.source:s}' + data_dir = os.path.abspath(data_dir) + + # ensure directory exists + if not os.path.exists(data_dir): + print('create directory:', data_dir) + os.mkdir(data_dir) + + return data_dir + def __crop_to_date_range__(self, start_date: str, end_date: str): - """Cut out the specified time range. - start/end_date in format YYYYMMDD + """Crop the time-series given the start/end_date in format YYYYMMDD. """ - t_flag = np.ones(len(self.dates), bool) + flag = np.ones(len(self.dates), dtype=bool) if start_date: t0 = ptime.date_list2vector([start_date])[0][0] - t_flag[self.dates < t0] = 0 + flag[self.dates < t0] = 0 if end_date: t1 = ptime.date_list2vector([end_date])[0][0] - t_flag[self.dates > t1] = 0 - self.dates = self.dates[t_flag] - self.dis_e = self.dis_e[t_flag] - self.dis_n = self.dis_n[t_flag] - self.dis_u = self.dis_u[t_flag] - self.std_e = self.std_e[t_flag] - self.std_n = self.std_n[t_flag] - self.std_u = self.std_u[t_flag] + flag[self.dates > t1] = 0 - return None + self.dates = self.dates[flag] + self.dis_e = self.dis_e[flag] + self.dis_n = self.dis_n[flag] + self.dis_u = self.dis_u[flag] + self.std_e = self.std_e[flag] + self.std_n = self.std_n[flag] + self.std_u = self.std_u[flag] ##################################### Utility Functions ################################### - def display_data(self, marker_size=2, marker_color='k', plot_errors=True): - """Display displacement data. + def plot(self, marker_size=2, marker_color='k', plot_error_bar=True): + """Plot the displacement time-series. """ + if self.dis_e is None: + self.open() + # instantiate figure and axes fig, ax = plt.subplots(nrows=3, ncols=1, sharex=True) # plot displacement data - ax[0].scatter(self.dates, self.dis_e, s=marker_size**2, c=marker_color) - ax[1].scatter(self.dates, self.dis_n, s=marker_size**2, c=marker_color) - ax[2].scatter(self.dates, self.dis_u, s=marker_size**2, c=marker_color) + kwargs = dict(s=marker_size**2, c=marker_color) + ax[0].scatter(self.dates, self.dis_e, **kwargs) + ax[1].scatter(self.dates, self.dis_n, **kwargs) + ax[2].scatter(self.dates, self.dis_u, **kwargs) # plot displacement errors - if plot_errors == True: - ax[0].errorbar(self.dates, self.dis_e, yerr=self.std_e, - linestyle='none', color=marker_color) - ax[1].errorbar(self.dates, self.dis_n, yerr=self.std_n, - linestyle='none', color=marker_color) - ax[2].errorbar(self.dates, self.dis_u, yerr=self.std_u, - linestyle='none', color=marker_color) + if plot_error_bar: + kwargs = dict(linestyle='none', color=marker_color) + ax[0].errorbar(self.dates, self.dis_e, yerr=self.std_e, **kwargs) + ax[1].errorbar(self.dates, self.dis_n, yerr=self.std_n, **kwargs) + ax[2].errorbar(self.dates, self.dis_u, yerr=self.std_u, **kwargs) # format plot - ax[0].set_ylabel('East (m)') - ax[1].set_ylabel('North (m)') - ax[2].set_ylabel('Up (m)') + for i, label in enumerate(['East', 'North', 'Up']): + ax[i].set_ylabel(f'{label} [m]') fig.suptitle(f'{self.site:s} ({self.source:s})') + fig.tight_layout() plt.show() @@ -554,9 +559,12 @@ def displacement_enu2los(self, inc_angle:float, az_angle:float, gnss_comp='enu2l gnss_comp - str, GNSS components used to convert to LOS direction horz_az_angle - float, fault azimuth angle used to convert horizontal to fault-parallel measured from the north with anti-clockwise as positive - Returns: dis_los - 1D np.array for displacement in LOS direction - std_los - 1D np.array for displacement standard deviation in LOS direction + Returns: dis_los - 1D np.ndarray for displacement in LOS direction + std_los - 1D np.ndarray for displacement standard deviation in LOS direction """ + if self.dis_e is None: + self.open() + # get unit vector for the component of interest unit_vec = ut.get_unit_vector4component_of_interest( los_inc_angle=inc_angle, @@ -569,19 +577,19 @@ def displacement_enu2los(self, inc_angle:float, az_angle:float, gnss_comp='enu2l self.dis_los = ( self.dis_e * unit_vec[0] + self.dis_n * unit_vec[1] + self.dis_u * unit_vec[2]) + # assuming ENU component are independent with each other self.std_los = ( (self.std_e * unit_vec[0])**2 + (self.std_n * unit_vec[1])**2 + (self.std_u * unit_vec[2])**2 ) ** 0.5 # display if requested - if display == True: + if display: # instantiate figure and axes _, ax = plt.subplots(sharex=True) # plot LOS displacement - ax.scatter(self.dates, self.dis_los, s=2**2, - c='k', label='LOS') + ax.scatter(self.dates, self.dis_los, s=2**2, c='k', label='LOS') # plot fit if model specified if model is not None: @@ -596,7 +604,7 @@ def displacement_enu2los(self, inc_angle:float, az_angle:float, gnss_comp='enu2l def get_los_geometry(self, geom_obj, print_msg=False): """Get the Line-of-Sight geometry info in incidence and azimuth angle in degrees.""" - lat, lon = self.get_stat_lat_lon(print_msg=print_msg) + lat, lon = self.get_stat_lat_lon() # get LOS geometry if isinstance(geom_obj, str): @@ -607,11 +615,9 @@ def get_los_geometry(self, geom_obj, print_msg=False): # check against image boundary y = max(0, y); y = min(int(atr['LENGTH'])-1, y) x = max(0, x); x = min(int(atr['WIDTH'])-1, x) - box = (x, y, x+1, y+1) - inc_angle = readfile.read(geom_obj, datasetName='incidenceAngle', box=box, - print_msg=print_msg)[0][0,0] - az_angle = readfile.read(geom_obj, datasetName='azimuthAngle', box=box, - print_msg=print_msg)[0][0,0] + kwargs = dict(box=(x,y,x+1,y+1), print_msg=print_msg) + inc_angle = readfile.read(geom_obj, datasetName='incidenceAngle', **kwargs)[0][0,0] + az_angle = readfile.read(geom_obj, datasetName='azimuthAngle', **kwargs)[0][0,0] elif isinstance(geom_obj, dict): # use mean inc/az_angle from metadata @@ -624,9 +630,9 @@ def get_los_geometry(self, geom_obj, print_msg=False): return inc_angle, az_angle - def read_gnss_los_displacement(self, geom_obj, start_date=None, end_date=None, ref_site=None, - gnss_comp='enu2los', horz_az_angle=-90., print_msg=False): - """Read GNSS displacement in LOS direction. + def get_los_displacement(self, geom_obj, start_date=None, end_date=None, ref_site=None, + gnss_comp='enu2los', horz_az_angle=-90., print_msg=False): + """Get GNSS displacement in LOS direction. Parameters: geom_obj - dict / str, metadata of InSAR file, or geometry file path start_date - str, dates in YYYYMMDD format @@ -635,35 +641,37 @@ def read_gnss_los_displacement(self, geom_obj, start_date=None, end_date=None, r gnss_comp - str, GNSS components used to convert to LOS direction horz_az_angle - float, fault azimuth angle used to convert horizontal to fault-parallel - Returns: dates - 1D np.array of datetime.datetime object - dis/std - 1D np.array of displacement / uncertainty in meters + Returns: dates - 1D np.ndarray of datetime.datetime object + dis/std - 1D np.ndarray of displacement / uncertainty in meters site_lalo - tuple of 2 float, lat/lon of GNSS site ref_site_lalo - tuple of 2 float, lat/lon of reference GNSS site """ # read GNSS object - inc_angle, az_angle = self.get_los_geometry(geom_obj) + site_lalo = self.get_stat_lat_lon() dates = self.read_displacement(start_date, end_date, print_msg=print_msg)[0] - dis, std = self.displacement_enu2los(inc_angle, az_angle, gnss_comp=gnss_comp, - horz_az_angle=horz_az_angle) - site_lalo = self.get_stat_lat_lon(print_msg=print_msg) - - # define GNSS station object based on processing source - GNSS = get_gnss_class(self.source) + inc_angle, az_angle = self.get_los_geometry(geom_obj) + dis, std = self.displacement_enu2los( + inc_angle, az_angle, + gnss_comp=gnss_comp, + horz_az_angle=horz_az_angle, + ) # get LOS displacement relative to another GNSS site if ref_site: - ref_obj = GNSS(site=ref_site, data_dir=self.data_dir) - ref_obj.open() + ref_obj = get_gnss_class(self.source)(site=ref_site, data_dir=self.data_dir) + ref_site_lalo = ref_obj.get_stat_lat_lon() ref_obj.read_displacement(start_date, end_date, print_msg=print_msg) inc_angle, az_angle = ref_obj.get_los_geometry(geom_obj) - ref_obj.displacement_enu2los(inc_angle, az_angle, gnss_comp=gnss_comp, - horz_az_angle=horz_az_angle) - ref_site_lalo = ref_obj.get_stat_lat_lon(print_msg=print_msg) + ref_obj.displacement_enu2los( + inc_angle, az_angle, + gnss_comp=gnss_comp, + horz_az_angle=horz_az_angle, + ) # get relative LOS displacement on common dates dates = np.array(sorted(list(set(self.dates) & set(ref_obj.dates)))) - dis = np.zeros(dates.shape, np.float32) - std = np.zeros(dates.shape, np.float32) + dis = np.zeros(dates.shape, dtype=np.float32) + std = np.zeros(dates.shape, dtype=np.float32) for i, date_i in enumerate(dates): idx1 = np.where(self.dates == date_i)[0][0] idx2 = np.where(ref_obj.dates == date_i)[0][0] @@ -675,42 +683,36 @@ def read_gnss_los_displacement(self, geom_obj, start_date=None, end_date=None, r return dates, dis, std, site_lalo, ref_site_lalo - def get_gnss_los_velocity(self, geom_obj, start_date=None, end_date=None, - ref_site=None, gnss_comp='enu2los', - horz_az_angle=-90., model=None, - print_msg=True): - """Convert the three-component displacement data into LOS - velocity. - - Parameters: geom_obj - dict / str, metadata of InSAR file, or - geometry file path - start_date - str, YYYYMMDD format - end_date - str, YYYYMMDD format - ref_site - str, reference GNSS site - gnss_comp - str, GNSS components used to convert to - LOS direction - horz_az_angle - float, fault azimuth angle used to convert - horizontal to fault-parallel - model - dict, time function model, e.g. + def get_los_velocity(self, geom_obj, start_date=None, end_date=None, ref_site=None, + gnss_comp='enu2los', horz_az_angle=-90., model=None, print_msg=True): + """Convert the three-component displacement data into LOS velocity. + + Parameters: geom_obj - dict / str, metadata of InSAR file, or + geometry file path + start_date - str, YYYYMMDD format + end_date - str, YYYYMMDD format + ref_site - str, reference GNSS site + gnss_comp - str, GNSS components used to convert to LOS direction + horz_az_angle - float, fault azimuth angle used to convert horizontal + to fault-parallel + model - dict, time function model, e.g. {'polynomial': 1, 'periodic': [1.0, 0.5]} - Returns: dates - 1D np.array, datetime.datetime object - dis - 1D np.array, displacement in meters - std - 1D np.array, displacement uncertainty in meters - site_lalo - tuple of 2 float, lat/lon of GNSS site - ref_site_lalo - tuple of 2 float, lat/lon of reference GNSS site + Returns: dates - 1D np.ndarray, datetime.datetime object + dis - 1D np.ndarray, displacement in meters """ # retrieve displacement data - dates, dis = self.read_gnss_los_displacement(geom_obj, - start_date=start_date, - end_date=end_date, - ref_site=ref_site, - gnss_comp=gnss_comp, - horz_az_angle=horz_az_angle)[:2] + dates, dis = self.get_los_displacement( + geom_obj, + start_date=start_date, + end_date=end_date, + ref_site=ref_site, + gnss_comp=gnss_comp, + horz_az_angle=horz_az_angle, + )[:2] # displacement -> velocity - # if: - # 1. num of observations > 2 AND - # 2. time overlap > 1/4 + # if 1. num of observations > 2 AND + # 2. time overlap > 1/4 dis2vel = True if len(dates) <= 2: dis2vel = False @@ -727,160 +729,175 @@ def get_gnss_los_velocity(self, geom_obj, start_date=None, end_date=None, self.velocity = np.dot(np.linalg.pinv(A), dis)[1] else: self.velocity = np.nan - if print_msg == True: + if print_msg: print(f'Velocity calculation failed for site {self.site:s}') return self.velocity, dis - -class UNR_GNSS(GNSS): - """GNSS class for daily solutions processed by UNR NGL. +class GNSS_UNR(GNSS): + """GNSS child class for daily solutions processed by Nevada Geodetic Lab + at University of Nevada, Reno (UNR). This object will assign the attributes: - site - str, four-digit site code - site_lat/lon - float - dates - 1D np.ndarray - date_list - list - dis_e/n/u - 1D np.ndarray - std_e,n,u - 1D np.ndarray - - based on the specific formats of the data source, using the functions: - dload_site - get_stat_lat_lon - read_displacement + site - str, four-digit site code + site_lat/lon - float + dates - 1D np.ndarray + date_list - list + dis_e/n/u - 1D np.ndarray + std_e,n,u - 1D np.ndarray + + Based on the specific formats of the data source, using the functions: + dload_site() + get_stat_lat_lon() + read_displacement() """ - source = 'UNR' + def __init__(self, site: str, data_dir=None, version='IGS14'): + super().__init__(site=site, data_dir=data_dir, version=version, source='UNR') + # get local file name + if version == 'IGS08': + self.file = os.path.join(self.data_dir, f'{self.site:s}.{version:s}.tenv3') + elif version == 'IGS14': + self.file = os.path.join(self.data_dir, f'{self.site:s}.tenv3') + else: + raise ValueError(f'Un-supported GNSS versoin: {version}!') - def dload_site(self, print_msg=True) -> str: + + def dload_site(self, overwrite=False, print_msg=True) -> str: """Download the station displacement data from the specified source. Modifies: self.file - str, local file path/name self.file_url - str, file URL Returns: self.file - str, local file path/name """ - if print_msg == True: - print(f"Downloading data for site {self.site:s} from UNR NGL source") + vprint = print if print_msg else lambda *args, **kwargs: None # URL and file name specs + # example link: http://geodesy.unr.edu/gps_timeseries/tenv3/IGS08/1LSU.IGS08.tenv3 + # http://geodesy.unr.edu/gps_timeseries/tenv3/IGS14/CASU.tenv3 url_prefix = 'http://geodesy.unr.edu/gps_timeseries/tenv3' - if self.version == 'IGS08': - self.file = os.path.join(self.data_dir, - '{site:s}.{version:s}.tenv3'.\ - format(site=self.site, version=self.version)) - elif self.version == 'IGS14': - self.file = os.path.join(self.data_dir, - '{site:s}.tenv3'.\ - format(site=self.site)) - self.file_url = os.path.join(url_prefix, self.version, - os.path.basename(self.file)) + self.file_url = os.path.join(url_prefix, self.version, os.path.basename(self.file)) # download file if not present - if os.path.exists(self.file): - if print_msg == True: - print(f'file {self.file:s} exists--reading') - else: - if print_msg == True: - print(f'... downloading {self.file_url:s} to {self.file:s}') - urlretrieve(self.file_url, self.file) #nosec + if overwrite or not os.path.isfile(self.file): + vprint(f"downloading site {self.site:s} from UNR NGL to {self.file:s}") + # urlretrieve(self.file_url, self.file) + # retry on download fail + # https://stackoverflow.com/questions/31529151 + total_tries = 3 + remain_tries = total_tries + while remain_tries > 0 : + try: + urlretrieve(self.file_url, self.file) + vprint(f'successfully downloaded: {self.file_url}') + time.sleep(0.1) + except: + vprint(f'error downloading {self.file_url} on trial no. {total_tries-remain_tries}') + remain_tries -= 1 + continue + else: + break return self.file - def get_stat_lat_lon(self, print_msg=True) -> (float, float): - """Get station lat/lon based on processing source. - Retrieve data from the displacement file. + + def get_stat_lat_lon(self) -> (float, float): + """Get station lat/lon from the displacement file. Modifies: self.lat/lon - float Returns: self.lat/lon - float """ - if print_msg == True: - print('calculating station lat/lon') - - data = np.loadtxt(self.file, dtype=bytes, skiprows=1) - self.site_lat, self.site_lon = data[0,20:22].astype(float) - - if print_msg == True: - print(f'\t{self.site_lat:f}, {self.site_lon:f}') + # download file if it does not exist + if not os.path.isfile(self.file): + self.dload_site(print_msg=print_msg) + data = np.loadtxt(self.file, dtype=bytes, skiprows=1, max_rows=10) + self.site_lat, self.site_lon = data[0, 20:22].astype(float) return self.site_lat, self.site_lon + def read_displacement(self, start_date=None, end_date=None, print_msg=True, display=False): """Read GNSS displacement time-series (defined by start/end_date) - Parameters: start/end_date - str, date in YYYYMMDD format - Returns: dates - 1D np.ndarray of datetime.datetime object - dis_e/n/u - 1D np.ndarray of displacement in meters in float32 - std_e/n/u - 1D np.ndarray of displacement STD in meters in float32 + Parameters: start_date - str, start date in YYYYMMDD format + end_date - str, end_date in YYYYMMDD format + Returns: dates - 1D np.ndarray in datetime.datetime object + dis_e/n/u - 1D np.ndarray in float32, displacement in meters + std_e/n/u - 1D np.ndarray in float32, displacement STD in meters """ + vprint = print if print_msg else lambda *args, **kwargs: None + # download file if it does not exist if not os.path.isfile(self.file): self.dload_site(print_msg=print_msg) # read dates, dis_e, dis_n, dis_u - if print_msg == True: - print('reading time and displacement in east/north/vertical direction') - - # read data from file - data = np.loadtxt(self.file, dtype=bytes, skiprows=1).astype(str) - - # Parse dates - self.dates = np.array([dt.datetime.strptime(i, "%y%b%d") \ - for i in data[:,1]]) - - # parse displacement data + vprint('reading time and displacement in east/north/vertical direction') + try: + fc = np.loadtxt(self.file, dtype=bytes, skiprows=1).astype(str) + except: + msg = 'Error occurred while reading, probably due to interuptions during previous downloading. ' + msg += 'Remove the file and re-download.' + print(msg) + self.dload_site(overwrite=True, print_msg=print_msg) + fc = np.loadtxt(self.file, dtype=bytes, skiprows=1).astype(str) + + self.dates = np.array([dt.datetime.strptime(x, "%y%b%d") for x in fc[:, 1]]) (self.dis_e, self.dis_n, self.dis_u, self.std_e, self.std_n, - self.std_u) = data[:, (8,10,12,14,15,16)].astype(np.float32).T + self.std_u) = fc[:, (8,10,12,14,15,16)].astype(np.float32).T # cut out the specified time range self.__crop_to_date_range__(start_date, end_date) # formulate date list - self.date_list = [date.strftime('%Y%m%d') for date in self.dates] + self.date_list = [x.strftime('%Y%m%d') for x in self.dates] # display if requested - if display == True: - self.display_data() + if display: + self.plot() return (self.dates, self.dis_e, self.dis_n, self.dis_u, self.std_e, self.std_n, self.std_u) -class ESESES_GNSS(GNSS): - """GNSS class for daily solutions processed by ESESES. +class GNSS_ESESES(GNSS): + """GNSS child class for daily solutions processed for the Enhanced Solid + Earth Science ESDR System (ESESES) project by JPL and SOPAC. This object will assign the attributes: - site - str, four-digit site code - site_lat/lon - float - dates - 1D np.ndarray - date_list - list - dis_e/n/u - 1D np.ndarray - std_e,n,u - 1D np.ndarray - - based on the specific formats of the data source, using the functions: - dload_site - get_stat_lat_lon - read_displacement + site - str, four-digit site code + site_lat/lon - float + dates - 1D np.ndarray + date_list - list + dis_e/n/u - 1D np.ndarray + std_e,n,u - 1D np.ndarray + + Based on the specific formats of the data source, using the functions: + dload_site() + get_stat_lat_lon() + read_displacement() """ source = 'ESESES' def dload_site(self, print_msg=True) -> str: - """Download the station displacement data from the - specified source. + """Download the station displacement data from the specified source. Modifies: self.file - str, local file path/name self.file_url - str, file URL Returns: self.file - str, local file path/name """ - if print_msg == True: + if print_msg: + print(f"downloading site {self.site:s} from UNR NGL to {self.file:s}") print(f'downloading data for site {self.site:s} from the ESESES source') # determine proper URL - url_fmt = 'http://garner.ucsd.edu/pub/measuresESESES_products/Timeseries/CurrentUntarred/Clean_TrendNeuTimeSeries_comb_{:s}' + url_fmt = 'http://garner.ucsd.edu/pub/measuresESESES_products/Timeseries' + url_fmt += '/CurrentUntarred/Clean_TrendNeuTimeSeries_comb_{:s}' # start with today and check back in time today = dt.date.today() @@ -909,7 +926,7 @@ def dload_site(self, print_msg=True) -> str: self.file_url = os.path.join(url_prefix, os.path.basename(self.file)) # download file if not present - if os.path.exists(self.file): + if os.path.isfile(self.file): if print_msg == True: print(f'file {self.file:s} exists--reading') else: @@ -928,7 +945,7 @@ def dload_site(self, print_msg=True) -> str: return self.file - def get_stat_lat_lon(self, print_msg=True) -> (float, float): + def get_stat_lat_lon(self) -> (float, float): """Get station lat/lon based on processing source. Retrieve data from the displacement file. @@ -952,8 +969,9 @@ def get_stat_lat_lon(self, print_msg=True) -> (float, float): lon_line = [line for line in lines \ if line.find('# East Longitude') != -1] lon_line = lon_line[0].strip('\n') - site_lon = float(lon_line.split()[-1]) - self.site_lon = self.lon_360to180(site_lon) + self.site_lon = float(lon_line.split()[-1]) + # ensure longitude in the range of (-180, 180] + self.site_lon -= 0 if self.site_lon <= 180 else 360 if print_msg == True: print(f'\t{self.site_lat:f}, {self.site_lon:f}') @@ -1002,14 +1020,14 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, # display if requested if display == True: - self.display_data() + self.plot() return (self.dates, self.dis_e, self.dis_n, self.dis_u, self.std_e, self.std_n, self.std_u) -class JPL_SIDESHOW_GNSS(GNSS): +class GNSS_JPL_SIDESHOW(GNSS): """GNSS class for daily solutions processed by JPL-SIDESHOW. This object will assign the attributes: @@ -1044,7 +1062,7 @@ def dload_site(self, print_msg=True) -> str: self.file_url = os.path.join(url_prefix, os.path.basename(self.file)) # download file if not present - if os.path.exists(self.file): + if os.path.isfile(self.file): if print_msg == True: print(f'file {self.file:s} exists--reading') else: @@ -1054,7 +1072,7 @@ def dload_site(self, print_msg=True) -> str: return self.file - def get_stat_lat_lon(self, print_msg=True) -> (float, float): + def get_stat_lat_lon(self) -> (float, float): """Get station lat/lon based on processing source. Retrieve data from the displacement file. @@ -1121,14 +1139,14 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, # display if requested if display == True: - self.display_data() + self.plot() return (self.dates, self.dis_e, self.dis_n, self.dis_u, self.std_e, self.std_n, self.std_u) -class GENERIC_GNSS(GNSS): +class GNSS_GENERIC(GNSS): """GNSS class for daily solutions of an otherwise-unsupported source. The user should format the station position data in a file called .dat The file should have seven space-separated columns: @@ -1184,7 +1202,7 @@ def dload_site(self, print_msg=True) -> str: return self.file - def get_stat_lat_lon(self, print_msg=True) -> (str, str): + def get_stat_lat_lon(self) -> (str, str): """Get station lat/lon based on processing source. Retrieve data from the site list file, which should be located in the current directory. @@ -1279,11 +1297,8 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, # display if requested if display == True: - self.display_data() + self.plot() return (self.dates, self.dis_e, self.dis_n, self.dis_u, self.std_e, self.std_n, self.std_u) - - -#################################### End of GNSS class #################################### diff --git a/src/mintpy/objects/insar_vs_gps.py b/src/mintpy/objects/insar_vs_gps.py index b0db7547a..848e3e868 100644 --- a/src/mintpy/objects/insar_vs_gps.py +++ b/src/mintpy/objects/insar_vs_gps.py @@ -107,10 +107,7 @@ def open(self): def read_gnss(self): # define GNSS station object based on processing source - if self.gnss_source == 'UNR': - GNSS = gnss.UNR_GNSS - elif self.gnss_source == 'ESESES': - GNSS = gnss.ESESES_GNSS + GNSS = gnss.get_gnss_class(self.gnss_source) # read data for each GNSS site for sname in self.site_names: @@ -121,7 +118,7 @@ def read_gnss(self): site['lat'] = gnss_obj.site_lat site['lon'] = gnss_obj.site_lon - dates, dis, dis_std = gnss_obj.read_gnss_los_displacement( + dates, dis, dis_std = gnss_obj.get_los_displacement( self.geom_file, start_date=self.start_date, end_date=self.end_date, diff --git a/src/mintpy/utils/plot.py b/src/mintpy/utils/plot.py index b323c1b84..0ec1a710c 100644 --- a/src/mintpy/utils/plot.py +++ b/src/mintpy/utils/plot.py @@ -1119,6 +1119,7 @@ def plot_gnss(ax, SNWE, inps, metadata=dict(), print_msg=True): metadata - dict, mintpy metadata Returns: ax - matplotlib.axes object """ + from pyproj import Geod from mintpy.objects import gnss vprint = print if print_msg else lambda *args, **kwargs: None @@ -1151,6 +1152,20 @@ def plot_gnss(ax, SNWE, inps, metadata=dict(), print_msg=True): print(' continue without GNSS plots.') return ax + # print the nearest GNSS to the current reference point + # to facilitate the --ref-gnss option setup + if inps.ref_lalo: + site_dist = Geod(ellps='WGS84').inv( + np.tile(inps.ref_lalo[1], site_names.size), + np.tile(inps.ref_lalo[0], site_names.size), + site_lons, + site_lats, + )[2] + n_ind = np.argmin(site_dist) + msg = 'nearest GNSS site (potential --ref-gnss choice): ' + msg += f'{site_names[n_ind]} at [{site_lats[n_ind]}, {site_lons[n_ind]}]' + print(msg) + # post-query: convert lat/lon to UTM for plotting if 'UTM_ZONE' in metadata.keys(): site_lats, site_lons = ut0.latlon2utm(metadata, site_lats, site_lons) @@ -1195,7 +1210,7 @@ def plot_gnss(ax, SNWE, inps, metadata=dict(), print_msg=True): # save absolute value to support both spatially relative and absolute comparison # without compromising the re-usability of the CSV file obs_type = 'velocity' if k == 'velocity' else 'displacement' - site_obs = gnss.get_gnss_los_obs( + site_obs = gnss.get_los_obs( meta=metadata, obs_type=obs_type, site_names=site_names, @@ -1288,7 +1303,7 @@ def plot_insar_vs_gnss_scatter(vel_file, csv_file='gnss_enu2los.csv', msk_file=N disp_unit = 'cm/yr' unit_fac = 100. - # read GNSS velocity from CSV file (generated by gnss.get_gnss_los_obs()) + # read GNSS velocity from CSV file (generated by gnss.get_los_obs()) col_names = ['Site', 'Lon', 'Lat', 'Displacement', 'Velocity'] num_col = len(col_names) col_types = ['U10'] + ['f8'] * (num_col - 1) diff --git a/src/mintpy/view.py b/src/mintpy/view.py index b9c24951e..7c39e68bb 100644 --- a/src/mintpy/view.py +++ b/src/mintpy/view.py @@ -546,8 +546,7 @@ def extent2meshgrid(extent: tuple, ds_shape: list): coord = ut.coordinate(metadata) if inps.disp_gnss and inps.gnss_component and inps.ref_gnss_site: gnss_obj = gnss.get_gnss_class(inps.gnss_source)(site=inps.ref_gnss_site) - gnss_obj.open(print_msg=False) - ref_site_lalo = gnss_obj.get_stat_lat_lon(print_msg=False) + ref_site_lalo = gnss_obj.get_stat_lat_lon() y, x = coord.geo2radar(ref_site_lalo[0], ref_site_lalo[1])[0:2] ref_data = data[y - inps.pix_box[1], x - inps.pix_box[0]] data -= ref_data From de77ca68b7c632651c98740d9b003e5c3c70f0ce Mon Sep 17 00:00:00 2001 From: Zhang Yunjun Date: Sun, 21 Apr 2024 17:22:36 +0800 Subject: [PATCH 29/44] use site instead of site and stat(ion) + rename get_stat_lat_lon() to get_site_lat_lon() + rename read_*_station_list() to read_*_site_list() --- src/mintpy/objects/gnss.py | 86 +++++++++++++++++++------------------- src/mintpy/view.py | 2 +- 2 files changed, 44 insertions(+), 44 deletions(-) diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index c1b6bf476..3a8f847be 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -62,13 +62,13 @@ def search_gnss(SNWE, start_date=None, end_date=None, source='UNR', site_list_fi # read site_list_file if source == 'UNR': - sites = read_UNR_station_list(site_list_file) + sites = read_UNR_site_list(site_list_file) elif source == 'ESESES': - sites = read_ESESES_station_list(site_list_file) + sites = read_ESESES_site_list(site_list_file) elif source == 'JPL-SIDESHOW': - sites = read_JPL_SIDESHOW_station_list(site_list_file) + sites = read_JPL_SIDESHOW_site_list(site_list_file) elif source == 'GENERIC': - sites = read_GENERIC_station_list(site_list_file) + sites = read_GENERIC_site_list(site_list_file) # ensure that site data formatting is consistent sites['site'] = np.array([site.upper() for site in sites['site']]) @@ -123,7 +123,7 @@ def dload_site_list(out_file=None, source='UNR', print_msg=True) -> str: return out_file -def read_UNR_station_list(site_list_file:str): +def read_UNR_site_list(site_list_file:str): """Return names and lon/lat values for UNR GNSS stations. """ fc = np.loadtxt(site_list_file, dtype=str, skiprows=1, usecols=(0,1,2,3,4,5,6,7,8,9,10)) @@ -143,7 +143,7 @@ def read_UNR_station_list(site_list_file:str): return sites -def read_ESESES_station_list(site_list_file:str): +def read_ESESES_site_list(site_list_file:str): """Return names and lon/lat values for JPL GNSS stations. """ fc = np.loadtxt(site_list_file, skiprows=17, dtype=str) @@ -155,7 +155,7 @@ def read_ESESES_station_list(site_list_file:str): return sites -def read_JPL_SIDESHOW_station_list(site_list_file:str): +def read_JPL_SIDESHOW_site_list(site_list_file:str): """Return names and lon/lat values for JPL-SIDESHOW GNSS stations. """ fc = np.loadtxt(site_list_file, comments='<', skiprows=9, dtype=str) @@ -167,7 +167,7 @@ def read_JPL_SIDESHOW_station_list(site_list_file:str): return sites -def read_GENERIC_station_list(site_list_file:str): +def read_GENERIC_site_list(site_list_file:str): """Return names and lon/lat values for GNSS stations processed by an otherwise-unsupported source. @@ -452,7 +452,7 @@ def open(self, file=None, print_msg=True): self.dload_site(print_msg=print_msg) # retrieve data from file - self.get_stat_lat_lon() + self.get_site_lat_lon() self.read_displacement(print_msg=print_msg) @@ -460,11 +460,11 @@ def dload_site(self, print_msg=True): """Download GNSS site data file.""" raise NotImplementedError('dload_site() is NOT implemented. Override with child class.') - def get_stat_lat_lon(self, print_msg=True): + def get_site_lat_lon(self, print_msg=False): """Get the GNSS site latitude & longitude into: Returns: site_lat/lon - float, site latitude/longitude in degree """ - raise NotImplementedError('get_stat_lat_lon() is NOT implemented. Override with child class.') + raise NotImplementedError('get_site_lat_lon() is NOT implemented. Override with child class.') def read_displacement(self, start_date=None, end_date=None, print_msg=True, display=False): """Get the GNSS time/displacement(Std) into: @@ -604,7 +604,7 @@ def displacement_enu2los(self, inc_angle:float, az_angle:float, gnss_comp='enu2l def get_los_geometry(self, geom_obj, print_msg=False): """Get the Line-of-Sight geometry info in incidence and azimuth angle in degrees.""" - lat, lon = self.get_stat_lat_lon() + lat, lon = self.get_site_lat_lon() # get LOS geometry if isinstance(geom_obj, str): @@ -647,7 +647,7 @@ def get_los_displacement(self, geom_obj, start_date=None, end_date=None, ref_sit ref_site_lalo - tuple of 2 float, lat/lon of reference GNSS site """ # read GNSS object - site_lalo = self.get_stat_lat_lon() + site_lalo = self.get_site_lat_lon() dates = self.read_displacement(start_date, end_date, print_msg=print_msg)[0] inc_angle, az_angle = self.get_los_geometry(geom_obj) dis, std = self.displacement_enu2los( @@ -659,7 +659,7 @@ def get_los_displacement(self, geom_obj, start_date=None, end_date=None, ref_sit # get LOS displacement relative to another GNSS site if ref_site: ref_obj = get_gnss_class(self.source)(site=ref_site, data_dir=self.data_dir) - ref_site_lalo = ref_obj.get_stat_lat_lon() + ref_site_lalo = ref_obj.get_site_lat_lon() ref_obj.read_displacement(start_date, end_date, print_msg=print_msg) inc_angle, az_angle = ref_obj.get_los_geometry(geom_obj) ref_obj.displacement_enu2los( @@ -749,7 +749,7 @@ class GNSS_UNR(GNSS): Based on the specific formats of the data source, using the functions: dload_site() - get_stat_lat_lon() + get_site_lat_lon() read_displacement() """ def __init__(self, site: str, data_dir=None, version='IGS14'): @@ -801,7 +801,7 @@ def dload_site(self, overwrite=False, print_msg=True) -> str: return self.file - def get_stat_lat_lon(self) -> (float, float): + def get_site_lat_lon(self, print_msg=False) -> (float, float): """Get station lat/lon from the displacement file. Modifies: self.lat/lon - float @@ -879,7 +879,7 @@ class GNSS_ESESES(GNSS): Based on the specific formats of the data source, using the functions: dload_site() - get_stat_lat_lon() + get_site_lat_lon() read_displacement() """ source = 'ESESES' @@ -945,7 +945,7 @@ def dload_site(self, print_msg=True) -> str: return self.file - def get_stat_lat_lon(self) -> (float, float): + def get_site_lat_lon(self, print_msg=False) -> (float, float): """Get station lat/lon based on processing source. Retrieve data from the displacement file. @@ -1031,17 +1031,17 @@ class GNSS_JPL_SIDESHOW(GNSS): """GNSS class for daily solutions processed by JPL-SIDESHOW. This object will assign the attributes: - site - str, four-digit site code - site_lat/lon - float - dates - 1D np.ndarray - date_list - list - dis_e/n/u - 1D np.ndarray - std_e,n,u - 1D np.ndarray - - based on the specific formats of the data source, using the functions: - dload_site - get_stat_lat_lon - read_displacement + site - str, four-digit site code + site_lat/lon - float + dates - 1D np.ndarray + date_list - list + dis_e/n/u - 1D np.ndarray + std_e,n,u - 1D np.ndarray + + Based on the specific formats of the data source, using the functions: + dload_site() + get_site_lat_lon() + read_displacement() """ source = 'JPL-SIDESHOW' @@ -1072,14 +1072,14 @@ def dload_site(self, print_msg=True) -> str: return self.file - def get_stat_lat_lon(self) -> (float, float): + def get_site_lat_lon(self, print_msg=False) -> (float, float): """Get station lat/lon based on processing source. Retrieve data from the displacement file. Modifies: self.lat/lon - float Returns: self.lat/lon - float """ - if print_msg == True: + if print_msg: print('calculating station lat/lon') # need to refer to the site list @@ -1161,17 +1161,17 @@ class GNSS_GENERIC(GNSS): for the GenericList.txt file are given above. This object will assign the attributes: - site - str, four-digit site code - site_lat/lon - float - dates - 1D np.ndarray - date_list - list - dis_e/n/u - 1D np.ndarray - std_e,n,u - 1D np.ndarray - - based on the specific formats of the data source, using the functions: - dload_site - get_stat_lat_lon - read_displacement + site - str, four-digit site code + site_lat/lon - float + dates - 1D np.ndarray + date_list - list + dis_e/n/u - 1D np.ndarray + std_e,n,u - 1D np.ndarray + + Based on the specific formats of the data source, using the functions: + dload_site() + get_site_lat_lon() + read_displacement() """ source = 'GENERIC' @@ -1202,7 +1202,7 @@ def dload_site(self, print_msg=True) -> str: return self.file - def get_stat_lat_lon(self) -> (str, str): + def get_site_lat_lon(self) -> (str, str): """Get station lat/lon based on processing source. Retrieve data from the site list file, which should be located in the current directory. diff --git a/src/mintpy/view.py b/src/mintpy/view.py index 7c39e68bb..cfa566631 100644 --- a/src/mintpy/view.py +++ b/src/mintpy/view.py @@ -546,7 +546,7 @@ def extent2meshgrid(extent: tuple, ds_shape: list): coord = ut.coordinate(metadata) if inps.disp_gnss and inps.gnss_component and inps.ref_gnss_site: gnss_obj = gnss.get_gnss_class(inps.gnss_source)(site=inps.ref_gnss_site) - ref_site_lalo = gnss_obj.get_stat_lat_lon() + ref_site_lalo = gnss_obj.get_site_lat_lon() y, x = coord.geo2radar(ref_site_lalo[0], ref_site_lalo[1])[0:2] ref_data = data[y - inps.pix_box[1], x - inps.pix_box[0]] data -= ref_data From 9bd488babcf75e1c9c9a5652c22a292a93a68e8a Mon Sep 17 00:00:00 2001 From: Zhang Yunjun Date: Mon, 22 Apr 2024 00:18:43 +0800 Subject: [PATCH 30/44] refactor GNSS_ESESES/JPL-SIDESHOW/GENERIC [WIP] + objects.gnss.py - get_los_obs(): use different CSV file name for different sources - add get_ESESES_url_prefix() to speedup the ESESES downloading / calculation - rename file_url to url for member var - add a new member var: url_prefix - move dload_site() to the parent class for better reuse. + utils.plot.plot_gnss(): specify the missing gnss_source arg + utils.ptime.get_date_str_format(): add YYYYMMDD:HHMMSS format --- src/mintpy/objects/gnss.py | 530 +++++++++++++++---------------------- src/mintpy/utils/plot.py | 1 + src/mintpy/utils/ptime.py | 3 + 3 files changed, 220 insertions(+), 314 deletions(-) diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index 3a8f847be..a952e3883 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -73,7 +73,7 @@ def search_gnss(SNWE, start_date=None, end_date=None, source='UNR', site_list_fi # ensure that site data formatting is consistent sites['site'] = np.array([site.upper() for site in sites['site']]) sites['lon'][sites['lon'] > 180] -= 360 # ensure lon values in (-180, 180] - vprint(f'load {len(sites["site"]):d} sites with fields: {" ".join(sites.keys())}') + vprint(f'load {len(sites["site"]):d} GNSS sites with fields: {" ".join(sites.keys())}') # limit in space idx = ((sites['lat'] >= SNWE[0]) * (sites['lat'] <= SNWE[1]) * @@ -231,7 +231,7 @@ def get_los_obs(meta, obs_type, site_names, start_date, end_date, source='UNR', file_dir = os.path.dirname(meta['FILE_PATH']) csv_file = os.path.join(file_dir, f'gnss_{gnss_comp:s}') csv_file += f'{horz_az_angle:.0f}' if gnss_comp == 'horz' else '' - csv_file += '.csv' + csv_file += f'_{source.upper()}.csv' col_names = ['Site', 'Lon', 'Lat', 'Displacement', 'Velocity'] col_types = ['U10'] + ['f8'] * (len(col_names) - 1) vprint(f'default GNSS observation file name: {csv_file:s}') @@ -271,20 +271,21 @@ def get_los_obs(meta, obs_type, site_names, start_date, end_date, source='UNR', work_dir=file_dir, coord='geo') if geom_file: geom_obj = geom_file - vprint('use incidence / azimuth angle from file: {}'.\ - format(os.path.basename(geom_file))) + vprint(f'use incidence / azimuth angle from file: {os.path.basename(geom_file)}') else: geom_obj = meta vprint('use incidence / azimuth angle from metadata') + # get url_prefix [to speed up downloading for ESESES] + url_prefix = get_ESESES_url_prefix() if source == 'ESESES' else None + # loop for calculation prog_bar = ptime.progressBar(maxValue=num_site, print_msg=print_msg) for i, site_name in enumerate(site_names): prog_bar.update(i+1, suffix=f'{i+1}/{num_site} {site_name:s}') # calculate GNSS data value - gnss_obj = get_gnss_class(source)(site_name) - gnss_obj.open(print_msg=False) + gnss_obj = get_gnss_class(source)(site_name, url_prefix=url_prefix) vel, dis_ts = gnss_obj.get_los_velocity( geom_obj, start_date=start_date, @@ -356,6 +357,41 @@ def get_gnss_class(source:str): raise ValueError(f'GNSS source {source:s} is NOT supported!') +def get_ESESES_url_prefix(): + """Get the url prefix for the ESESES source, which updates regularly. + [Poor design of ESESES website]. + """ + print('searching for ESESES url_prefix ...') + # url prefix format + url_fmt = 'http://garner.ucsd.edu/pub/measuresESESES_products/Timeseries' + url_fmt += '/CurrentUntarred/Clean_TrendNeuTimeSeries_comb_{:s}' + + # start with today and check back in time + today = dt.date.today() + max_day = 21 + num_day = 0 + while num_day < max_day: + # formulate URL based on date + day_str = (today - dt.timedelta(days=num_day)).strftime('%Y%m%d') + url_prefix = url_fmt.format(day_str) + + # check if page exists + try: + urlopen(url_prefix) + print(f'{url_prefix} [YES!]') + except: + if num_day == max_day - 1: + raise FileNotFoundError(f'The ESESES repository {url_fmt} CANNOT be found!') + else: + num_day += 1 + print(f'{url_prefix} [no]') + continue + else: + break + + return url_prefix + + #################################### GNSS-GSI utility functions ##################################### @@ -420,19 +456,36 @@ class GNSS: below, support functions for downloading and parsing GNSS position based on the processing source (e.g., UNR, etc.). Use the `get_gnss_class` method to determine appropriate child class. + + The parent class/object will assign the following attributes: + source - str, GNSS solution source + version - str, GNSS solution version + url_prefix - str, GNSS data file url prefix + + The chile class/object will assign the following attributes: + file - str, path of the local data file + url - str, path of the remote data file + site - str, four-digit site code + site_lat/lon - float, site latitude/longitude in degree + dates - 1D np.ndarray, dt.datetime object + date_list - list(str), dates in YYYYMMDD format + dis_e/n/u - 1D np.ndarray, displacement in meters + std_e,n,u - 1D np.ndarray, displacement STD in meters """ - def __init__(self, site: str, data_dir=None, version='IGS14', source='UNR'): + def __init__(self, site: str, data_dir=None, version='IGS14', source='UNR', url_prefix=None): # site info self.site = site self.source = source self.version = version + self.url_prefix = url_prefix + self.url = None - # site info [local] + # local file info self.data_dir = self.__format_data_dir__(data_dir) self.file = None - # variables to be filled by child classes + # displacement data self.dates = None self.date_list = None self.dis_e = None @@ -456,9 +509,44 @@ def open(self, file=None, print_msg=True): self.read_displacement(print_msg=print_msg) - def dload_site(self, print_msg=True): - """Download GNSS site data file.""" - raise NotImplementedError('dload_site() is NOT implemented. Override with child class.') + def dload_site(self, overwrite=False, total_tries=5, print_msg=True): + """Download GNSS site data file. + + Parameters: overwrite - bool, overwrite existing data file + total_tries - int, number of tries to download if failed + print_msg - bool, verbose print out msg + Returns: self.file - str, path to the local data file + """ + vprint = print if print_msg else lambda *args, **kwargs: None + + # download + if self.url and overwrite or not os.path.isfile(self.file): + vprint(f"downloading site {self.site:s} from {self.source} to {self.file:s}") + # retry on download fail + # https://stackoverflow.com/questions/31529151 + remain_tries = total_tries + while remain_tries > 0 : + try: + urlretrieve(self.url, self.file) + vprint(f'successfully downloaded: {self.url}') + except: + vprint(f'error downloading {self.url} on trial no. {total_tries-remain_tries}') + remain_tries -= 1 + continue + else: + break + + # uncompress the downloaded *.z file [for ESESES only] + if self.source == 'ESESES' and self.file.endswith('.Z'): + with zipfile.ZipFile(self.file, 'r') as fz: + fz.extractall(self.data_dir) + + # update file name + self.file = self.file.strip('.Z') + vprint(f'... extracted to {self.file:s}') + + return self.file + def get_site_lat_lon(self, print_msg=False): """Get the GNSS site latitude & longitude into: @@ -484,7 +572,7 @@ def __format_data_dir__(self, data_dir) -> str: """ # format data directory name based on processing source if data_dir is None: - data_dir = f'GNSS-{self.source:s}' + data_dir = f'GNSS-{self.source.upper():s}' data_dir = os.path.abspath(data_dir) # ensure directory exists @@ -739,22 +827,18 @@ class GNSS_UNR(GNSS): """GNSS child class for daily solutions processed by Nevada Geodetic Lab at University of Nevada, Reno (UNR). - This object will assign the attributes: - site - str, four-digit site code - site_lat/lon - float - dates - 1D np.ndarray - date_list - list - dis_e/n/u - 1D np.ndarray - std_e,n,u - 1D np.ndarray - - Based on the specific formats of the data source, using the functions: - dload_site() - get_site_lat_lon() - read_displacement() + Website: http://geodesy.unr.edu/NGLStationPages/GlobalStationList """ - def __init__(self, site: str, data_dir=None, version='IGS14'): - super().__init__(site=site, data_dir=data_dir, version=version, source='UNR') - # get local file name + def __init__(self, site: str, data_dir=None, version='IGS14', url_prefix=None): + super().__init__( + site=site, + data_dir=data_dir, + version=version, + source='UNR', + url_prefix=url_prefix, + ) + + # get file if version == 'IGS08': self.file = os.path.join(self.data_dir, f'{self.site:s}.{version:s}.tenv3') elif version == 'IGS14': @@ -762,43 +846,12 @@ def __init__(self, site: str, data_dir=None, version='IGS14'): else: raise ValueError(f'Un-supported GNSS versoin: {version}!') - - def dload_site(self, overwrite=False, print_msg=True) -> str: - """Download the station displacement data from the specified source. - - Modifies: self.file - str, local file path/name - self.file_url - str, file URL - Returns: self.file - str, local file path/name - """ - vprint = print if print_msg else lambda *args, **kwargs: None - - # URL and file name specs - # example link: http://geodesy.unr.edu/gps_timeseries/tenv3/IGS08/1LSU.IGS08.tenv3 - # http://geodesy.unr.edu/gps_timeseries/tenv3/IGS14/CASU.tenv3 - url_prefix = 'http://geodesy.unr.edu/gps_timeseries/tenv3' - self.file_url = os.path.join(url_prefix, self.version, os.path.basename(self.file)) - - # download file if not present - if overwrite or not os.path.isfile(self.file): - vprint(f"downloading site {self.site:s} from UNR NGL to {self.file:s}") - # urlretrieve(self.file_url, self.file) - # retry on download fail - # https://stackoverflow.com/questions/31529151 - total_tries = 3 - remain_tries = total_tries - while remain_tries > 0 : - try: - urlretrieve(self.file_url, self.file) - vprint(f'successfully downloaded: {self.file_url}') - time.sleep(0.1) - except: - vprint(f'error downloading {self.file_url} on trial no. {total_tries-remain_tries}') - remain_tries -= 1 - continue - else: - break - - return self.file + # get url + # examples: http://geodesy.unr.edu/gps_timeseries/tenv3/IGS08/1LSU.IGS08.tenv3 + # http://geodesy.unr.edu/gps_timeseries/tenv3/IGS14/CASU.tenv3 + if not self.url_prefix: + self.url_prefix = f'http://geodesy.unr.edu/gps_timeseries/tenv3/{self.version}' + self.url = os.path.join(self.url_prefix, os.path.basename(self.file)) def get_site_lat_lon(self, print_msg=False) -> (float, float): @@ -816,8 +869,7 @@ def get_site_lat_lon(self, print_msg=False) -> (float, float): return self.site_lat, self.site_lon - def read_displacement(self, start_date=None, end_date=None, print_msg=True, - display=False): + def read_displacement(self, start_date=None, end_date=None, print_msg=True, display=False): """Read GNSS displacement time-series (defined by start/end_date) Parameters: start_date - str, start date in YYYYMMDD format end_date - str, end_date in YYYYMMDD format @@ -869,81 +921,26 @@ class GNSS_ESESES(GNSS): """GNSS child class for daily solutions processed for the Enhanced Solid Earth Science ESDR System (ESESES) project by JPL and SOPAC. - This object will assign the attributes: - site - str, four-digit site code - site_lat/lon - float - dates - 1D np.ndarray - date_list - list - dis_e/n/u - 1D np.ndarray - std_e,n,u - 1D np.ndarray - - Based on the specific formats of the data source, using the functions: - dload_site() - get_site_lat_lon() - read_displacement() + Website: https://cddis.nasa.gov/Data_and_Derived_Products/GNSS/ESESES_products.html + http://garner.ucsd.edu/pub/measuresESESES_products/ """ - source = 'ESESES' - - def dload_site(self, print_msg=True) -> str: - """Download the station displacement data from the specified source. - - Modifies: self.file - str, local file path/name - self.file_url - str, file URL - Returns: self.file - str, local file path/name - """ - if print_msg: - print(f"downloading site {self.site:s} from UNR NGL to {self.file:s}") - print(f'downloading data for site {self.site:s} from the ESESES source') - - # determine proper URL - url_fmt = 'http://garner.ucsd.edu/pub/measuresESESES_products/Timeseries' - url_fmt += '/CurrentUntarred/Clean_TrendNeuTimeSeries_comb_{:s}' - - # start with today and check back in time - today = dt.date.today() - day_lim = 21 - for days in range(day_lim): - # formulate "days ago" - days_ago = dt.timedelta(days=days) - - # formulate URL based on date - url_prefix = url_fmt.format((today - days_ago).strftime('%Y%m%d')) - - # check if page exists - try: - urlopen(url_prefix) #nosec - break - except Exception: - if days_ago.days == (day_lim - 1): - raise FileNotFoundError('The ESESES source repository cannot be found.') - else: - pass - - # file name and full url - self.file = os.path.join(self.data_dir, - '{site:s}CleanTrend.neu.Z'.\ - format(site=self.site.lower())) - self.file_url = os.path.join(url_prefix, os.path.basename(self.file)) - - # download file if not present - if os.path.isfile(self.file): - if print_msg == True: - print(f'file {self.file:s} exists--reading') - else: - if print_msg == True: - print(f'... downloading {self.file_url:s} to {self.file:s}') - urlretrieve(self.file_url, self.file) #nosec + def __init__(self, site: str, data_dir=None, version='IGS14', url_prefix=None): + super().__init__( + site=site, + data_dir=data_dir, + version=version, + source='ESESES', + url_prefix=url_prefix, + ) - # unzip file - with zipfile.ZipFile(self.file, 'r') as Zfile: - Zfile.extractall(self.data_dir) + # get file + self.file = os.path.join(self.data_dir, f'{self.site.lower():s}CleanTrend.neu.Z') - # update file name - self.file = self.file.strip('.Z') - if print_msg == True: - print(f'... extracted to {self.file:s}') + # get url + if not self.url_prefix: + self.url_prefix = get_ESESES_url_prefix() + self.url = os.path.join(self.url_prefix, os.path.basename(self.file)) - return self.file def get_site_lat_lon(self, print_msg=False) -> (float, float): """Get station lat/lon based on processing source. @@ -952,56 +949,56 @@ def get_site_lat_lon(self, print_msg=False) -> (float, float): Modifies: self.lat/lon - float Returns: self.lat/lon - float """ - if print_msg == True: - print('calculating station lat/lon') + # download file if it does not exist + if not os.path.isfile(self.file): + self.dload_site(print_msg=print_msg) - with open(self.file) as data_file: - # Read raw file contents - lines = data_file.readlines() + # use the uncompressed data file + if self.file.endswith('.Z'): + self.file = self.file[:-2] - # Determine reference latitude - lat_line = [line for line in lines \ - if line.find('# Latitude') != -1] - lat_line = lat_line[0].strip('\n') + with open(self.file) as f: + lines = f.readlines() + + # latitude + lat_line = [x for x in lines if x.startswith('# Latitude')][0].strip('\n') self.site_lat = float(lat_line.split()[-1]) - # Determine reference longitude - lon_line = [line for line in lines \ - if line.find('# East Longitude') != -1] - lon_line = lon_line[0].strip('\n') + # longitude + lon_line = [x for x in lines if x.startswith('# East Longitude')][0].strip('\n') self.site_lon = float(lon_line.split()[-1]) # ensure longitude in the range of (-180, 180] self.site_lon -= 0 if self.site_lon <= 180 else 360 - if print_msg == True: - print(f'\t{self.site_lat:f}, {self.site_lon:f}') - return self.site_lat, self.site_lon - def read_displacement(self, start_date=None, end_date=None, print_msg=True, - display=False): - """Read GNSS displacement time-series (defined by start/end_date) + + def read_displacement(self, start_date=None, end_date=None, print_msg=True, display=False): + """Read GNSS displacement time-series (defined by start/end_date). + Parameters: start/end_date - str, date in YYYYMMDD format Returns: dates - 1D np.ndarray of datetime.datetime object dis_e/n/u - 1D np.ndarray of displacement in meters in float32 std_e/n/u - 1D np.ndarray of displacement STD in meters in float32 """ + vprint = print if print_msg else lambda *args, **kwargs: None + # download file if it does not exist if not os.path.isfile(self.file): self.dload_site(print_msg=print_msg) - # read dates, dis_e, dis_n, dis_u - if print_msg == True: - print('reading time and displacement in east/north/vertical direction') + # use the uncompressed data file + if self.file.endswith('.Z'): + self.file = self.file[:-2] - # read data from file - data = np.loadtxt(self.file, usecols=tuple(range(0,12))) - n_data = data.shape[0] + # read data file + vprint('reading time and displacement in east/north/vertical direction') + fc = np.loadtxt(self.file, usecols=tuple(range(0,12))) + num_solution = fc.shape[0] # parse dates - dates = [dt.datetime(int(data[i,1]), 1, 1) \ - + dt.timedelta(days=int(data[i,2])) \ - for i in range(n_data)] + dates = [dt.datetime(int(fc[i, 1]), 1, 1) + dt.timedelta(days=int(fc[i, 2])) + for i in range(num_solution)] self.dates = np.array(dates) # parse displacement data @@ -1010,7 +1007,7 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, self.dis_u, self.std_n, self.std_e, - self.std_u) = data[:, 3:9].astype(np.float32).T / 1000 + self.std_u) = fc[:, 3:9].astype(np.float32).T / 1000 # cut out the specified time range self.__crop_to_date_range__(start_date, end_date) @@ -1019,7 +1016,7 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, self.date_list = [date.strftime('%Y%m%d') for date in self.dates] # display if requested - if display == True: + if display: self.plot() return (self.dates, @@ -1030,47 +1027,25 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, class GNSS_JPL_SIDESHOW(GNSS): """GNSS class for daily solutions processed by JPL-SIDESHOW. - This object will assign the attributes: - site - str, four-digit site code - site_lat/lon - float - dates - 1D np.ndarray - date_list - list - dis_e/n/u - 1D np.ndarray - std_e,n,u - 1D np.ndarray - - Based on the specific formats of the data source, using the functions: - dload_site() - get_site_lat_lon() - read_displacement() + Website: """ - source = 'JPL-SIDESHOW' - - def dload_site(self, print_msg=True) -> str: - """Download the station displacement data from the - specified source. - - Modifies: self.file - str, local file path/name - self.file_url - str, file URL - Returns: self.file - str, local file path/name - """ - if print_msg == True: - print(f'downloading data for site {self.site:s} from the JPL-SIDESHOW source') + def __init__(self, site: str, data_dir=None, version='IGS14', url_prefix=None): + super().__init__( + site=site, + data_dir=data_dir, + version=version, + source='JPL-SIDESHOW', + url_prefix=url_prefix, + ) - # URL and file name specs - url_prefix = 'https://sideshow.jpl.nasa.gov/pub/JPL_GPS_Timeseries/repro2018a/post/point/' + # get file self.file = os.path.join(self.data_dir, f'{self.site:s}.series') - self.file_url = os.path.join(url_prefix, os.path.basename(self.file)) - # download file if not present - if os.path.isfile(self.file): - if print_msg == True: - print(f'file {self.file:s} exists--reading') - else: - if print_msg == True: - print(f'... downloading {self.file_url:s} to {self.file:s}') - urlretrieve(self.file_url, self.file) #nosec + # get url + if not self.url_prefix: + self.url_prefix = 'https://sideshow.jpl.nasa.gov/pub/JPL_GPS_Timeseries/repro2018a/post/point' + self.url = os.path.join(self.url_prefix, os.path.basename(self.file)) - return self.file def get_site_lat_lon(self, print_msg=False) -> (float, float): """Get station lat/lon based on processing source. @@ -1079,9 +1054,6 @@ def get_site_lat_lon(self, print_msg=False) -> (float, float): Modifies: self.lat/lon - float Returns: self.lat/lon - float """ - if print_msg: - print('calculating station lat/lon') - # need to refer to the site list site_list_file = os.path.basename(GNSS_SITE_LIST_URLS['JPL-SIDESHOW']) @@ -1100,8 +1072,8 @@ def get_site_lat_lon(self, print_msg=False) -> (float, float): return self.site_lat, self.site_lon - def read_displacement(self, start_date=None, end_date=None, print_msg=True, - display=False): + + def read_displacement(self, start_date=None, end_date=None, print_msg=True, display=False): """Read GNSS displacement time-series (defined by start/end_date) Parameters: start/end_date - str, date in YYYYMMDD format Returns: dates - 1D np.ndarray of datetime.datetime object @@ -1129,7 +1101,7 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, self.dis_u, self.std_e, self.std_n, - self.std_u) = data[:, 1:7].astype(float).T + self.std_u) = data[:, 1:7].astype(np.float32).T # cut out the specified time range self.__crop_to_date_range__(start_date, end_date) @@ -1148,106 +1120,51 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, class GNSS_GENERIC(GNSS): """GNSS class for daily solutions of an otherwise-unsupported source. - The user should format the station position data in a file called - .dat The file should have seven space-separated columns: - - date dis_e dis_n dis_u std_e std_n std_u - - where date is in the format or T; and - displacement values are in meters. - For the generic type, it is necessary to have an accompanying file with - the site reference coordinates in the current folder. The specifications - for the GenericList.txt file are given above. + Required local files: + 1. GenericList.txt: site list file, with the following 3, 9 or 11 + space-separated columns: - This object will assign the attributes: - site - str, four-digit site code - site_lat/lon - float - dates - 1D np.ndarray - date_list - list - dis_e/n/u - 1D np.ndarray - std_e,n,u - 1D np.ndarray - - Based on the specific formats of the data source, using the functions: - dload_site() - get_site_lat_lon() - read_displacement() - """ - source = 'GENERIC' + SITE lat lon [vel_e vel_n vel_u err_e err_n err_u] [start_date end_date] - def dload_site(self, print_msg=True) -> str: - """Read displacement data from a GENERIC the station file. - In this case, the site data must already be downloaded and located in - the directory specified on instantiation (e.g., GNSS-GENERIC). - The file name convention should be: + where site is the four-digit, alphanumeric (uppercase) site code; and + lat/lon are in decimal degrees. If included, vel should be in units of + m/yr; and dates should be in format YYYYMMDD. - .txt + 2. .txt: data file for each site, with 7 space-separated columns: - where the site name is in all caps. + date dis_e dis_n dis_u std_e std_n std_u - Modifies: self.file - str, local file path/name - self.file_url - str, file URL - Returns: self.file - str, local file path/name - """ - if print_msg == True: - print(f'reading data for site {self.site:s}') + where date is in the format or T or :, + displacement values are in meters. + """ + def __init__(self, site: str, data_dir=None, version='IGS14', url_prefix=None): + super().__init__( + site=site, + data_dir=data_dir, + version=version, + source='GENERIC', + url_prefix=url_prefix, + ) - # URL and file name specs + # get file self.file = os.path.join(self.data_dir, f'{self.site:s}.txt') - self.file_url = '' - # download file if not present - if print_msg == True: - print(f'reading file {self.file:s}') + # get url + self.url_prefix = '' + self.url = '' - return self.file - def get_site_lat_lon(self) -> (str, str): + def get_site_lat_lon(self, print_msg=False) -> (str, str): """Get station lat/lon based on processing source. - Retrieve data from the site list file, which should be located in the - current directory. - The file should be called "GenericList.txt" and should consist of - three columns: - - - - where site is a four-digit site code in all caps. Lat/lon should be in - decimal degrees. - - Modifies: self.lat/lon - str - Returns: self.lat/lon - str """ - if print_msg == True: - print('calculating station lat/lon') - - # need to refer to the site list - site_list_file = 'GenericList.txt' - - # find site in site list file - with open(site_list_file) as site_list: - for line in site_list: - if line[:4] == self.site: - site_lat, site_lon = line.split()[1:3] - - # format - self.site_lat = float(site_lat) - self.site_lon = float(site_lon) - - if print_msg == True: - print(f'\t{self.site_lat:f}, {self.site_lon:f}') + sites = read_GENERIC_site_list('GenericList.txt') + ind = sites['site'].tolist().index(self.site) + return sites['lat'][ind], sites['lon'][ind] - return self.site_lat, self.site_lon - - def read_displacement(self, start_date=None, end_date=None, print_msg=True, - display=False): - """Read GNSS displacement time-series (defined by start/end_date) - The position file for a GENERIC site must consist of seven columns: - - - date is in format YYYYMMDD or YYYYMMDD:HHMMSS - displacements are in meters - if standard deviations or uncertainties are not availabe, fill columns with zeros + def read_displacement(self, start_date=None, end_date=None, print_msg=True, display=False): + """Read GNSS displacement time-series (defined by start/end_date). Parameters: start/end_date - str, date in YYYYMMDD format Returns: dates - 1D np.ndarray of datetime.datetime object @@ -1261,25 +1178,10 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, # read dates, dis_e, dis_n, dis_u if print_msg == True: print('reading time and displacement in east/north/vertical direction') + fc = np.loadtxt(self.file) # parse dates - with open(self.file) as data_file: - lines = data_file.readlines() - self.dates = [] - for line in lines: - date = line.split()[0] - date_len = len(date) - - # format - if date_len == 8: - datetime = dt.datetime.strptime(date, '%Y%m%d') - elif date_len == 15: - datetime = dt.datetime.strptime(date, '%Y%m%d:%H%M%S') - else: - raise ValueError('Date/time format not recognized') - - self.dates.append(datetime) - self.dates = np.array(self.dates) + self.dates = np.array(ptime.date_list2vector(fc[:, 0])) # parse displacement data (self.dis_e, @@ -1287,7 +1189,7 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, self.dis_u, self.std_e, self.std_n, - self.std_u) = np.loadtxt(self.file, usecols=tuple(range(1,7))).T + self.std_u) = fc[:, tuple(range(1,7))].astype(np.float32).T # cut out the specified time range self.__crop_to_date_range__(start_date, end_date) @@ -1296,7 +1198,7 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, self.date_list = [date.strftime('%Y%m%d') for date in self.dates] # display if requested - if display == True: + if display: self.plot() return (self.dates, diff --git a/src/mintpy/utils/plot.py b/src/mintpy/utils/plot.py index 0ec1a710c..effdead1b 100644 --- a/src/mintpy/utils/plot.py +++ b/src/mintpy/utils/plot.py @@ -1216,6 +1216,7 @@ def plot_gnss(ax, SNWE, inps, metadata=dict(), print_msg=True): site_names=site_names, start_date=start_date, end_date=end_date, + source=inps.gnss_source, gnss_comp=inps.gnss_component, horz_az_angle=inps.horz_az_angle, print_msg=print_msg, diff --git a/src/mintpy/utils/ptime.py b/src/mintpy/utils/ptime.py index a667ee969..09b3196ee 100644 --- a/src/mintpy/utils/ptime.py +++ b/src/mintpy/utils/ptime.py @@ -61,6 +61,9 @@ def get_date_str_format(date_str): elif len(re.findall(r'\d{4}-\d{2}-\d{2}', date_str)) > 0: date_str_format = '%Y-%m-%d' + elif len(re.findall(r'\d{8}:\d{6}', date_str)) > 0: + date_str_format = '%Y%m%d:%H%M%S' + elif len(re.findall(r'\d{8}T\d{6}', date_str)) > 0: date_str_format = '%Y%m%dT%H%M%S' From f3d49227a5d8b3ce679908ba9c4b66718f3c88b9 Mon Sep 17 00:00:00 2001 From: Zhang Yunjun Date: Mon, 22 Apr 2024 00:25:38 +0800 Subject: [PATCH 31/44] bugfix for GNSS_ESESES.read_displacement() --- src/mintpy/objects/gnss.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index a952e3883..b3bab4e4e 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -992,8 +992,9 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, disp self.file = self.file[:-2] # read data file + # use the first 9 cols only, as some epoches miss 10-13 cols: CorrNE/NU/EU, Chi-Squared vprint('reading time and displacement in east/north/vertical direction') - fc = np.loadtxt(self.file, usecols=tuple(range(0,12))) + fc = np.loadtxt(self.file, usecols=tuple(range(0,9))) num_solution = fc.shape[0] # parse dates From 9a7ed8ba6441317520972827bb7cd443507b848e Mon Sep 17 00:00:00 2001 From: Zhang Yunjun Date: Mon, 22 Apr 2024 11:30:21 +0800 Subject: [PATCH 32/44] GNSS_ESESES: add dload_site() and move the url_prefix searching into it --- src/mintpy/objects/gnss.py | 32 ++++++++++++++++++++------------ 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index b3bab4e4e..bbc35ca8c 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -518,8 +518,6 @@ def dload_site(self, overwrite=False, total_tries=5, print_msg=True): Returns: self.file - str, path to the local data file """ vprint = print if print_msg else lambda *args, **kwargs: None - - # download if self.url and overwrite or not os.path.isfile(self.file): vprint(f"downloading site {self.site:s} from {self.source} to {self.file:s}") # retry on download fail @@ -535,16 +533,6 @@ def dload_site(self, overwrite=False, total_tries=5, print_msg=True): continue else: break - - # uncompress the downloaded *.z file [for ESESES only] - if self.source == 'ESESES' and self.file.endswith('.Z'): - with zipfile.ZipFile(self.file, 'r') as fz: - fz.extractall(self.data_dir) - - # update file name - self.file = self.file.strip('.Z') - vprint(f'... extracted to {self.file:s}') - return self.file @@ -936,11 +924,31 @@ def __init__(self, site: str, data_dir=None, version='IGS14', url_prefix=None): # get file self.file = os.path.join(self.data_dir, f'{self.site.lower():s}CleanTrend.neu.Z') + # get url + # moved to GNSS_ESESES.dload_site() to avoid searching url_prefix + # when downloading is not needed. + + + def dload_site(self, overwrite=False, total_tries=5, print_msg=True): + """Download GNSS data file. + """ # get url if not self.url_prefix: self.url_prefix = get_ESESES_url_prefix() self.url = os.path.join(self.url_prefix, os.path.basename(self.file)) + # call parent class to download + super().dload_site(overwrite=overwrite, print_msg=print_msg) + + # uncompress the downloaded *.z file [for ESESES only] + with zipfile.ZipFile(self.file, 'r') as fz: + fz.extractall(self.data_dir) + self.file = self.file.strip('.Z') # update file name + if print_msg: + print(f'... extracted to {self.file:s}') + + return self.file + def get_site_lat_lon(self, print_msg=False) -> (float, float): """Get station lat/lon based on processing source. From c4449524fc99955061743894895ffd84ee870556 Mon Sep 17 00:00:00 2001 From: Zhang Yunjun Date: Mon, 22 Apr 2024 11:57:53 +0800 Subject: [PATCH 33/44] pass testing of all sources except for generic --- src/mintpy/objects/gnss.py | 34 +++++++++++++--------------------- 1 file changed, 13 insertions(+), 21 deletions(-) diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index bbc35ca8c..47fddd4dc 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -482,7 +482,7 @@ def __init__(self, site: str, data_dir=None, version='IGS14', source='UNR', url_ self.url = None # local file info - self.data_dir = self.__format_data_dir__(data_dir) + self.data_dir = self._format_data_dir_(data_dir) self.file = None # displacement data @@ -552,7 +552,7 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, disp raise NotImplementedError('read_displacement() is NOT implemented. Override with child class.') - def __format_data_dir__(self, data_dir) -> str: + def _format_data_dir_(self, data_dir) -> str: """Check formatting of GNSS data directory and ensure that directory exists. Parameters: data_dir - None or str, data directory with GNSS position files @@ -571,8 +571,9 @@ def __format_data_dir__(self, data_dir) -> str: return data_dir - def __crop_to_date_range__(self, start_date: str, end_date: str): - """Crop the time-series given the start/end_date in format YYYYMMDD. + def _crop_to_date_range_(self, start_date: str, end_date: str): + """Crop the time-series given the start/end_date in format YYYYMMDD, + and create date_list from dates. """ flag = np.ones(len(self.dates), dtype=bool) if start_date: @@ -590,6 +591,9 @@ def __crop_to_date_range__(self, start_date: str, end_date: str): self.std_n = self.std_n[flag] self.std_u = self.std_u[flag] + # create member var: date_list + self.date_list = [x.strftime('%Y%m%d') for x in self.dates] + ##################################### Utility Functions ################################### def plot(self, marker_size=2, marker_color='k', plot_error_bar=True): @@ -806,7 +810,7 @@ def get_los_velocity(self, geom_obj, start_date=None, end_date=None, ref_site=No else: self.velocity = np.nan if print_msg: - print(f'Velocity calculation failed for site {self.site:s}') + print(f'\nVelocity calculation failed for site {self.site:s}') return self.velocity, dis @@ -891,10 +895,7 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, disp self.std_u) = fc[:, (8,10,12,14,15,16)].astype(np.float32).T # cut out the specified time range - self.__crop_to_date_range__(start_date, end_date) - - # formulate date list - self.date_list = [x.strftime('%Y%m%d') for x in self.dates] + self._crop_to_date_range_(start_date, end_date) # display if requested if display: @@ -1019,10 +1020,7 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, disp self.std_u) = fc[:, 3:9].astype(np.float32).T / 1000 # cut out the specified time range - self.__crop_to_date_range__(start_date, end_date) - - # formulate date list - self.date_list = [date.strftime('%Y%m%d') for date in self.dates] + self._crop_to_date_range_(start_date, end_date) # display if requested if display: @@ -1113,10 +1111,7 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, disp self.std_u) = data[:, 1:7].astype(np.float32).T # cut out the specified time range - self.__crop_to_date_range__(start_date, end_date) - - # formulate date list - self.date_list = [date.strftime('%Y%m%d') for date in self.dates] + self._crop_to_date_range_(start_date, end_date) # display if requested if display == True: @@ -1201,10 +1196,7 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, disp self.std_u) = fc[:, tuple(range(1,7))].astype(np.float32).T # cut out the specified time range - self.__crop_to_date_range__(start_date, end_date) - - # formulate date list - self.date_list = [date.strftime('%Y%m%d') for date in self.dates] + self._crop_to_date_range_(start_date, end_date) # display if requested if display: From a1a7fa5965f367770a36097b5aa340d4d2513e2a Mon Sep 17 00:00:00 2001 From: Zhang Yunjun Date: Mon, 22 Apr 2024 19:44:42 +0800 Subject: [PATCH 34/44] cli/view: add example usage --- src/mintpy/cli/view.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/mintpy/cli/view.py b/src/mintpy/cli/view.py index b1264821a..5e691f5c8 100755 --- a/src/mintpy/cli/view.py +++ b/src/mintpy/cli/view.py @@ -34,9 +34,10 @@ view.py ifgramStack.h5 20171010_20171115 #all data related with 20171010_20171115 view.py ifgramStack.h5 'coherence*20171010*' #all coherence related with 20171010 - # GPS (for one subplot in geo-coordinates only) + # GNSS (for one subplot in geo-coordinates only) view.py geo_velocity_msk.h5 velocity --show-gnss --gnss-label #show locations of available GPS view.py geo_velocity_msk.h5 velocity --show-gnss --gnss-comp enu2los --ref-gnss GV01 + view.py geo_velocity_msk.h5 velocity --show-gnss --gnss-comp enu2los --ref-gnss GV01 --gnss-source ESESES view.py geo_timeseries_ERA5_ramp_demErr.h5 20180619 --ref-date 20141213 --show-gnss --gnss-comp enu2los --ref-gnss GV01 # Faults From f5b76dfd261b717b3c0f1ee86abc3cd9513d78ec Mon Sep 17 00:00:00 2001 From: Zhang Yunjun Date: Mon, 22 Apr 2024 19:52:48 +0800 Subject: [PATCH 35/44] docs: update names in module_hierarchy --- docs/api/module_hierarchy.md | 4 +- src/mintpy/objects/insar_vs_gps.py | 66 +++++++++++++++--------------- 2 files changed, 36 insertions(+), 34 deletions(-) diff --git a/docs/api/module_hierarchy.md b/docs/api/module_hierarchy.md index 8333d9b2f..582600bce 100644 --- a/docs/api/module_hierarchy.md +++ b/docs/api/module_hierarchy.md @@ -51,7 +51,7 @@ Hierarchy of sub-modules within MintPy. Level _N_ modules depends on level _N-1_ network (objects/{stack, sensor}, utils/{readfile}) ------------------ level 4 -------------------- /objects - gps (objects/{stack, coord}, utils/{ptime, utils1, readfile}) + gnss (objects/{stack, coord}, utils/{ptime, utils1, readfile}) stackDict (objects/{stack}, utils/{ptime, utils0, readfile}) /simulation simulation (objects/{stack}, utils/{ptime, network}, simulation/{fractal, decorrelation, defo_model}) @@ -65,5 +65,5 @@ Hierarchy of sub-modules within MintPy. Level _N_ modules depends on level _N-1_ isce_utils (constants, utils/{ptime, readfile, writefile, attribute, utils1}) ------------------ level 6 -------------------- /objects - insar_vs_gps (objects/{stack, giant}, utils/{readfile, gps, plot, utils}) + insar_vs_gnss (objects/{stack, giant}, utils/{readfile, gnss, plot, utils}) ``` diff --git a/src/mintpy/objects/insar_vs_gps.py b/src/mintpy/objects/insar_vs_gps.py index 848e3e868..f61542911 100644 --- a/src/mintpy/objects/insar_vs_gps.py +++ b/src/mintpy/objects/insar_vs_gps.py @@ -23,45 +23,47 @@ ############################## beginning of insar_vs_gnss class ############################## class insar_vs_gnss: """ Comparing InSAR time-series with GNSS time-series in LOS direction - Parameters: ts_file : str, time-series HDF5 file - geom_file : str, geometry HDF5 file - temp_coh_file : str, temporal coherence HDF5 file - site_names : list of str, GNSS site names - gnss_source : str, program or institution that processed the GNSS data - gnss_dir : str, directory of the local GNSS data files - ref_site : str, common reference site in space for InSAR and GNSS - start/end_date : str, date in YYYYMMDD format for the start/end date - min_ref_date : str, date in YYYYMMDD format for the earliest common - reference date between InSAR and GNSS - Returns: ds : dict, each element has the following components: + Parameters: ts_file - str, time-series HDF5 file + geom_file - str, geometry HDF5 file + temp_coh_file - str, temporal coherence HDF5 file + site_names - list of str, GNSS site names + gnss_source - str, program or institution that processed the GNSS data + gnss_dir - str, directory of the local GNSS data files + ref_site - str, common reference site in space for InSAR and GNSS + start/end_date - str, date in YYYYMMDD format for the start/end date + min_ref_date - str, date in YYYYMMDD format for the earliest common + reference date between InSAR and GNSS + Returns: ds - dict, each element has the following components: 'GV03': { - 'name': 'GV03', - 'lat': -0.7977926892712729, - 'lon': -91.13294444114553, - 'gnss_datetime': array([datetime.datetime(2014, 11, 1, 0, 0), - datetime.datetime(2014, 11, 2, 0, 0), - ..., - datetime.datetime(2018, 6, 25, 0, 0)], dtype=object), - 'gnss_dis': array([-2.63673663e-02, ..., 6.43612206e-01], dtype=float32), - 'gnss_std': array([0.00496152, ..., 0.00477411], dtype=float32), + 'name' : 'GV03', + 'lat' : -0.7977926892712729, + 'lon' : -91.13294444114553, + 'gnss_datetime' : array([datetime.datetime(2014, 11, 1, 0, 0), + datetime.datetime(2014, 11, 2, 0, 0), + ..., + datetime.datetime(2018, 6, 25, 0, 0), + ], dtype=object), + 'gnss_dis' : array([-2.63673663e-02, ..., 6.43612206e-01], dtype=float32), + 'gnss_std' : array([0.00496152, ..., 0.00477411], dtype=float32), 'reference_site': 'GV01', 'insar_datetime': array([datetime.datetime(2014, 12, 13, 0, 0), - datetime.datetime(2014, 12, 25, 0, 0), - ..., - datetime.datetime(2018, 6, 19, 0, 0)], dtype=object), + datetime.datetime(2014, 12, 25, 0, 0), + ..., + datetime.datetime(2018, 6, 19, 0, 0), + ], dtype=object), 'insar_dis_linear': array([-0.01476493, ..., 0.62273948]), - 'temp_coh': 0.9961861392598478, - 'gnss_std_mean': 0.004515478, - 'comm_dis_gnss': array([-0.02635017, ..., 0.61315614], dtype=float32), - 'comm_dis_insar': array([-0.01476493, ..., 0.60640174], dtype=float32), - 'r_square': 0.9993494518609801, - 'dis_rmse': 0.008023425326946351 + 'temp_coh' : 0.9961861392598478, + 'gnss_std_mean' : 0.004515478, + 'comm_dis_gnss' : array([-0.02635017, ..., 0.61315614], dtype=float32), + 'comm_dis_insar' : array([-0.01476493, ..., 0.60640174], dtype=float32), + 'r_square' : 0.9993494518609801, + 'dis_rmse' : 0.008023425326946351, } """ - def __init__(self, ts_file, geom_file, temp_coh_file, - site_names, gnss_source='UNR', gnss_dir='./GNSS', ref_site='GV01', - start_date=None, end_date=None, min_ref_date=None): + def __init__(self, ts_file, geom_file, temp_coh_file, site_names, gnss_source='UNR', + gnss_dir='./GNSS', ref_site='GV01', start_date=None, end_date=None, + min_ref_date=None): self.insar_file = ts_file self.geom_file = geom_file self.temp_coh_file = temp_coh_file From 829408ed3cad13eb5b8862efa40b3598f62ebcf2 Mon Sep 17 00:00:00 2001 From: Zhang Yunjun Date: Mon, 22 Apr 2024 19:53:17 +0800 Subject: [PATCH 36/44] rename insar_vs_gps to insar_vs_gnss --- src/mintpy/objects/{insar_vs_gps.py => insar_vs_gnss.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/mintpy/objects/{insar_vs_gps.py => insar_vs_gnss.py} (100%) diff --git a/src/mintpy/objects/insar_vs_gps.py b/src/mintpy/objects/insar_vs_gnss.py similarity index 100% rename from src/mintpy/objects/insar_vs_gps.py rename to src/mintpy/objects/insar_vs_gnss.py From 320265bce8d0d4b97db42f578f0724d6865d2983 Mon Sep 17 00:00:00 2001 From: Zhang Yunjun Date: Mon, 22 Apr 2024 19:59:36 +0800 Subject: [PATCH 37/44] utils.plot.plot_insar_vs_gnss_scatter(): update default csv_file name --- src/mintpy/utils/plot.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/mintpy/utils/plot.py b/src/mintpy/utils/plot.py index effdead1b..816ec9211 100644 --- a/src/mintpy/utils/plot.py +++ b/src/mintpy/utils/plot.py @@ -1270,8 +1270,9 @@ def plot_gnss(ax, SNWE, inps, metadata=dict(), print_msg=True): return ax -def plot_insar_vs_gnss_scatter(vel_file, csv_file='gnss_enu2los.csv', msk_file=None, ref_gnss_site=None, cutoff=5, - fig_size=(4, 4), xname='InSAR', vlim=None, ex_gnss_sites=None, display=True): +def plot_insar_vs_gnss_scatter(vel_file, csv_file='gnss_enu2los_UNR.csv', msk_file=None, ref_gnss_site=None, + cutoff=5, fig_size=(4, 4), xname='InSAR', vlim=None, ex_gnss_sites=None, + display=True): """Scatter plot to compare the velocities between SAR/InSAR and GNSS. Parameters: vel_file - str, path of InSAR LOS velocity HDF5 file. @@ -1289,7 +1290,7 @@ def plot_insar_vs_gnss_scatter(vel_file, csv_file='gnss_enu2los.csv', msk_file=N gnss_obs - 1D np.ndarray in float32, GNSS velocity in cm/yr Example: from mintpy.utils import plot as pp - csv_file = os.path.join(work_dir, 'geo/gnss_enu2los.csv') + csv_file = os.path.join(work_dir, 'geo/gnss_enu2los_UNR.csv') vel_file = os.path.join(work_dir, 'geo/geo_velocity.h5') msk_file = os.path.join(work_dir, 'geo/geo_maskTempCoh.h5') pp.plot_insar_vs_gnss_scatter( From b7be5cb9dce7639b355d997cdfb26e6d66b67bd9 Mon Sep 17 00:00:00 2001 From: Zhang Yunjun Date: Mon, 22 Apr 2024 20:05:19 +0800 Subject: [PATCH 38/44] move the rarely used zipfile import to the inside of the func --- src/mintpy/objects/gnss.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index 47fddd4dc..e6fe7289b 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -12,7 +12,6 @@ import datetime as dt import glob import os -import zipfile from urllib.request import urlopen, urlretrieve import matplotlib.pyplot as plt @@ -933,6 +932,8 @@ def __init__(self, site: str, data_dir=None, version='IGS14', url_prefix=None): def dload_site(self, overwrite=False, total_tries=5, print_msg=True): """Download GNSS data file. """ + from zipfile import ZipFile + # get url if not self.url_prefix: self.url_prefix = get_ESESES_url_prefix() @@ -942,7 +943,7 @@ def dload_site(self, overwrite=False, total_tries=5, print_msg=True): super().dload_site(overwrite=overwrite, print_msg=print_msg) # uncompress the downloaded *.z file [for ESESES only] - with zipfile.ZipFile(self.file, 'r') as fz: + with ZipFile(self.file, 'r') as fz: fz.extractall(self.data_dir) self.file = self.file.strip('.Z') # update file name if print_msg: From 51f86f0a51c74d268466bb0985ea9e4573b01234 Mon Sep 17 00:00:00 2001 From: Zhang Yunjun Date: Mon, 22 Apr 2024 20:10:57 +0800 Subject: [PATCH 39/44] simplify and rm _format_data_dir_() --- src/mintpy/objects/gnss.py | 38 ++++++++++++-------------------------- 1 file changed, 12 insertions(+), 26 deletions(-) diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index e6fe7289b..65fc1392d 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -480,9 +480,14 @@ def __init__(self, site: str, data_dir=None, version='IGS14', source='UNR', url_ self.url_prefix = url_prefix self.url = None - # local file info - self.data_dir = self._format_data_dir_(data_dir) + # local file/dir info self.file = None + self.data_dir = data_dir if data_dir else os.path.abspath(f'GNSS-{source.upper()}') + + # ensure local dir exists + if not os.path.exists(self.data_dir): + print('create directory:', self.data_dir) + os.mkdir(self.data_dir) # displacement data self.dates = None @@ -551,26 +556,7 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, disp raise NotImplementedError('read_displacement() is NOT implemented. Override with child class.') - def _format_data_dir_(self, data_dir) -> str: - """Check formatting of GNSS data directory and ensure that directory exists. - - Parameters: data_dir - None or str, data directory with GNSS position files - Returns: data_dir - str, full path to data directory - """ - # format data directory name based on processing source - if data_dir is None: - data_dir = f'GNSS-{self.source.upper():s}' - data_dir = os.path.abspath(data_dir) - - # ensure directory exists - if not os.path.exists(data_dir): - print('create directory:', data_dir) - os.mkdir(data_dir) - - return data_dir - - - def _crop_to_date_range_(self, start_date: str, end_date: str): + def _crop_to_date_range(self, start_date: str, end_date: str): """Crop the time-series given the start/end_date in format YYYYMMDD, and create date_list from dates. """ @@ -894,7 +880,7 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, disp self.std_u) = fc[:, (8,10,12,14,15,16)].astype(np.float32).T # cut out the specified time range - self._crop_to_date_range_(start_date, end_date) + self._crop_to_date_range(start_date, end_date) # display if requested if display: @@ -1021,7 +1007,7 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, disp self.std_u) = fc[:, 3:9].astype(np.float32).T / 1000 # cut out the specified time range - self._crop_to_date_range_(start_date, end_date) + self._crop_to_date_range(start_date, end_date) # display if requested if display: @@ -1112,7 +1098,7 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, disp self.std_u) = data[:, 1:7].astype(np.float32).T # cut out the specified time range - self._crop_to_date_range_(start_date, end_date) + self._crop_to_date_range(start_date, end_date) # display if requested if display == True: @@ -1197,7 +1183,7 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, disp self.std_u) = fc[:, tuple(range(1,7))].astype(np.float32).T # cut out the specified time range - self._crop_to_date_range_(start_date, end_date) + self._crop_to_date_range(start_date, end_date) # display if requested if display: From 9ed3d16b1274084875fa709320cd45606c58d016 Mon Sep 17 00:00:00 2001 From: Zhang Yunjun Date: Mon, 22 Apr 2024 20:34:33 +0800 Subject: [PATCH 40/44] gnss: add one line description --- src/mintpy/objects/gnss.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index 65fc1392d..eb92008fa 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -1,3 +1,4 @@ +"""Class / utilities for GNSS download / operations.""" ############################################################ # Program is part of MintPy # # Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi # From ac7242cb473e42be14f0aaedf78e4a2d3fffe1eb Mon Sep 17 00:00:00 2001 From: Zhang Yunjun Date: Mon, 22 Apr 2024 20:38:48 +0800 Subject: [PATCH 41/44] Update gnss.py --- src/mintpy/objects/gnss.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index eb92008fa..7e0357603 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -1022,7 +1022,7 @@ def read_displacement(self, start_date=None, end_date=None, print_msg=True, disp class GNSS_JPL_SIDESHOW(GNSS): """GNSS class for daily solutions processed by JPL-SIDESHOW. - Website: + Website: https://sideshow.jpl.nasa.gov/pub/ """ def __init__(self, site: str, data_dir=None, version='IGS14', url_prefix=None): super().__init__( From cb3b326afc7e2fde0e801e1235065eb4a34013a4 Mon Sep 17 00:00:00 2001 From: Zhang Yunjun Date: Mon, 22 Apr 2024 20:59:28 +0800 Subject: [PATCH 42/44] GNSS_UNR: add dload_site() to download the plot png file --- src/mintpy/objects/gnss.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index 7e0357603..b08d52989 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -832,6 +832,31 @@ def __init__(self, site: str, data_dir=None, version='IGS14', url_prefix=None): self.url = os.path.join(self.url_prefix, os.path.basename(self.file)) + def dload_site(self, overwrite=False, total_tries=5, print_msg=True): + """Download GNSS site data file. + + Parameters: overwrite - bool, overwrite existing data file + total_tries - int, number of tries to download if failed + print_msg - bool, verbose print out msg + Returns: self.file - str, path to the local data file + """ + # download data file via the parent class member function + super().dload_site(overwrite=overwrite, print_msg=print_msg) + + # download time-series plot file + # example link: http://geodesy.unr.edu/tsplots/IGS08/TimeSeries/CAMO.png + # http://geodesy.unr.edu/tsplots/IGS14/IGS14/TimeSeries/CASU.png + plot_file = os.path.join(self.data_dir, f'pic/{self.site}.png') + if self.version == 'IGS14': + url_prefix = 'http://geodesy.unr.edu/tsplots/IGS14/IGS14/TimeSeries' + elif self.version == 'IGS08': + url_prefix = 'http://geodesy.unr.edu/tsplots/IGS08/TimeSeries' + plot_file_url = os.path.join(url_prefix, f'{self.site}.png') + urlretrieve(plot_file_url, plot_file) + + return self.file + + def get_site_lat_lon(self, print_msg=False) -> (float, float): """Get station lat/lon from the displacement file. From 26dd11ac0bb2bf349805dc75b738a26a35db40c0 Mon Sep 17 00:00:00 2001 From: Zhang Yunjun Date: Mon, 22 Apr 2024 21:09:10 +0800 Subject: [PATCH 43/44] Update gnss.py --- src/mintpy/objects/gnss.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index b08d52989..d8967eee3 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -847,11 +847,19 @@ def dload_site(self, overwrite=False, total_tries=5, print_msg=True): # example link: http://geodesy.unr.edu/tsplots/IGS08/TimeSeries/CAMO.png # http://geodesy.unr.edu/tsplots/IGS14/IGS14/TimeSeries/CASU.png plot_file = os.path.join(self.data_dir, f'pic/{self.site}.png') - if self.version == 'IGS14': - url_prefix = 'http://geodesy.unr.edu/tsplots/IGS14/IGS14/TimeSeries' - elif self.version == 'IGS08': - url_prefix = 'http://geodesy.unr.edu/tsplots/IGS08/TimeSeries' + + # ensure local plot directory exists + if not os.path.exists(os.path.dirname(plot_file)): + os.makedirs(os.path.dirname(plot_file), exist_ok=True) + + # get plot file url + url_prefix = { + 'IGS08' : 'http://geodesy.unr.edu/tsplots/IGS08/TimeSeries', + 'IGS14' : 'http://geodesy.unr.edu/tsplots/IGS14/IGS14/TimeSeries', + }[self.version] plot_file_url = os.path.join(url_prefix, f'{self.site}.png') + + # download urlretrieve(plot_file_url, plot_file) return self.file From 63df14433fba4281b00e6cfa48acabb26a0a4d79 Mon Sep 17 00:00:00 2001 From: Zhang Yunjun Date: Tue, 23 Apr 2024 09:28:38 +0800 Subject: [PATCH 44/44] displacement_enu2los(): rm plotting code + displacement_enu2los(): remove the plotting code + move the matplotlib import into the sub-function, to speedup the warmup proc --- src/mintpy/objects/gnss.py | 25 +++++-------------------- 1 file changed, 5 insertions(+), 20 deletions(-) diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index d8967eee3..6d26b28e8 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -15,7 +15,6 @@ import os from urllib.request import urlopen, urlretrieve -import matplotlib.pyplot as plt import numpy as np from mintpy.objects.coord import coordinate @@ -585,6 +584,8 @@ def _crop_to_date_range(self, start_date: str, end_date: str): def plot(self, marker_size=2, marker_color='k', plot_error_bar=True): """Plot the displacement time-series. """ + import matplotlib.pyplot as plt + if self.dis_e is None: self.open() @@ -616,12 +617,12 @@ def plot(self, marker_size=2, marker_color='k', plot_error_bar=True): def displacement_enu2los(self, inc_angle:float, az_angle:float, gnss_comp='enu2los', - horz_az_angle=-90., display=False, model=None): + horz_az_angle=-90.): """Convert displacement in ENU to LOS direction. Parameters: inc_angle - float, LOS incidence angle in degree - az_angle - float, LOS aziuth angle in degree - from the north, defined as positive in clock-wise direction + az_angle - float, LOS aziuth angle in degree from the north, + defined as positive in clock-wise direction gnss_comp - str, GNSS components used to convert to LOS direction horz_az_angle - float, fault azimuth angle used to convert horizontal to fault-parallel measured from the north with anti-clockwise as positive @@ -649,22 +650,6 @@ def displacement_enu2los(self, inc_angle:float, az_angle:float, gnss_comp='enu2l + (self.std_n * unit_vec[1])**2 + (self.std_u * unit_vec[2])**2 ) ** 0.5 - # display if requested - if display: - # instantiate figure and axes - _, ax = plt.subplots(sharex=True) - - # plot LOS displacement - ax.scatter(self.dates, self.dis_los, s=2**2, c='k', label='LOS') - - # plot fit if model specified - if model is not None: - # specific time_func model - A = time_func.get_design_matrix4time_func(self.date_list, model=model) - estm_dis = np.dot(np.linalg.pinv(A), self.dis_los) - ax.plot(self.dates, estm_dis, 'b', label='model') - ax.legend() - return self.dis_los, self.std_los