Skip to content

Commit

Permalink
Merge pull request #60 from ASFHyP3/develop
Browse files Browse the repository at this point in the history
Release v0.4.1
  • Loading branch information
AndrewPlayer3 authored Dec 17, 2024
2 parents 93dcdd7 + 042a6d8 commit dfa124c
Show file tree
Hide file tree
Showing 16 changed files with 79 additions and 79 deletions.
6 changes: 2 additions & 4 deletions .github/workflows/static-analysis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@ jobs:
# Docs: https://github.com/ASFHyP3/actions
uses: ASFHyP3/actions/.github/workflows/[email protected]

call-flake8-workflow:
call-ruff-workflow:
# Docs: https://github.com/ASFHyP3/actions
uses: ASFHyP3/actions/.github/workflows/[email protected]
with:
local_package_names: opera_disp_tms
uses: ASFHyP3/actions/.github/workflows/[email protected]
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/)
and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [0.4.1]
### Changed
* The [`static-analysis`](.github/workflows/static-analysis.yml) Github Actions workflow now uses `ruff` rather than `flake8` for linting.

## [0.4.0]
### Added
* Ability to update the reference date for OPERA DISP granule xarray objects
Expand Down
5 changes: 1 addition & 4 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,7 @@ dependencies:
- python>=3.10
- pip
# For packaging, and testing
- flake8
- flake8-import-order
- flake8-blind-except
- flake8-builtins
- ruff
- setuptools
- setuptools_scm
- wheel
Expand Down
31 changes: 27 additions & 4 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,6 @@ dynamic = ["version", "readme"]

[project.optional-dependencies]
develop = [
"flake8",
"flake8-import-order",
"flake8-blind-except",
"flake8-builtins",
"pytest",
"pytest-cov",
"pytest-console-scripts",
Expand Down Expand Up @@ -72,3 +68,30 @@ readme = {file = ["README.md"], content-type = "text/markdown"}
where = ["src"]

[tool.setuptools_scm]

[tool.ruff]
line-length = 120
# The directories to consider when resolving first- vs. third-party imports.
# See: https://docs.astral.sh/ruff/settings/#src
src = ["src", "tests"]

[tool.ruff.format]
indent-style = "space"
quote-style = "single"

[tool.ruff.lint]
extend-select = [
"I", # isort: https://docs.astral.sh/ruff/rules/#isort-i
# TODO: Uncomment the following extensions and address their warnings:
# "UP", # pyupgrade: https://docs.astral.sh/ruff/rules/#pyupgrade-up
# "D", # pydocstyle: https://docs.astral.sh/ruff/rules/#pydocstyle-d
# "ANN", # annotations: https://docs.astral.sh/ruff/rules/#flake8-annotations-ann
# "PTH", # use-pathlib-pth: https://docs.astral.sh/ruff/rules/#flake8-use-pathlib-pth
]

[tool.ruff.lint.pydocstyle]
convention = "google"

[tool.ruff.lint.isort]
case-sensitive = true
lines-after-imports = 2
2 changes: 1 addition & 1 deletion scripts/make_cal_meta_tiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

def make_cal_meta_tiles(orbit_direction):
script_dir = Path(__file__).parent
with open(script_dir / 'cal_corners.txt', 'r') as f:
with open(script_dir / 'cal_corners.txt') as f:
corners = [[int(val) for val in corner.strip().split(' ')] for corner in f.readlines()]

for corner in corners:
Expand Down
11 changes: 6 additions & 5 deletions src/opera_disp_tms/__main__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
"""
OPERA-DISP Tile Map Service Generator
"""
"""OPERA-DISP Tile Map Service Generator"""

import argparse
from datetime import datetime
Expand Down Expand Up @@ -76,8 +74,11 @@ def main():
'tile_type', type=str, choices=['displacement', 'secant_velocity'], help='Data value to visualize'
)
parser.add_argument(
'bbox', type=str.split, nargs='+', action=Bbox,
help='Integer bounds in EPSG:4326, formatted like [min lon, min lat, max lon, max lat]'
'bbox',
type=str.split,
nargs='+',
action=Bbox,
help='Integer bounds in EPSG:4326, formatted like [min lon, min lat, max lon, max lat]',
)
parser.add_argument('direction', type=str, choices=['ascending', 'descending'], help='Direction of the orbit pass')
parser.add_argument(
Expand Down
5 changes: 3 additions & 2 deletions src/opera_disp_tms/create_tile_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

from osgeo import gdal, gdalconst, osr


gdal.UseExceptions()


Expand Down Expand Up @@ -79,8 +80,8 @@ def create_tile_map(output_folder: str, input_rasters: list[str], scale_range: l
def main():
parser = argparse.ArgumentParser(
description='Generate a directory with small .png tiles from a list of rasters in a common projection, '
'following the OSGeo Tile Map Service Specification, using gdal2tiles: '
'https://gdal.org/en/latest/programs/gdal2tiles.html'
'following the OSGeo Tile Map Service Specification, using gdal2tiles: '
'https://gdal.org/en/latest/programs/gdal2tiles.html'
)
parser.add_argument('output_folder', type=str, help='Path of the output directory to create')
parser.add_argument('input_rasters', type=str, nargs='+', help='List of gdal-compatible raster paths to mosaic')
Expand Down
2 changes: 1 addition & 1 deletion src/opera_disp_tms/frames.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import json
import sqlite3
from collections.abc import Iterable
from dataclasses import dataclass
from pathlib import Path
from typing import Iterable

from shapely import from_wkt
from shapely.geometry import Polygon, box
Expand Down
2 changes: 1 addition & 1 deletion src/opera_disp_tms/generate_metadata_tile.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import argparse
import warnings
from collections.abc import Iterable
from pathlib import Path
from typing import Iterable

import numpy as np
import pyproj
Expand Down
2 changes: 1 addition & 1 deletion src/opera_disp_tms/generate_sw_disp_tile.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import argparse
import warnings
from collections.abc import Iterable
from dataclasses import dataclass
from datetime import datetime, timedelta
from pathlib import Path
from typing import Iterable

import numpy as np
import xarray as xr
Expand Down
4 changes: 2 additions & 2 deletions src/opera_disp_tms/generate_sw_vel_tile.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import argparse
from collections.abc import Iterable
from datetime import datetime
from pathlib import Path
from typing import Iterable, List
from typing import List

import numpy as np
import xarray as xr
Expand Down Expand Up @@ -89,7 +90,6 @@ def add_velocity_data_to_array(
Args:
granules: A list of granule objects
frame: The frame metadata
geotransform: The geotransform of the frame
frame_map_array: The frame map as a numpy array
out_array: The array to add the velocity data to
Expand Down
21 changes: 12 additions & 9 deletions src/opera_disp_tms/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,19 +156,22 @@ def get_cmr_metadata(
temporal_range: The temporal range to search for granules in.
cmr_endpoint: The endpoint to query for granules.
"""
page_size = 2000
cmr_parameters = {
'provider_short_name': 'ASF',
'short_name': 'OPERA_L3_DISP-S1_PROVISIONAL_V0',
'attribute[]': [f'int,FRAME_ID,{frame_id}', f'float,PRODUCT_VERSION,{version}'],
'temporal[]': ','.join([date.strftime(CMR_DATE_FORMAT) for date in temporal_range]),
'page_size': page_size,
'temporal': ','.join([date.strftime(CMR_DATE_FORMAT) for date in temporal_range]),
'page_size': 2000,
}
response = requests.post(cmr_endpoint, data=cmr_parameters)
response.raise_for_status()
items = response.json()['items']
if len(items) == page_size:
raise NotImplementedError(f'Got full page of {page_size} items, please implement pagination')
headers = {}
items = []

while True:
response = requests.post(cmr_endpoint, data=cmr_parameters, headers=headers)
response.raise_for_status()
items.extend(response.json()['items'])
if 'CMR-Search-After' not in response.headers:
break
headers['CMR-Search-After'] = response.headers['CMR-Search-After']
return items


Expand Down
5 changes: 3 additions & 2 deletions src/opera_disp_tms/utils.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import os
from collections.abc import Iterable
from datetime import datetime, timedelta
from mimetypes import guess_type
from pathlib import Path
from typing import Iterable, Union
from typing import Union

import boto3
import requests
Expand Down Expand Up @@ -94,7 +95,7 @@ def transform_point(x: float, y: float, source_wkt: str, target_wkt: str) -> tup


def create_buffered_bbox(
geotransform: Iterable[int], shape: tuple[int, ...], buffer_size: int
geotransform: Iterable[int], shape: tuple[int, ...], buffer_size: int
) -> tuple[int, int, int, int]:
"""Create a buffered bounding box from a geotransform and shape
Expand Down
4 changes: 2 additions & 2 deletions tests/test_create_tile_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def create_test_geotiff(output_file, geotransform, shape, epsg):
def test_get_tile_extent(tmp_path):
epsg = 3857
minx, miny, maxx, maxy = [-113, 33, -112, 34]
geotransform = [minx, 0.1, 0, maxy, 0, -0.01]
geotransform = [minx, 0.1, 0, maxy, 0, -0.01]
shape = (100, 10)
frame_tile = tmp_path / 'test_tile.tif'
create_test_geotiff(str(frame_tile), geotransform, shape, epsg)
Expand All @@ -30,4 +30,4 @@ def test_get_tile_extent(tmp_path):
with open(f'{tmp_path}/extent.json') as f:
extent_json = json.load(f)
print(extent_json)
assert extent_json == {"extent": [minx, miny, maxx, maxy], "EPSG": epsg}
assert extent_json == {'extent': [minx, miny, maxx, maxy], 'EPSG': epsg}
2 changes: 1 addition & 1 deletion tests/test_generate_frame_tile.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def test_burn_frame(tmp_path):
ds = None

golden = np.zeros(data.shape)
golden[int(data.shape[0] / 2):, :] = 9999
golden[int(data.shape[0] / 2) :, :] = 9999
assert np.all(data == golden)

frame2 = Frame(10000, 1, 1, 'ASCENDING', 1, 1, box(1, 1, 1.5, 2))
Expand Down
52 changes: 12 additions & 40 deletions tests/test_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,36 +5,18 @@

def test_from_umm():
umm = {
"meta": {
"native-id": "mock-scene-name"
},
"umm": {
"TemporalExtent": {
"RangeDateTime": {
"BeginningDateTime": "2019-10-06T00:26:42Z",
"EndingDateTime": "2020-09-30T00:26:48Z"
}
'meta': {'native-id': 'mock-scene-name'},
'umm': {
'TemporalExtent': {
'RangeDateTime': {'BeginningDateTime': '2019-10-06T00:26:42Z', 'EndingDateTime': '2020-09-30T00:26:48Z'}
},
"AdditionalAttributes": [
{
"Name": "FRAME_ID",
"Values": ["8882"]
}
],
"RelatedUrls": [
{
"URL": "mock-url",
"Type": "GET DATA"
},
{
"URL": "mock-s3-uri",
"Type": "GET DATA VIA DIRECT ACCESS"
}
'AdditionalAttributes': [{'Name': 'FRAME_ID', 'Values': ['8882']}],
'RelatedUrls': [
{'URL': 'mock-url', 'Type': 'GET DATA'},
{'URL': 'mock-s3-uri', 'Type': 'GET DATA VIA DIRECT ACCESS'},
],
"DataGranule": {
"ProductionDateTime": "2024-10-29T21:36:46Z"
}
}
'DataGranule': {'ProductionDateTime': '2024-10-29T21:36:46Z'},
},
}
assert Granule.from_umm(umm) == Granule(
scene_name='mock-scene-name',
Expand All @@ -47,12 +29,7 @@ def test_from_umm():
creation_date=datetime(2024, 10, 29, 21, 36, 46),
)

umm['umm']['AdditionalAttributes'] = [
{
"Name": "FRAME_ID",
"Values": ["9154"]
}
]
umm['umm']['AdditionalAttributes'] = [{'Name': 'FRAME_ID', 'Values': ['9154']}]
assert Granule.from_umm(umm) == Granule(
scene_name='mock-scene-name',
frame_id=9154,
Expand All @@ -64,12 +41,7 @@ def test_from_umm():
creation_date=datetime(2024, 10, 29, 21, 36, 46),
)

umm['umm']['AdditionalAttributes'] = [
{
"Name": "FRAME_ID",
"Values": ["3325"]
}
]
umm['umm']['AdditionalAttributes'] = [{'Name': 'FRAME_ID', 'Values': ['3325']}]
assert Granule.from_umm(umm) == Granule(
scene_name='mock-scene-name',
frame_id=3325,
Expand Down

0 comments on commit dfa124c

Please sign in to comment.