diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8742864c59..ea685ddb7b 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -2,7 +2,7 @@ name: CI # https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency # https://docs.github.com/en/developers/webhooks-and-events/events/github-event-types#pullrequestevent concurrency: - group: ${{ github.workflow }}-${{ github.event.number }}-${{ github.event.type }} + group: ${{ github.workflow }}-${{ github.event.number || github.event.ref }}-${{ github.event.type }} cancel-in-progress: true on: [push, pull_request] @@ -18,10 +18,10 @@ jobs: fail-fast: true matrix: os: ["windows-latest", "ubuntu-latest", "macos-latest"] - python-version: ["3.9", "3.10", "3.11"] + python-version: ["3.9", "3.11", "3.12"] experimental: [false] include: - - python-version: "3.11" + - python-version: "3.12" os: "ubuntu-latest" experimental: true @@ -51,7 +51,7 @@ jobs: CONDA_PREFIX=$(python -c "import sys; print(sys.prefix)") echo "CONDA_PREFIX=$CONDA_PREFIX" >> $GITHUB_ENV - - uses: actions/cache@v3 + - uses: actions/cache@v4 with: path: ${{ env.CONDA_PREFIX }} key: ${{ matrix.os }}-${{matrix.python-version}}-conda-${{ hashFiles('continuous_integration/environment.yaml') }}-${{ env.DATE }}-${{matrix.experimental}}-${{ env.CACHE_NUMBER }} @@ -67,8 +67,11 @@ jobs: # Install pykdtree with --no-build-isolation so it builds with numpy 2.0 # We must get LD_PRELOAD for stdlibc++ or else the manylinux wheels # may break the conda-forge libraries trying to use newer glibc versions + # NOTE: Many of the packages removed and then reinstalled below are to avoid + # compatibility issues with numpy 2. When conda-forge has numpy 2 available + # this shouldn't be needed. run: | - python -m pip install versioneer extension-helpers setuptools-scm configobj pkgconfig + python -m pip install versioneer extension-helpers setuptools-scm configobj pkgconfig hatchling hatch-vcs python -m pip install \ --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple/ \ --trusted-host pypi.anaconda.org \ @@ -77,18 +80,19 @@ jobs: numpy \ pandas \ scipy - mamba remove --force-remove -y pykdtree pyresample trollimage pyhdf netcdf4 h5py + conda remove --force-remove -y pykdtree pyresample python-geotiepoints pyhdf netcdf4 h5py cftime astropy pyerfa python -m pip install --upgrade --no-deps --pre --no-build-isolation \ + pyerfa \ git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ git+https://github.com/pytroll/trollimage \ + git+https://github.com/pytroll/python-geotiepoints \ git+https://github.com/fhs/pyhdf \ git+https://github.com/h5py/h5py \ git+https://github.com/h5netcdf/h5netcdf \ git+https://github.com/Unidata/netcdf4-python \ git+https://github.com/dask/dask \ git+https://github.com/dask/distributed \ - git+https://github.com/zarr-developers/zarr \ git+https://github.com/Unidata/cftime \ git+https://github.com/rasterio/rasterio \ git+https://github.com/pydata/bottleneck \ @@ -110,7 +114,7 @@ jobs: pytest --cov=satpy satpy/tests --cov-report=xml --cov-report= - name: Upload unittest coverage to Codecov - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v4 with: flags: unittests file: ./coverage.xml @@ -131,7 +135,7 @@ jobs: coverage xml - name: Upload behaviour test coverage to Codecov - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v4 with: flags: behaviourtests file: ./coverage.xml diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index 9fd1d86b5a..797eab716b 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -23,7 +23,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.8.11 + uses: pypa/gh-action-pypi-publish@v1.8.14 with: user: __token__ password: ${{ secrets.pypi_password }} diff --git a/.gitignore b/.gitignore index 8990fa1d46..15779c950f 100644 --- a/.gitignore +++ b/.gitignore @@ -75,3 +75,10 @@ doc/source/_build/* satpy/version.py doc/source/api/*.rst doc/source/reader_table.rst +doc/source/area_def_list.rst + +# lock files +*.lock + +# rye files +.python-version diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 37c458982a..7fdc0a6b44 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,23 +3,23 @@ fail_fast: false repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.1.9' + rev: 'v0.4.7' hooks: - id: ruff - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.6.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer - id: check-yaml args: [--unsafe] - repo: https://github.com/PyCQA/bandit - rev: '1.7.6' # Update me! + rev: '1.7.8' # Update me! hooks: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.8.0' # Use the sha / tag you want to point at + rev: 'v1.10.0' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: diff --git a/AUTHORS.md b/AUTHORS.md index 4ed7b8a0d8..53e26af430 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -7,6 +7,7 @@ The following people have made contributions to this project: +- [Youva Aoun (YouvaEUMex)](https://github.com/YouvaEUMex) - [Trygve Aspenes (TAlonglong)](https://github.com/TAlonglong) - [Talfan Barnie (TalfanBarnie)](https://github.com/TalfanBarnie) - [Jonathan Beavers (jon4than)](https://github.com/jon4than) @@ -26,6 +27,7 @@ The following people have made contributions to this project: - [Adam Dybbroe (adybbroe)](https://github.com/adybbroe) - [Ulrik Egede (egede)](https://github.com/egede) - [Joleen Feltz (joleenf)](https://github.com/joleenf) +- [Florian Fichtner (fwfichtner)](https://github.com/fwfichtner) - [Stephan Finkensieper (sfinkens)](https://github.com/sfinkens) - Deutscher Wetterdienst - [Gionata Ghiggi (ghiggi)](https://github.com/ghiggi) - [Andrea Grillini (AppLEaDaY)](https://github.com/AppLEaDaY) @@ -37,6 +39,7 @@ The following people have made contributions to this project: - [David Hoese (djhoese)](https://github.com/djhoese) - [Marc Honnorat (honnorat)](https://github.com/honnorat) - [Lloyd Hughes (system123)](https://github.com/system123) +- [Sara Hörnquist (shornqui)](https://github.com/shornqui) - [Mikhail Itkin (mitkin)](https://github.com/mitkin) - [Tommy Jasmin (tommyjasmin)](https://github.com/tommyjasmin) - [Jactry Zeng](https://github.com/jactry) @@ -45,6 +48,7 @@ The following people have made contributions to this project: - [Janne Kotro (jkotro)](https://github.com/jkotro) - [Ralph Kuehn (ralphk11)](https://github.com/ralphk11) - [Panu Lahtinen (pnuu)](https://github.com/pnuu) +- [Clement Laplace (ClementLaplace)](https://github.com/ClementLaplace) - [Jussi Leinonen (jleinonen)](https://github.com/jleinonen) - meteoswiss - [Thomas Leppelt (m4sth0)](https://github.com/m4sth0) - Deutscher Wetterdienst - [Lu Liu (yukaribbba)](https://github.com/yukaribbba) @@ -53,6 +57,7 @@ The following people have made contributions to this project: - [Luca Merucci (lmeru)](https://github.com/lmeru) - [Lucas Meyer (LTMeyer)](https://github.com/LTMeyer) - [Zifeng Mo (Isotr0py)](https://github.com/Isotr0py) +- [David Navia (dnaviap)](https://github.com/dnaviap) - [Ondrej Nedelcev (nedelceo)](https://github.com/nedelceo) - [Oana Nicola](https://github.com/) - [Esben S. Nielsen (storpipfugl)](https://github.com/storpipfugl) @@ -74,12 +79,14 @@ The following people have made contributions to this project: - [Ronald Scheirer](https://github.com/) - [Hauke Schulz (observingClouds)](https://github.com/observingClouds) - [Jakub Seidl (seidlj)](https://github.com/seidlj) +- [Will Sharpe (wjsharpe)](https://github.com/wjsharpe) - [Eysteinn Sigurðsson (eysteinn)](https://github.com/eysteinn) - [Jean-Luc Shaw (jeanlucshaw)](https://github.com/jeanlucshaw) - [Dario Stelitano (bornagain1981)](https://github.com/bornagain1981) - [Johan Strandgren (strandgren)](https://github.com/strandgren) - [Matias Takala (elfsprite)](https://github.com/elfsprite) - [Taiga Tsukada (tsukada-cs)](https://github.com/tsukada-cs) +- [Antonio Valentino](https://github.com/avalentino) - [Christian Versloot (christianversloot)](https://github.com/christianversloot) - [Helga Weber (helgaweb)](https://github.com/helgaweb) - [hazbottles (hazbottles)](https://github.com/hazbottles) @@ -87,5 +94,3 @@ The following people have made contributions to this project: - [praerien (praerien)](https://github.com/praerien) - [Xin Zhang (zxdawn)](https://github.com/zxdawn) - [Yufei Zhu (yufeizhu600)](https://github.com/yufeizhu600) -- [Youva Aoun (YouvaEUMex)](https://github.com/YouvaEUMex) -- [David Navia (dnaviap)](https://github.com/dnaviap) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8730209f99..a1484ff13b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,176 @@ +## Version 0.49.0 (2024/06/05) + +### Issues Closed + +* [Issue 2790](https://github.com/pytroll/satpy/issues/2790) - VIIRS L1B DNB_SENZ file_key ([PR 2791](https://github.com/pytroll/satpy/pull/2791) by [@wjsharpe](https://github.com/wjsharpe)) +* [Issue 2781](https://github.com/pytroll/satpy/issues/2781) - [Question] Sun Zenith Correction +* [Issue 2765](https://github.com/pytroll/satpy/issues/2765) - abi_l2_nc reader Key Error 'calibration' when trying to load Mask from fire Hot Spot ([PR 2794](https://github.com/pytroll/satpy/pull/2794) by [@djhoese](https://github.com/djhoese)) + +In this release 3 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 2804](https://github.com/pytroll/satpy/pull/2804) - Fix LI L2 accumulated products `'with_area_definition': False` 1-d coordinates computation +* [PR 2794](https://github.com/pytroll/satpy/pull/2794) - Fix ABI L2 datasets when unitless and no calibration ([2765](https://github.com/pytroll/satpy/issues/2765)) +* [PR 2791](https://github.com/pytroll/satpy/pull/2791) - fixed DNB_SENZ file_key ([2790](https://github.com/pytroll/satpy/issues/2790)) + +#### Features added + +* [PR 2807](https://github.com/pytroll/satpy/pull/2807) - Update the vii_l1b-reader, for new testdata format of VII +* [PR 2801](https://github.com/pytroll/satpy/pull/2801) - Replace pytest-lazyfixture with pytest-lazy-fixtures +* [PR 2800](https://github.com/pytroll/satpy/pull/2800) - Add numpy rules to ruff +* [PR 2799](https://github.com/pytroll/satpy/pull/2799) - Add netcdf4 to goci2 optional dependency in `pyproject.toml` +* [PR 2795](https://github.com/pytroll/satpy/pull/2795) - Add support for MERSI-1 on FY-3A/B/C +* [PR 2789](https://github.com/pytroll/satpy/pull/2789) - Activate LI L2 accumulated products gridding by default +* [PR 2787](https://github.com/pytroll/satpy/pull/2787) - Fix datetime imports +* [PR 2778](https://github.com/pytroll/satpy/pull/2778) - Add the reader for the fci L1C Africa files +* [PR 2776](https://github.com/pytroll/satpy/pull/2776) - Add option to choose start time to MSI SAFE reader +* [PR 2727](https://github.com/pytroll/satpy/pull/2727) - Refactor Sentinel-1 SAR-C reader + +#### Documentation changes + +* [PR 2789](https://github.com/pytroll/satpy/pull/2789) - Activate LI L2 accumulated products gridding by default + +#### Backward incompatible changes + +* [PR 2789](https://github.com/pytroll/satpy/pull/2789) - Activate LI L2 accumulated products gridding by default + +#### Refactoring + +* [PR 2787](https://github.com/pytroll/satpy/pull/2787) - Fix datetime imports + +#### Clean ups + +* [PR 2797](https://github.com/pytroll/satpy/pull/2797) - Add missing coverage configuration section to pyproject.toml +* [PR 2784](https://github.com/pytroll/satpy/pull/2784) - Fix various issues in unstable CI + +In this release 18 pull requests were closed. + + +## Version 0.48.0 (2024/04/22) + +### Issues Closed + +* [Issue 2782](https://github.com/pytroll/satpy/issues/2782) - Documentation points to missing setup.py ([PR 2786](https://github.com/pytroll/satpy/pull/2786) by [@mraspaud](https://github.com/mraspaud)) +* [Issue 2771](https://github.com/pytroll/satpy/issues/2771) - Load data in another datatype rather than float64 +* [Issue 2759](https://github.com/pytroll/satpy/issues/2759) - 'defusedxml' missing in "msi_safe" extras ([PR 2761](https://github.com/pytroll/satpy/pull/2761) by [@fwfichtner](https://github.com/fwfichtner)) +* [Issue 2749](https://github.com/pytroll/satpy/issues/2749) - [Question] Resample of mesoscale data gives blank data +* [Issue 2747](https://github.com/pytroll/satpy/issues/2747) - Cannot load from MTG FCI L1C data +* [Issue 2729](https://github.com/pytroll/satpy/issues/2729) - Add Viirs L2 Reader + Enhancments ([PR 2740](https://github.com/pytroll/satpy/pull/2740) by [@wjsharpe](https://github.com/wjsharpe)) +* [Issue 2695](https://github.com/pytroll/satpy/issues/2695) - Improvements for BackgroundCompositor ([PR 2696](https://github.com/pytroll/satpy/pull/2696) by [@yukaribbba](https://github.com/yukaribbba)) + +In this release 7 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 2786](https://github.com/pytroll/satpy/pull/2786) - Remove doc references to setup.py ([2782](https://github.com/pytroll/satpy/issues/2782)) +* [PR 2779](https://github.com/pytroll/satpy/pull/2779) - Convert Sentinel-2 MSI sensor name to lowercase in the reader YAML config file and add support for "counts" calibration +* [PR 2774](https://github.com/pytroll/satpy/pull/2774) - Fix the viirs EDR tests for newer xarray +* [PR 2761](https://github.com/pytroll/satpy/pull/2761) - Add missing defusedxml ([2759](https://github.com/pytroll/satpy/issues/2759)) +* [PR 2754](https://github.com/pytroll/satpy/pull/2754) - Bugfix vgac reader +* [PR 2701](https://github.com/pytroll/satpy/pull/2701) - Ici reader tiepoints bugfix +* [PR 2696](https://github.com/pytroll/satpy/pull/2696) - Add double alpha channel support and improve metadata behaviours for BackgroundCompositor ([2695](https://github.com/pytroll/satpy/issues/2695)) + +#### Features added + +* [PR 2780](https://github.com/pytroll/satpy/pull/2780) - Add new (Eumetrain) FCI RGB composites +* [PR 2767](https://github.com/pytroll/satpy/pull/2767) - Use flags from file when available in OLCI NC reader +* [PR 2763](https://github.com/pytroll/satpy/pull/2763) - Replace setup with pyproject.toml +* [PR 2762](https://github.com/pytroll/satpy/pull/2762) - Add support for EO-SIP AVHRR LAC data +* [PR 2753](https://github.com/pytroll/satpy/pull/2753) - Add fsspec support to `li_l2_nc` reader +* [PR 2740](https://github.com/pytroll/satpy/pull/2740) - Add VIIRS L2 Reader ([2729](https://github.com/pytroll/satpy/issues/2729)) +* [PR 2696](https://github.com/pytroll/satpy/pull/2696) - Add double alpha channel support and improve metadata behaviours for BackgroundCompositor ([2695](https://github.com/pytroll/satpy/issues/2695)) +* [PR 2595](https://github.com/pytroll/satpy/pull/2595) - VGAC decode the time variable + +#### Documentation changes + +* [PR 2786](https://github.com/pytroll/satpy/pull/2786) - Remove doc references to setup.py ([2782](https://github.com/pytroll/satpy/issues/2782)) +* [PR 2766](https://github.com/pytroll/satpy/pull/2766) - Add Data Store to EUMETSAT part +* [PR 2750](https://github.com/pytroll/satpy/pull/2750) - Add missing `h` docstring information to _geos_area.py + +In this release 18 pull requests were closed. + + +## Version 0.47.0 (2024/02/21) + +### Issues Closed + +* [Issue 2734](https://github.com/pytroll/satpy/issues/2734) - Using a static image alters time information ([PR 2737](https://github.com/pytroll/satpy/pull/2737) by [@pnuu](https://github.com/pnuu)) +* [Issue 2723](https://github.com/pytroll/satpy/issues/2723) - MODIS Satpy scene Don't know how to open the following files: {'MOD021KM.A2017131.1325.061.2017314123114.hdf'} +* [Issue 2719](https://github.com/pytroll/satpy/issues/2719) - Add lat lon to Seviri plots +* [Issue 2718](https://github.com/pytroll/satpy/issues/2718) - Set invert as a modifier when do composite +* [Issue 2712](https://github.com/pytroll/satpy/issues/2712) - mitiff writer add config option to add or not to add the size of a pixel in the proj string ([PR 2713](https://github.com/pytroll/satpy/pull/2713) by [@TAlonglong](https://github.com/TAlonglong)) +* [Issue 2710](https://github.com/pytroll/satpy/issues/2710) - scene.save_datasets() outputs different values for AHI_HSD reader with calibration="brightness_temperature" +* [Issue 2708](https://github.com/pytroll/satpy/issues/2708) - this is regarding slstr_l1b geometry +* [Issue 2703](https://github.com/pytroll/satpy/issues/2703) - read swath in loop +* [Issue 2680](https://github.com/pytroll/satpy/issues/2680) - satpy_cf_nc reader cannot read FCI file written with cf writer +* [Issue 2672](https://github.com/pytroll/satpy/issues/2672) - Changes in NWC SAF GEO v2021 data ([PR 2673](https://github.com/pytroll/satpy/pull/2673) by [@pnuu](https://github.com/pnuu)) +* [Issue 2630](https://github.com/pytroll/satpy/issues/2630) - wrong start_time with BackgroundCompositor ([PR 2737](https://github.com/pytroll/satpy/pull/2737) by [@pnuu](https://github.com/pnuu)) +* [Issue 2447](https://github.com/pytroll/satpy/issues/2447) - add more options to time handling in combine_metadata ([PR 2737](https://github.com/pytroll/satpy/pull/2737) by [@pnuu](https://github.com/pnuu)) +* [Issue 2446](https://github.com/pytroll/satpy/issues/2446) - combine metadata in `MultiFiller` ([PR 2737](https://github.com/pytroll/satpy/pull/2737) by [@pnuu](https://github.com/pnuu)) +* [Issue 2427](https://github.com/pytroll/satpy/issues/2427) - Wrong start_time, end_time attributes after MultiScene.blend(blend_function=timeseries) ([PR 2737](https://github.com/pytroll/satpy/pull/2737) by [@pnuu](https://github.com/pnuu)) +* [Issue 2319](https://github.com/pytroll/satpy/issues/2319) - slstr_l2.yaml points to deleted slstr_l2.py ([PR 2731](https://github.com/pytroll/satpy/pull/2731) by [@djhoese](https://github.com/djhoese)) +* [Issue 1921](https://github.com/pytroll/satpy/issues/1921) - Standardize dataset information for SEVIRI and FCI L2 products +* [Issue 1174](https://github.com/pytroll/satpy/issues/1174) - combine_metadata only supports the average of time attrs ([PR 2737](https://github.com/pytroll/satpy/pull/2737) by [@pnuu](https://github.com/pnuu)) + +In this release 17 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 2743](https://github.com/pytroll/satpy/pull/2743) - Fix nominal time attributes in SEVIRI HRIT ([](https://github.com/`nominal_start/issues/)) +* [PR 2742](https://github.com/pytroll/satpy/pull/2742) - Fix nominal end time in AHI HSD +* [PR 2737](https://github.com/pytroll/satpy/pull/2737) - Change `start_time` and `end_time` handling in `combine_metadata` ([2734](https://github.com/pytroll/satpy/issues/2734), [2630](https://github.com/pytroll/satpy/issues/2630), [2447](https://github.com/pytroll/satpy/issues/2447), [2446](https://github.com/pytroll/satpy/issues/2446), [2427](https://github.com/pytroll/satpy/issues/2427), [1174](https://github.com/pytroll/satpy/issues/1174)) +* [PR 2731](https://github.com/pytroll/satpy/pull/2731) - Remove slstr_l2 reader in favor of ghrsst_l2 ([2319](https://github.com/pytroll/satpy/issues/2319)) +* [PR 2730](https://github.com/pytroll/satpy/pull/2730) - Pin pytest to fix CI +* [PR 2726](https://github.com/pytroll/satpy/pull/2726) - Fix AGRI L1 C07 having a valid LUT value for its fill value ([565](https://github.com/ssec/polar2grid/issues/565)) +* [PR 2713](https://github.com/pytroll/satpy/pull/2713) - Add kwargs config option to turn off mitiff corner correction ([2712](https://github.com/pytroll/satpy/issues/2712)) +* [PR 2711](https://github.com/pytroll/satpy/pull/2711) - Add support for NOAA-21 in MiRS limb correction +* [PR 2707](https://github.com/pytroll/satpy/pull/2707) - Fix 'viirs_edr' renaming two sets of dimensions to the same names +* [PR 2700](https://github.com/pytroll/satpy/pull/2700) - Fix eps_l1b reader Delayed usage causing docs failures + +#### Features added + +* [PR 2746](https://github.com/pytroll/satpy/pull/2746) - Fix concurrency group in ci +* [PR 2745](https://github.com/pytroll/satpy/pull/2745) - Sort reader table by name + diverse fixes +* [PR 2744](https://github.com/pytroll/satpy/pull/2744) - Fix cutoffs for night_ir_alpha and bump up trollimage version +* [PR 2737](https://github.com/pytroll/satpy/pull/2737) - Change `start_time` and `end_time` handling in `combine_metadata` ([2734](https://github.com/pytroll/satpy/issues/2734), [2630](https://github.com/pytroll/satpy/issues/2630), [2447](https://github.com/pytroll/satpy/issues/2447), [2446](https://github.com/pytroll/satpy/issues/2446), [2427](https://github.com/pytroll/satpy/issues/2427), [1174](https://github.com/pytroll/satpy/issues/1174)) +* [PR 2728](https://github.com/pytroll/satpy/pull/2728) - Update asv dependencies +* [PR 2720](https://github.com/pytroll/satpy/pull/2720) - Add support for the MERSI-RM instrument on FY-3G +* [PR 2714](https://github.com/pytroll/satpy/pull/2714) - Add QC-based filtering to the VIIRS EDR AOD550 product +* [PR 2675](https://github.com/pytroll/satpy/pull/2675) - Make CF encoding of dataset attributes public +* [PR 2673](https://github.com/pytroll/satpy/pull/2673) - Add NWC SAF GEO v2021 ASIIF-TF and ASII-GW dataset names ([2672](https://github.com/pytroll/satpy/issues/2672)) +* [PR 2534](https://github.com/pytroll/satpy/pull/2534) - Add fsspec functionality to `viirs_sdr` reader +* [PR 2441](https://github.com/pytroll/satpy/pull/2441) - Add channel aliases to the CLAVRx reader to facilitate composites + +#### Documentation changes + +* [PR 2700](https://github.com/pytroll/satpy/pull/2700) - Fix eps_l1b reader Delayed usage causing docs failures + +#### Backward incompatible changes + +* [PR 2731](https://github.com/pytroll/satpy/pull/2731) - Remove slstr_l2 reader in favor of ghrsst_l2 ([2319](https://github.com/pytroll/satpy/issues/2319)) + +#### Refactoring + +* [PR 2699](https://github.com/pytroll/satpy/pull/2699) - Move Scene.to_hvplot internals to _scene_converters + +#### Clean ups + +* [PR 2711](https://github.com/pytroll/satpy/pull/2711) - Add support for NOAA-21 in MiRS limb correction +* [PR 2700](https://github.com/pytroll/satpy/pull/2700) - Fix eps_l1b reader Delayed usage causing docs failures +* [PR 2689](https://github.com/pytroll/satpy/pull/2689) - Fix/supress warnings in reader tests ([](https://github.com/and/issues/)) +* [PR 2665](https://github.com/pytroll/satpy/pull/2665) - FCI L2 CF harmonization +* [PR 2597](https://github.com/pytroll/satpy/pull/2597) - Update CI to test Python 3.12 + +In this release 29 pull requests were closed. + + ## Version 0.46.0 (2023/12/18) ### Issues Closed diff --git a/asv.conf.json b/asv.conf.json index dbecadf79a..0b53ffd65c 100644 --- a/asv.conf.json +++ b/asv.conf.json @@ -46,7 +46,7 @@ // determined by looking for tools on the PATH environment // variable. //"environment_type": "virtualenv", - "environment_type": "conda", + "environment_type": "mamba", // timeout in seconds for installing any dependencies in environment // defaults to 10 min @@ -58,7 +58,7 @@ // The Pythons you'd like to test against. If not provided, defaults // to the current version of Python used to run `asv`. // "pythons": ["2.7", "3.6"], - "pythons": ["3.9", "3.10"], + "pythons": ["3.11", "3.12"], // The list of conda channel names to be searched for benchmark // dependency packages in the specified order @@ -80,14 +80,14 @@ // "pip+emcee": [""], // emcee is only available for install with pip. // }, "matrix": { - "pyresample": ["1.22.3"], - "trollimage": ["1.17.0"], - "pyorbital": ["1.7.1"], - "pyspectral": ["0.10.6"], - "rasterio": ["1.2.10"], - "dask": ["2021.12.0"], - "xarray": ["0.20.2"], - "numpy": ["1.22.0"], + "pyresample": ["1.27.1"], + "trollimage": ["1.22.2"], + "pyorbital": ["1.8.1"], + "pyspectral": ["0.13.0"], + "rasterio": ["1.3.9"], + "dask": ["2024.1.1"], + "xarray": ["2024.1.1"], + "numpy": ["1.26.0"], "s3fs": [], "h5py": [], "netCDF4": [], diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index ecc0084ea7..6479ce6b53 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -7,7 +7,7 @@ dependencies: - distributed - dask-image - donfig - - appdirs + - platformdirs - toolz - Cython - numba @@ -43,10 +43,10 @@ dependencies: - python-eccodes # 2.19.1 seems to cause library linking issues - eccodes>=2.20 - - pytest + - pytest<8.0.0 - pytest-cov - - pytest-lazy-fixture - fsspec + - botocore>=1.33 - s3fs - python-geotiepoints - pooch @@ -58,7 +58,8 @@ dependencies: - ephem - bokeh - pip: + - pytest-lazy-fixtures - trollsift - - trollimage>=1.20 + - trollimage>=1.23 - pyspectral - pyorbital diff --git a/doc/rtd_environment.yml b/doc/rtd_environment.yml index 1e40cbb73a..abd8add616 100644 --- a/doc/rtd_environment.yml +++ b/doc/rtd_environment.yml @@ -4,7 +4,7 @@ channels: dependencies: - python=3.10 - pip - - appdirs + - platformdirs - dask - dask-image - defusedxml @@ -18,7 +18,6 @@ dependencies: - pooch - pyresample - pytest - - pytest-lazy-fixture - python-eccodes - python-geotiepoints - rasterio @@ -32,6 +31,8 @@ dependencies: - xarray - zarr - xarray-datatree + - cartopy - pip: - graphviz + - pytest-lazy-fixtures - .. # relative path to the satpy project diff --git a/doc/source/_static/main.js b/doc/source/_static/main.js index 188a335e71..f2da99d4e7 100644 --- a/doc/source/_static/main.js +++ b/doc/source/_static/main.js @@ -1,6 +1,22 @@ $(document).ready( function () { $('table.datatable').DataTable( { "paging": false, - "dom": 'lfitp' + "layout": { + 'topStart': 'info', + 'topEnd': 'search', + 'bottomStart': null + }, + "order": [[0, 'asc']] +} ); + + $('table.area-table').DataTable( { + "paging": true, + "pageLength": 15, + "layout": { + 'topStart': 'info', + 'topEnd': 'search', + 'bottomEnd': 'paging', + 'bottomStart': null + } } ); } ); diff --git a/doc/source/conf.py b/doc/source/conf.py index 49e47b2cc2..62156e9760 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -14,9 +14,9 @@ from __future__ import annotations +import datetime as dt import os import sys -from datetime import datetime # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the @@ -24,9 +24,15 @@ sys.path.append(os.path.abspath("../../")) sys.path.append(os.path.abspath(os.path.dirname(__file__))) -from reader_table import generate_reader_table # noqa: E402 +from pyresample.area_config import ( # noqa: E402 + _create_area_def_from_dict, + _read_yaml_area_file_content, + generate_area_def_rst_list, +) +from reader_table import generate_reader_table, rst_table_header, rst_table_row # noqa: E402 import satpy # noqa: E402 +from satpy.resample import get_area_file # noqa: E402 # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -80,6 +86,26 @@ def __getattr__(cls, name): with open("reader_table.rst", mode="w") as f: f.write(generate_reader_table()) +# create table from area definition yaml file +area_file = get_area_file()[0] + +area_dict = _read_yaml_area_file_content(area_file) +area_table = [rst_table_header("Area Definitions", header=["Name", "Description", "Projection"], + widths="auto", class_name="area-table")] + +for aname, params in area_dict.items(): + area = _create_area_def_from_dict(aname, params) + if not hasattr(area, "_repr_html_"): + continue + + area_table.append(rst_table_row([f"`{aname}`_", area.description, + area.proj_dict.get("proj")])) + +with open("area_def_list.rst", mode="w") as f: + f.write("".join(area_table)) + f.write("\n\n") + f.write(generate_area_def_rst_list(area_file)) + # -- General configuration ----------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be extensions @@ -117,7 +143,7 @@ def __getattr__(cls, name): # General information about the project. project = u"Satpy" -copyright = u"2009-{}, The PyTroll Team".format(datetime.utcnow().strftime("%Y")) # noqa: A001 +copyright = u"2009-{}, The PyTroll Team".format(dt.datetime.utcnow().strftime("%Y")) # noqa: A001 # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -194,11 +220,13 @@ def __getattr__(cls, name): html_css_files = [ "theme_overrides.css", # override wide tables in RTD theme - "https://cdn.datatables.net/1.10.23/css/jquery.dataTables.min.css", + # "https://cdn.datatables.net/v/dt/dt-2.0.0/datatables.min.css", + "https://cdn.datatables.net/v/dt/dt-2.0.8/r-3.0.2/datatables.min.css" ] html_js_files = [ - "https://cdn.datatables.net/1.10.23/js/jquery.dataTables.min.js", + # "https://cdn.datatables.net/v/dt/dt-2.0.0/datatables.min.js", + "https://cdn.datatables.net/v/dt/dt-2.0.8/r-3.0.2/datatables.min.js", "main.js", ] diff --git a/doc/source/config.rst b/doc/source/config.rst index b1777c9751..2279e10fe5 100644 --- a/doc/source/config.rst +++ b/doc/source/config.rst @@ -35,7 +35,7 @@ locations: 3. ``~/.satpy/satpy.yaml`` 4. ``/satpy.yaml`` (see :ref:`config_path_setting` below) -The above ``user_config_dir`` is provided by the ``appdirs`` package and +The above ``user_config_dir`` is provided by the ``platformdirs`` package and differs by operating system. Typical user config directories are: * Mac OSX: ``~/Library/Preferences/satpy`` @@ -90,7 +90,7 @@ Directory where any files cached by Satpy will be stored. This directory is not necessarily cleared out by Satpy, but is rarely used without explicitly being enabled by the user. This defaults to a different path depending on your operating system following -the `appdirs `_ +the `platformdirs `_ "user cache dir". .. _config_cache_lonlats_setting: @@ -214,7 +214,7 @@ Data Directory Directory where any data Satpy needs to perform certain operations will be stored. This replaces the legacy ``SATPY_ANCPATH`` environment variable. This defaults to a different path depending on your operating system following the -`appdirs `_ +`platformdirs `_ "user data dir". .. _download_aux_setting: diff --git a/doc/source/data_download.rst b/doc/source/data_download.rst index b8742fac96..8cabacd58a 100644 --- a/doc/source/data_download.rst +++ b/doc/source/data_download.rst @@ -72,7 +72,8 @@ NASA VIIRS Atmosphere SIPS * `Resource Description `__ * Associated Readers: ``viirs_l1b`` -EUMETSAT Data Center --------------------- +EUMETSAT Data Store and Data Center +----------------------------------- -* `Data Ordering `__ +* EUMETSAT's primary source for data is the `Data Store `__ +* Some products remain available on the `Earth Observation Portal `__ diff --git a/doc/source/dev_guide/custom_reader.rst b/doc/source/dev_guide/custom_reader.rst index a77988760e..a5656795ca 100644 --- a/doc/source/dev_guide/custom_reader.rst +++ b/doc/source/dev_guide/custom_reader.rst @@ -117,7 +117,8 @@ The parameters to provide in this section are: file format. This can be multiline if formatted properly in YAML (see example below). status - The status of the reader (one of: Nominal, Beta, Alpha) + The status of the reader (one of: Nominal, Beta, Alpha, Defunct; see :ref:`Status Description ` + for more details). supports_fsspec If the reader supports reading data via fsspec (either true or false). sensors diff --git a/doc/source/dev_guide/index.rst b/doc/source/dev_guide/index.rst index e877fd1c63..bcd536f614 100644 --- a/doc/source/dev_guide/index.rst +++ b/doc/source/dev_guide/index.rst @@ -29,7 +29,7 @@ and all code should follow the practices `_. Satpy is now Python 3 only and it is no longer needed to support Python 2. -Check ``setup.py`` for the current Python versions any new code needs +Check ``pyproject.toml`` for the current Python versions any new code needs to support. .. _devinstall: @@ -63,7 +63,7 @@ clone your fork. The package can then be installed in development mode by doing: The first command will install all dependencies needed by the Satpy conda-forge package, but won't actually install Satpy. The second command should be run from the root of the cloned Satpy repository (where the -``setup.py`` is) and will install the actual package. +``pyproject.toml`` is) and will install the actual package. You can now edit the python files in your cloned repository and have them immediately reflected in your conda environment. diff --git a/doc/source/dev_guide/plugins.rst b/doc/source/dev_guide/plugins.rst index bce72dabae..35c772c2a6 100644 --- a/doc/source/dev_guide/plugins.rst +++ b/doc/source/dev_guide/plugins.rst @@ -156,19 +156,6 @@ have a ``etc/`` directory in the root of the package structure. Even so, for future compatibility, it is best to use the name of the package directory on the right-hand side of the ``=``. -.. warning:: - - Due to some limitations in setuptools you must also define a ``setup.py`` - file in addition to ``pyproject.toml`` if you'd like to use "editable" - installations (``pip install -e .``). Once - `this setuptools issue `_ - is resolved this won't be needed. For now this minimal ``setup.py`` will - work: - - .. code-block:: python - - from setuptools import setup - setup() **Alternative: setup.py** diff --git a/doc/source/index.rst b/doc/source/index.rst index 052a7e2d03..b229c904ee 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -84,6 +84,30 @@ Documentation .. include:: reader_table.rst +.. _Status Description: +.. note:: + + Status description: + + Defunct + Most likely the reader is not functional. If it is there is a good chance of + bugs and/or performance problems (e.g. not ported to dask/xarray yet). Future + development is unclear. Users are encouraged to contribute (see section + :doc:`dev_guide/CONTRIBUTING` and/or get help on Slack or by opening a Github issue). + + Alpha + This denotes early development status. Reader is functional and implements some + or all of the nominal features. There might be bugs. Exactness of results is + not be guaranteed. Use at your own risk. + + Beta + This denotes final developement status. Reader is functional and implements all + nominal features. Results should be dependable but there might be bugs. Users + are actively encouraged to test and report bugs. + + Nominal + This denotes a finished status. Reader is functional and most likely no new + features will be introduced. It has been tested and there are no known bugs. Indices and tables ================== diff --git a/doc/source/install.rst b/doc/source/install.rst index 3c3ba26a41..619903b34c 100644 --- a/doc/source/install.rst +++ b/doc/source/install.rst @@ -86,7 +86,7 @@ To install the `satpy` package and the minimum amount of python dependencies: Additional dependencies can be installed as "extras" and are grouped by reader, writer, or feature added. Extras available can be found in the -`setup.py `_ file. +`pyproject.toml `_ file. They can be installed individually: .. code-block:: bash diff --git a/doc/source/reader_table.py b/doc/source/reader_table.py index 618cb2b96b..0caf2c4c34 100644 --- a/doc/source/reader_table.py +++ b/doc/source/reader_table.py @@ -39,16 +39,18 @@ def rst_table_row(columns=None): return row -def rst_table_header(name=None, header=None, header_rows=1, widths="auto"): # noqa: D417 +def rst_table_header(name=None, header=None, header_rows=1, widths="auto", class_name="datatable"): """Create header for rst table. Args: name (str): Name of the table header (list[str]): Column names - header-rows (int): Number of header rows - width (optional[list[int]]): Width of each column as a list. If not specified + header_rows (int): Number of header rows + widths (optional[list[int]]): Width of each column as a list. If not specified defaults to auto and will therefore determined by the backend (see ) + class_name (str): The CSS class name for the table. A corresponding js function should be in main.js in + in the "statis" directory. Returns: str @@ -61,7 +63,7 @@ def rst_table_header(name=None, header=None, header_rows=1, widths="auto"): # n table_header = (f".. list-table:: {name}\n" f" :header-rows: {header_rows}\n" f" :widths: {widths}\n" - f" :class: datatable\n\n" + f" :class: {class_name}\n\n" f"{header}") return table_header @@ -74,7 +76,7 @@ def generate_reader_table(): str """ table = [rst_table_header("Satpy Readers", header=["Description", "Reader name", "Status", "fsspec support"], - widths=[45, 25, 30, 30])] + widths="auto")] reader_configs = available_readers(as_dict=True, yaml_loader=BaseLoader) for rc in reader_configs: diff --git a/pyproject.toml b/pyproject.toml index 4de1e302f4..c2ae1a890d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,9 +1,123 @@ +[project] +name = "satpy" +dynamic = ["version"] +description = "Python package for earth-observing satellite data processing" +authors = [ + { name = "The Pytroll Team", email = "pytroll@googlegroups.com" } +] +dependencies = [ + "platformdirs", + "dask[array]>=0.17.1", + "donfig", + "numpy>=1.21", + "packaging", + "pillow", + "pooch", + "pykdtree", + "pyorbital", + "pyproj>=2.2", + "pyresample>=1.24.0", + "pyyaml>=5.1", + "trollimage>=1.23", + "trollsift", + "xarray>=0.14.1", + "zarr", +] +readme = "README.rst" +requires-python = ">=3.9" +license = { text = "GPLv3" } +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Topic :: Scientific/Engineering" +] + +[project.optional-dependencies] +avhrr_l1b_eps = ["defusedxml"] +avhrr_l1b_gaclac = ["pygac >= 1.3.0"] +modis_l1b = ["pyhdf", "python-geotiepoints >= 1.1.7"] +geocat = ["pyhdf"] +goci2 = ["netCDF4 >= 1.1.8"] +acspo = ["netCDF4 >= 1.1.8"] +clavrx = ["netCDF4 >= 1.1.8"] +viirs_l1b = ["netCDF4 >= 1.1.8"] +viirs_sdr = ["h5py >= 2.7.0"] +viirs_compact = ["h5py >= 2.7.0"] +omps_edr = ["h5py >= 2.7.0"] +amsr2_l1b = ["h5py >= 2.7.0"] +hrpt = ["pyorbital >= 1.3.1", "pygac", "python-geotiepoints >= 1.1.7"] +hrit_msg = ["pytroll-schedule"] +msi_safe = ["rioxarray", "bottleneck", "python-geotiepoints", "defusedxml"] +nc_nwcsaf_msg = ["netCDF4 >= 1.1.8"] +sar_c = ["python-geotiepoints >= 1.1.7", "rasterio", "rioxarray", "defusedxml"] +abi_l1b = ["h5netcdf"] +seviri_l1b_hrit = ["pyorbital >= 1.3.1"] +seviri_l1b_native = ["pyorbital >= 1.3.1"] +seviri_l1b_nc = ["pyorbital >= 1.3.1", "netCDF4 >= 1.1.8"] +seviri_l2_bufr = ["eccodes"] +seviri_l2_grib = ["eccodes"] +hsaf_grib = ["pygrib"] +remote_reading = ["fsspec"] +insat_3d = ["xarray-datatree"] +gms5-vissr_l1b = ["numba"] +# Writers: +cf = ["h5netcdf >= 0.7.3"] +awips_tiled = ["netCDF4 >= 1.1.8"] +geotiff = ["rasterio", "trollimage[geotiff]"] +ninjo = ["pyninjotiff", "pint"] +units = ["pint-xarray"] +# Composites/Modifiers: +rayleigh = ["pyspectral >= 0.10.1"] +angles = ["pyorbital >= 1.3.1"] +filters = ["dask-image"] +# MultiScene: +animations = ["imageio"] +# Documentation: +doc = ["sphinx", "sphinx_rtd_theme", "sphinxcontrib-apidoc"] +# Other +geoviews = ["geoviews"] +holoviews = ["holoviews"] +hvplot = ["hvplot", "geoviews", "cartopy", "holoviews"] +overlays = ["pycoast", "pydecorate"] +satpos_from_tle = ["skyfield", "astropy"] +tests = ["behave", "h5py", "netCDF4", "pyhdf", "imageio", + "rasterio", "geoviews", "trollimage", "fsspec", "bottleneck", + "rioxarray", "pytest", "pytest-lazy-fixtures", "defusedxml", + "s3fs", "eccodes", "h5netcdf", "xarray-datatree", + "skyfield", "ephem", "pint-xarray", "astropy", "dask-image", "python-geotiepoints", "numba"] + +[project.scripts] +satpy_retrieve_all_aux_data = "satpy.aux_download:retrieve_all_cmd" + +[project.urls] +Homepage = "https://github.com/pytroll/satpy" +"Bug Tracker" = "https://github.com/pytroll/satpy/issues" +Documentation = "https://satpy.readthedocs.io/en/stable/" +"Source Code" = "https://github.com/pytroll/satpy" +Organization = "https://pytroll.github.io/" +Slack = "https://pytroll.slack.com/" +Twitter = "https://twitter.com/hashtag/satpy?src=hashtag_click" +"Release Notes" = "https://github.com/pytroll/satpy/blob/main/CHANGELOG.md" +Mastodon = "https://fosstodon.org/tags/satpy" + [build-system] -requires = ["setuptools>=60", "wheel", "setuptools_scm[toml]>=8.0"] -build-backend = "setuptools.build_meta" +requires = ["hatchling", "hatch-vcs"] +build-backend = "hatchling.build" + +[tool.hatch.metadata] +allow-direct-references = true + +[tool.hatch.build.targets.wheel] +packages = ["satpy"] -[tool.setuptools_scm] -write_to = "satpy/version.py" +[tool.hatch.version] +source = "vcs" + +[tool.hatch.build.hooks.vcs] +version-file = "satpy/version.py" [tool.isort] sections = ["FUTURE", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"] @@ -14,19 +128,25 @@ known_first_party = "satpy" line_length = 120 [tool.ruff] +line-length = 120 + +[tool.ruff.lint] # See https://docs.astral.sh/ruff/rules/ # In the future, add "B", "S", "N" -select = ["A", "D", "E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] -line-length = 120 +select = ["A", "D", "E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20", "NPY"] -[tool.ruff.per-file-ignores] +[tool.ruff.lint.per-file-ignores] "satpy/tests/*" = ["S101"] # assert allowed in tests "utils/coord2area_def.py" = ["T201"] # allow print "fetch_avhrr_calcoeffs.py" = ["T201"] # allow print -[tool.ruff.pydocstyle] +[tool.ruff.lint.pydocstyle] convention = "google" -[tool.ruff.mccabe] +[tool.ruff.lint.mccabe] # Unlike Flake8, default to a complexity level of 10. max-complexity = 10 + +[tool.coverage.run] +relative_files = true +omit = ["satpy/version.py"] diff --git a/satpy/_config.py b/satpy/_config.py index 6a14f994a8..fbfcb0c0d5 100644 --- a/satpy/_config.py +++ b/satpy/_config.py @@ -29,8 +29,8 @@ from importlib.resources import files as impr_files from typing import Iterable -import appdirs from donfig import Config +from platformdirs import AppDirs from satpy._compat import cache @@ -40,7 +40,7 @@ # FIXME: Use package_resources? PACKAGE_CONFIG_PATH = os.path.join(BASE_PATH, "etc") -_satpy_dirs = appdirs.AppDirs(appname="satpy", appauthor="pytroll") +_satpy_dirs = AppDirs(appname="satpy", appauthor="pytroll") _CONFIG_DEFAULTS = { "tmp_dir": tempfile.gettempdir(), "cache_dir": _satpy_dirs.user_cache_dir, diff --git a/satpy/cf/attrs.py b/satpy/cf/attrs.py index cdec8500d4..adcc2ba60a 100644 --- a/satpy/cf/attrs.py +++ b/satpy/cf/attrs.py @@ -128,8 +128,8 @@ def _encode_to_cf(obj): return _encode_python_objects(obj) -def _encode_nc_attrs(attrs): - """Encode dataset attributes in a netcdf compatible datatype. +def encode_attrs_to_cf(attrs): + """Encode dataset attributes as a netcdf compatible datatype. Args: attrs (dict): @@ -161,7 +161,7 @@ def preprocess_attrs( if flatten_attrs: data_arr.attrs = flatten_dict(data_arr.attrs) - data_arr.attrs = _encode_nc_attrs(data_arr.attrs) + data_arr.attrs = encode_attrs_to_cf(data_arr.attrs) return data_arr @@ -224,7 +224,7 @@ def preprocess_header_attrs(header_attrs, flatten_attrs=False): if header_attrs is not None: if flatten_attrs: header_attrs = flatten_dict(header_attrs) - header_attrs = _encode_nc_attrs(header_attrs) # OrderedDict + header_attrs = encode_attrs_to_cf(header_attrs) # OrderedDict else: header_attrs = {} header_attrs = _add_history(header_attrs) diff --git a/satpy/cf/decoding.py b/satpy/cf/decoding.py new file mode 100644 index 0000000000..2515f6bd38 --- /dev/null +++ b/satpy/cf/decoding.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . + +"""CF decoding.""" + +import copy +import datetime as dt +import json + + +def decode_attrs(attrs): + """Decode CF-encoded attributes to Python object. + + Converts timestamps to datetime and strings starting with "{" to + dictionary. + + Args: + attrs (dict): Attributes to be decoded + + Returns (dict): Decoded attributes + """ + attrs = copy.deepcopy(attrs) + _decode_dict_type_attrs(attrs) + _decode_timestamps(attrs) + return attrs + + +def _decode_dict_type_attrs(attrs): + for key, val in attrs.items(): + attrs[key] = _str2dict(val) + + +def _str2dict(val): + """Convert string to dictionary.""" + if isinstance(val, str) and val.startswith("{"): + val = json.loads(val, object_hook=_datetime_parser_json) + return val + + +def _decode_timestamps(attrs): + for key, value in attrs.items(): + timestamp = _str2datetime(value) + if timestamp: + attrs[key] = timestamp + + +def _datetime_parser_json(json_dict): + """Traverse JSON dictionary and parse timestamps.""" + for key, value in json_dict.items(): + timestamp = _str2datetime(value) + if timestamp: + json_dict[key] = timestamp + return json_dict + + +def _str2datetime(string): + """Convert string to datetime object.""" + try: + return dt.datetime.fromisoformat(string) + except (TypeError, ValueError): + return None diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index a70bbea86f..9ab658954f 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1665,37 +1665,81 @@ def __call__(self, *args, **kwargs): img.attrs["mode"] = "".join(img.bands.data) img.attrs.pop("modifiers", None) img.attrs.pop("calibration", None) - # Add start time if not present in the filename - if "start_time" not in img.attrs or not img.attrs["start_time"]: - import datetime as dt - img.attrs["start_time"] = dt.datetime.utcnow() - if "end_time" not in img.attrs or not img.attrs["end_time"]: - import datetime as dt - img.attrs["end_time"] = dt.datetime.utcnow() return img class BackgroundCompositor(GenericCompositor): - """A compositor that overlays one composite on top of another.""" + """A compositor that overlays one composite on top of another. + + The output image mode will be determined by both foreground and background. Generally, when the background has + an alpha band, the output image will also have one. + + ============ ============ ======== + Foreground Background Result + ============ ============ ======== + L L L + ------------ ------------ -------- + L LA LA + ------------ ------------ -------- + L RGB RGB + ------------ ------------ -------- + L RGBA RGBA + ------------ ------------ -------- + LA L L + ------------ ------------ -------- + LA LA LA + ------------ ------------ -------- + LA RGB RGB + ------------ ------------ -------- + LA RGBA RGBA + ------------ ------------ -------- + RGB L RGB + ------------ ------------ -------- + RGB LA RGBA + ------------ ------------ -------- + RGB RGB RGB + ------------ ------------ -------- + RGB RGBA RGBA + ------------ ------------ -------- + RGBA L RGB + ------------ ------------ -------- + RGBA LA RGBA + ------------ ------------ -------- + RGBA RGB RGB + ------------ ------------ -------- + RGBA RGBA RGBA + ============ ============ ======== + + """ def __call__(self, projectables, *args, **kwargs): """Call the compositor.""" projectables = self.match_data_arrays(projectables) + # Get enhanced datasets foreground = enhance2dataset(projectables[0], convert_p=True) background = enhance2dataset(projectables[1], convert_p=True) - # Adjust bands so that they match - # L/RGB -> RGB/RGB - # LA/RGB -> RGBA/RGBA - # RGB/RGBA -> RGBA/RGBA + before_bg_mode = background.attrs["mode"] + + # Adjust bands so that they have the same mode foreground = add_bands(foreground, background["bands"]) background = add_bands(background, foreground["bands"]) + # It's important whether the alpha band of background is initially generated, e.g. by CloudCompositor + # The result will be used to determine the output image mode + initial_bg_alpha = "A" in before_bg_mode + attrs = self._combine_metadata_with_mode_and_sensor(foreground, background) - data = self._get_merged_image_data(foreground, background) + if "A" not in foreground.attrs["mode"] and "A" not in background.attrs["mode"]: + data = self._simple_overlay(foreground, background) + else: + data = self._get_merged_image_data(foreground, background, initial_bg_alpha=initial_bg_alpha) + for data_arr in data: + data_arr.attrs = attrs res = super(BackgroundCompositor, self).__call__(data, **kwargs) - res.attrs.update(attrs) + attrs.update(res.attrs) + res.attrs = attrs return res def _combine_metadata_with_mode_and_sensor(self, @@ -1714,27 +1758,61 @@ def _combine_metadata_with_mode_and_sensor(self, @staticmethod def _get_merged_image_data(foreground: xr.DataArray, - background: xr.DataArray + background: xr.DataArray, + initial_bg_alpha: bool, ) -> list[xr.DataArray]: - if "A" in foreground.attrs["mode"]: - # Use alpha channel as weight and blend the two composites - alpha = foreground.sel(bands="A") - data = [] - # NOTE: there's no alpha band in the output image, it will - # be added by the data writer - for band in foreground.mode[:-1]: - fg_band = foreground.sel(bands=band) - bg_band = background.sel(bands=band) - chan = (fg_band * alpha + bg_band * (1 - alpha)) - chan = xr.where(chan.isnull(), bg_band, chan) - data.append(chan) - else: - data_arr = xr.where(foreground.isnull(), background, foreground) - # Split to separate bands so the mode is correct - data = [data_arr.sel(bands=b) for b in data_arr["bands"]] + # For more info about alpha compositing please review https://en.wikipedia.org/wiki/Alpha_compositing + alpha_fore = _get_alpha(foreground) + alpha_back = _get_alpha(background) + new_alpha = alpha_fore + alpha_back * (1 - alpha_fore) + + data = [] + + # Pass the image data (alpha band will be dropped temporally) to the writer + output_mode = background.attrs["mode"].replace("A", "") + + for band in output_mode: + fg_band = foreground.sel(bands=band) + bg_band = background.sel(bands=band) + # Do the alpha compositing + chan = (fg_band * alpha_fore + bg_band * alpha_back * (1 - alpha_fore)) / new_alpha + # Fill the NaN area with background + chan = xr.where(chan.isnull(), bg_band * alpha_back, chan) + chan["bands"] = band + data.append(chan) + + # If background has an initial alpha band, it will also be passed to the writer + if initial_bg_alpha: + new_alpha["bands"] = "A" + data.append(new_alpha) return data + @staticmethod + def _simple_overlay(foreground: xr.DataArray, + background: xr.DataArray,) -> list[xr.DataArray]: + # This is for the case when no alpha bands are involved + # Just simply lay the foreground upon background + data_arr = xr.where(foreground.isnull(), background, foreground) + # Split to separate bands so the mode is correct + data = [data_arr.sel(bands=b) for b in data_arr["bands"]] + + return data + + +def _get_alpha(dataset: xr.DataArray): + # 1. This function is only used by _get_merged_image_data + # 2. Both foreground and background have been through add_bands, so they have the same mode + # 3. If none of them has alpha band, they will be passed to _simple_overlay not _get_merged_image_data + # So any dataset(whether foreground or background) passed to this function has an alpha band for certain + # We will use it directly + alpha = dataset.sel(bands="A") + # There could be NaNs in the alpha + # Replace them with 0 to prevent cases like 1 + nan = nan, so they won't affect new_alpha + alpha = xr.where(alpha.isnull(), 0, alpha) + + return alpha + class MaskingCompositor(GenericCompositor): """A compositor that masks e.g. IR 10.8 channel data using cloud products from NWC SAF.""" diff --git a/satpy/composites/viirs.py b/satpy/composites/viirs.py index 5df2d482af..a9a047fd21 100644 --- a/satpy/composites/viirs.py +++ b/satpy/composites/viirs.py @@ -15,12 +15,14 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Composite classes for the VIIRS instrument.""" + from __future__ import annotations +import datetime as dt import logging import math -from datetime import datetime import dask import dask.array as da @@ -842,7 +844,7 @@ def _linear_normalization_from_0to1( data[mask] = data[mask] / theoretical_max -def _check_moon_phase(moon_datasets: list[xr.DataArray], start_time: datetime) -> float: +def _check_moon_phase(moon_datasets: list[xr.DataArray], start_time: dt.datetime) -> float: """Check if we have Moon phase as an input dataset and, if not, calculate it.""" if moon_datasets: # convert to decimal instead of % diff --git a/satpy/dataset/metadata.py b/satpy/dataset/metadata.py index 46f6f622b8..03208ebc50 100644 --- a/satpy/dataset/metadata.py +++ b/satpy/dataset/metadata.py @@ -15,10 +15,12 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Utilities for merging metadata from various sources.""" +import datetime as dt +import warnings from collections.abc import Collection -from datetime import datetime from functools import partial, reduce from operator import eq, is_ @@ -27,33 +29,50 @@ from satpy.writers.utils import flatten_dict -def combine_metadata(*metadata_objects, average_times=True): +def combine_metadata(*metadata_objects, average_times=None): """Combine the metadata of two or more Datasets. If the values corresponding to any keys are not equal or do not exist in all provided dictionaries then they are not included in - the returned dictionary. By default any keys with the word 'time' - in them and consisting of datetime objects will be averaged. This - is to handle cases where data were observed at almost the same time - but not exactly. In the interest of time, lazy arrays are compared by - object identity rather than by their contents. + the returned dictionary. + + All values of the keys containing the substring 'start_time' will be set + to the earliest value and similarly for 'end_time' to latest time. All + other keys containing the word 'time' are averaged. Before these adjustments, + `None` values resulting from data that don't have times associated to them + are removed. These rules are applied also to values in the 'time_parameters' + dictionary. + + .. versionchanged:: 0.47 + + Before Satpy 0.47, all times, including `start_time` and `end_time`, were averaged. + + In the interest of processing time, lazy arrays are compared by object + identity rather than by their contents. Args: *metadata_objects: MetadataObject or dict objects to combine - average_times (bool): Average any keys with 'time' in the name + + Kwargs: + average_times (bool): Removed option to average all time attributes. Returns: dict: the combined metadata """ - info_dicts = _get_valid_dicts(metadata_objects) + if average_times is not None: + warnings.warn( + "'average_time' option has been removed and start/end times are handled with min/max instead.", + UserWarning + ) + info_dicts = _get_valid_dicts(metadata_objects) if len(info_dicts) == 1: return info_dicts[0].copy() shared_keys = _shared_keys(info_dicts) - return _combine_shared_info(shared_keys, info_dicts, average_times) + return _combine_shared_info(shared_keys, info_dicts) def _get_valid_dicts(metadata_objects): @@ -75,17 +94,51 @@ def _shared_keys(info_dicts): return reduce(set.intersection, key_sets) -def _combine_shared_info(shared_keys, info_dicts, average_times): +def _combine_shared_info(shared_keys, info_dicts): shared_info = {} for key in shared_keys: values = [info[key] for info in info_dicts] - if "time" in key and isinstance(values[0], datetime) and average_times: - shared_info[key] = average_datetimes(values) - elif _are_values_combinable(values): - shared_info[key] = values[0] + _combine_values(key, values, shared_info) return shared_info +def _combine_values(key, values, shared_info): + if "time" in key: + times = _combine_times(key, values) + if times is not None: + shared_info[key] = times + elif _are_values_combinable(values): + shared_info[key] = values[0] + + +def _combine_times(key, values): + if key == "time_parameters": + return _combine_time_parameters(values) + filtered_values = _filter_time_values(values) + if not filtered_values: + return None + if "end_time" in key: + return max(filtered_values) + elif "start_time" in key: + return min(filtered_values) + return average_datetimes(filtered_values) + + +def _combine_time_parameters(values): + # Assume the first item has all the keys + keys = values[0].keys() + res = {} + for key in keys: + sub_values = [itm[key] for itm in values] + res[key] = _combine_times(key, sub_values) + return res + + +def _filter_time_values(values): + """Remove values that are not datetime objects.""" + return [v for v in values if isinstance(v, dt.datetime)] + + def average_datetimes(datetime_list): """Average a series of datetime objects. @@ -100,8 +153,8 @@ def average_datetimes(datetime_list): Returns: Average datetime as a datetime object """ - total = [datetime.timestamp(dt) for dt in datetime_list] - return datetime.fromtimestamp(sum(total) / len(total)) + total = [dt.datetime.timestamp(d) for d in datetime_list] + return dt.datetime.fromtimestamp(sum(total) / len(total)) def _are_values_combinable(values): diff --git a/satpy/etc/areas.yaml b/satpy/etc/areas.yaml index 28805855d9..55e7878e00 100644 --- a/satpy/etc/areas.yaml +++ b/satpy/etc/areas.yaml @@ -349,7 +349,7 @@ EastEurope: lower_left_xy: [654112.8864287604, 2989901.7547366405] upper_right_xy: [4553111.804127298, 5390224.287390241] -AfHorn: +AfHorn_geos: description: Eastern disk MSG image 0 degrees projection: proj: geos @@ -364,7 +364,7 @@ AfHorn: lower_left_xy: [2263804.1886089267, -1327678.4008740226] upper_right_xy: [5564247.671007627, 3472966.6644331776] -SouthAmerica: +SouthAmerica_geos: description: Lower West part of Southern disk MSG image 0 degrees projection: proj: geos @@ -764,6 +764,7 @@ australia: area_extent: lower_left_xy: [-2504688.5428486555, -5591295.9185533915] upper_right_xy: [2504688.5428486555, -1111475.102852225] + mali: description: mali projection: @@ -777,6 +778,7 @@ mali: area_extent: lower_left_xy: [-1224514.3987260093, 1111475.1028522244] upper_right_xy: [1224514.3987260093, 3228918.5790461157] + mali_eqc: description: mali projection: @@ -790,6 +792,7 @@ mali_eqc: area_extent: lower_left_xy: [-1224514.3987260093, -1001875.4171394627] upper_right_xy: [1224514.3987260093, 1001875.4171394617] + sve: description: Sweden and baltic sea @@ -801,6 +804,7 @@ sve: area_extent: lower_left_xy: [-342379.698, 6032580.06] upper_right_xy: [1423701.52, 8029648.75] + brazil2: description: brazil, platecarree projection: @@ -813,6 +817,7 @@ brazil2: lower_left_xy: [-7792364.355529149, -4452779.631730943] upper_right_xy: [-2226389.8158654715, 1669792.3618991035] units: m + sudeste: description: sudeste, platecarree projection: @@ -825,6 +830,7 @@ sudeste: lower_left_xy: [-6122571.993630046, -3005626.251418386] upper_right_xy: [-4230140.650144396, -1447153.3803125564] units: m + SouthAmerica_flat: description: South America flat projection: @@ -838,6 +844,7 @@ SouthAmerica_flat: lower_left_xy: [-8326322.82790897, -4609377.085697311] upper_right_xy: [-556597.4539663679, 1535833.8895192828] units: m + south_america: description: south_america, platecarree projection: @@ -850,6 +857,7 @@ south_america: lower_left_xy: [-8126322.82790897, -5009377.085697311] upper_right_xy: [-556597.4539663679, 1335833.8895192828] units: m + brazil: description: brazil, platecarree projection: @@ -862,6 +870,7 @@ brazil: lower_left_xy: [-8348961.809495518, -3896182.1777645745] upper_right_xy: [-3784862.6869713017, 1001875.4171394621] units: m + worldeqc3km70: description: World in 3km, platecarree projection: @@ -874,6 +883,7 @@ worldeqc3km70: lower_left_xy: [-20037508.3428, -7792364.355533333] upper_right_xy: [20037508.3428, 7792364.355533333] units: m + worldeqc30km70: description: World in 3km, platecarree projection: @@ -886,6 +896,7 @@ worldeqc30km70: lower_left_xy: [-20037508.3428, -7792364.355533333] upper_right_xy: [20037508.3428, 7792364.355533333] units: m + worldeqc3km73: description: World in 3km, platecarree projection: @@ -898,6 +909,7 @@ worldeqc3km73: lower_left_xy: [-20037508.3428, -8181982.573309999] upper_right_xy: [20037508.3428, 8181982.573309999] units: m + worldeqc3km: description: World in 3km, platecarree projection: @@ -910,6 +922,7 @@ worldeqc3km: lower_left_xy: [-20037508.3428, -10018754.1714] upper_right_xy: [20037508.3428, 10018754.1714] units: m + worldeqc30km: description: World in 3km, platecarree projection: @@ -922,6 +935,7 @@ worldeqc30km: lower_left_xy: [-20037508.3428, -10018754.1714] upper_right_xy: [20037508.3428, 10018754.1714] units: m + libya: description: libya area projection: @@ -936,6 +950,7 @@ libya: lower_left_xy: [-1921632.0902750609, 1725320.2028891125] upper_right_xy: [1918367.9097249391, 4797320.202889113] units: m + phil: description: kuwait area projection: @@ -951,6 +966,7 @@ phil: lower_left_xy: [-2200000.0, 0.0] upper_right_xy: [2200000.0, 2200000.0] units: m + phil_small: description: kuwait area projection: @@ -966,6 +982,7 @@ phil_small: lower_left_xy: [-600000.0, 0.0] upper_right_xy: [1600000.0, 2200000.0] units: m + kuwait: description: kuwait area projection: @@ -980,6 +997,7 @@ kuwait: lower_left_xy: [-1280000.0, 1820000.0] upper_right_xy: [1280000.0, 4380000.0] units: m + afghanistan: description: Afghanistan projection: @@ -1008,7 +1026,8 @@ maspalomas: area_extent: lower_left_xy: [-1200000.0, 2900000.0] upper_right_xy: [900000.0, 4000000.0] -afhorn: + +afhorn_merc: description: Africa horn 3km resolution projection: proj: merc @@ -1020,6 +1039,7 @@ afhorn: area_extent: lower_left_xy: [-2432000.0, -1130348.139543] upper_right_xy: [2432000.0, 3733651.860457] + spain: description: Spain projection: @@ -1036,6 +1056,7 @@ spain: area_extent: lower_left_xy: [-500000.0, -500000.0] upper_right_xy: [500000.0, 500000.0] + germ: description: Germany projection: @@ -1052,6 +1073,7 @@ germ: area_extent: lower_left_xy: [-155100.436345, -4441495.37946] upper_right_xy: [868899.563655, -3417495.37946] + germ2: description: Germany projection: @@ -1068,6 +1090,7 @@ germ2: area_extent: lower_left_xy: [-165100.436345, -4441495.37946] upper_right_xy: [878899.563655, -3417495.37946] + euro4: description: Euro 4km area - Europe projection: @@ -1082,6 +1105,7 @@ euro4: area_extent: lower_left_xy: [-2717181.7304994687, -5571048.14031214] upper_right_xy: [1378818.2695005313, -1475048.1403121399] + euro1: description: Euro 4km area - Europe projection: @@ -1096,6 +1120,7 @@ euro1: area_extent: lower_left_xy: [-2717181.7304994687, -5571048.14031214] upper_right_xy: [1378818.2695005313, -1475048.1403121399] + scan: description: Scandinavia projection: @@ -1110,6 +1135,7 @@ scan: area_extent: lower_left_xy: [-1268854.126638295, -4150234.8425892727] upper_right_xy: [779145.8733617051, -2102234.8425892727] + scan2: description: Scandinavia - 2km area projection: @@ -1124,6 +1150,7 @@ scan2: area_extent: lower_left_xy: [-1268854.126638295, -4150234.8425892727] upper_right_xy: [779145.8733617051, -2102234.8425892727] + scan1: description: Scandinavia - 1km area projection: @@ -1138,6 +1165,7 @@ scan1: area_extent: lower_left_xy: [-1268854.126638295, -4150234.8425892727] upper_right_xy: [779145.8733617051, -2062234.8425892727] + scan500m: description: Scandinavia - 500m area projection: @@ -1152,6 +1180,7 @@ scan500m: area_extent: lower_left_xy: [-1268854.126638295, -4150234.8425892727] upper_right_xy: [779145.8733617051, -2062234.8425892727] + mesanX: description: Mesan-X rotated lon/lat 1.8km projection: @@ -1169,6 +1198,7 @@ mesanX: area_extent: lower_left_xy: [1067435.7598983962, -1278764.890341909] upper_right_xy: [3791765.9965939857, 1690140.6680267097] + mesanE: description: Europe Mesan rotated lon/lat 1.8km projection: @@ -1186,6 +1216,7 @@ mesanE: area_extent: lower_left_xy: [289083.0005619671, -2957836.6467769896] upper_right_xy: [5381881.121371055, 3335826.68502126] + baws: description: BAWS projection: @@ -1200,6 +1231,7 @@ baws: area_extent: lower_left_xy: [-475617.0, 5324430.0] upper_right_xy: [924383.0, 6724430.0] + eurotv: description: Europe TV - 6.2x5.0km projection: @@ -1216,6 +1248,7 @@ eurotv: area_extent: lower_left_xy: [-3503748.8201907813, -6589593.134058789] upper_right_xy: [2842567.6359087573, -1499856.5846593212] + eurotv4n: description: Europe TV4 - 4.1x4.1km projection: @@ -1232,6 +1265,7 @@ eurotv4n: area_extent: lower_left_xy: [-5103428.678666952, -6772478.60053407] upper_right_xy: [3293371.321333048, -2049278.6005340703] + eurol: description: Euro 3.0km area - Europe projection: @@ -1246,6 +1280,7 @@ eurol: area_extent: lower_left_xy: [-3780000.0, -7644000.0] upper_right_xy: [3900000.0, -1500000.0] + eurol1: description: Euro 3.0km area - Europe projection: @@ -1260,6 +1295,7 @@ eurol1: area_extent: lower_left_xy: [-3780000.0, -7644000.0] upper_right_xy: [3900000.0, -1500000.0] + scanl: description: Scandinavia - Large projection: @@ -1274,6 +1310,7 @@ scanl: area_extent: lower_left_xy: [-900000.0, -4500000.0] upper_right_xy: [2000000.0, -1600000.0] + euron1: description: Northern Europe - 1km projection: @@ -1288,6 +1325,7 @@ euron1: area_extent: lower_left_xy: [-1000000.0, -4500000.0] upper_right_xy: [2072000.0, -1428000.0] + euron0250: description: Northern Europe - 1km projection: @@ -1302,6 +1340,7 @@ euron0250: area_extent: lower_left_xy: [-1000000.0, -4500000.0] upper_right_xy: [2072000.0, -1428000.0] + nsea: description: North Baltic Sea projection: @@ -1315,6 +1354,7 @@ nsea: area_extent: lower_left_xy: [-322789.07638000086, 7784901.986829306] upper_right_xy: [1725210.923619999, 9832901.986829307] + ssea: description: South Baltic Sea projection: @@ -1328,6 +1368,7 @@ ssea: area_extent: lower_left_xy: [-801407.3620468981, 7003690.663643802] upper_right_xy: [1246592.637953102, 9051690.663643802] + nsea250: description: North Baltic Sea projection: @@ -1341,6 +1382,7 @@ nsea250: area_extent: lower_left_xy: [-322789.07638000086, 7784901.986829306] upper_right_xy: [1725210.923619999, 9832901.986829307] + ssea250: description: South Baltic Sea projection: @@ -1354,6 +1396,7 @@ ssea250: area_extent: lower_left_xy: [-801407.3620468981, 7003690.663643802] upper_right_xy: [1246592.637953102, 9051690.663643802] + bsea250: description: South Baltic Sea projection: @@ -1367,6 +1410,7 @@ bsea250: area_extent: lower_left_xy: [512000.0, 3525000.0] upper_right_xy: [1700000.0, 4933000.0] + test250: description: South Baltic Sea projection: @@ -1380,6 +1424,7 @@ test250: area_extent: lower_left_xy: [512000.0, 3525000.0] upper_right_xy: [1700000.0, 4933000.0] + bsea1000: description: South Baltic Sea projection: @@ -1393,6 +1438,7 @@ bsea1000: area_extent: lower_left_xy: [512000.0, 3525000.0] upper_right_xy: [1700000.0, 4933000.0] + euro: description: Euro area - Europe projection: @@ -1407,6 +1453,7 @@ euro: area_extent: lower_left_xy: [-2717181.7304994687, -5571048.14031214] upper_right_xy: [1378818.2695005313, -1475048.1403121399] + baltrad_lambert: description: Baltrad Lambert projection: @@ -1421,6 +1468,7 @@ baltrad_lambert: area_extent: lower_left_xy: [-994211.85388, -1291605.15396] upper_right_xy: [635788.14612, 1098394.84604] + eport: description: eport projection: @@ -1435,6 +1483,7 @@ eport: lower_left_xy: [-5283418.625834752, -5283418.625834753] upper_right_xy: [5283418.625834753, 5283418.625834752] units: m + eport1: description: eport projection: @@ -1449,6 +1498,7 @@ eport1: lower_left_xy: [-5283418.625834752, -5283418.625834753] upper_right_xy: [5283418.625834753, 5283418.625834752] units: m + eport10: description: eport reduced resolution projection: @@ -1463,6 +1513,7 @@ eport10: lower_left_xy: [-5283418.625834752, -5283418.625834753] upper_right_xy: [5283418.625834753, 5283418.625834752] units: m + eport4: description: eport reduced resolution projection: @@ -1477,6 +1528,7 @@ eport4: lower_left_xy: [-5283418.625834752, -5283418.625834753] upper_right_xy: [5283418.625834753, 5283418.625834752] units: m + eport2: description: eport reduced resolution projection: @@ -1491,6 +1543,7 @@ eport2: lower_left_xy: [-5283418.625834752, -5283418.625834753] upper_right_xy: [5283418.625834753, 5283418.625834752] units: m + npp_sample_m: description: North America - NPP sample data - M-bands projection: @@ -1505,6 +1558,7 @@ npp_sample_m: area_extent: lower_left_xy: [-1700000.0, -1400000.0] upper_right_xy: [1100000.0, 1400000.0] + arctic_europe_1km: description: Arctic and Europe projection: @@ -1519,6 +1573,7 @@ arctic_europe_1km: area_extent: lower_left_xy: [-3100000.0, -7100000.0] upper_right_xy: [6000000.0, 2000000.0] + arctic_europe_9km: description: Arctic and Europe projection: @@ -1533,6 +1588,7 @@ arctic_europe_9km: area_extent: lower_left_xy: [-3100000.0, -7100000.0] upper_right_xy: [6000000.0, 2000000.0] + sswe: description: Southern Sweden projection: @@ -1549,6 +1605,7 @@ sswe: area_extent: lower_left_xy: [-400884.23045, -3946631.71387] upper_right_xy: [623115.76955, -2922631.71387] + nswe: description: Northern Sweden projection: @@ -1565,6 +1622,7 @@ nswe: area_extent: lower_left_xy: [-392288.010506, -3105279.35252] upper_right_xy: [631711.989494, -2081279.35252] + sval: description: Svalbard projection: @@ -1579,6 +1637,7 @@ sval: area_extent: lower_left_xy: [-287554.9489620461, -1630805.15418955] upper_right_xy: [736445.0510379539, -606805.1541895501] + ease_sh: description: Antarctic EASE grid projection: @@ -1594,6 +1653,7 @@ ease_sh: lower_left_xy: [-5326849.0625, -5326849.0625] upper_right_xy: [5326849.0625, 5326849.0625] units: m + ease_nh: description: Arctic EASE grid projection: @@ -1609,6 +1669,7 @@ ease_nh: lower_left_xy: [-5326849.0625, -5326849.0625] upper_right_xy: [5326849.0625, 5326849.0625] units: m + barents_sea: description: Barents and Greenland seas projection: @@ -1623,6 +1684,7 @@ barents_sea: area_extent: lower_left_xy: [-1600000.0, -2000000.0] upper_right_xy: [1400000.0, -300000.0] + antarctica: description: Antarctica - 1km projection: @@ -1637,6 +1699,7 @@ antarctica: area_extent: lower_left_xy: [-2858899.2042342643, -2858899.204234264] upper_right_xy: [2858899.204234264, 2858899.2042342643] + arctica: description: arctica - 1km projection: @@ -1651,6 +1714,7 @@ arctica: area_extent: lower_left_xy: [-1458899.2042342643, -1458899.2042342639] upper_right_xy: [1458899.2042342639, 1458899.2042342643] + euroasia: description: Euroasia - Global 1km USGS Landuse database projection: @@ -1665,6 +1729,7 @@ euroasia: area_extent: lower_left_xy: [-3000000.0, -4999000.0] upper_right_xy: [9999000.0, 8000000.0] + euroasia_10km: description: Euroasia - Global 1km USGS Landuse database projection: @@ -1679,6 +1744,7 @@ euroasia_10km: area_extent: lower_left_xy: [-3000000.0, -4999000.0] upper_right_xy: [9999000.0, 8000000.0] + euroasia_asia: description: Euroasia - optimised for Asia - @@ -1695,6 +1761,7 @@ euroasia_asia: area_extent: lower_left_xy: [-8000000.0, -5499000.0] upper_right_xy: [4999000.0, 6500000.0] + euroasia_asia_10km: description: Euroasia - optimised for Asia - Global 1km USGS Landuse database projection: @@ -1709,6 +1776,7 @@ euroasia_asia_10km: area_extent: lower_left_xy: [-8000000.0, -5499000.0] upper_right_xy: [4999000.0, 6500000.0] + australia_pacific: description: Austalia/Pacific - Global 1km USGS Landuse database projection: @@ -1723,6 +1791,7 @@ australia_pacific: area_extent: lower_left_xy: [-5000000.0, -3944890.0] upper_right_xy: [4299000.0, 4054110.0] + australia_pacific_10km: description: Austalia/Pacific - Global 1km USGS Landuse database projection: @@ -1737,6 +1806,7 @@ australia_pacific_10km: area_extent: lower_left_xy: [-5000000.0, -3944890.0] upper_right_xy: [4299000.0, 4054110.0] + africa: description: Africa - Global 1km USGS Landuse database projection: @@ -1751,6 +1821,7 @@ africa: area_extent: lower_left_xy: [-4458000.0, -4795000.0] upper_right_xy: [3891000.0, 4480000.0] + africa_10km: description: Africa - Global 1km USGS Landuse database projection: @@ -1765,7 +1836,8 @@ africa_10km: area_extent: lower_left_xy: [-4458000.0, -4795000.0] upper_right_xy: [3891000.0, 4480000.0] -southamerica: + +southamerica_laea: description: South America - Global 1km USGS Landuse database projection: proj: laea @@ -1779,6 +1851,7 @@ southamerica: area_extent: lower_left_xy: [-3000000.0, -4899000.0] upper_right_xy: [2999000.0, 3100000.0] + southamerica_10km: description: South America - Global 1km USGS Landuse database projection: @@ -1793,6 +1866,7 @@ southamerica_10km: area_extent: lower_left_xy: [-3000000.0, -4899000.0] upper_right_xy: [2999000.0, 3100000.0] + northamerica: description: North America - Global 1km USGS Landuse database projection: @@ -1807,6 +1881,7 @@ northamerica: area_extent: lower_left_xy: [-4487000.0, -4515000.0] upper_right_xy: [4735000.0, 4480000.0] + northamerica_10km: description: North America - Global 1km USGS Landuse database projection: @@ -1821,6 +1896,7 @@ northamerica_10km: area_extent: lower_left_xy: [-4487000.0, -4515000.0] upper_right_xy: [4735000.0, 4480000.0] + romania: description: Romania - 3km projection: @@ -1835,6 +1911,7 @@ romania: area_extent: lower_left_xy: [-2226837.662574135, -1684219.2829063328] upper_right_xy: [2299196.337425865, 881436.7170936672] + stere_asia_test: description: stere projection: @@ -1847,6 +1924,7 @@ stere_asia_test: area_extent: lower_left_xy: [-3589072.840299738, -3568228.07278016] upper_right_xy: [3611014.256314698, 3594111.7022882444] + bocheng_test: description: stere projection: @@ -1859,6 +1937,7 @@ bocheng_test: area_extent: lower_left_xy: [-3589072.840299738, -3568228.07278016] upper_right_xy: [3611014.256314698, 3594111.7022882444] + nsper_swe: description: nsper_swe projection: @@ -1873,6 +1952,7 @@ nsper_swe: area_extent: lower_left_xy: [-5000000.0, -5000000.0] upper_right_xy: [5000000.0, 5000000.0] + new_bsea250: description: new_bsea250 projection: @@ -1886,6 +1966,7 @@ new_bsea250: area_extent: lower_left_xy: [-638072.2772287376, -680339.8397175331] upper_right_xy: [638072.277228737, 757253.9342263378] + scanice: description: Scandinavia and Iceland projection: @@ -1900,6 +1981,7 @@ scanice: area_extent: lower_left_xy: [-1920000.0, -1536000.0] upper_right_xy: [1920000.0, 1536000.0] + baws250: description: BAWS, 250m resolution projection: @@ -1914,6 +1996,7 @@ baws250: area_extent: lower_left_xy: [-475617.0, 5324430.0] upper_right_xy: [924383.0, 6724430.0] + moll: description: moll projection: @@ -1927,6 +2010,7 @@ moll: area_extent: lower_left_xy: [-18040095.696147293, -9020047.848073646] upper_right_xy: [18040095.696147293, 9020047.848073646] + robinson: description: robinson projection: @@ -1960,6 +2044,7 @@ met07globe: # obsolete platform number area_extent: lower_left_xy: [-5621225.237846375, -5621225.237846375] upper_right_xy: [5621225.237846375, 5621225.237846375] + met09globe: # obsolete platform number description: Cropped disk MSG image 0 degrees projection: @@ -1974,6 +2059,7 @@ met09globe: # obsolete platform number area_extent: lower_left_xy: [-5432229.931711678, -5429229.528545862] upper_right_xy: [5429229.528545862, 5432229.931711678] + met09globeFull: # superseded by msg_seviri_fes_3km description: Full disk MSG image 0 degrees projection: @@ -1988,6 +2074,7 @@ met09globeFull: # superseded by msg_seviri_fes_3km area_extent: lower_left_xy: [-5570248.477339261, -5567248.074173444] upper_right_xy: [5567248.074173444, 5570248.477339261] + seviri_0deg: # superseded by msg_seviri_fes_3km description: Full disk MSG image 0 degrees projection: @@ -2002,6 +2089,7 @@ seviri_0deg: # superseded by msg_seviri_fes_3km area_extent: lower_left_xy: [-5570248.686685662, -5567248.28340708] upper_right_xy: [5567248.28340708, 5570248.686685662] + seviri_iodc: # superseded by msg_seviri_iodc_3km description: Full disk MSG image 41.5 degrees projection: @@ -2016,6 +2104,7 @@ seviri_iodc: # superseded by msg_seviri_iodc_3km area_extent: lower_left_xy: [-5570248.686685662, -5567248.28340708] upper_right_xy: [5567248.28340708, 5570248.686685662] + msg_resample_area: description: Full disk MSG image 20.75 degrees projection: diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 0f0e98f4e0..963a5a198f 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -1,7 +1,47 @@ sensor_name: visir/fci - composites: +### L2 + binary_cloud_mask: + # This will set all clear pixels to '0', all pixels with cloudy features (meteorological/dust/ash clouds) to '1' and + # missing/undefined pixels to 'nan'. This can be used for the official EUMETSAT cloud mask product (CLM). + compositor: !!python/name:satpy.composites.CategoricalDataCompositor + prerequisites: + - name: 'cloud_state' + lut: [ .nan, 0, 1, 1, 1, 1, 1, 1, 0, .nan ] + standard_name: binary_cloud_mask + +### Night Layers + night_ir105: + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - name: ir_105 + standard_name: night_ir105 + + night_ir_alpha: + compositor: !!python/name:satpy.composites.GenericCompositor + standard_name: night_ir_alpha + prerequisites: + - name: ir_38 + - name: ir_105 + - name: ir_123 + - name: ir_105 + + night_ir_with_background: + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background + prerequisites: + - night_ir_alpha + - _night_background + + night_ir_with_background_hires: + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background + prerequisites: + - night_ir_alpha + - _night_background_hires + +### Green Corrections ndvi_hybrid_green: description: > The FCI green band at 0.51 µm deliberately misses the chlorophyll band, such that @@ -47,15 +87,7 @@ composites: modifiers: [ sunz_corrected ] standard_name: toa_bidirectional_reflectance - binary_cloud_mask: - # This will set all clear pixels to '0', all pixles with cloudy features (meteorological/dust/ash clouds) to '1' and - # missing/undefined pixels to 'nan'. This can be used for the the official EUMETSAT cloud mask product (CLM). - compositor: !!python/name:satpy.composites.CategoricalDataCompositor - prerequisites: - - name: 'cloud_state' - lut: [.nan, 0, 1, 1, 1, 1, 1, 1, 0, .nan] - standard_name: binary_cloud_mask - +### True Color true_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB description: > @@ -94,13 +126,46 @@ composites: - name: vis_04 standard_name: true_color_raw + true_color_with_night_ir105: + description: > + True Color during daytime, and a simple IR105 layer during nighttime. + compositor: !!python/name:satpy.composites.DayNightCompositor + standard_name: fci_day_night_blend + lim_low: 78 + lim_high: 88 + prerequisites: + - true_color + - night_ir105 + + true_color_with_night_ir: + description: > + True Color during daytime, and a simple IR105 layer during nighttime. + compositor: !!python/name:satpy.composites.DayNightCompositor + standard_name: fci_day_night_blend + lim_low: 78 + lim_high: 88 + prerequisites: + - true_color + - night_ir_with_background + + true_color_with_night_ir_hires: + description: > + True Color during daytime, and a simple IR105 layer during nighttime. + compositor: !!python/name:satpy.composites.DayNightCompositor + standard_name: fci_day_night_blend + lim_low: 78 + lim_high: 88 + prerequisites: + - true_color + - night_ir_with_background_hires + true_color_reproduction: # JMA True Color Reproduction complete composite with corrected and uncorrected blend. # http://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: true_color_reproduction - lim_low: 73. - lim_high: 85. + lim_low: 73 + lim_high: 85 prerequisites: - true_color_reproduction_corr - true_color_reproduction_uncorr @@ -125,7 +190,7 @@ composites: - name: vis_04 standard_name: true_color_reproduction_color_stretch - # GeoColor +### GeoColor geo_color: compositor: !!python/name:satpy.composites.DayNightCompositor description: > @@ -142,7 +207,6 @@ composites: - true_color - geo_color_night - # GeoColor Night-time geo_color_high_clouds: standard_name: geo_color_high_clouds compositor: !!python/name:satpy.composites.HighCloudCompositor @@ -180,3 +244,145 @@ composites: prerequisites: - geo_color_high_clouds - geo_color_background_with_low_clouds + +### IR-Sandwich + ir_sandwich: + compositor: !!python/name:satpy.composites.SandwichCompositor + standard_name: ir_sandwich + prerequisites: + - name: 'vis_06' + modifiers: [ sunz_corrected ] + - name: colorized_ir_clouds + + colorized_ir_clouds: + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - name: 'ir_105' + standard_name: colorized_ir_clouds + + ir_sandwich_with_night_colorized_ir_clouds: + compositor: !!python/name:satpy.composites.DayNightCompositor + standard_name: fci_day_night_blend + lim_low: 78 + lim_high: 88 + prerequisites: + - ir_sandwich + - colorized_ir_clouds + +### other RGBs + cloud_type: + description: > + Equal to cimss_cloud_type recipe, but with additional sunz_reducer modifier to avoid saturation at the terminator. + references: + EUMETRAIN Quick Guide: https://resources.eumetrain.org/rgb_quick_guides/quick_guides/CloudTypeRGB.pdf + Recipe: https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: nir_13 + modifiers: [ sunz_corrected, sunz_reduced ] + - name: vis_06 + modifiers: [ sunz_corrected, sunz_reduced ] + - name: nir_16 + modifiers: [ sunz_corrected, sunz_reduced ] + standard_name: cimss_cloud_type + + cloud_type_with_night_ir105: + description: > + Combines the cloud_type during daytime with the simple 10.5µm night_ir105 layer during nighttime + compositor: !!python/name:satpy.composites.DayNightCompositor + standard_name: fci_day_night_blend + lim_low: 78 + lim_high: 88 + prerequisites: + - cloud_type + - night_ir105 + + cloud_phase: + description: > + Equal to cloud_phase recipe, but with additional sunz_reducer modifier to avoid saturation at the terminator. + references: + EUMETRAIN Quick Guide: https://resources.eumetrain.org/rgb_quick_guides/quick_guides/CloudPhaseRGB.pdf + Recipe: https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: nir_16 + modifiers: [sunz_corrected, sunz_reduced] + - name: nir_22 + modifiers: [sunz_corrected, sunz_reduced] + - name: vis_06 + modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] + standard_name: cloud_phase + + cloud_phase_with_night_ir105: + description: > + Combines the cloud_phase during daytime with the simple 10.5µm night_ir105 layer during nighttime + compositor: !!python/name:satpy.composites.DayNightCompositor + standard_name: fci_day_night_blend + lim_low: 78 + lim_high: 88 + prerequisites: + - cloud_phase + - night_ir105 + + fire_temperature: + standard_name: fire_temperature_fci + compositor: !!python/name:satpy.composites.GenericCompositor + description: > + The fire temperature RGB highlights intense fires and differentiate these + from low temperature fires. Small low temperature fires will only show up at 3.9 μm and + appear red. With the increasing intensity and temperature the fires will also be detected + by the 2.2 μm and 1.6 μm bands resulting very intense fires in white. + Note: the EUM, CIRA and AWIPS recipes are identical (apart from neglectable 0.15K difference due to + unprecise C->K conversion) + references: + Recipe: https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf + Cira Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/Fire_Temperature_RGB.pdf + Eumetrain Quick Guide: https://resources.eumetrain.org/rgb_quick_guides/quick_guides/FireTemperatureRGB.pdf + prerequisites: + - name: ir_38 + - name: nir_22 + - name: nir_16 + + fire_temperature_38refl: + standard_name: fire_temperature_fci_38refl + compositor: !!python/name:satpy.composites.GenericCompositor + description: > + Same as fire_temperature, but uses only reflective part of 3.8 + references: + discussion: See https://github.com/pytroll/satpy/pull/728 + prerequisites: + - name: ir_38 + modifiers: [nir_reflectance] + - name: nir_22 + modifiers: [sunz_corrected] + - name: nir_16 + modifiers: [sunz_corrected] + + fire_temperature_rad: + standard_name: fire_temperature_fci_rad + compositor: !!python/name:satpy.composites.GenericCompositor + description: > + Same as fire_temperature, but uses the channels in radiance units. This is the original VIIRS recipe. + references: + discussion: See https://github.com/pytroll/satpy/pull/728 + prerequisites: + - name: ir_38 + calibration: radiance + - name: nir_22 + calibration: radiance + - name: nir_16 + calibration: radiance + + snow: + references: + EUMETRAIN Quick Guide: https://resources.eumetrain.org/rgb_quick_guides/quick_guides/SnowRGB.pdf + Recipe: https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: vis_08 + modifiers: [sunz_corrected] + - name: nir_16 + modifiers: [sunz_corrected] + - name: ir_38 + modifiers: [nir_reflectance] + standard_name: snow diff --git a/satpy/etc/composites/mersi-1.yaml b/satpy/etc/composites/mersi-1.yaml new file mode 100644 index 0000000000..1d7a2df79d --- /dev/null +++ b/satpy/etc/composites/mersi-1.yaml @@ -0,0 +1,83 @@ +sensor_name: visir/mersi-1 + +modifiers: + rayleigh_corrected: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: rayleigh_only + prerequisites: + - name: '3' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + sunz_corrected: + modifier: !!python/name:satpy.modifiers.SunZenithCorrector + prerequisites: + - name: solar_zenith_angle + + nir_reflectance: + modifier: !!python/name:satpy.modifiers.NIRReflectance + prerequisites: + - name: '24' + optional_prerequisites: + - solar_zenith_angle + + +composites: + colorized_ir: + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - name: '5' + standard_name: colorized_ir_clouds + + true_color: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: '3' + modifiers: [sunz_corrected, rayleigh_corrected] + - name: '2' + modifiers: [sunz_corrected, rayleigh_corrected] + - name: '1' + modifiers: [sunz_corrected, rayleigh_corrected] + standard_name: true_color + + true_color_uncorr: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: '3' + modifiers: [sunz_corrected] + - name: '2' + modifiers: [sunz_corrected] + - name: '1' + modifiers: [sunz_corrected] + standard_name: true_color + + natural_color: + compositor: !!python/name:satpy.composites.RatioSharpenedRGB + prerequisites: + - name: '6' + modifiers: [sunz_corrected] + - name: '16' + modifiers: [sunz_corrected] + - name: '3' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: '4' + modifiers: [sunz_corrected] + high_resolution_band: green + neutral_resolution_band: blue + standard_name: natural_color + + overview: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: '3' + modifiers: [sunz_corrected] + - name: '4' + modifiers: [sunz_corrected] + - name: '5' + standard_name: overview diff --git a/satpy/etc/composites/mersi-rm.yaml b/satpy/etc/composites/mersi-rm.yaml new file mode 100644 index 0000000000..ab0317b62f --- /dev/null +++ b/satpy/etc/composites/mersi-rm.yaml @@ -0,0 +1,89 @@ +sensor_name: visir/mersi-rm + +modifiers: + rayleigh_corrected: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: rayleigh_only + prerequisites: + - name: '1' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + sunz_corrected: + modifier: !!python/name:satpy.modifiers.SunZenithCorrector + prerequisites: + - solar_zenith_angle + + nir_reflectance: + modifier: !!python/name:satpy.modifiers.NIRReflectance + prerequisites: + - name: '7' + optional_prerequisites: + - solar_zenith_angle + + +composites: + natural_color: + compositor: !!python/name:satpy.composites.RatioSharpenedRGB + prerequisites: + - name: '5' + modifiers: [sunz_corrected] + - name: '3' + modifiers: [sunz_corrected] + - name: '1' + modifiers: [sunz_corrected] + standard_name: natural_color + + overview_raw: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: '1' + - name: '2' + - name: '7' + standard_name: overview + + overview: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: '1' + modifiers: [sunz_corrected] + - name: '2' + modifiers: [sunz_corrected] + - name: '7' + standard_name: overview + + cloudtop: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: '7' + - name: '8' + - name: '9' + standard_name: cloudtop + + day_microphysics: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: '2' + modifiers: [sunz_corrected] + - name: '7' + modifiers: [nir_reflectance] + - name: '8' + standard_name: day_microphysics + + night_fog: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: '8' + - name: '7' + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: '7' + - name: '6' + - name: '7' + standard_name: night_fog diff --git a/satpy/etc/composites/msi.yaml b/satpy/etc/composites/msi.yaml index 010bd240b0..602a0cd6b1 100644 --- a/satpy/etc/composites/msi.yaml +++ b/satpy/etc/composites/msi.yaml @@ -1,85 +1,151 @@ sensor_name: visir/msi - modifiers: - rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - - name: 'B04' - modifiers: [sunz_corrected] + - name: 'B04' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_antarctic: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: antarctic_aerosol + prerequisites: + - name: 'B04' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_continental_average: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: continental_average_aerosol + prerequisites: + - name: 'B04' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_continental_clean: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: continental_clean_aerosol + prerequisites: + - name: 'B04' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_continental_polluted: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: continental_polluted_aerosol + prerequisites: + - name: 'B04' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_desert: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: desert_aerosol + prerequisites: + - name: 'B04' + modifiers: [sunz_corrected] optional_prerequisites: - - satellite_azimuth_angle - - satellite_zenith_angle - - solar_azimuth_angle - - solar_zenith_angle + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle rayleigh_corrected_marine_clean: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_clean_aerosol prerequisites: - - name: 'B04' - modifiers: [sunz_corrected] + - name: 'B04' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_marine_polluted: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: marine_polluted_aerosol + prerequisites: + - name: 'B04' + modifiers: [sunz_corrected] optional_prerequisites: - - satellite_azimuth_angle - - satellite_zenith_angle - - solar_azimuth_angle - - solar_zenith_angle + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle rayleigh_corrected_marine_tropical: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance - atmosphere: tropical + atmosphere: us-standard aerosol_type: marine_tropical_aerosol prerequisites: - - name: 'B04' - modifiers: [sunz_corrected] + - name: 'B04' + modifiers: [sunz_corrected] optional_prerequisites: - - satellite_azimuth_angle - - satellite_zenith_angle - - solar_azimuth_angle - - solar_zenith_angle + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle - rayleigh_corrected_desert: + rayleigh_corrected_rural: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance - atmosphere: tropical - aerosol_type: desert_aerosol + atmosphere: us-standard + aerosol_type: rural_aerosol prerequisites: - - name: 'B04' - modifiers: [sunz_corrected] + - name: 'B04' + modifiers: [sunz_corrected] optional_prerequisites: - - satellite_azimuth_angle - - satellite_zenith_angle - - solar_azimuth_angle - - solar_zenith_angle + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle - rayleigh_corrected_land: + rayleigh_corrected_urban: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard - aerosol_type: continental_average_aerosol + aerosol_type: urban_aerosol prerequisites: - - name: 'B04' - modifiers: [sunz_corrected] + - name: 'B04' + modifiers: [sunz_corrected] optional_prerequisites: - - satellite_azimuth_angle - - satellite_zenith_angle - - solar_azimuth_angle - - solar_zenith_angle + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle composites: - natural_color: - compositor: !!python/name:satpy.composites.GenericCompositor - prerequisites: - - name: 'B11' - - name: 'B08' - modifiers: [effective_solar_pathlength_corrected] - - name: 'B04' - modifiers: [effective_solar_pathlength_corrected] - standard_name: natural_color - true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: @@ -91,57 +157,429 @@ composites: modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: true_color - true_color_land: + true_color_antarctic: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'B04' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] - - name: 'B03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] - - name: 'B02' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_antarctic] + - name: 'B03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_antarctic] + - name: 'B02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_antarctic] + standard_name: true_color + + true_color_continental_average: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_average] + - name: 'B03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_average] + - name: 'B02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_average] + standard_name: true_color + + true_color_continental_clean: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_clean] + - name: 'B03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_clean] + - name: 'B02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_clean] + standard_name: true_color + + true_color_continental_polluted: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_polluted] + - name: 'B03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_polluted] + - name: 'B02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_polluted] standard_name: true_color true_color_desert: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'B04' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - - name: 'B03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - - name: 'B02' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] + - name: 'B03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] + - name: 'B02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] standard_name: true_color true_color_marine_clean: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'B04' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - - name: 'B03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - - name: 'B02' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] + - name: 'B03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] + - name: 'B02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] + standard_name: true_color + + true_color_marine_polluted: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_polluted] + - name: 'B03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_polluted] + - name: 'B02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_polluted] standard_name: true_color true_color_marine_tropical: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'B04' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - - name: 'B03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - - name: 'B02' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] + - name: 'B03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] + - name: 'B02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] + standard_name: true_color + + true_color_rural: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_rural] + - name: 'B03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_rural] + - name: 'B02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_rural] + standard_name: true_color + + true_color_urban: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_urban] + - name: 'B03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_urban] + - name: 'B02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_urban] + standard_name: true_color + + true_color_uncorr: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B03' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B02' + modifiers: [effective_solar_pathlength_corrected] standard_name: true_color true_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: + - name: 'B04' + # modifiers: [effective_solar_pathlength_corrected] + - name: 'B03' + # modifiers: [effective_solar_pathlength_corrected] + - name: 'B02' + # modifiers: [effective_solar_pathlength_corrected] + standard_name: true_color + + natural_color: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B11' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B08' + modifiers: [effective_solar_pathlength_corrected] - name: 'B04' - #modifiers: [effective_solar_pathlength_corrected] - - name: 'B03' - #modifiers: [effective_solar_pathlength_corrected] - - name: 'B02' - #modifiers: [effective_solar_pathlength_corrected] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + standard_name: natural_color + + urban_color: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B12' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B11' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + standard_name: natural_color + + false_color: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B08' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'B03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + standard_name: natural_color + + ndvi: + # Normalized Difference Vegetation Index + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndvi/ + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.RatioCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: B08 + modifiers: [effective_solar_pathlength_corrected] + - name: B04 + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - compositor: !!python/name:satpy.composites.SumCompositor + prerequisites: + - name: B08 + modifiers: [effective_solar_pathlength_corrected] + - name: B04 + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + standard_name: ndvi_msi + + ndmi: + # Normalized Difference Moisture Index + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndmi/ + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.RatioCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: B08 + modifiers: [effective_solar_pathlength_corrected] + - name: B11 + modifiers: [effective_solar_pathlength_corrected] + - compositor: !!python/name:satpy.composites.SumCompositor + prerequisites: + - name: B08 + modifiers: [effective_solar_pathlength_corrected] + - name: B11 + modifiers: [effective_solar_pathlength_corrected] + standard_name: ndmi_msi + + ndwi: + # Normalized Difference Water Index + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndwi/ + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.RatioCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: B03 + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: B08 + modifiers: [effective_solar_pathlength_corrected] + - compositor: !!python/name:satpy.composites.SumCompositor + prerequisites: + - name: B03 + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: B08 + modifiers: [effective_solar_pathlength_corrected] + standard_name: ndwi_msi + + ndsi: + # Normalized Difference Snow Index + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndsi/ + compositor: !!python/name:satpy.composites.MaskingCompositor + prerequisites: + - name: B11 + modifiers: [effective_solar_pathlength_corrected] + - compositor: !!python/name:satpy.composites.RatioCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: B03 + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: B11 + modifiers: [effective_solar_pathlength_corrected] + - compositor: !!python/name:satpy.composites.SumCompositor + prerequisites: + - name: B03 + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: B11 + modifiers: [effective_solar_pathlength_corrected] + conditions: + - method: less_equal + value: 0.42 + transparency: 100 + - method: isnan + transparency: 100 + standard_name: ndsi_msi + + ndsi_with_true_color: + compositor: !!python/name:satpy.composites.BackgroundCompositor + prerequisites: + - name: ndsi + - name: true_color + standard_name: no_enhancement + + true_color_l2a: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B04' + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] + - name: 'B03' + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] + - name: 'B02' + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] standard_name: true_color + + natural_color_l2a: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B11' + modifiers: [esa_sunz_corrected] + - name: 'B08' + modifiers: [esa_sunz_corrected] + - name: 'B04' + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] + standard_name: natural_color + + urban_color_l2a: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B12' + modifiers: [esa_sunz_corrected] + - name: 'B11' + modifiers: [esa_sunz_corrected] + - name: 'B04' + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] + standard_name: natural_color + + false_color_l2a: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B08' + modifiers: [esa_sunz_corrected] + - name: 'B04' + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] + - name: 'B03' + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] + standard_name: natural_color + + aerosol_optical_thickness: + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - name: AOT + calibration: aerosol_thickness + standard_name: aot_msi + + water_vapor_map: + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - name: WVP + calibration: water_vapor + standard_name: wvp_msi + + scene_class: + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - name: SCL + standard_name: scl_msi + + ndvi_l2a: + # Normalized Difference Vegetation Index + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndvi/ + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.RatioCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: B08 + modifiers: [esa_sunz_corrected] + - name: B04 + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] + - compositor: !!python/name:satpy.composites.SumCompositor + prerequisites: + - name: B08 + modifiers: [esa_sunz_corrected] + - name: B04 + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] + standard_name: ndvi_msi + + ndmi_l2a: + # Normalized Difference Moisture Index + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndvi/ + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.RatioCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: B8A + modifiers: [esa_sunz_corrected] + - name: B11 + modifiers: [esa_sunz_corrected] + - compositor: !!python/name:satpy.composites.SumCompositor + prerequisites: + - name: B8A + modifiers: [esa_sunz_corrected] + - name: B11 + modifiers: [esa_sunz_corrected] + standard_name: ndmi_msi + + ndwi_l2a: + # Normalized Difference Water Index + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndwi/ + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.RatioCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: B03 + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] + - name: B08 + modifiers: [esa_sunz_corrected] + - compositor: !!python/name:satpy.composites.SumCompositor + prerequisites: + - name: B03 + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] + - name: B08 + modifiers: [esa_sunz_corrected] + standard_name: ndwi_msi + + ndsi_l2a: + # Normalized Difference Snow Index + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndsi/ + compositor: !!python/name:satpy.composites.MaskingCompositor + prerequisites: + - name: B11 + modifiers: [esa_sunz_corrected] + - compositor: !!python/name:satpy.composites.RatioCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: B03 + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] + - name: B11 + modifiers: [esa_sunz_corrected] + - compositor: !!python/name:satpy.composites.SumCompositor + prerequisites: + - name: B03 + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] + - name: B11 + modifiers: [esa_sunz_corrected] + conditions: + - method: less_equal + value: 0.42 + transparency: 100 + - method: isnan + transparency: 100 + standard_name: ndsi_msi + + ndsi_l2a_with_true_color_l2a: + compositor: !!python/name:satpy.composites.BackgroundCompositor + prerequisites: + - name: ndsi_l2a + - name: true_color_l2a + standard_name: no_enhancement diff --git a/satpy/etc/composites/seviri.yaml b/satpy/etc/composites/seviri.yaml index f30330bb18..e53609d8e0 100644 --- a/satpy/etc/composites/seviri.yaml +++ b/satpy/etc/composites/seviri.yaml @@ -421,6 +421,7 @@ composites: - name: HRV modifiers: [sunz_corrected] - IR_108 + hrv_fog: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: hrv_fog diff --git a/satpy/etc/composites/visir.yaml b/satpy/etc/composites/visir.yaml index d9798057a2..ffe3be4183 100644 --- a/satpy/etc/composites/visir.yaml +++ b/satpy/etc/composites/visir.yaml @@ -462,6 +462,23 @@ composites: - 10.8 standard_name: night_microphysics + 24h_microphysics: + references: + EUMETRAIN Quick Guide: https://eumetrain.org/sites/default/files/2021-05/24MicroRGB.pdf + Recipe: https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - 12.0 + - 10.8 + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - 10.8 + - 8.7 + - 10.8 + standard_name: 24h_microphysics + ir108_3d: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: ir108_3d diff --git a/satpy/etc/enhancements/fci.yaml b/satpy/etc/enhancements/fci.yaml new file mode 100644 index 0000000000..05ce0f9e53 --- /dev/null +++ b/satpy/etc/enhancements/fci.yaml @@ -0,0 +1,62 @@ +enhancements: + fci_day_night_blend: + standard_name: fci_day_night_blend + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: [ 0,0,0 ] + max_stretch: [ 1,1,1 ] + + night_ir105: + standard_name: night_ir105 + operations: + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - { colors: greys, min_value: 190, max_value: 295 } + + fire_temperature_fci: + standard_name: fire_temperature_fci + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: [273.15, 0.0, 0.0] + max_stretch: [333.15, 100.0, 75.0] + - name: gamma + method: !!python/name:satpy.enhancements.gamma + kwargs: + gamma: [0.4, 1, 1] + + fire_temperature_fci_38refl: + standard_name: fire_temperature_fci_38refl + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: [0, 0.0, 0.0] + max_stretch: [50, 100.0, 75.0] + - name: gamma + method: !!python/name:satpy.enhancements.gamma + kwargs: + gamma: [1, 1, 1] + + fire_temperature_fci_rad: + standard_name: fire_temperature_fci_rad + operations: + # note: the stretch parameters have been converted to wavelength units + # compared to e.g. the VIIRS recipe + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: [0, 0, 0] + max_stretch: [5.1, 17.7, 22.0] + - name: gamma + method: !!python/name:satpy.enhancements.gamma + kwargs: {gamma: [1.0, 1.0, 1.0]} diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 25680d6db9..cea87de760 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -271,6 +271,29 @@ enhancements: stretch: linear cutoffs: [0.005, 0.005] + four_level_cloud_mask: + standard_name: cloud_mask + reader: clavrx + operations: + - name: palettize + method: !!python/name:satpy.enhancements.palettize + kwargs: + palettes: + - {'values': [-127,# Fill Value + 0, # Clear + 1, # Probably Clear + 2, # Probably Cloudy + 3, # Cloudy + ], + 'colors': [[0, 0, 0], # black,-127 = Fill Value + [94, 79, 162], # blue, 0 = Clear + [73, 228, 242], # cyan, 1 = Probably Clear + [158, 1, 66], # red, 2 = Probably Cloudy + [255, 255, 255], # white, 3 = Cloudy + ], + 'color_scale': 255, + } + sar-ice: standard_name: sar-ice operations: @@ -906,6 +929,7 @@ enhancements: [252, 254, 254], [253, 254, 254], [253, 254, 254], [ 253, 254, 254], [253, 254, 254], [254, 254, 254], [254, 254, 254], [254, 254, 254], [254, 254, 254], [255, 255, 255]] + night_microphysics_default: standard_name: night_microphysics operations: @@ -915,6 +939,21 @@ enhancements: stretch: crude min_stretch: [-4, 0, 243] max_stretch: [2, 10, 293] + + 24h_microphysics_default: + standard_name: 24h_microphysics + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: [-4, 0, 248] + max_stretch: [2, 6, 303] + - name: gamma + method: !!python/name:satpy.enhancements.gamma + kwargs: + gamma: [ 1, 1.2, 1 ] + ir_overview_default: standard_name: ir_overview operations: @@ -1104,7 +1143,7 @@ enhancements: operations: - name: stretch method: !!python/name:satpy.enhancements.stretch - kwargs: {stretch: linear, cutoffs: [0.02, 0.02]} + kwargs: {stretch: linear, cutoffs: [[0.02, 0.02], [0.02, 0.02], [0.02, 0.02], [0.02, 0.02]]} - name: inverse method: !!python/name:satpy.enhancements.invert args: diff --git a/satpy/etc/enhancements/msi.yaml b/satpy/etc/enhancements/msi.yaml new file mode 100644 index 0000000000..d221008c99 --- /dev/null +++ b/satpy/etc/enhancements/msi.yaml @@ -0,0 +1,210 @@ +enhancements: + ndvi_msi: + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndvi/ + standard_name: ndvi_msi + operations: + - name: palettize + method: !!python/name:satpy.enhancements.palettize + kwargs: + palettes: + - min_value: -1 + max_value: 1 + values: [ + -1, + -0.5, + -0.2, + -0.1, + 0, + 0.025, + 0.05, + 0.075, + 0.1, + 0.125, + 0.15, + 0.175, + 0.2, + 0.25, + 0.3, + 0.35, + 0.4, + 0.45, + 0.5, + 0.55, + 0.6, + 1, + ] + colors: [ + [12, 12, 12], + [191, 191, 191], + [219, 219, 219], + [234, 234, 234], + [255, 249, 204], + [237, 232, 181], + [221, 216, 155], + [204, 198, 130], + [188, 183, 107], + [175, 193, 96], + [163, 204, 89], + [145, 191, 81], + [127, 178, 71], + [112, 163, 63], + [96, 150, 53], + [79, 137, 45], + [63, 124, 35], + [48, 109, 28], + [33, 96, 17], + [15, 84, 10], + [0, 68, 0], + [0, 68, 0], + ] + + ndmi_msi: + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndmi/ + standard_name: ndmi_msi + operations: + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - min_value: -0.8 + max_value: 0.8 + values: [ + -0.8, + -0.24, + -0.032, + 0.032, + 0.24, + 0.8, + ] + colors: [ + [128, 0, 0], + [255, 0, 0], + [255, 255, 0], + [0, 255, 255], + [0, 0, 255], + [0, 0, 128], + ] + + ndwi_msi: + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndwi/ + standard_name: ndwi_msi + operations: + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - min_value: -0.8 + max_value: 0.8 + values: [ + -0.8, + -0.7, + -0.6, + -0.5, + -0.4, + -0.3, + -0.2, + -0.1, + 0, + 0.1, + 0.2, + 0.3, + 0.4, + 0.5, + 0.6, + 0.7, + 0.8 + ] + colors: [ + [0, 128, 0], + [32, 144, 32], + [64, 160, 64], + [96, 176, 96], + [128, 192, 128], + [160, 208, 160], + [192, 223, 192], + [224, 239, 224], + [255, 255, 255], + [224, 224, 249], + [192, 192, 242], + [160, 160, 236], + [128, 128, 230], + [96, 96, 223], + [64, 64, 217], + [32, 32, 210], + [0, 0, 204], + ] + + ndsi_msi: + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndwi/ + standard_name: ndsi_msi + operations: + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - values: [0] + colors: [[0, 240, 240]] + + aot_msi: + standard_name: aot_msi + operations: + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - colors: rdylgn + min_value: 0 + max_value: 1 + reverse: True + + wvp_msi: + standard_name: wvp_msi + operations: + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - colors: rdylbu + min_value: 0 + max_value: 5 + reverse: True + + scl_msi: + # The palette is defined by Sentinel-2 Products Specification Document V14.9, page 319 + # Please review https://sentinels.copernicus.eu/documents/247904/685211/S2-PDGS-TAS-DI-PSD-V14.9.pdf/3d3b6c9c-4334-dcc4-3aa7-f7c0deffbaf7?t=1643013091529 + standard_name: scl_msi + operations: + - name: palettize + method: !!python/name:satpy.enhancements.palettize + kwargs: + palettes: + - values: [ + 0, #Nodata + 1, #Saturated_defective + 2, #Topograhic_shadow + 3, #Cloud_shadow + 4, #Vegetation + 5, #Not_vegetated + 6, #Water + 7, #Unclassified + 8, #Cloud_medium_probability + 9, #Cloud_high_probability + 10, #Thin_cirrus + 11, #Snow/ice + ] + colors: [[0, 0, 0], + [255, 0, 0], + [89, 89, 89], + [148, 54, 52], + [0, 176, 80], + [255, 255, 0], + [0, 112, 192], + [128, 128, 128], + [191, 191, 191], + [255, 255, 255], + [146, 205, 220], + [112, 48, 160]] + + no_enhancement: + standard_name: no_enhancement + operations: [] diff --git a/satpy/etc/readers/agri_fy4a_l1.yaml b/satpy/etc/readers/agri_fy4a_l1.yaml index 5e3dfead35..cd7c7a8fe0 100644 --- a/satpy/etc/readers/agri_fy4a_l1.yaml +++ b/satpy/etc/readers/agri_fy4a_l1.yaml @@ -5,7 +5,7 @@ reader: name: agri_fy4a_l1 short_name: AGRI FY4A L1 - long_name: FY-4A AGRI L1 data in HDF5 format + long_name: FY-4A AGRI Level 1 HDF5 format description: FY-4A AGRI instrument HDF5 reader status: Beta supports_fsspec: false diff --git a/satpy/etc/readers/agri_fy4b_l1.yaml b/satpy/etc/readers/agri_fy4b_l1.yaml index b1ff44189d..77c616b2e4 100644 --- a/satpy/etc/readers/agri_fy4b_l1.yaml +++ b/satpy/etc/readers/agri_fy4b_l1.yaml @@ -4,7 +4,11 @@ reader: name: agri_fy4b_l1 + short_name: AGRI FY4B L1 + long_name: FY-4B AGRI Level 1 data HDF5 format description: FY-4B AGRI instrument HDF5 reader + status: Nominal + supports_fsspec: true sensors: [agri] default_channels: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader diff --git a/satpy/etc/readers/avhrr_l1b_gaclac.yaml b/satpy/etc/readers/avhrr_l1b_gaclac.yaml index 484bed6797..a547815072 100644 --- a/satpy/etc/readers/avhrr_l1b_gaclac.yaml +++ b/satpy/etc/readers/avhrr_l1b_gaclac.yaml @@ -185,3 +185,4 @@ file_types: file_patterns: - '{creation_site:3s}.{transfer_mode:4s}.{platform_id:2s}.D{start_time:%y%j.S%H%M}.E{end_time:%H%M}.B{orbit_number:05d}{end_orbit_last_digits:02d}.{station:2s}' - '{subscription_prefix:10d}.{creation_site:3s}.{transfer_mode:4s}.{platform_id:2s}.D{start_time:%y%j.S%H%M}.E{end_time:%H%M}.B{orbit_number:05d}{end_orbit_last_digits:02d}.{station:2s}' + - '{platform_id:3s}_RPRO_AVH_L1B_1P_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{orbit_number:06d}/image.l1b' diff --git a/satpy/etc/readers/fci_l1c_nc.yaml b/satpy/etc/readers/fci_l1c_nc.yaml index d241b3fa9e..7b4ead398e 100644 --- a/satpy/etc/readers/fci_l1c_nc.yaml +++ b/satpy/etc/readers/fci_l1c_nc.yaml @@ -6,109 +6,252 @@ reader: Reader for FCI L1c data in NetCDF4 format. Used to read Meteosat Third Generation (MTG) Flexible Combined Imager (FCI) L1c data. - status: Beta for full-disc FDHSI and HRFI, RSS not supported yet + status: Beta for full-disc FDHSI, HRFI, and African dissemination format. RSS not supported yet supports_fsspec: true reader: !!python/name:satpy.readers.yaml_reader.GEOVariableSegmentYAMLReader - sensors: [ fci ] + sensors: [fci] # Source: MTG FCI L1 Product User Guide [FCIL1PUG] # https://www.eumetsat.int/media/45923 + file_types: fci_l1c_fdhsi: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler - file_patterns: [ '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-FDHSI-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' ] + file_patterns: + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-FDHSI-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" expected_segments: 40 - required_netcdf_variables: - - attr/platform - - data/{channel_name}/measured/start_position_row - - data/{channel_name}/measured/end_position_row - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 - - data/{channel_name}/measured/radiance_unit_conversion_coefficient - - data/{channel_name}/measured/channel_effective_solar_irradiance - - data/{channel_name}/measured/effective_radiance - - data/{channel_name}/measured/x - - data/{channel_name}/measured/y - - data/{channel_name}/measured/pixel_quality - - data/{channel_name}/measured/index_map - - data/mtg_geos_projection - - data/swath_direction - - data/swath_number - - index - - state/celestial/earth_sun_distance - - state/celestial/subsolar_latitude - - state/celestial/subsolar_longitude - - state/celestial/sun_satellite_distance - - state/platform/platform_altitude - - state/platform/subsatellite_latitude - - state/platform/subsatellite_longitude - - time + required_netcdf_variables: &required-variables + - attr/platform + - data/{channel_name}/measured/start_position_row + - data/{channel_name}/measured/end_position_row + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 + - data/{channel_name}/measured/radiance_unit_conversion_coefficient + - data/{channel_name}/measured/channel_effective_solar_irradiance + - data/{channel_name}/measured/effective_radiance + - data/{channel_name}/measured/x + - data/{channel_name}/measured/y + - data/{channel_name}/measured/pixel_quality + - data/{channel_name}/measured/index_map + - data/mtg_geos_projection + - data/swath_direction + - data/swath_number + - index + - state/celestial/earth_sun_distance + - state/celestial/subsolar_latitude + - state/celestial/subsolar_longitude + - state/celestial/sun_satellite_distance + - state/platform/platform_altitude + - state/platform/subsatellite_latitude + - state/platform/subsatellite_longitude + - time variable_name_replacements: channel_name: - - vis_04 - - vis_05 - - vis_06 - - vis_08 - - vis_09 - - nir_13 - - nir_16 - - nir_22 - - ir_38 - - wv_63 - - wv_73 - - ir_87 - - ir_97 - - ir_105 - - ir_123 - - ir_133 + - vis_04 + - vis_05 + - vis_06 + - vis_08 + - vis_09 + - nir_13 + - nir_16 + - nir_22 + - ir_38 + - wv_63 + - wv_73 + - ir_87 + - ir_97 + - ir_105 + - ir_123 + - ir_133 fci_l1c_hrfi: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler - file_patterns: [ '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-HRFI-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' ] + file_patterns: + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-HRFI-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" expected_segments: 40 - required_netcdf_variables: - - attr/platform - - data/{channel_name}/measured/start_position_row - - data/{channel_name}/measured/end_position_row - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 - - data/{channel_name}/measured/radiance_unit_conversion_coefficient - - data/{channel_name}/measured/channel_effective_solar_irradiance - - data/{channel_name}/measured/effective_radiance - - data/{channel_name}/measured/x - - data/{channel_name}/measured/y - - data/{channel_name}/measured/pixel_quality - - data/{channel_name}/measured/index_map - - data/mtg_geos_projection - - data/swath_direction - - data/swath_number - - index - - state/celestial/earth_sun_distance - - state/celestial/subsolar_latitude - - state/celestial/subsolar_longitude - - state/celestial/sun_satellite_distance - - state/platform/platform_altitude - - state/platform/subsatellite_latitude - - state/platform/subsatellite_longitude - - time + required_netcdf_variables: *required-variables + variable_name_replacements: + channel_name: + - vis_06_hr + - nir_22_hr + - ir_38_hr + - ir_105_hr + # Note: In The current file the 'MTI1-FCI-1C' which is a part of the file will be replaced by MTI1+FCI-1C, patterns have been added + # to maanage this + fci_l1c_af_vis_06: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-1KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + + expected_segments: 1 + required_netcdf_variables: *required-variables + variable_name_replacements: + channel_name: + - vis_06 + fci_l1c_af_vis_04: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS04-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS04-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + expected_segments: 1 + required_netcdf_variables: *required-variables + variable_name_replacements: + channel_name: + - vis_04 + fci_l1c_af_vis_05: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS05-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS05-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + expected_segments: 1 + required_netcdf_variables: *required-variables + variable_name_replacements: + channel_name: + - vis_05 + fci_l1c_af_vis_08: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS08-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS08-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + expected_segments: 1 + required_netcdf_variables: *required-variables + variable_name_replacements: + channel_name: + - vis_08 + fci_l1c_af_vis_09: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS09-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS09-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + expected_segments: 1 + required_netcdf_variables: *required-variables + variable_name_replacements: + channel_name: + - vis_09 + fci_l1c_af_nir_13: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-NIR13-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-NIR13-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + expected_segments: 1 + required_netcdf_variables: *required-variables + variable_name_replacements: + channel_name: + - nir_13 + fci_l1c_af_nir_16: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-NIR16-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-NIR16-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + expected_segments: 1 + required_netcdf_variables: *required-variables + variable_name_replacements: + channel_name: + - nir_16 + fci_l1c_af_nir_22: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-NIR22-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-NIR22-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + expected_segments: 1 + required_netcdf_variables: *required-variables + variable_name_replacements: + channel_name: + - nir_22 + fci_l1c_af_ir_38: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR38-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR38-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + expected_segments: 1 + required_netcdf_variables: *required-variables + variable_name_replacements: + channel_name: + - ir_38 + fci_l1c_af_wv_63: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-WV63-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-WV63-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + expected_segments: 1 + required_netcdf_variables: *required-variables + variable_name_replacements: + channel_name: + - wv_63 + fci_l1c_af_wv_73: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-WV73-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-WV73-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + expected_segments: 1 + required_netcdf_variables: *required-variables + variable_name_replacements: + channel_name: + - wv_73 + fci_l1c_af_ir_87: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR87-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR87-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + expected_segments: 1 + required_netcdf_variables: *required-variables + variable_name_replacements: + channel_name: + - ir_87 + fci_l1c_af_ir_97: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR97-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR97-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + expected_segments: 1 + required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - - vis_06_hr - - nir_22_hr - - ir_38_hr - - ir_105_hr + - ir_97 + fci_l1c_af_ir_105: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR105-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR105-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + expected_segments: 1 + required_netcdf_variables: *required-variables + variable_name_replacements: + channel_name: + - ir_105 + fci_l1c_af_ir_123: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR123-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR123-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + expected_segments: 1 + required_netcdf_variables: *required-variables + variable_name_replacements: + channel_name: + - ir_123 + fci_l1c_af_ir_133: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR133-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR133-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + expected_segments: 1 + required_netcdf_variables: *required-variables + variable_name_replacements: + channel_name: + - ir_133 datasets: vis_04: name: vis_04 sensor: fci - wavelength: [ 0.384, 0.444, 0.504 ] - resolution: 1000 + wavelength: [0.384, 0.444, 0.504] + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_04 } calibration: counts: standard_name: counts @@ -119,13 +262,14 @@ datasets: reflectance: standard_name: toa_bidirectional_reflectance units: "%" - file_type: fci_l1c_fdhsi vis_05: name: vis_05 sensor: fci wavelength: [0.470, 0.510, 0.550] - resolution: 1000 + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_05 } calibration: counts: standard_name: counts @@ -136,15 +280,15 @@ datasets: reflectance: standard_name: toa_bidirectional_reflectance units: "%" - file_type: fci_l1c_fdhsi vis_06: name: vis_06 sensor: fci wavelength: [0.590, 0.640, 0.690] resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06] } + 3000: { file_type: fci_l1c_af_vis_06 } calibration: counts: standard_name: counts @@ -160,7 +304,9 @@ datasets: name: vis_08 sensor: fci wavelength: [0.815, 0.865, 0.915] - resolution: 1000 + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_08 } calibration: counts: standard_name: counts @@ -171,13 +317,14 @@ datasets: reflectance: standard_name: toa_bidirectional_reflectance units: "%" - file_type: fci_l1c_fdhsi vis_09: name: vis_09 sensor: fci wavelength: [0.894, 0.914, 0.934] - resolution: 1000 + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_09 } calibration: counts: standard_name: counts @@ -188,13 +335,14 @@ datasets: reflectance: standard_name: toa_bidirectional_reflectance units: "%" - file_type: fci_l1c_fdhsi nir_13: name: nir_13 sensor: fci wavelength: [1.350, 1.380, 1.410] - resolution: 1000 + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_13 } calibration: counts: standard_name: counts @@ -205,13 +353,14 @@ datasets: reflectance: standard_name: toa_bidirectional_reflectance units: "%" - file_type: fci_l1c_fdhsi nir_16: name: nir_16 sensor: fci wavelength: [1.560, 1.610, 1.660] - resolution: 1000 + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_16 } calibration: counts: standard_name: counts @@ -222,15 +371,15 @@ datasets: reflectance: standard_name: toa_bidirectional_reflectance units: "%" - file_type: fci_l1c_fdhsi nir_22: name: nir_22 sensor: fci wavelength: [2.200, 2.250, 2.300] resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_22 } calibration: counts: standard_name: counts @@ -247,8 +396,9 @@ datasets: sensor: fci wavelength: [3.400, 3.800, 4.200] resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_38 } calibration: counts: standard_name: counts @@ -264,7 +414,9 @@ datasets: name: wv_63 sensor: fci wavelength: [5.300, 6.300, 7.300] - resolution: 2000 + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } calibration: counts: standard_name: counts @@ -275,13 +427,14 @@ datasets: brightness_temperature: standard_name: toa_brightness_temperature units: "K" - file_type: fci_l1c_fdhsi wv_73: name: wv_73 sensor: fci wavelength: [6.850, 7.350, 7.850] - resolution: 2000 + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } calibration: counts: standard_name: counts @@ -292,13 +445,14 @@ datasets: brightness_temperature: standard_name: toa_brightness_temperature units: "K" - file_type: fci_l1c_fdhsi ir_87: name: ir_87 sensor: fci wavelength: [8.300, 8.700, 9.100] - resolution: 2000 + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } calibration: counts: standard_name: counts @@ -309,13 +463,14 @@ datasets: brightness_temperature: standard_name: toa_brightness_temperature units: "K" - file_type: fci_l1c_fdhsi ir_97: name: ir_97 sensor: fci wavelength: [9.360, 9.660, 9.960] - resolution: 2000 + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } calibration: counts: standard_name: counts @@ -326,15 +481,15 @@ datasets: brightness_temperature: standard_name: toa_brightness_temperature units: "K" - file_type: fci_l1c_fdhsi ir_105: name: ir_105 sensor: fci wavelength: [9.800, 10.500, 11.200] resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_105 } calibration: counts: standard_name: counts @@ -350,7 +505,9 @@ datasets: name: ir_123 sensor: fci wavelength: [11.800, 12.300, 12.800] - resolution: 2000 + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } calibration: counts: standard_name: counts @@ -361,13 +518,14 @@ datasets: brightness_temperature: standard_name: toa_brightness_temperature units: "K" - file_type: fci_l1c_fdhsi ir_133: name: ir_133 sensor: fci wavelength: [12.700, 13.300, 13.900] - resolution: 2000 + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } calibration: counts: standard_name: counts @@ -378,1333 +536,1523 @@ datasets: brightness_temperature: standard_name: toa_brightness_temperature units: "K" - file_type: fci_l1c_fdhsi vis_04_pixel_quality: name: vis_04_pixel_quality sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_04 } vis_05_pixel_quality: name: vis_05_pixel_quality sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_05 } vis_06_pixel_quality: name: vis_06_pixel_quality sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_06 } vis_08_pixel_quality: name: vis_08_pixel_quality sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_08 } vis_09_pixel_quality: name: vis_09_pixel_quality sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_09 } nir_13_pixel_quality: name: nir_13_pixel_quality sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_13 } nir_16_pixel_quality: name: nir_16_pixel_quality sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_16 } nir_22_pixel_quality: name: nir_22_pixel_quality sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_22 } ir_38_pixel_quality: name: ir_38_pixel_quality sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_38 } wv_63_pixel_quality: name: wv_63_pixel_quality sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } wv_73_pixel_quality: name: wv_73_pixel_quality sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } ir_87_pixel_quality: name: ir_87_pixel_quality sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } ir_97_pixel_quality: name: ir_97_pixel_quality sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } ir_105_pixel_quality: name: ir_105_pixel_quality sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_105 } ir_123_pixel_quality: name: ir_123_pixel_quality sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } ir_133_pixel_quality: name: ir_133_pixel_quality sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } vis_04_index_map: name: vis_04_index_map sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_04 } vis_05_index_map: name: vis_05_index_map sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_05 } vis_06_index_map: name: vis_06_index_map sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_06 } vis_08_index_map: name: vis_08_index_map sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_08 } vis_09_index_map: name: vis_09_index_map sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_09 } nir_13_index_map: name: nir_13_index_map sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_13 } nir_16_index_map: name: nir_16_index_map sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_16 } nir_22_index_map: name: nir_22_index_map sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_22 } ir_38_index_map: name: ir_38_index_map sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_38 } wv_63_index_map: name: wv_63_index_map sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } wv_73_index_map: name: wv_73_index_map sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } ir_87_index_map: name: ir_87_index_map sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } ir_97_index_map: name: ir_97_index_map sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } ir_105_index_map: name: ir_105_index_map sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_105 } ir_123_index_map: name: ir_123_index_map sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } ir_133_index_map: name: ir_133_index_map sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } vis_04_time: name: vis_04_time units: s sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_04 } vis_05_time: name: vis_05_time units: s sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_05 } vis_06_time: name: vis_06_time units: s sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_06 } vis_08_time: name: vis_08_time units: s sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_08 } vis_09_time: name: vis_09_time units: s sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_09 } nir_13_time: name: nir_13_time units: s sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_13 } nir_16_time: name: nir_16_time units: s sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_16 } nir_22_time: name: nir_22_time units: s sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_22 } ir_38_time: name: ir_38_time units: s sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_38 } wv_63_time: name: wv_63_time units: s sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } wv_73_time: name: wv_73_time units: s sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } ir_87_time: name: ir_87_time units: s sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } ir_97_time: name: ir_97_time units: s sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } ir_105_time: name: ir_105_time units: s sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_105 } ir_123_time: name: ir_123_time units: s sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } ir_133_time: name: ir_133_time units: s sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } vis_04_swath_direction: name: vis_04_swath_direction sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_04 } vis_05_swath_direction: name: vis_05_swath_direction sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_05 } vis_06_swath_direction: name: vis_06_swath_direction sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_06 } vis_08_swath_direction: name: vis_08_swath_direction sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_08 } vis_09_swath_direction: name: vis_09_swath_direction sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_09 } nir_13_swath_direction: name: nir_13_swath_direction sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_13 } nir_16_swath_direction: name: nir_16_swath_direction sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_16 } nir_22_swath_direction: name: nir_22_swath_direction sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_22 } ir_38_swath_direction: name: ir_38_swath_direction sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_38 } wv_63_swath_direction: name: wv_63_swath_direction sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } wv_73_swath_direction: name: wv_73_swath_direction sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } ir_87_swath_direction: name: ir_87_swath_direction sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } ir_97_swath_direction: name: ir_97_swath_direction sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } ir_105_swath_direction: name: ir_105_swath_direction sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_105 } ir_123_swath_direction: name: ir_123_swath_direction sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } ir_133_swath_direction: name: ir_133_swath_direction sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } vis_04_swath_number: name: vis_04_swath_number sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_04 } vis_05_swath_number: name: vis_05_swath_number sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_05 } vis_06_swath_number: name: vis_06_swath_number sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06] } + 3000: { file_type: fci_l1c_af_vis_06 } vis_08_swath_number: name: vis_08_swath_number sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_08 } vis_09_swath_number: name: vis_09_swath_number sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_09 } nir_13_swath_number: name: nir_13_swath_number sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_13 } nir_16_swath_number: name: nir_16_swath_number sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_16 } nir_22_swath_number: name: nir_22_swath_number sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_22 } ir_38_swath_number: name: ir_38_swath_number sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_38 } wv_63_swath_number: name: wv_63_swath_number sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } wv_73_swath_number: name: wv_73_swath_number sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } ir_87_swath_number: name: ir_87_swath_number sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } ir_97_swath_number: name: ir_97_swath_number sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } ir_105_swath_number: name: ir_105_swath_number sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_105 } ir_123_swath_number: name: ir_123_swath_number sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } ir_133_swath_number: name: ir_133_swath_number sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } vis_04_subsatellite_latitude: name: vis_04_subsatellite_latitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_04 } vis_05_subsatellite_latitude: name: vis_05_subsatellite_latitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_05 } vis_06_subsatellite_latitude: name: vis_06_subsatellite_latitude units: deg sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_06 } vis_08_subsatellite_latitude: name: vis_08_subsatellite_latitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_08 } vis_09_subsatellite_latitude: name: vis_09_subsatellite_latitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_09 } nir_13_subsatellite_latitude: name: nir_13_subsatellite_latitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_13 } nir_16_subsatellite_latitude: name: nir_16_subsatellite_latitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_16 } nir_22_subsatellite_latitude: name: nir_22_subsatellite_latitude units: deg sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_22 } ir_38_subsatellite_latitude: name: ir_38_subsatellite_latitude units: deg sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_38 } wv_63_subsatellite_latitude: name: wv_63_subsatellite_latitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } wv_73_subsatellite_latitude: name: wv_73_subsatellite_latitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } ir_87_subsatellite_latitude: name: ir_87_subsatellite_latitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } ir_97_subsatellite_latitude: name: ir_97_subsatellite_latitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } ir_105_subsatellite_latitude: name: ir_105_subsatellite_latitude units: deg sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_105 } ir_123_subsatellite_latitude: name: ir_123_subsatellite_latitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } ir_133_subsatellite_latitude: name: ir_133_subsatellite_latitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } vis_04_subsatellite_longitude: name: vis_04_subsatellite_longitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_04 } vis_05_subsatellite_longitude: name: vis_05_subsatellite_longitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_05 } vis_06_subsatellite_longitude: name: vis_06_subsatellite_longitude units: deg sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_06 } vis_08_subsatellite_longitude: name: vis_08_subsatellite_longitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_08 } vis_09_subsatellite_longitude: name: vis_09_subsatellite_longitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_09 } nir_13_subsatellite_longitude: name: nir_13_subsatellite_longitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_13 } nir_16_subsatellite_longitude: name: nir_16_subsatellite_longitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_16 } nir_22_subsatellite_longitude: name: nir_22_subsatellite_longitude units: deg sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_22 } ir_38_subsatellite_longitude: name: ir_38_subsatellite_longitude units: deg sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_38 } wv_63_subsatellite_longitude: name: wv_63_subsatellite_longitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } wv_73_subsatellite_longitude: name: wv_73_subsatellite_longitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } ir_87_subsatellite_longitude: name: ir_87_subsatellite_longitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } ir_97_subsatellite_longitude: name: ir_97_subsatellite_longitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } ir_105_subsatellite_longitude: name: ir_105_subsatellite_longitude units: deg sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_105 } ir_123_subsatellite_longitude: name: ir_123_subsatellite_longitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } ir_133_subsatellite_longitude: name: ir_133_subsatellite_longitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } vis_04_subsolar_latitude: name: vis_04_subsolar_latitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_04 } vis_05_subsolar_latitude: name: vis_05_subsolar_latitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_05 } vis_06_subsolar_latitude: name: vis_06_subsolar_latitude units: deg sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_06 } vis_08_subsolar_latitude: name: vis_08_subsolar_latitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_08 } vis_09_subsolar_latitude: name: vis_09_subsolar_latitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_09 } nir_13_subsolar_latitude: name: nir_13_subsolar_latitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_13 } nir_16_subsolar_latitude: name: nir_16_subsolar_latitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_16 } nir_22_subsolar_latitude: name: nir_22_subsolar_latitude units: deg sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_22 } ir_38_subsolar_latitude: name: ir_38_subsolar_latitude units: deg sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_38 } wv_63_subsolar_latitude: name: wv_63_subsolar_latitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } wv_73_subsolar_latitude: name: wv_73_subsolar_latitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } ir_87_subsolar_latitude: name: ir_87_subsolar_latitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } ir_97_subsolar_latitude: name: ir_97_subsolar_latitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } ir_105_subsolar_latitude: name: ir_105_subsolar_latitude units: deg sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_105 } ir_123_subsolar_latitude: name: ir_123_subsolar_latitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } ir_133_subsolar_latitude: name: ir_133_subsolar_latitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } vis_04_subsolar_longitude: name: vis_04_subsolar_longitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_04 } vis_05_subsolar_longitude: name: vis_05_subsolar_longitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_05 } vis_06_subsolar_longitude: name: vis_06_subsolar_longitude units: deg sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_06 } vis_08_subsolar_longitude: name: vis_08_subsolar_longitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_08 } vis_09_subsolar_longitude: name: vis_09_subsolar_longitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_09 } nir_13_subsolar_longitude: name: nir_13_subsolar_longitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_13 } nir_16_subsolar_longitude: name: nir_16_subsolar_longitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_16 } nir_22_subsolar_longitude: name: nir_22_subsolar_longitude units: deg sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_22 } ir_38_subsolar_longitude: name: ir_38_subsolar_longitude units: deg sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_38 } wv_63_subsolar_longitude: name: wv_63_subsolar_longitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } wv_73_subsolar_longitude: name: wv_73_subsolar_longitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } ir_87_subsolar_longitude: name: ir_87_subsolar_longitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } ir_97_subsolar_longitude: name: ir_97_subsolar_longitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } ir_105_subsolar_longitude: name: ir_105_subsolar_longitude units: deg sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af } ir_123_subsolar_longitude: name: ir_123_subsolar_longitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } ir_133_subsolar_longitude: name: ir_133_subsolar_longitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi - + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } vis_04_platform_altitude: name: vis_04_platform_altitude units: m sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_04 } vis_05_platform_altitude: name: vis_05_platform_altitude units: m sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_05 } vis_06_platform_altitude: name: vis_06_platform_altitude units: m sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_06 } vis_08_platform_altitude: name: vis_08_platform_altitude units: m sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_08 } vis_09_platform_altitude: name: vis_09_platform_altitude units: m sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_09 } nir_13_platform_altitude: name: nir_13_platform_altitude units: m sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_13 } nir_16_platform_altitude: name: nir_16_platform_altitude units: m sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_16 } nir_22_platform_altitude: name: nir_22_platform_altitude units: m sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_22 } ir_38_platform_altitude: name: ir_38_platform_altitude units: m sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_38 } wv_63_platform_altitude: name: wv_63_platform_altitude units: m sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } wv_73_platform_altitude: name: wv_73_platform_altitude units: m sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } ir_87_platform_altitude: name: ir_87_platform_altitude units: m sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } ir_97_platform_altitude: name: ir_97_platform_altitude units: m sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } ir_105_platform_altitude: name: ir_105_platform_altitude units: m sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af } ir_123_platform_altitude: name: ir_123_platform_altitude units: m sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } ir_133_platform_altitude: name: ir_133_platform_altitude units: m sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } vis_04_earth_sun_distance: name: vis_04_earth_sun_distance units: km sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_04, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_04 } vis_05_earth_sun_distance: name: vis_05_earth_sun_distance units: km sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_05 } vis_06_earth_sun_distance: name: vis_06_earth_sun_distance units: km sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_06 } vis_08_earth_sun_distance: name: vis_08_earth_sun_distance units: km sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_08 } vis_09_earth_sun_distance: name: vis_09_earth_sun_distance units: km sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_09 } nir_13_earth_sun_distance: name: nir_13_earth_sun_distance units: km sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_13 } nir_16_earth_sun_distance: name: nir_16_earth_sun_distance units: km sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_16 } nir_22_earth_sun_distance: name: nir_22_earth_sun_distance units: km sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_22 } ir_38_earth_sun_distance: name: ir_38_earth_sun_distance units: km sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_38 } wv_63_earth_sun_distance: name: wv_63_earth_sun_distance units: km sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } wv_73_earth_sun_distance: name: wv_73_earth_sun_distance units: km sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } ir_87_earth_sun_distance: name: ir_87_earth_sun_distance units: km sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } ir_97_earth_sun_distance: name: ir_97_earth_sun_distance units: km sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } ir_105_earth_sun_distance: name: ir_105_earth_sun_distance units: km sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_105 } ir_123_earth_sun_distance: name: ir_123_earth_sun_distance units: km sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } ir_133_earth_sun_distance: name: ir_133_earth_sun_distance units: km sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } vis_04_sun_satellite_distance: name: vis_04_sun_satellite_distance units: km sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_04 } vis_05_sun_satellite_distance: name: vis_05_sun_satellite_distance units: km sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_05 } vis_06_sun_satellite_distance: name: vis_06_sun_satellite_distance units: km sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_06 } vis_08_sun_satellite_distance: name: vis_08_sun_satellite_distance units: km sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_08 } vis_09_sun_satellite_distance: name: vis_09_sun_satellite_distance units: km sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_vis_09 } nir_13_sun_satellite_distance: name: nir_13_sun_satellite_distance units: km sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_13 } nir_16_sun_satellite_distance: name: nir_16_sun_satellite_distance units: km sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_16 } nir_22_sun_satellite_distance: name: nir_22_sun_satellite_distance units: km sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_nir_22 } ir_38_sun_satellite_distance: name: ir_38_sun_satellite_distance units: km sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_38 } wv_63_sun_satellite_distance: name: wv_63_sun_satellite_distance units: km sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } wv_73_sun_satellite_distance: name: wv_73_sun_satellite_distance units: km sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } ir_87_sun_satellite_distance: name: ir_87_sun_satellite_distance units: km sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } ir_97_sun_satellite_distance: name: ir_97_sun_satellite_distance units: km sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } ir_105_sun_satellite_distance: name: ir_105_sun_satellite_distance units: km sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: fci_l1c_hrfi } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_105 } ir_123_sun_satellite_distance: name: ir_123_sun_satellite_distance units: km sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } ir_133_sun_satellite_distance: name: ir_133_sun_satellite_distance units: km sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index 1ad5d576a0..ab91e592b5 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -74,2930 +74,3070 @@ file_types: file_patterns: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-AMV-{channel}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' + datasets: -# CLM - cloud_state: - name: cloud_state - resolution: 2000 - file_type: nc_fci_clm - file_key: cloud_state - long_name: cloud_mask_classification +# COMMON + product_quality: + name: product_quality + standard_name: product_quality + file_type: [nc_fci_amv, nc_fci_clm, nc_fci_ct, nc_fci_ctth, nc_fci_fir, nc_fci_gii, nc_fci_oca, nc_fci_olr, nc_fci_crm, nc_fci_asr] + nc_key: product_quality + + product_completeness: + name: product_completeness + standard_name: product_completeness + file_type: [nc_fci_amv, nc_fci_clm, nc_fci_ct, nc_fci_ctth, nc_fci_fir, nc_fci_gii, nc_fci_oca, nc_fci_olr, nc_fci_crm, nc_fci_asr] + nc_key: product_completeness - quality_illumination_clm: - name: quality_illumination_clm + product_timeliness: + name: product_timeliness + standard_name: product_timeliness + file_type: [nc_fci_amv, nc_fci_clm, nc_fci_ct, nc_fci_ctth, nc_fci_fir, nc_fci_gii, nc_fci_oca, nc_fci_olr, nc_fci_crm, nc_fci_asr] + nc_key: product_timeliness + + quality_illumination: + name: quality_illumination + standard_name: status_flag resolution: 2000 - file_type: nc_fci_clm - file_key: quality_illumination - long_name: illumination_classification + file_type: [nc_fci_clm, nc_fci_ct] + nc_key: quality_illumination + fill_value: -127 + import_enum_information: True - quality_nwp_parameters_clm: - name: quality_nwp_parameters_clm + quality_nwp_parameters: + name: quality_nwp_parameters + standard_name: status_flag resolution: 2000 - file_type: nc_fci_clm - file_key: quality_nwp_parameters - long_name: quality_index + file_type: [nc_fci_clm, nc_fci_ct, nc_fci_ctth] + nc_key: quality_nwp_parameters + fill_value: -127 + import_enum_information: True - quality_MTG_parameters_clm: - name: quality_MTG_parameters_clm + quality_mtg_parameters: + name: quality_mtg_parameters + standard_name: status_flag resolution: 2000 - file_type: nc_fci_clm - file_key: quality_MTG_parameters - long_name: quality_index + file_type: [nc_fci_clm, nc_fci_ct, nc_fci_ctth] + nc_key: quality_MTG_parameters fill_value: -127 + import_enum_information: True - quality_overall_processing_clm: - name: quality_overall_processing_clm + quality_overall_processing: + name: quality_overall_processing + standard_name: quality_flag resolution: 2000 - file_type: nc_fci_clm - file_key: quality_overall_processing - long_name: quality_index + file_type: [nc_fci_clm, nc_fci_ct, nc_fci_ctth, nc_fci_olr] + nc_key: quality_overall_processing + fill_value: -127 + import_enum_information: True - product_quality_clm: - name: product_quality_clm - file_type: nc_fci_clm - file_key: product_quality - long_name: product_quality_index +# AMV Intermediate - Atmospheric Motion Vectors Intermediate + intm_latitude: + name: intm_latitude + standard_name: latitude + file_type: nc_fci_amvi + nc_key: intm_latitude - product_completeness_clm: - name: product_completeness_clm - file_type: nc_fci_clm - file_key: product_completeness - long_name: product_completeness_index + intm_longitude: + name: intm_longitude + standard_name: longitude + file_type: nc_fci_amvi + nc_key: intm_longitude - product_timeliness_clm: - name: product_timeliness_clm - file_type: nc_fci_clm - file_key: product_timeliness - long_name: product_timeliness_index + intm_speed: + name: intm_speed + standard_name: wind_speed + file_type: nc_fci_amvi + nc_key: intm_speed + coordinates: + - intm_longitude + - intm_latitude -# FCI CT L2 - cloud_phase: - name: cloud_phase - resolution: 2000 - file_type: nc_fci_ct - file_key: cloud_phase - long_name: cloud_phase + intm_u_component: + name: intm_u_component + standard_name: wind_speed_horizontal_component + file_type: nc_fci_amvi + nc_key: intm_u_component + coordinates: + - intm_longitude + - intm_latitude - cloud_type: - name: cloud_type - resolution: 2000 - file_type: nc_fci_ct - file_key: cloud_type - long_name: cloud_type + intm_v_component: + name: intm_v_component + standard_name: wind_speed_vertical_component + file_type: nc_fci_amvi + nc_key: intm_v_component + coordinates: + - intm_longitude + - intm_latitude - quality_illumination_ct: - name: quality_illumination_ct - resolution: 2000 - file_type: nc_fci_ct - file_key: quality_illumination - long_name: illumination_classification + intm_direction: + name: intm_direction + standard_name: wind_to_direction + file_type: nc_fci_amvi + nc_key: intm_direction + coordinates: + - intm_longitude + - intm_latitude - quality_nwp_parameters_ct: - name: quality_nwp_parameters_ct - resolution: 2000 - file_type: nc_fci_ct - file_key: quality_nwp_parameters - long_name: quality_index + intm_pressure: + name: intm_pressure + standard_name: wind_pressure + file_type: nc_fci_amvi + nc_key: intm_pressure + coordinates: + - intm_longitude + - intm_latitude - quality_MTG_parameters_ct: - name: quality_MTG_parameters_ct - resolution: 2000 - file_type: nc_fci_ct - file_key: quality_MTG_parameters - long_name: quality_index + intm_temperature: + name: intm_temperature + standard_name: wind_temperature + file_type: nc_fci_amvi + nc_key: intm_temperature + coordinates: + - intm_longitude + - intm_latitude + + intm_target_type: + name: intm_target_type + standard_name: wind_target_type + file_type: nc_fci_amvi + nc_key: target_type + coordinates: + - intm_longitude + - intm_latitude + + intm_wind_method: + name: intm_wind_method + standard_name: wind_wind_method + file_type: nc_fci_amvi + nc_key: wind_method + coordinates: + - intm_longitude + - intm_latitude + + +# AMV Final - Atmospheric Motion Vectors Final + channel_id: + name: channel_id + standard_name: channel_id + file_type: nc_fci_amv + nc_key: channel_id + + amv_latitude: + name: latitude + standard_name: latitude + file_type: nc_fci_amv + nc_key: latitude + + amv_longitude: + name: longitude + standard_name: longitude + file_type: nc_fci_amv + nc_key: longitude + + speed: + name: speed + standard_name: wind_speed + file_type: nc_fci_amv + nc_key: speed + coordinates: + - longitude + - latitude + + speed_u_component: + name: speed_u_component + standard_name: wind_speed_horizontal_component + file_type: nc_fci_amv + nc_key: speed_u_component + coordinates: + - longitude + - latitude - quality_overall_processing_ct: - name: quality_overall_processing_ct + speed_v_component: + name: speed_v_component + standard_name: wind_speed_vertical_component + file_type: nc_fci_amv + nc_key: speed_v_component + coordinates: + - longitude + - latitude + + direction: + name: direction + standard_name: wind_to_direction + file_type: nc_fci_amv + nc_key: direction + coordinates: + - longitude + - latitude + + pressure: + name: pressure + standard_name: wind_pressure + file_type: nc_fci_amv + nc_key: pressure + coordinates: + - longitude + - latitude + + temperature: + name: temperature + standard_name: wind_temperature + file_type: nc_fci_amv + nc_key: temperature + coordinates: + - longitude + - latitude + + target_type: + name: target_type + standard_name: wind_target_type + file_type: nc_fci_amv + nc_key: target_type + coordinates: + - longitude + - latitude + + wind_method: + name: wind_method + standard_name: wind_wind_method + file_type: nc_fci_amv + nc_key: wind_method + coordinates: + - longitude + - latitude + + fcst_u: + name: fcst_u + standard_name: wind_forecast_u_component + file_type: nc_fci_amv + nc_key: forecast_u_component + coordinates: + - longitude + - latitude + + fcst_v: + name: fcst_v + standard_name: wind_forecast_v_component + file_type: nc_fci_amv + nc_key: forecast_v_component + coordinates: + - longitude + - latitude + + best_fit_pres: + name: best_fit_pres + standard_name: wind_best_fit_pressure + file_type: nc_fci_amv + nc_key: best_fit_pressure + coordinates: + - longitude + - latitude + + best_fit_u: + name: best_fit_u + standard_name: wind_best_fit_u_component + file_type: nc_fci_amv + nc_key: best_fit_u_component + coordinates: + - longitude + - latitude + + best_fit_v: + name: best_fit_v + standard_name: wind_best_fit_v_component + file_type: nc_fci_amv + nc_key: best_fit_v_component + coordinates: + - longitude + - latitude + + qi: + name: qi + standard_name: wind_overall_reliability + file_type: nc_fci_amv + nc_key: overall_reliability + coordinates: + - longitude + - latitude + + qi_excl_fcst: + name: qi_excl_fcst + standard_name: wind_overall_reliability_exc_forecast + file_type: nc_fci_amv + nc_key: overall_reliability_exc_forecast + coordinates: + - longitude + - latitude + + +# CLM - Cloud Mask + cloud_state: + name: cloud_state + standard_name: cloud_mask_classification resolution: 2000 - file_type: nc_fci_ct - file_key: quality_overall_processing - long_name: quality_index + file_type: nc_fci_clm + nc_key: cloud_state + fill_value: -127 + import_enum_information: True - product_quality_ct: - name: product_quality_ct - file_type: nc_fci_ct - file_key: product_quality - long_name: product_quality_index - product_completeness_ct: - name: product_completeness_ct +# CT - Cloud Type + cloud_phase: + name: cloud_phase + standard_name: cloud_phase_classification + resolution: 2000 file_type: nc_fci_ct - file_key: product_completeness - long_name: product_completeness_index + nc_key: cloud_phase + fill_value: -127 + import_enum_information: True - product_timeliness_ct: - name: product_timeliness_ct + cloud_type: + name: cloud_type + standard_name: cloud_type_classification + resolution: 2000 file_type: nc_fci_ct - file_key: product_timeliness - long_name: product_timeliness_index + nc_key: cloud_type + fill_value: -127 + import_enum_information: True + - # FCI CTTH Product + # CTTH - Cloud Top Temperature and Height cloud_top_aviation_height: name: cloud_top_aviation_height + standard_name: height_at_cloud_top_for_aviation resolution: 2000 file_type: nc_fci_ctth - file_key: cloud_top_aviation_height + nc_key: cloud_top_aviation_height cloud_top_height: name: cloud_top_height + standard_name: height_at_cloud_top resolution: 2000 file_type: nc_fci_ctth - file_key: cloud_top_height - fill_value: 32769 + nc_key: cloud_top_height cloud_top_pressure: name: cloud_top_pressure + standard_name: air_pressure_at_cloud_top resolution: 2000 file_type: nc_fci_ctth - file_key: cloud_top_pressure - fill_value: 3276.9001 + nc_key: cloud_top_pressure cloud_top_temperature: name: cloud_top_temperature + standard_name: air_temperature_at_cloud_top resolution: 2000 file_type: nc_fci_ctth - file_key: cloud_top_temperature - fill_value: 327.69 + nc_key: cloud_top_temperature effective_cloudiness: name: effective_cloudiness + standard_name: effective_cloud_cover resolution: 2000 file_type: nc_fci_ctth - file_key: effective_cloudiness + nc_key: effective_cloudiness - quality_status_ctth: - name: quality_status_ctth + quality_status: + name: quality_status + standard_name: status_flag resolution: 2000 file_type: nc_fci_ctth - file_key: quality_status + nc_key: quality_status + fill_value: -127 + import_enum_information: True - quality_rtm_ctth: - name: quality_rtm_ctth + quality_rtm: + name: quality_rtm + standard_name: status_flag resolution: 2000 file_type: nc_fci_ctth - file_key: quality_rtm + nc_key: quality_rtm + fill_value: -127 + import_enum_information: True - quality_method_ctth: - name: quality_method_ctth + quality_method: + name: quality_method + standard_name: status_flag resolution: 2000 file_type: nc_fci_ctth - file_key: quality_method + nc_key: quality_method + fill_value: -127 + import_enum_information: True - quality_nwp_parameters_ctth: - name: quality_nwp_parameters_ctth + quality_overall_processing_aviation: + name: quality_overall_processing_aviation + standard_name: quality_flag resolution: 2000 file_type: nc_fci_ctth - file_key: quality_nwp_parameters + nc_key: quality_overall_processing_aviation + fill_value: -127 + import_enum_information: True + - quality_MTG_parameters_ctth: - name: quality_MTG_parameters_ctth + # FIR - Active Fire Monitoring + fire_probability: + name: fire_probability + standard_name: fire_probability resolution: 2000 - file_type: nc_fci_ctth - file_key: quality_MTG_parameters - fill_value: -127 + file_type: nc_fci_fir + nc_key: fire_probability - quality_overall_processing_ctth: - name: quality_overall_processing_ctth + fire_result: + name: fire_result + standard_name: active_fire_classification resolution: 2000 - file_type: nc_fci_ctth - file_key: quality_overall_processing + file_type: nc_fci_fir + nc_key: fire_result + fill_value: -127 + import_enum_information: True + + + # GII - Global Instability Index + k_index: + name: k_index + standard_name: atmosphere_stability_k_index + resolution: 6000 + file_type: nc_fci_gii + nc_key: k_index + coordinates: + - longitude + - latitude + + lifted_index: + name: lifted_index + standard_name: atmosphere_stability_lifted_index + resolution: 6000 + file_type: nc_fci_gii + nc_key: lifted_index + coordinates: + - longitude + - latitude + + prec_water_high: + name: prec_water_high + standard_name: atmosphere_mass_content_of_water_vapor + resolution: 6000 + file_type: nc_fci_gii + nc_key: prec_water_high + coordinates: + - longitude + - latitude + + prec_water_low: + name: prec_water_low + standard_name: atmosphere_mass_content_of_water_vapor + resolution: 6000 + file_type: nc_fci_gii + nc_key: prec_water_low + coordinates: + - longitude + - latitude + + prec_water_mid: + name: prec_water_mid + standard_name: atmosphere_mass_content_of_water_vapor + resolution: 6000 + file_type: nc_fci_gii + nc_key: prec_water_mid + coordinates: + - longitude + - latitude - quality_overall_processing_aviation_ctth: - name: quality_overall_processing_aviation_ctth - resolution: 2000 - file_type: nc_fci_ctth - file_key: quality_overall_processing_aviation + prec_water_total: + name: prec_water_total + standard_name: atmosphere_mass_content_of_water_vapor + resolution: 6000 + file_type: nc_fci_gii + nc_key: prec_water_total + coordinates: + - longitude + - latitude - product_quality_ctth: - name: product_quality_ctth - file_type: nc_fci_ctth - file_key: product_quality - long_name: product_quality_index + percent_cloud_free: + name: percent_cloud_free + long_name: Percentage of Cloud Free Pixels Processed in FoR + standard_name: cloud_free_area_fraction + resolution: 6000 + file_type: nc_fci_gii + nc_key: percent_cloud_free + units: '%' + coordinates: + - longitude + - latitude - product_completeness_ctth: - name: product_completeness_ctth - file_type: nc_fci_ctth - file_key: product_completeness - long_name: product_completeness_index + number_of_iterations: + name: number_of_iterations + standard_name: number_of_iterations + resolution: 6000 + file_type: nc_fci_gii + nc_key: number_of_iterations + coordinates: + - longitude + - latitude - product_timeliness_ctth: - name: product_timeliness_ctth - file_type: nc_fci_ctth - file_key: product_timeliness - long_name: product_timeliness_index - # OCA + # OCA - Optimal Cloud Analysis retrieved_cloud_phase: name: retrieved_cloud_phase + standard_name: thermodynamic_phase_of_cloud_particles_classification resolution: 2000 file_type: nc_fci_oca - file_key: retrieved_cloud_phase - standard_name: thermodynamic_phase_of_cloud_water_particles_at_cloud_top + nc_key: retrieved_cloud_phase + fill_value: -127 + import_enum_information: True retrieved_cloud_optical_thickness: name: retrieved_cloud_optical_thickness + standard_name: atmosphere_optical_thickness_due_to_cloud resolution: 2000 file_type: nc_fci_oca - file_key: retrieved_cloud_optical_thickness - long_name: cloud_optical_depth + nc_key: retrieved_cloud_optical_thickness retrieved_cloud_optical_thickness_upper_layer: name: retrieved_cloud_optical_thickness_upper_layer + long_name: Cloud Optical Thickness (referenced to 0.55 µm and in log10(COT)) for Upper Layer + standard_name: atmosphere_optical_thickness_due_to_cloud resolution: 2000 file_type: nc_fci_oca - file_key: retrieved_cloud_optical_thickness + nc_key: retrieved_cloud_optical_thickness + layer: 0 + + retrieval_error_cloud_optical_thickness_upper_layer: + name: retrieval_error_cloud_optical_thickness_upper_layer + long_name: Cloud Optical Thickness Error (error in log10(COT)) for Upper Layer + standard_name: atmosphere_optical_thickness_due_to_cloud standard_error + resolution: 2000 + file_type: nc_fci_oca + nc_key: retrieval_error_cloud_optical_thickness layer: 0 - long_name: cloud_optical_depth retrieved_cloud_optical_thickness_lower_layer: name: retrieved_cloud_optical_thickness_lower_layer + long_name: Cloud Optical Thickness (referenced to 0.55 µm and in log10(COT)) for Lower Layer + standard_name: atmosphere_optical_thickness_due_to_cloud + resolution: 2000 + file_type: nc_fci_oca + nc_key: retrieved_cloud_optical_thickness + layer: 1 + + retrieval_error_cloud_optical_thickness_lower_layer: + name: retrieval_error_cloud_optical_thickness_lower_layer + long_name: Cloud Optical Thickness Error (error in log10(COT)) for Lower Layer + standard_name: atmosphere_optical_thickness_due_to_cloud standard_error resolution: 2000 file_type: nc_fci_oca - file_key: retrieved_cloud_optical_thickness + nc_key: retrieval_error_cloud_optical_thickness layer: 1 - long_name: cloud_optical_depth retrieved_cloud_particle_effective_radius: name: retrieved_cloud_particle_effective_radius + standard_name: effective_radius_of_cloud_particles_at_cloud_top resolution: 2000 file_type: nc_fci_oca - file_key: retrieved_cloud_particle_effective_radius - standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top + nc_key: retrieved_cloud_particle_effective_radius - retrieved_cloud_top_temperature: - name: retrieved_cloud_top_temperature + retrieval_error_cloud_particle_effective_radius: + name: retrieval_error_cloud_particle_effective_radius + standard_name: effective_radius_of_cloud_particles_at_cloud_top standard_error resolution: 2000 file_type: nc_fci_oca - file_key: retrieved_cloud_top_temperature - standard_name: air_temperature_at_cloud_top + nc_key: retrieval_error_cloud_particle_effective_radius retrieved_cloud_top_pressure_upper_layer: name: retrieved_cloud_top_pressure_upper_layer - resolution: 2000 - file_type: nc_fci_oca - file_key: retrieved_cloud_top_pressure - layer: 0 - standard_name: air_pressure_at_cloud_top - - retrieved_cloud_top_pressure_lower_layer: - name: retrieved_cloud_top_pressure_lower_layer - resolution: 2000 - file_type: nc_fci_oca - file_key: retrieved_cloud_top_pressure - layer: 1 + long_name: Cloud Top Pressure for Upper Layer standard_name: air_pressure_at_cloud_top - - retrieved_cloud_top_height: - name: retrieved_cloud_top_height resolution: 2000 file_type: nc_fci_oca - file_key: retrieved_cloud_top_height - standard_name: height_at_cloud_top + nc_key: retrieved_cloud_top_pressure + layer: 0 - retrieval_error_cloud_optical_thickness_upper_layer: - name: retrieval_error_cloud_optical_thickness_upper_layer + retrieval_error_cloud_top_pressure_upper_layer: + name: retrieval_error_cloud_top_pressure_upper_layer + long_name: Cloud Top Pressure Error for Upper Layer + standard_name: air_pressure_at_cloud_top standard_error resolution: 2000 file_type: nc_fci_oca - file_key: retrieval_error_cloud_optical_thickness + nc_key: retrieval_error_cloud_top_pressure layer: 0 - long_name: cloud_optical_depth - retrieval_error_cloud_optical_thickness_lower_layer: - name: retrieval_error_cloud_optical_thickness_lower_layer + retrieved_cloud_top_pressure_lower_layer: + name: retrieved_cloud_top_pressure_lower_layer + long_name: Cloud Top Pressure for Lower Layer + standard_name: air_pressure_at_cloud_top resolution: 2000 file_type: nc_fci_oca - file_key: retrieval_error_cloud_optical_thickness + nc_key: retrieved_cloud_top_pressure layer: 1 - long_name: cloud_optical_depth - retrieval_error_cloud_particle_effective_radius: - name: retrieval_error_cloud_particle_effective_radius + retrieval_error_cloud_top_pressure_lower_layer: + name: retrieval_error_cloud_top_pressure_lower_layer + long_name: Cloud Top Pressure Error for Lower Layer + standard_name: air_pressure_at_cloud_top standard_error resolution: 2000 file_type: nc_fci_oca - file_key: retrieval_error_cloud_particle_effective_radius - standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top_standard_error + nc_key: retrieval_error_cloud_top_pressure + layer: 1 - retrieval_error_cloud_top_pressure_upper_layer: - name: retrieval_error_cloud_top_pressure_upper_layer + retrieved_cloud_top_temperature: + name: retrieved_cloud_top_temperature + standard_name: air_temperature_at_cloud_top resolution: 2000 file_type: nc_fci_oca - file_key: retrieval_error_cloud_top_pressure - layer: 0 - standard_name: air_pressure_at_cloud_top_standard_error + nc_key: retrieved_cloud_top_temperature - retrieval_error_cloud_top_pressure_lower_layer: - name: retrieval_error_cloud_top_pressure_lower_layer + retrieved_cloud_top_height: + name: retrieved_cloud_top_height + standard_name: height_at_cloud_top resolution: 2000 file_type: nc_fci_oca - file_key: retrieval_error_cloud_top_pressure - layer: 1 - standard_name: air_pressure_at_cloud_top_standard_error + nc_key: retrieved_cloud_top_height quality_jmeas: name: quality_jmeas + standard_name: cost_function_part_due_to_measurements resolution: 2000 file_type: nc_fci_oca - file_key: quality_jmeas - long_name: cost_function - - product_quality_oca: - name: product_quality_oca - file_type: nc_fci_oca - file_key: product_quality - long_name: product_quality_index - - product_completeness_oca: - name: product_completeness_oca - file_type: nc_fci_oca - file_key: product_completeness - long_name: product_completeness_index - - product_timeliness_oca: - name: product_timeliness_oca - file_type: nc_fci_oca - file_key: product_timeliness - long_name: product_timeliness_index - - # FIR - fire_probability: - name: fire_probability - resolution: 2000 - file_type: nc_fci_fir - file_key: fire_probability - - fire_result: - name: fire_result - resolution: 2000 - file_type: nc_fci_fir - file_key: fire_result - - product_quality_fir: - name: product_quality_fir - file_type: nc_fci_fir - file_key: product_quality - long_name: product_quality_index + nc_key: quality_jmeas - product_completeness_fir: - name: product_completeness_fir - file_type: nc_fci_fir - file_key: product_completeness - long_name: product_completeness_index - - product_timeliness_fir: - name: product_timeliness_fir - file_type: nc_fci_fir - file_key: product_timeliness - long_name: product_timeliness_index - # OLR + # OLR - Outgoing Longwave Radiation olr: name: olr + standard_name: outgoing_longwave_radiation resolution: 2000 file_type: nc_fci_olr - file_key: olr_value - long_name: outgoing_longwave_radiation - - cloud_type_olr: - name: cloud_type_olr - resolution: 2000 - file_type: nc_fci_olr - file_key: cloud_type - long_name: cloud_type_olr + nc_key: olr_value - quality_overall_processing_olr: - name: quality_overall_processing_olr + olr_cloud_type: + name: olr_cloud_type + standard_name: cloud_type_classification resolution: 2000 file_type: nc_fci_olr - file_key: quality_overall_processing - long_name: quality_index - - product_quality_olr: - name: product_quality_olr - file_type: nc_fci_olr - file_key: product_quality - long_name: product_quality_index - - product_completeness_olr: - name: product_completeness_olr - file_type: nc_fci_olr - file_key: product_completeness - long_name: product_completeness_index + nc_key: cloud_type + fill_value: -127 + import_enum_information: True - product_timeliness_olr: - name: product_timeliness_olr - file_type: nc_fci_olr - file_key: product_timeliness - long_name: product_timeliness_index - # CRM + # CRM - Clear-Sky Reflectance Maps crm: name: crm + long_name: TOA Bidirectional Reflectance (temporal average) + standard_name: toa_bidirectional_reflectance resolution: 1000 file_type: nc_fci_crm - file_key: mean_clear_sky_reflectance - long_name: mean_clear_sky_reflectance + nc_key: mean_clear_sky_reflectance + units: '%' crm_vis04: name: crm_vis04 + long_name: TOA Bidirectional Reflectance at 0.44um (temporal average) + standard_name: toa_bidirectional_reflectance resolution: 1000 wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_crm - file_key: mean_clear_sky_reflectance - long_name: mean_clear_sky_reflectance_vis04 + nc_key: mean_clear_sky_reflectance + units: '%' vis_channel_id: 0 crm_vis05: name: crm_vis05 + long_name: TOA Bidirectional Reflectance at 0.51um (temporal average) + standard_name: toa_bidirectional_reflectance resolution: 1000 wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_crm - file_key: mean_clear_sky_reflectance - long_name: mean_clear_sky_reflectance_vis05 + nc_key: mean_clear_sky_reflectance + units: '%' vis_channel_id: 1 - crm_vis06: - name: crm_vis06 - resolution: 1000 - wavelength: [0.59, 0.64, 0.69] - file_type: nc_fci_crm - file_key: mean_clear_sky_reflectance - long_name: mean_clear_sky_reflectance_vis06 - vis_channel_id: 2 - - crm_vis08: - name: crm_vis08 - resolution: 1000 - wavelength: [0.815, 0.865, 0.915] - file_type: nc_fci_crm - file_key: mean_clear_sky_reflectance - long_name: mean_clear_sky_reflectance_vis08 - vis_channel_id: 3 - - crm_vis09: - name: crm_vis09 - resolution: 1000 - wavelength: [0.894, 0.914, 0.934] - file_type: nc_fci_crm - file_key: mean_clear_sky_reflectance - long_name: mean_clear_sky_reflectance_vis09 - vis_channel_id: 4 - - crm_nir13: - name: crm_nir13 - resolution: 1000 - wavelength: [1.35, 1.38, 1.41] - file_type: nc_fci_crm - file_key: mean_clear_sky_reflectance - long_name: mean_clear_sky_reflectance_nir13 - vis_channel_id: 5 - - crm_nir16: - name: crm_nir16 - resolution: 1000 - wavelength: [1.56, 1.61, 1.66] - file_type: nc_fci_crm - file_key: mean_clear_sky_reflectance - long_name: mean_clear_sky_reflectance_nir16 - vis_channel_id: 6 - - crm_nir22: - name: crm_nir22 - resolution: 1000 - wavelength: [2.2, 2.25, 2.3] - file_type: nc_fci_crm - file_key: mean_clear_sky_reflectance - long_name: mean_clear_sky_reflectance_nir22 - vis_channel_id: 7 - - mean_sza: - name: mean_sza - resolution: 1000 - file_type: nc_fci_crm - file_key: mean_solar_zenith - long_name: mean_solar_zenith_angle - - mean_rel_azi: - name: mean_rel_azi - resolution: 1000 - file_type: nc_fci_crm - file_key: mean_rel_solar_sat_azimuth - long_name: mean_relative_solar_satellite_azimuth_angle - - n_acc: - name: n_acc - resolution: 1000 - file_type: nc_fci_crm - file_key: number_of_accumulations - long_name: number_of_accumulations - - historical_data: - name: historical_data - resolution: 1000 - file_type: nc_fci_crm - file_key: historical_data - long_name: historical_data - - product_quality_crm: - name: product_quality_crm - file_type: nc_fci_crm - file_key: product_quality - long_name: product_quality_index - - product_completeness_crm: - name: product_completeness_crm - file_type: nc_fci_crm - file_key: product_completeness - long_name: product_completeness_index - - product_timeliness_crm: - name: product_timeliness_crm - file_type: nc_fci_crm - file_key: product_timeliness - long_name: product_timeliness_index - - - # LAT/LON FOR SEGMENTED PRODUCTS - latitude: - name: latitude - file_key: latitude - resolution: [6000, 6000, 32000] - file_type: [ nc_fci_gii, nc_fci_toz, nc_fci_asr ] - standard_name: latitude - units: degree_north - - longitude: - name: longitude - file_key: longitude - resolution: [6000, 6000, 32000] - file_type: [ nc_fci_gii, nc_fci_toz, nc_fci_asr ] - standard_name: longitude - units: degree_east - - - # GII - k_index: - name: k_index - resolution: 6000 - file_type: nc_fci_gii - file_key: k_index - long_name: k_index - coordinates: - - longitude - - latitude - - lifted_index: - name: lifted_index - resolution: 6000 - file_type: nc_fci_gii - file_key: lifted_index - long_name: lifted_index - coordinates: - - longitude - - latitude - - prec_water_high: - name: prec_water_high - resolution: 6000 - file_type: nc_fci_gii - file_key: prec_water_high - long_name: prec_water_high - coordinates: - - longitude - - latitude - - prec_water_low: - name: prec_water_low - resolution: 6000 - file_type: nc_fci_gii - file_key: prec_water_low - long_name: prec_water_low - coordinates: - - longitude - - latitude - - prec_water_mid: - name: prec_water_mid - resolution: 6000 - file_type: nc_fci_gii - file_key: prec_water_mid - long_name: prec_water_mid - coordinates: - - longitude - - latitude - - prec_water_total: - name: prec_water_total - resolution: 6000 - file_type: nc_fci_gii - file_key: prec_water_total - long_name: prec_water_total - coordinates: - - longitude - - latitude - - percent_cloud_free_gii: - name: percent_cloud_free_gii - resolution: 6000 - file_type: nc_fci_gii - file_key: percent_cloud_free - long_name: percent_cloud_free - coordinates: - - longitude - - latitude - - number_of_iterations_gii: - name: number_of_iterations_gii - resolution: 6000 - file_type: nc_fci_gii - file_key: number_of_iterations - long_name: number_of_iterations - coordinates: - - longitude - - latitude + crm_vis06: + name: crm_vis06 + long_name: TOA Bidirectional Reflectance at 0.64um (temporal average) + standard_name: toa_bidirectional_reflectance + resolution: 1000 + wavelength: [0.59, 0.64, 0.69] + file_type: nc_fci_crm + nc_key: mean_clear_sky_reflectance + units: '%' + vis_channel_id: 2 - product_quality_gii: - name: product_quality_gii - file_type: nc_fci_gii - file_key: product_quality - long_name: product_quality_index + crm_vis08: + name: crm_vis08 + long_name: TOA Bidirectional Reflectance at 0.86um (temporal average) + standard_name: toa_bidirectional_reflectance + resolution: 1000 + wavelength: [0.815, 0.865, 0.915] + file_type: nc_fci_crm + nc_key: mean_clear_sky_reflectance + units: '%' + vis_channel_id: 3 - product_completeness_gii: - name: product_completeness_gii - file_type: nc_fci_gii - file_key: product_completeness - long_name: product_completeness_index + crm_vis09: + name: crm_vis09 + long_name: TOA Bidirectional Reflectance at 0.91um (temporal average) + standard_name: toa_bidirectional_reflectance + resolution: 1000 + wavelength: [0.894, 0.914, 0.934] + file_type: nc_fci_crm + nc_key: mean_clear_sky_reflectance + units: '%' + vis_channel_id: 4 - product_timeliness_gii: - name: product_timeliness_gii - file_type: nc_fci_gii - file_key: product_timeliness - long_name: product_timeliness_index + crm_nir13: + name: crm_nir13 + long_name: TOA Bidirectional Reflectance at 1.38um (temporal average) + standard_name: toa_bidirectional_reflectance + resolution: 1000 + wavelength: [1.35, 1.38, 1.41] + file_type: nc_fci_crm + nc_key: mean_clear_sky_reflectance + units: '%' + vis_channel_id: 5 + crm_nir16: + name: crm_nir16 + long_name: TOA Bidirectional Reflectance at 1.61um (temporal average) + standard_name: toa_bidirectional_reflectance + resolution: 1000 + wavelength: [1.56, 1.61, 1.66] + file_type: nc_fci_crm + nc_key: mean_clear_sky_reflectance + units: '%' + vis_channel_id: 6 -# TOZ - total_ozone: - name: total_ozone - resolution: 6000 - file_type: nc_fci_toz - file_key: total_ozone - long_name: total_ozone - coordinates: - - longitude - - latitude + crm_nir22: + name: crm_nir22 + long_name: TOA Bidirectional Reflectance at 2.25um (temporal average) + standard_name: toa_bidirectional_reflectance + resolution: 1000 + wavelength: [2.2, 2.25, 2.3] + file_type: nc_fci_crm + nc_key: mean_clear_sky_reflectance + units: '%' + vis_channel_id: 7 - percent_pixels_toz: - name: percent_pixels_toz - resolution: 6000 - file_type: nc_fci_toz - file_key: percent_pixels - long_name: percent_pixels - coordinates: - - longitude - - latitude + mean_sza: + name: mean_sza + long_name: Solar Zenith Angle (temporal average) + standard_name: solar_zenith_angle + resolution: 1000 + file_type: nc_fci_crm + nc_key: mean_solar_zenith - number_of_iterations_toz: - name: number_of_iterations_toz - resolution: 6000 - file_type: nc_fci_toz - file_key: number_of_iterations - long_name: number_of_iterations - coordinates: - - longitude - - latitude + mean_rel_azi: + name: mean_rel_azi + long_name: Relative Solar Satellite Azimuth Angle (temporal average) + standard_name: relative_sun_sensor_azimuth_angle + resolution: 1000 + file_type: nc_fci_crm + nc_key: mean_rel_solar_sat_azimuth - retrieval_type_toz: - name: retrieval_type_toz - resolution: 6000 - file_type: nc_fci_toz - file_key: retrieval_type - long_name: retrieval_type - coordinates: - - longitude - - latitude + n_acc: + name: n_acc + standard_name: number_of_accumulations + resolution: 1000 + file_type: nc_fci_crm + nc_key: number_of_accumulations - product_quality_toz: - name: product_quality_toz - file_type: nc_fci_toz - file_key: product_quality - long_name: product_quality_index + historical_data: + name: historical_data + standard_name: status_flag + resolution: 1000 + file_type: nc_fci_crm + nc_key: historical_data + import_enum_information: True - product_completeness_toz: - name: product_completeness_toz - file_type: nc_fci_toz - file_key: product_completeness - long_name: product_completeness_index - product_timeliness_toz: - name: product_timeliness_toz - file_type: nc_fci_toz - file_key: product_timeliness - long_name: product_timeliness_index + # LAT/LON FOR SEGMENTED PRODUCTS + latitude: + name: latitude + standard_name: latitude + nc_key: latitude + resolution: [6000, 6000, 32000] + file_type: [ nc_fci_gii, nc_fci_toz, nc_fci_asr ] + units: degree_north + longitude: + name: longitude + standard_name: longitude + nc_key: longitude + resolution: [6000, 6000, 32000] + file_type: [ nc_fci_gii, nc_fci_toz, nc_fci_asr ] + units: degree_east - # CLM Test + # CLM Test - Cloud Mask Test cloud_test_sit1_flag: name: cloud_test_sit1_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag - long_name: cloud_mask_test_sit1_flag + nc_key: cloud_mask_test_flag extract_byte: 0 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt1_flag: name: cloud_test_cmt1_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt1_flag + nc_key: cloud_mask_test_flag extract_byte: 1 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt2_flag: name: cloud_test_cmt2_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt2_flag + nc_key: cloud_mask_test_flag extract_byte: 2 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt3_flag: name: cloud_test_cmt3_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt3_flag + nc_key: cloud_mask_test_flag extract_byte: 3 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt4_flag: name: cloud_test_cmt4_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt4_flag + nc_key: cloud_mask_test_flag extract_byte: 4 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt5_flag: name: cloud_test_cmt5_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt5_flag + nc_key: cloud_mask_test_flag extract_byte: 5 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt6_flag: name: cloud_test_cmt6_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt6_flag + nc_key: cloud_mask_test_flag extract_byte: 6 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt7_flag: name: cloud_test_cmt7_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt7_flag + nc_key: cloud_mask_test_flag extract_byte: 7 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt8_flag: name: cloud_test_cmt8_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt8_flag + nc_key: cloud_mask_test_flag extract_byte: 8 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt9_flag: name: cloud_test_cmt9_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt9_flag + nc_key: cloud_mask_test_flag extract_byte: 9 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt10_flag: name: cloud_test_cmt10_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt0_flag + nc_key: cloud_mask_test_flag extract_byte: 10 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt11_flag: name: cloud_test_cmt11_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt11_flag + nc_key: cloud_mask_test_flag extract_byte: 11 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt12_flag: name: cloud_test_cmt12_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt12_flag + nc_key: cloud_mask_test_flag extract_byte: 12 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt13_flag: name: cloud_test_cmt13_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt13_flag + nc_key: cloud_mask_test_flag extract_byte: 13 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt14_flag: name: cloud_test_cmt14_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt14_flag + nc_key: cloud_mask_test_flag extract_byte: 14 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] cloud_test_opqt_flag: name: cloud_test_opqt_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag - long_name: cloud_mask_test_opqt_flag + nc_key: cloud_mask_test_flag extract_byte: 15 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmrt1_flag: name: cloud_test_cmrt1_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmrt1_flag + nc_key: cloud_mask_test_flag extract_byte: 16 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmrt2_flag: name: cloud_test_cmrt2_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmrt2_flag + nc_key: cloud_mask_test_flag extract_byte: 17 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmrt3_flag: name: cloud_test_cmrt3_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmrt3_flag + nc_key: cloud_mask_test_flag extract_byte: 18 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmrt4_flag: name: cloud_test_cmrt4_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmrt4_flag + nc_key: cloud_mask_test_flag extract_byte: 19 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmrt5_flag: name: cloud_test_cmrt5_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmrt5_flag + nc_key: cloud_mask_test_flag extract_byte: 20 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmrt6_flag: name: cloud_test_cmrt6_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmrt6_flag + nc_key: cloud_mask_test_flag extract_byte: 21 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] cloud_test_dust_flag: name: cloud_test_dust_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag - long_name: cloud_mask_test_dust_flag + nc_key: cloud_mask_test_flag extract_byte: 22 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] cloud_test_ash_flag: name: cloud_test_ash_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag - long_name: cloud_mask_test_ash_flag + nc_key: cloud_mask_test_flag extract_byte: 23 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] cloud_test_dust_ash_flag: name: cloud_test_dust_ash_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag - long_name: cloud_mask_test_dust_ash_flag + nc_key: cloud_mask_test_flag extract_byte: 24 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] cloud_test_sit1: name: cloud_test_sit1 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - long_name: cloud_mask_test_sit1 + nc_key: cloud_mask_test_result extract_byte: 0 + flag_values: [0,1] + flag_meanings: ['No snow/ice detected',' Snow/ice detected'] cloud_test_cmt1: name: cloud_test_cmt1 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt1 + nc_key: cloud_mask_test_result extract_byte: 1 + flag_values: [0,1] + flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmt2: name: cloud_test_cmt2 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt2 + nc_key: cloud_mask_test_result extract_byte: 2 + flag_values: [0,1] + flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmt3: name: cloud_test_cmt3 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt3 + nc_key: cloud_mask_test_result extract_byte: 3 + flag_values: [0,1] + flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmt4: name: cloud_test_cmt4 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt4 + nc_key: cloud_mask_test_result extract_byte: 4 + flag_values: [0,1] + flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmt5: name: cloud_test_cmt5 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt5 + nc_key: cloud_mask_test_result extract_byte: 5 + flag_values: [0,1] + flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmt6: name: cloud_test_cmt6 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt6 + nc_key: cloud_mask_test_result extract_byte: 6 + flag_values: [0,1] + flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmt7: name: cloud_test_cmt7 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt7 + nc_key: cloud_mask_test_result extract_byte: 7 + flag_values: [0,1] + flag_meanings: ['No cloud detected','Cloud detected'] + cloud_test_cmt8: name: cloud_test_cmt8 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt8 + nc_key: cloud_mask_test_result extract_byte: 8 + flag_values: [0,1] + flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmt9: name: cloud_test_cmt9 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt9 + nc_key: cloud_mask_test_result extract_byte: 9 + flag_values: [0,1] + flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmt10: name: cloud_test_cmt10 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt10 + nc_key: cloud_mask_test_result extract_byte: 10 + flag_values: [0,1] + flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmt11: name: cloud_test_cmt11 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt11 + nc_key: cloud_mask_test_result extract_byte: 11 + flag_values: [0,1] + flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmt12: name: cloud_test_cmt12 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt12 + nc_key: cloud_mask_test_result extract_byte: 12 + flag_values: [0,1] + flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmt13: name: cloud_test_cmt13 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt13 + nc_key: cloud_mask_test_result extract_byte: 13 + flag_values: [0,1] + flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmt14: name: cloud_test_cmt14 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt14 + nc_key: cloud_mask_test_result extract_byte: 14 + flag_values: [0,1] + flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_opqt: name: cloud_test_opqt + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - long_name: cloud_mask_test_opqt + nc_key: cloud_mask_test_result extract_byte: 15 + flag_values: [0,1] + flag_meanings: ['No opaqueness detected', 'Opaqueness detected'] cloud_test_cmrt1: name: cloud_test_cmrt1 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmrt1 + nc_key: cloud_mask_test_result extract_byte: 16 + flag_values: [0,1] + flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmrt2: name: cloud_test_cmrt2 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmrt2 + nc_key: cloud_mask_test_result extract_byte: 17 + flag_values: [0,1] + flag_meanings: ['Clear unchanged', 'Cloud detected (restored from clear sky)'] cloud_test_cmrt3: name: cloud_test_cmrt3 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmrt3 + nc_key: cloud_mask_test_result extract_byte: 18 + flag_values: [0,1] + flag_meanings: ['Clear unchanged', 'Cloud detected (restored from clear sky)'] cloud_test_cmrt4: name: cloud_test_cmrt4 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmrt4 + nc_key: cloud_mask_test_result extract_byte: 19 + flag_values: [0,1] + flag_meanings: ['Clear unchanged', 'Cloud detected (restored from clear sky)'] cloud_test_cmrt5: name: cloud_test_cmrt5 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmrt5 + nc_key: cloud_mask_test_result extract_byte: 20 + flag_values: [0,1] + flag_meanings: ['Clear sky restored', 'Cloud unchanged'] cloud_test_dust: name: cloud_test_dust + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - long_name: cloud_mask_test_dust + nc_key: cloud_mask_test_result extract_byte: 21 + flag_values: [0,1] + flag_meanings: ['No dust detected','Dust detected'] cloud_test_ash: name: cloud_test_ash + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - long_name: cloud_mask_test_ash + nc_key: cloud_mask_test_result extract_byte: 22 + flag_values: [0,1] + flag_meanings: ['No ash detected','Ash detected'] cloud_test_dust_ash: name: cloud_test_dust_ash + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - long_name: cloud_mask_test_dust_ash + nc_key: cloud_mask_test_result extract_byte: 23 + flag_values: [0,1] + flag_meanings: ['Dust detected','Ash detected'] cloud_test_cmrt6: name: cloud_test_cmrt6 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_cmrt6_test_result - long_name: cloud_mask_cmrt6_result - - product_quality_clmtest: - name: product_quality_clmtest - file_type: nc_fci_test_clm - file_key: product_quality - long_name: product_quality_index - - product_completeness_clmtest: - name: product_completeness_clmtest - file_type: nc_fci_test_clm - file_key: product_completeness - long_name: product_completeness_index - - product_timeliness_clmtest: - name: product_timeliness_clmtest - file_type: nc_fci_test_clm - file_key: product_timeliness - long_name: product_timeliness_index - + nc_key: cloud_mask_cmrt6_test_result + fill_value: -127 + import_enum_information: True - # ASR + # ASR - All-Sky Radiances bt_max: name: bt_max + long_name: TOA Brightess Temperature Segment max + standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_max - long_name: maximum_brightness_temperature_in_segment + nc_key: bt_max + cell_method: area:maximum coordinates: - longitude - latitude bt_mean: name: bt_mean + long_name: TOA Brightess Temperature Segment mean + standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean - long_name: mean_brightness_temperature_in_segment + nc_key: bt_mean + cell_method: area:mean coordinates: - longitude - latitude bt_min: name: bt_min + long_name: TOA Brightess Temperature Segment min + standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_min - long_name: minimum_brightness_temperature_in_segment + nc_key: bt_min + cell_method: area:minimum coordinates: - longitude - latitude bt_std: name: bt_std + long_name: TOA Brightess Temperature Segment Standard Deviation + standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_std - long_name: brightness_temperature_standard_deviation_in_segment + nc_key: bt_std + cell_method: area:standard_deviation coordinates: - longitude - latitude radiance_max: name: radiance_max + long_name: TOA Radiance Segment max + standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_max - long_name: maximum_radiance_in_segment + nc_key: radiance_max + cell_method: area:maximum coordinates: - longitude - latitude radiance_mean: name: radiance_mean + long_name: TOA Radiance Segment mean + standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean - long_name: mean_radiance_in_segment + nc_key: radiance_mean + cell_method: area:mean coordinates: - longitude - latitude radiance_min: name: radiance_min + long_name: TOA Radiance Segment min + standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_min - long_name: minimum_radiance_in_segment + nc_key: radiance_min + cell_method: area:minimum coordinates: - longitude - latitude radiance_std: name: radiance_std + long_name: TOA Radiance Segment Standard Deviation + standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_std - long_name: radiance_standard_deviation_in_segment + nc_key: radiance_std + cell_method: area:standard_deviation coordinates: - longitude - latitude reflectance_max: name: reflectance_max + long_name: TOA Bidirectional Reflectance Segment max + standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_max - long_name: maximum_reflectance_in_segment + nc_key: reflectance_max + cell_method: area:maximum + units: '%' coordinates: - longitude - latitude reflectance_mean: name: reflectance_mean + long_name: TOA Bidirectional Reflectance Segment mean + standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean - long_name: mean_reflectance_in_segment + nc_key: reflectance_mean + cell_method: area:mean + units: '%' coordinates: - longitude - latitude reflectance_min: name: reflectance_min + long_name: TOA Bidirectional Reflectance Segment min + standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_min - long_name: minimum_reflectance_in_segment + nc_key: reflectance_min + cell_method: area:minimum + units: '%' coordinates: - longitude - latitude reflectance_std: name: reflectance_std + long_name: TOA Bidirectional Reflectance Segment Standard Deviation + standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_std - long_name: reflectance_standard_deviation_in_segment + nc_key: reflectance_std + cell_method: area:standard_deviation + units: '%' coordinates: - longitude - latitude quality_bt: name: quality_bt + long_name: TOA Brightess Temperature % Confidence + standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt - long_name: brightness_temperature_quality + nc_key: quality_bt fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_reflectance: name: quality_reflectance + long_name: TOA Bidirectional Reflectance % Confidence + standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance - long_name: reflectance_quality + nc_key: quality_reflectance fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_radiance: name: quality_radiance + long_name: TOA Radiance % Confidence + standard_name: radiance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_radiance - long_name: radiance_quality + nc_key: quality_radiance fill_value: -1 + units: '%' coordinates: - longitude - latitude land_pixel_percent: name: land_pixel_percent + standard_name: land_area_fraction resolution: 32000 file_type: nc_fci_asr - file_key: land_pixel_percent - long_name: land_pixel_percentage_in_segment + nc_key: land_pixel_percent + units: '%' coordinates: - longitude - latitude water_pixel_percent: name: water_pixel_percent + standard_name: water_area_fraction resolution: 32000 file_type: nc_fci_asr - file_key: water_pixel_percent - long_name: water_pixel_percentage_in_segment + nc_key: water_pixel_percent + units: '%' coordinates: - longitude - latitude pixel_percentage: name: pixel_percentage + standard_name: water_area_fraction resolution: 32000 file_type: nc_fci_asr - file_key: pixel_percentage - long_name: pixel_percentage_used_in_segment + nc_key: pixel_percentage + units: '%' coordinates: - longitude - latitude reflectance_mean_all_vis04: name: reflectance_mean_all_vis04 + long_name: TOA Bidirectional Reflectance Segment mean at 0.44um (all pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_all + nc_key: reflectance_mean vis_channel_id: 0 + wavelength: [0.384, 0.444, 0.504] category_id: 0 + cell_method: area:mean + units: '%' coordinates: - longitude - latitude reflectance_mean_clear_vis04: name: reflectance_mean_clear_vis04 + long_name: TOA Bidirectional Reflectance Segment mean at 0.44um (clear pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_clear + nc_key: reflectance_mean vis_channel_id: 0 + wavelength: [0.384, 0.444, 0.504] category_id: 1 + cell_method: area:mean + units: '%' coordinates: - longitude - latitude reflectance_mean_cloudy_vis04: name: reflectance_mean_cloudy_vis04 + long_name: TOA Bidirectional Reflectance Segment mean at 0.44um (cloudy pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_cloudy + nc_key: reflectance_mean vis_channel_id: 0 + wavelength: [0.384, 0.444, 0.504] category_id: 2 + cell_method: area:mean + units: '%' coordinates: - longitude - latitude reflectance_mean_all_vis05: name: reflectance_mean_all_vis05 + long_name: TOA Bidirectional Reflectance Segment mean at 0.51um (all pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_all + nc_key: reflectance_mean vis_channel_id: 1 + wavelength: [0.47, 0.51, 0.55] category_id: 0 + cell_method: area:mean + units: '%' coordinates: - longitude - latitude reflectance_mean_clear_vis05: name: reflectance_mean_clear_vis05 + long_name: TOA Bidirectional Reflectance Segment mean at 0.51um (clear pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_clear + nc_key: reflectance_mean vis_channel_id: 1 + wavelength: [0.47, 0.51, 0.55] category_id: 1 + cell_method: area:mean + units: '%' coordinates: - longitude - latitude reflectance_mean_cloudy_vis05: name: reflectance_mean_cloudy_vis05 + long_name: TOA Bidirectional Reflectance Segment mean at 0.51um (cloudy pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_cloudy + nc_key: reflectance_mean vis_channel_id: 1 + wavelength: [0.47, 0.51, 0.55] category_id: 2 + cell_method: area:mean + units: '%' coordinates: - longitude - latitude reflectance_mean_all_vis06: name: reflectance_mean_all_vis06 + long_name: TOA Bidirectional Reflectance Segment mean at 0.64um (all pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_all + nc_key: reflectance_mean vis_channel_id: 2 + wavelength: [0.59, 0.64, 0.69] category_id: 0 + cell_method: area:mean + units: '%' coordinates: - longitude - latitude reflectance_mean_clear_vis06: name: reflectance_mean_clear_vis06 + long_name: TOA Bidirectional Reflectance Segment mean at 0.64um (clear pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_clear + nc_key: reflectance_mean vis_channel_id: 2 + wavelength: [0.59, 0.64, 0.69] category_id: 1 + cell_method: area:mean + units: '%' coordinates: - longitude - latitude reflectance_mean_cloudy_vis06: name: reflectance_mean_cloudy_vis06 + long_name: TOA Bidirectional Reflectance Segment mean at 0.64um (cloudy pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_cloudy + nc_key: reflectance_mean vis_channel_id: 2 + wavelength: [0.59, 0.64, 0.69] category_id: 2 + cell_method: area:mean + units: '%' coordinates: - longitude - latitude reflectance_mean_all_vis08: name: reflectance_mean_all_vis08 + long_name: TOA Bidirectional Reflectance Segment mean at 0.86um (all pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_all + nc_key: reflectance_mean vis_channel_id: 3 + wavelength: [0.815, 0.865, 0.915] category_id: 0 + cell_method: area:mean + units: '%' coordinates: - longitude - latitude reflectance_mean_clear_vis08: name: reflectance_mean_clear_vis08 + long_name: TOA Bidirectional Reflectance Segment mean at 0.86um (clear pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_clear + nc_key: reflectance_mean vis_channel_id: 3 + wavelength: [0.815, 0.865, 0.915] category_id: 1 + cell_method: area:mean + units: '%' coordinates: - longitude - latitude reflectance_mean_cloudy_vis08: name: reflectance_mean_cloudy_vis08 + long_name: TOA Bidirectional Reflectance Segment mean at 0.86um (cloudy pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_cloudy + nc_key: reflectance_mean vis_channel_id: 3 + wavelength: [0.815, 0.865, 0.915] category_id: 2 + cell_method: area:mean + units: '%' coordinates: - longitude - latitude reflectance_mean_all_vis09: name: reflectance_mean_all_vis09 + long_name: TOA Bidirectional Reflectance Segment mean at 0.91um (all pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_all + nc_key: reflectance_mean vis_channel_id: 4 + wavelength: [0.894, 0.914, 0.934] category_id: 0 + cell_method: area:mean + units: '%' coordinates: - longitude - latitude reflectance_mean_clear_vis09: name: reflectance_mean_clear_vis09 + long_name: TOA Bidirectional Reflectance Segment mean at 0.91um (clear pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_clear + nc_key: reflectance_mean vis_channel_id: 4 + wavelength: [0.894, 0.914, 0.934] category_id: 1 + cell_method: area:mean + units: '%' coordinates: - longitude - latitude reflectance_mean_cloudy_vis09: name: reflectance_mean_cloudy_vis09 + long_name: TOA Bidirectional Reflectance Segment mean at 0.91um (cloudy pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_cloudy + nc_key: reflectance_mean vis_channel_id: 4 + wavelength: [0.894, 0.914, 0.934] category_id: 2 + cell_method: area:mean + units: '%' coordinates: - longitude - latitude reflectance_mean_all_nir13: name: reflectance_mean_all_nir13 + long_name: TOA Bidirectional Reflectance Segment mean at 1.38um (all pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_all + nc_key: reflectance_mean vis_channel_id: 5 + wavelength: [1.35, 1.38, 1.41] category_id: 0 + cell_method: area:mean + units: '%' coordinates: - longitude - latitude reflectance_mean_clear_nir13: name: reflectance_mean_clear_nir13 + long_name: TOA Bidirectional Reflectance Segment mean at 1.38um (clear pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_clear + nc_key: reflectance_mean vis_channel_id: 5 + wavelength: [1.35, 1.38, 1.41] category_id: 1 + cell_method: area:mean + units: '%' coordinates: - longitude - latitude reflectance_mean_cloudy_nir13: name: reflectance_mean_cloudy_nir13 + long_name: TOA Bidirectional Reflectance Segment mean at 1.38um (cloudy pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_cloudy + nc_key: reflectance_mean vis_channel_id: 5 + wavelength: [1.35, 1.38, 1.41] category_id: 2 + cell_method: area:mean + units: '%' coordinates: - longitude - latitude reflectance_mean_all_nir16: name: reflectance_mean_all_nir16 + long_name: TOA Bidirectional Reflectance Segment mean at 1.61um (all pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_all + nc_key: reflectance_mean vis_channel_id: 6 + wavelength: [1.56, 1.61, 1.66] category_id: 0 + cell_method: area:mean + units: '%' coordinates: - longitude - latitude reflectance_mean_clear_nir16: name: reflectance_mean_clear_nir16 + long_name: TOA Bidirectional Reflectance Segment mean at 1.61um (clear pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_clear + nc_key: reflectance_mean vis_channel_id: 6 + wavelength: [1.56, 1.61, 1.66] category_id: 1 + cell_method: area:mean + units: '%' coordinates: - longitude - latitude reflectance_mean_cloudy_nir16: name: reflectance_mean_cloudy_nir16 + long_name: TOA Bidirectional Reflectance Segment mean at 1.61um (cloudy pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_cloudy + nc_key: reflectance_mean vis_channel_id: 6 + wavelength: [1.56, 1.61, 1.66] category_id: 2 + cell_method: area:mean + units: '%' coordinates: - longitude - latitude reflectance_mean_all_nir22: name: reflectance_mean_all_nir22 + long_name: TOA Bidirectional Reflectance Segment mean at 2.25um (all pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_all + nc_key: reflectance_mean vis_channel_id: 7 + wavelength: [2.2, 2.25, 2.3] category_id: 0 + cell_method: area:mean + units: '%' coordinates: - longitude - latitude reflectance_mean_clear_nir22: name: reflectance_mean_clear_nir22 + long_name: TOA Bidirectional Reflectance Segment mean at 2.25um (clear pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_clear + nc_key: reflectance_mean vis_channel_id: 7 + wavelength: [2.2, 2.25, 2.3] category_id: 1 + cell_method: area:mean + units: '%' coordinates: - longitude - latitude reflectance_mean_cloudy_nir22: name: reflectance_mean_cloudy_nir22 + long_name: TOA Bidirectional Reflectance Segment mean at 2.25um (cloudy pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_cloudy + nc_key: reflectance_mean vis_channel_id: 7 + wavelength: [2.2, 2.25, 2.3] category_id: 2 + cell_method: area:mean + units: '%' coordinates: - longitude - latitude bt_mean_all_ir38: name: bt_mean_all_ir38 + long_name: TOA Brightess Temperature Segment mean at 3.80um (all pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_all + nc_key: bt_mean ir_channel_id: 0 + wavelength: [3.4, 3.8, 4.2] category_id: 0 + cell_method: area:mean coordinates: - longitude - latitude bt_mean_clear_ir38: name: bt_mean_clear_ir38 + long_name: TOA Brightess Temperature Segment mean at 3.80um (clear pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_clear + nc_key: bt_mean ir_channel_id: 0 + wavelength: [3.4, 3.8, 4.2] category_id: 1 + cell_method: area:mean coordinates: - longitude - latitude bt_mean_cloudy_ir38: name: bt_mean_cloudy_ir38 + long_name: TOA Brightess Temperature Segment mean at 3.80um (cloudy pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_cloudy + nc_key: bt_mean ir_channel_id: 0 + wavelength: [3.4, 3.8, 4.2] category_id: 2 + cell_method: area:mean coordinates: - longitude - latitude bt_mean_all_wv63: name: bt_mean_all_wv63 + long_name: TOA Brightess Temperature Segment mean at 6.30um (all pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_all + nc_key: bt_mean ir_channel_id: 1 + wavelength: [5.3, 6.3, 7.3] category_id: 0 + cell_method: area:mean coordinates: - longitude - latitude bt_mean_clear_wv63: name: bt_mean_clear_wv63 + long_name: TOA Brightess Temperature Segment mean at 6.30um (clear pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_clear + nc_key: bt_mean ir_channel_id: 1 + wavelength: [5.3, 6.3, 7.3] category_id: 1 + cell_method: area:mean coordinates: - longitude - latitude bt_mean_cloudy_wv63: name: bt_mean_cloudy_wv63 + long_name: TOA Brightess Temperature Segment mean at 6.30um (cloudy pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_cloudy + nc_key: bt_mean ir_channel_id: 1 + wavelength: [5.3, 6.3, 7.3] category_id: 2 + cell_method: area:mean coordinates: - longitude - latitude bt_mean_all_wv73: name: bt_mean_all_wv73 + long_name: TOA Brightess Temperature Segment mean at 7.35um (all pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_all + nc_key: bt_mean ir_channel_id: 2 + wavelength: [6.85, 7.35, 7.85] category_id: 0 + cell_method: area:mean coordinates: - longitude - latitude bt_mean_clear_wv73: name: bt_mean_clear_wv73 + long_name: TOA Brightess Temperature Segment mean at 7.35um (clear pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_clear + nc_key: bt_mean ir_channel_id: 2 + wavelength: [6.85, 7.35, 7.85] category_id: 1 + cell_method: area:mean coordinates: - longitude - latitude bt_mean_cloudy_wv73: name: bt_mean_cloudy_wv73 + long_name: TOA Brightess Temperature Segment mean at 7.35um (cloudy pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_cloudy + nc_key: bt_mean ir_channel_id: 2 + wavelength: [6.85, 7.35, 7.85] category_id: 2 + cell_method: area:mean coordinates: - longitude - latitude bt_mean_all_ir87: name: bt_mean_all_ir87 + long_name: TOA Brightess Temperature Segment mean at 8.70um (all pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_all + nc_key: bt_mean ir_channel_id: 3 + wavelength: [8.3, 8.7, 9.1] category_id: 0 + cell_method: area:mean coordinates: - longitude - latitude bt_mean_clear_ir87: name: bt_mean_clear_ir87 + long_name: TOA Brightess Temperature Segment mean at 8.70um (clear pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_clear + nc_key: bt_mean ir_channel_id: 3 + wavelength: [8.3, 8.7, 9.1] category_id: 1 + cell_method: area:mean coordinates: - longitude - latitude bt_mean_cloudy_ir87: name: bt_mean_cloudy_ir87 + long_name: TOA Brightess Temperature Segment mean at 8.70um (cloudy pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_cloudy + nc_key: bt_mean ir_channel_id: 3 + wavelength: [8.3, 8.7, 9.1] category_id: 2 + cell_method: area:mean coordinates: - longitude - latitude bt_mean_all_ir97: name: bt_mean_all_ir97 + long_name: TOA Brightess Temperature Segment mean at 9.66um (all pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_all + nc_key: bt_mean ir_channel_id: 4 + wavelength: [9.36, 9.66, 9.96] category_id: 0 + cell_method: area:mean coordinates: - longitude - latitude bt_mean_clear_ir97: name: bt_mean_clear_ir97 + long_name: TOA Brightess Temperature Segment mean at 9.66um (clear pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_clear + nc_key: bt_mean ir_channel_id: 4 + wavelength: [9.36, 9.66, 9.96] category_id: 1 + cell_method: area:mean coordinates: - longitude - latitude bt_mean_cloudy_ir97: name: bt_mean_cloudy_ir97 + long_name: TOA Brightess Temperature Segment mean at 9.66um (cloudy pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_cloudy + nc_key: bt_mean ir_channel_id: 4 + wavelength: [9.36, 9.66, 9.96] category_id: 2 + cell_method: area:mean coordinates: - longitude - latitude bt_mean_all_ir105: name: bt_mean_all_ir105 + long_name: TOA Brightess Temperature Segment mean at 10.50um (all pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_all + nc_key: bt_mean ir_channel_id: 5 + wavelength: [9.8, 10.5, 11.2] category_id: 0 + cell_method: area:mean coordinates: - longitude - latitude bt_mean_clear_ir105: name: bt_mean_clear_ir105 + long_name: TOA Brightess Temperature Segment mean at 10.50um (clear pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_clear + nc_key: bt_mean ir_channel_id: 5 + wavelength: [9.8, 10.5, 11.2] category_id: 1 + cell_method: area:mean coordinates: - longitude - latitude bt_mean_cloudy_ir105: name: bt_mean_cloudy_ir105 + long_name: TOA Brightess Temperature Segment mean at 10.50um (cloudy pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_cloudy + nc_key: bt_mean ir_channel_id: 5 + wavelength: [9.8, 10.5, 11.2] category_id: 2 + cell_method: area:mean coordinates: - longitude - latitude bt_mean_all_ir123: name: bt_mean_all_ir123 + long_name: TOA Brightess Temperature Segment mean at 12.30um (all pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_all + nc_key: bt_mean ir_channel_id: 6 + wavelength: [11.8, 12.3, 12.8] category_id: 0 + cell_method: area:mean coordinates: - longitude - latitude bt_mean_clear_ir123: name: bt_mean_clear_ir123 + long_name: TOA Brightess Temperature Segment mean at 12.30um (clear pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_clear + nc_key: bt_mean ir_channel_id: 6 + wavelength: [11.8, 12.3, 12.8] category_id: 1 + cell_method: area:mean coordinates: - longitude - latitude bt_mean_cloudy_ir123: name: bt_mean_cloudy_ir123 + long_name: TOA Brightess Temperature Segment mean at 12.30um (cloudy pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_cloudy + nc_key: bt_mean ir_channel_id: 6 + wavelength: [11.8, 12.3, 12.8] category_id: 2 + cell_method: area:mean coordinates: - longitude - latitude bt_mean_all_ir133: name: bt_mean_all_ir133 + long_name: TOA Brightess Temperature Segment mean at 13.30um (all pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_all + nc_key: bt_mean ir_channel_id: 7 + wavelength: [12.7, 13.3, 13.9] category_id: 0 + cell_method: area:mean coordinates: - longitude - latitude bt_mean_clear_ir133: name: bt_mean_clear_ir133 + long_name: TOA Brightess Temperature Segment mean at 13.30um (clear pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_clear + nc_key: bt_mean ir_channel_id: 7 + wavelength: [12.7, 13.3, 13.9] category_id: 1 + cell_method: area:mean coordinates: - longitude - latitude bt_mean_cloudy_ir133: name: bt_mean_cloudy_ir133 + long_name: TOA Brightess Temperature Segment mean at 13.30um (cloudy pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_cloudy + nc_key: bt_mean ir_channel_id: 7 + wavelength: [12.7, 13.3, 13.9] category_id: 2 + cell_method: area:mean coordinates: - longitude - latitude quality_reflectance_all_vis04: name: quality_reflectance_all_vis04 + long_name: TOA Bidirectional Reflectance % Confidence at 0.44um (all pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_all + nc_key: quality_reflectance vis_channel_id: 0 + wavelength: [0.384, 0.444, 0.504] category_id: 0 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_reflectance_clear_vis04: name: quality_reflectance_clear_vis04 + long_name: TOA Bidirectional Reflectance % Confidence at 0.44um (clear pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_clear + nc_key: quality_reflectance vis_channel_id: 0 + wavelength: [0.384, 0.444, 0.504] category_id: 1 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_reflectance_cloudy_vis04: name: quality_reflectance_cloudy_vis04 + long_name: TOA Bidirectional Reflectance % Confidence at 0.44um (cloudy pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_cloudy + nc_key: quality_reflectance vis_channel_id: 0 + wavelength: [0.384, 0.444, 0.504] category_id: 2 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_reflectance_all_vis05: name: quality_reflectance_all_vis05 + long_name: TOA Bidirectional Reflectance % Confidence at 0.51um (all pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_all + nc_key: quality_reflectance vis_channel_id: 1 + wavelength: [0.47, 0.51, 0.55] category_id: 0 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_reflectance_clear_vis05: name: quality_reflectance_clear_vis05 + long_name: TOA Bidirectional Reflectance % Confidence at 0.51um (clear pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_clear + nc_key: quality_reflectance vis_channel_id: 1 + wavelength: [0.47, 0.51, 0.55] category_id: 1 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_reflectance_cloudy_vis05: name: quality_reflectance_cloudy_vis05 + long_name: TOA Bidirectional Reflectance % Confidence at 0.51um (cloudy pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_cloudy + nc_key: quality_reflectance vis_channel_id: 1 + wavelength: [0.47, 0.51, 0.55] category_id: 2 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_reflectance_all_vis06: name: quality_reflectance_all_vis06 + long_name: TOA Bidirectional Reflectance % Confidence at 0.64um (all pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_all + nc_key: quality_reflectance vis_channel_id: 2 + wavelength: [0.59, 0.64, 0.69] category_id: 0 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_reflectance_clear_vis06: name: quality_reflectance_clear_vis06 + long_name: TOA Bidirectional Reflectance % Confidence at 0.64um (clear pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_clear + nc_key: quality_reflectance vis_channel_id: 2 + wavelength: [0.59, 0.64, 0.69] category_id: 1 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_reflectance_cloudy_vis06: name: quality_reflectance_cloudy_vis06 + long_name: TOA Bidirectional Reflectance % Confidence at 0.64um (cloudy pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_cloudy + nc_key: quality_reflectance vis_channel_id: 2 + wavelength: [0.59, 0.64, 0.69] category_id: 2 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_reflectance_all_vis08: name: quality_reflectance_all_vis08 + long_name: TOA Bidirectional Reflectance % Confidence at 0.86um (all pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_all + nc_key: quality_reflectance vis_channel_id: 3 + wavelength: [0.815, 0.865, 0.915] category_id: 0 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_reflectance_clear_vis08: name: quality_reflectance_clear_vis08 + long_name: TOA Bidirectional Reflectance % Confidence at 0.86um (clear pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_clear + nc_key: quality_reflectance vis_channel_id: 3 + wavelength: [0.815, 0.865, 0.915] category_id: 1 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_reflectance_cloudy_vis08: name: quality_reflectance_cloudy_vis08 + long_name: TOA Bidirectional Reflectance % Confidence at 0.86um (cloudy pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_cloudy + nc_key: quality_reflectance vis_channel_id: 3 + wavelength: [0.815, 0.865, 0.915] category_id: 2 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_reflectance_all_vis09: name: quality_reflectance_all_vis09 + long_name: TOA Bidirectional Reflectance % Confidence at 0.91um (all pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_all + nc_key: quality_reflectance vis_channel_id: 4 + wavelength: [0.894, 0.914, 0.934] category_id: 0 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_reflectance_clear_vis09: name: quality_reflectance_clear_vis09 + long_name: TOA Bidirectional Reflectance % Confidence at 0.91um (clear pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_clear + nc_key: quality_reflectance vis_channel_id: 4 + wavelength: [0.894, 0.914, 0.934] category_id: 1 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_reflectance_cloudy_vis09: name: quality_reflectance_cloudy_vis09 + long_name: TOA Bidirectional Reflectance % Confidence at 0.91um (cloudy pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_cloudy + nc_key: quality_reflectance vis_channel_id: 4 + wavelength: [0.894, 0.914, 0.934] category_id: 2 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_reflectance_all_nir13: name: quality_reflectance_all_nir13 + long_name: TOA Bidirectional Reflectance % Confidence at 1.38um (all pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_all + nc_key: quality_reflectance vis_channel_id: 5 + wavelength: [1.35, 1.38, 1.41] category_id: 0 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_reflectance_clear_nir13: name: quality_reflectance_clear_nir13 + long_name: TOA Bidirectional Reflectance % Confidence at 1.38um (clear pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_clear + nc_key: quality_reflectance vis_channel_id: 5 + wavelength: [1.35, 1.38, 1.41] category_id: 1 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_reflectance_cloudy_nir13: name: quality_reflectance_cloudy_nir13 + long_name: TOA Bidirectional Reflectance % Confidence at 1.38um (cloudy pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_cloudy + nc_key: quality_reflectance vis_channel_id: 5 + wavelength: [1.35, 1.38, 1.41] category_id: 2 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_reflectance_all_nir16: name: quality_reflectance_all_nir16 + long_name: TOA Bidirectional Reflectance % Confidence at 1.61um (all pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_all + nc_key: quality_reflectance vis_channel_id: 6 + wavelength: [1.56, 1.61, 1.66] category_id: 0 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_reflectance_clear_nir16: name: quality_reflectance_clear_nir16 + long_name: TOA Bidirectional Reflectance % Confidence at 1.61um (clear pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_clear + nc_key: quality_reflectance vis_channel_id: 6 + wavelength: [1.56, 1.61, 1.66] category_id: 1 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_reflectance_cloudy_nir16: name: quality_reflectance_cloudy_nir16 + long_name: TOA Bidirectional Reflectance % Confidence at 1.61um (cloudy pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_cloudy + nc_key: quality_reflectance vis_channel_id: 6 + wavelength: [1.56, 1.61, 1.66] category_id: 2 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_reflectance_all_nir22: name: quality_reflectance_all_nir22 + long_name: TOA Bidirectional Reflectance % Confidence at 2.25um (all pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_all + nc_key: quality_reflectance vis_channel_id: 7 + wavelength: [2.2, 2.25, 2.3] category_id: 0 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_reflectance_clear_nir22: name: quality_reflectance_clear_nir22 + long_name: TOA Bidirectional Reflectance % Confidence at 2.25um (clear pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_clear + nc_key: quality_reflectance vis_channel_id: 7 + wavelength: [2.2, 2.25, 2.3] category_id: 1 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_reflectance_cloudy_nir22: name: quality_reflectance_cloudy_nir22 + long_name: TOA Bidirectional Reflectance % Confidence at 2.25um (cloudy pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_cloudy + nc_key: quality_reflectance vis_channel_id: 7 + wavelength: [2.2, 2.25, 2.3] category_id: 2 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_bt_all_ir38: name: quality_bt_all_ir38 + long_name: TOA Brightess Temperature % Confidence at 3.80um (all pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_all + nc_key: quality_bt ir_channel_id: 0 + wavelength: [3.4, 3.8, 4.2] category_id: 0 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_bt_clear_ir38: name: quality_bt_clear_ir38 + long_name: TOA Brightess Temperature % Confidence at 3.80um (clear pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_clear + nc_key: quality_bt ir_channel_id: 0 + wavelength: [3.4, 3.8, 4.2] category_id: 1 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_bt_cloudy_ir38: name: quality_bt_cloudy_ir38 + long_name: TOA Brightess Temperature % Confidence at 3.80um (cloudy pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_cloudy + nc_key: quality_bt ir_channel_id: 0 + wavelength: [3.4, 3.8, 4.2] category_id: 2 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_bt_all_wv63: name: quality_bt_all_wv63 + long_name: TOA Brightess Temperature % Confidence at 6.30um (all pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_all + nc_key: quality_bt ir_channel_id: 1 + wavelength: [5.3, 6.3, 7.3] category_id: 0 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_bt_clear_wv63: name: quality_bt_clear_wv63 + long_name: TOA Brightess Temperature % Confidence at 6.30um (clear pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_clear + nc_key: quality_bt ir_channel_id: 1 + wavelength: [5.3, 6.3, 7.3] category_id: 1 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_bt_cloudy_wv63: name: quality_bt_cloudy_wv63 + long_name: TOA Brightess Temperature % Confidence at 6.30um (cloudy pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_cloudy + nc_key: quality_bt ir_channel_id: 1 + wavelength: [5.3, 6.3, 7.3] category_id: 2 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_bt_all_wv73: name: quality_bt_all_wv73 + long_name: TOA Brightess Temperature % Confidence at 7.35um (all pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_all + nc_key: quality_bt ir_channel_id: 2 + wavelength: [6.85, 7.35, 7.85] category_id: 0 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_bt_clear_wv73: name: quality_bt_clear_wv73 + long_name: TOA Brightess Temperature % Confidence at 7.35um (clear pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_clear + nc_key: quality_bt ir_channel_id: 2 + wavelength: [6.85, 7.35, 7.85] category_id: 1 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_bt_cloudy_wv73: name: quality_bt_cloudy_wv73 + long_name: TOA Brightess Temperature % Confidence at 7.35um (cloudy pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_cloudy + nc_key: quality_bt ir_channel_id: 2 + wavelength: [6.85, 7.35, 7.85] category_id: 2 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_bt_all_ir87: name: quality_bt_all_ir87 + long_name: TOA Brightess Temperature % Confidence at 8.70um (all pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_all + nc_key: quality_bt ir_channel_id: 3 + wavelength: [8.3, 8.7, 9.1] category_id: 0 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_bt_clear_ir87: name: quality_bt_clear_ir87 + long_name: TOA Brightess Temperature % Confidence at 8.70um (clear pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_clear + nc_key: quality_bt ir_channel_id: 3 + wavelength: [8.3, 8.7, 9.1] category_id: 1 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_bt_cloudy_ir87: name: quality_bt_cloudy_ir87 + long_name: TOA Brightess Temperature % Confidence at 8.70um (cloudy pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_cloudy + nc_key: quality_bt ir_channel_id: 3 + wavelength: [8.3, 8.7, 9.1] category_id: 2 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_bt_all_ir97: name: quality_bt_all_ir97 + long_name: TOA Brightess Temperature % Confidence at 9.66um (all pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_all + nc_key: quality_bt ir_channel_id: 4 + wavelength: [9.36, 9.66, 9.96] category_id: 0 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_bt_clear_ir97: name: quality_bt_clear_ir97 + long_name: TOA Brightess Temperature % Confidence at 9.66um (clear pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_clear + nc_key: quality_bt ir_channel_id: 4 + wavelength: [9.36, 9.66, 9.96] category_id: 1 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_bt_cloudy_ir97: name: quality_bt_cloudy_ir97 + long_name: TOA Brightess Temperature % Confidence at 9.66um (cloudy pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_cloudy + nc_key: quality_bt ir_channel_id: 4 + wavelength: [9.36, 9.66, 9.96] category_id: 2 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_bt_all_ir105: name: quality_bt_all_ir105 + long_name: TOA Brightess Temperature % Confidence at 10.50um (all pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_all + nc_key: quality_bt ir_channel_id: 5 + wavelength: [9.8, 10.5, 11.2] category_id: 0 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_bt_clear_ir105: name: quality_bt_clear_ir105 + long_name: TOA Brightess Temperature % Confidence at 10.50um (clear pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_clear + nc_key: quality_bt ir_channel_id: 5 + wavelength: [9.8, 10.5, 11.2] category_id: 1 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_bt_cloudy_ir105: name: quality_bt_cloudy_ir105 + long_name: TOA Brightess Temperature % Confidence at 10.50um (cloudy pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_cloudy + nc_key: quality_bt ir_channel_id: 5 + wavelength: [9.8, 10.5, 11.2] category_id: 2 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_bt_all_ir123: name: quality_bt_all_ir123 + long_name: TOA Brightess Temperature % Confidence at 12.30um (all pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_all + nc_key: quality_bt ir_channel_id: 6 + wavelength: [11.8, 12.3, 12.8] category_id: 0 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_bt_clear_ir123: name: quality_bt_clear_ir123 + long_name: TOA Brightess Temperature % Confidence at 12.30um (clear pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_clear + nc_key: quality_bt ir_channel_id: 6 + wavelength: [11.8, 12.3, 12.8] category_id: 1 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_bt_cloudy_ir123: name: quality_bt_cloudy_ir123 + long_name: TOA Brightess Temperature % Confidence at 12.30um (cloudy pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_cloudy + nc_key: quality_bt ir_channel_id: 6 + wavelength: [11.8, 12.3, 12.8] category_id: 2 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_bt_all_ir133: name: quality_bt_all_ir133 + long_name: TOA Brightess Temperature % Confidence at 13.30um (all pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_all + nc_key: quality_bt ir_channel_id: 7 + wavelength: [12.7, 13.3, 13.9] category_id: 0 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_bt_clear_ir133: name: quality_bt_clear_ir133 + long_name: TOA Brightess Temperature % Confidence at 13.30um (clear pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_clear + nc_key: quality_bt ir_channel_id: 7 + wavelength: [12.7, 13.3, 13.9] category_id: 1 fill_value: -1 + units: '%' coordinates: - longitude - latitude quality_bt_cloudy_ir133: name: quality_bt_cloudy_ir133 + long_name: TOA Brightess Temperature % Confidence at 13.30um (cloudy pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_cloudy + nc_key: quality_bt ir_channel_id: 7 + wavelength: [12.7, 13.3, 13.9] category_id: 2 fill_value: -1 + units: '%' coordinates: - longitude - latitude pixel_percentage_all: name: pixel_percentage_all + long_name: Percentage of FoR pixels used (all pixels) + standard_name: pixels_used_fraction resolution: 32000 file_type: nc_fci_asr - file_key: pixel_percentage - long_name: pixel_percentage_all + nc_key: pixel_percentage category_id: 0 + units: '%' coordinates: - longitude - latitude pixel_percentage_clear: name: pixel_percentage_clear + long_name: Percentage of FoR pixels used (clear pixels) + standard_name: pixels_used_fraction resolution: 32000 file_type: nc_fci_asr - file_key: pixel_percentage - long_name: pixel_percentage_clear + nc_key: pixel_percentage category_id: 1 + units: '%' coordinates: - longitude - latitude pixel_percentage_cloudy: name: pixel_percentage_cloudy + long_name: Percentage of FoR pixels used (cloudy pixels) + standard_name: pixels_used_fraction resolution: 32000 file_type: nc_fci_asr - file_key: pixel_percentage - long_name: pixel_percentage_cloudy + nc_key: pixel_percentage category_id: 2 + units: '%' coordinates: - longitude - latitude - - product_quality_asr: - name: product_quality_asr - file_type: nc_fci_asr - file_key: product_quality - long_name: product_quality_index - - product_completeness_asr: - name: product_completeness_asr - file_type: nc_fci_asr - file_key: product_completeness - long_name: product_completeness_index - - product_timeliness_asr: - name: product_timeliness_asr - file_type: nc_fci_asr - file_key: product_timeliness - long_name: product_timeliness_index - -# AMV Intermediate Product - intm_latitude: - name: intm_latitude - file_type: nc_fci_amvi - file_key: intm_latitude - standard_name: latitude - - intm_longitude: - name: intm_longitude - file_type: nc_fci_amvi - file_key: intm_longitude - standard_name: longitude - - intm_speed: - name: intm_speed - file_type: nc_fci_amvi - file_key: intm_speed - standard_name: wind_speed - coordinates: - - intm_longitude - - intm_latitude - - intm_u_component: - name: intm_u_component - file_type: nc_fci_amvi - file_key: intm_u_component - standard_name: wind_speed_horizontal_component - coordinates: - - intm_longitude - - intm_latitude - - intm_v_component: - name: intm_v_component - file_type: nc_fci_amvi - file_key: intm_v_component - standard_name: wind_speed_vertical_component - coordinates: - - intm_longitude - - intm_latitude - - intm_direction: - name: intm_direction - file_type: nc_fci_amvi - file_key: intm_direction - standard_name: wind_to_direction - coordinates: - - intm_longitude - - intm_latitude - - intm_pressure: - name: intm_pressure - file_type: nc_fci_amvi - file_key: intm_pressure - standard_name: wind_pressure - coordinates: - - intm_longitude - - intm_latitude - - intm_temperature: - name: intm_temperature - file_type: nc_fci_amvi - file_key: intm_temperature - standard_name: wind_temperature - coordinates: - - intm_longitude - - intm_latitude - - intm_target_type: - name: intm_target_type - file_type: nc_fci_amvi - file_key: target_type - standard_name: wind_target_type - coordinates: - - intm_longitude - - intm_latitude - - intm_wind_method: - name: intm_wind_method - file_type: nc_fci_amvi - file_key: wind_method - standard_name: wind_wind_method - coordinates: - - intm_longitude - - intm_latitude - -# AMV Final Product - channel_id: - name: channel_id - file_type: nc_fci_amv - file_key: channel_id - standard_name: channel_id - - amv_latitude: - name: latitude - file_type: nc_fci_amv - file_key: latitude - standard_name: latitude - - amv_longitude: - name: longitude - file_type: nc_fci_amv - file_key: longitude - standard_name: longitude - - speed: - name: speed - file_type: nc_fci_amv - file_key: speed - standard_name: wind_speed - coordinates: - - longitude - - latitude - - speed_u_component: - name: speed_u_component - file_type: nc_fci_amv - file_key: speed_u_component - standard_name: wind_speed_horizontal_component - coordinates: - - longitude - - latitude - - speed_v_component: - name: speed_v_component - file_type: nc_fci_amv - file_key: speed_v_component - standard_name: wind_speed_vertical_component - coordinates: - - longitude - - latitude - - direction: - name: direction - file_type: nc_fci_amv - file_key: direction - standard_name: wind_to_direction - coordinates: - - longitude - - latitude - - pressure: - name: pressure - file_type: nc_fci_amv - file_key: pressure - standard_name: wind_pressure - coordinates: - - longitude - - latitude - - temperature: - name: temperature - file_type: nc_fci_amv - file_key: temperature - standard_name: wind_temperature - coordinates: - - longitude - - latitude - - target_type: - name: target_type - file_type: nc_fci_amv - file_key: target_type - standard_name: wind_target_type - coordinates: - - longitude - - latitude - - wind_method: - name: wind_method - file_type: nc_fci_amv - file_key: wind_method - standard_name: wind_wind_method - coordinates: - - longitude - - latitude - - fcst_u: - name: fcst_u - file_type: nc_fci_amv - file_key: forecast_u_component - standard_name: wind_forecast_u_component - coordinates: - - longitude - - latitude - - fcst_v: - name: fcst_v - file_type: nc_fci_amv - file_key: forecast_v_component - standard_name: wind_forecast_v_component - coordinates: - - longitude - - latitude - - best_fit_pres: - name: best_fit_pres - file_type: nc_fci_amv - file_key: best_fit_pressure - standard_name: wind_best_fit_pressure - coordinates: - - longitude - - latitude - - best_fit_u: - name: best_fit_u - file_type: nc_fci_amv - file_key: best_fit_u_component - standard_name: wind_best_fit_u_component - coordinates: - - longitude - - latitude - - best_fit_v: - name: best_fit_v - file_type: nc_fci_amv - file_key: best_fit_v_component - standard_name: wind_best_fit_v_component - coordinates: - - longitude - - latitude - - qi: - name: qi - file_type: nc_fci_amv - file_key: overall_reliability - standard_name: wind_overall_reliability - coordinates: - - longitude - - latitude - - qi_excl_fcst: - name: qi_excl_fcst - file_type: nc_fci_amv - file_key: overall_reliability_exc_forecast - standard_name: wind_overall_reliability_exc_forecast - coordinates: - - longitude - - latitude - - product_quality: - name: product_quality - file_type: nc_fci_amv - file_key: product_quality - long_name: product_quality_index - - product_completeness: - name: product_completeness - file_type: nc_fci_amv - file_key: product_completeness - long_name: product_completeness_index - - product_timeliness: - name: product_timeliness - file_type: nc_fci_amv - file_key: product_timeliness - long_name: product_timeliness_index diff --git a/satpy/etc/readers/fy3a_mersi1_l1b.yaml b/satpy/etc/readers/fy3a_mersi1_l1b.yaml new file mode 100644 index 0000000000..7ce31300bb --- /dev/null +++ b/satpy/etc/readers/fy3a_mersi1_l1b.yaml @@ -0,0 +1,448 @@ +reader: + name: fy3a_mersi1_l1b + short_name: FY3A MERSI-1 l1b + long_name: FY-3A MERSI-1 L1B data in HDF5 format + description: FY-3A Medium Resolution Spectral Imager 1 (MERSI-1) L1B Reader + status: Beta + supports_fsspec: false + sensors: [mersi-1] + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + +file_types: + fy3a_mersi1_l1b_1000: + file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B + rows_per_scan: 10 + file_patterns: + - 'FY3A_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_1000M_MS.{ext:3s}' + + fy3a_mersi1_l1b_250: + file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B + rows_per_scan: 40 + file_patterns: + - 'FY3A_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_0250M_MS.{ext:3s}' + +datasets: + '1': + name: '1' + wavelength: [0.445, 0.470, 0.495] + resolution: + 1000: + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_250_Aggr.1KM_RefSB + band_index: 0 + calibration_index: 0 + 250: + file_type: fy3a_mersi1_l1b_250 + file_key: EV_250_RefSB_b1 + calibration_index: 0 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '2': + name: '2' + wavelength: [0.525, 0.550, 0.575] + resolution: + 1000: + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_250_Aggr.1KM_RefSB + band_index: 1 + calibration_index: 1 + 250: + file_type: fy3a_mersi1_l1b_250 + file_key: EV_250_RefSB_b2 + calibration_index: 1 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '3': + name: '3' + wavelength: [0.625, 0.650, 0.675] + resolution: + 1000: + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_250_Aggr.1KM_RefSB + band_index: 2 + calibration_index: 2 + 250: + file_type: fy3a_mersi1_l1b_250 + file_key: EV_250_RefSB_b3 + calibration_index: 2 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '4': + name: '4' + wavelength: [0.840, 0.865, 0.890] + resolution: + 1000: + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_250_Aggr.1KM_RefSB + band_index: 3 + calibration_index: 3 + 250: + file_type: fy3a_mersi1_l1b_250 + file_key: EV_250_RefSB_b4 + calibration_index: 3 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '5': + name: '5' + wavelength: [10, 11.25, 12.5] + resolution: + 1000: + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_250_Aggr.1KM_Emissive + 250: + file_type: fy3a_mersi1_l1b_250 + file_key: EV_250_Emissive + coordinates: [longitude, latitude] + calibration: + brightness_temperature: + units: "K" + standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts + + '6': + name: '6' + wavelength: [1.615, 1.640, 1.665] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 0 + calibration_index: 4 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '7': + name: '7' + wavelength: [2.105, 2.130, 2.155] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 1 + calibration_index: 5 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '8': + name: '8' + wavelength: [0.402, 0.412, 0.422] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 2 + calibration_index: 6 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '9': + name: '9' + wavelength: [0.433, 0.443, 0.453] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 3 + calibration_index: 7 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '10': + name: '10' + wavelength: [0.480, 0.490, 0.500] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 4 + calibration_index: 8 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '11': + name: '11' + wavelength: [0.510, 0.520, 0.530] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 5 + calibration_index: 9 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '12': + name: '12' + wavelength: [0.555, 0.565, 0.575] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 6 + calibration_index: 10 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '13': + name: '13' + wavelength: [0.640, 0.650, 0.660] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 7 + calibration_index: 11 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '14': + name: '14' + wavelength: [0.675, 0.685, 0.695] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 8 + calibration_index: 12 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '15': + name: '15' + wavelength: [0.755, 0.765, 0.775] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 9 + calibration_index: 13 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '16': + name: '16' + wavelength: [0.855, 0.865, 0.875] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 10 + calibration_index: 14 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '17': + name: '17' + wavelength: [0.895, 0.905, 0.915] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 11 + calibration_index: 15 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '18': + name: '18' + wavelength: [0.930, 0.940, 0.950] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 12 + calibration_index: 16 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '19': + name: '19' + wavelength: [0.970, 0.980, 0.990] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 13 + calibration_index: 17 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '20': + name: '20' + wavelength: [1.020, 1.030, 1.040] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 14 + calibration_index: 18 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + longitude: + name: longitude + units: degrees_east + standard_name: longitude + resolution: + 1000: + file_type: fy3a_mersi1_l1b_1000 + file_key: Longitude + 250: + file_type: fy3a_mersi1_l1b_250 + file_key: Longitude + + latitude: + name: latitude + units: degrees_north + standard_name: latitude + resolution: + 1000: + file_type: fy3a_mersi1_l1b_1000 + file_key: Latitude + 250: + file_type: fy3a_mersi1_l1b_250 + file_key: Latitude + + solar_zenith_angle: + name: solar_zenith_angle + units: degree + standard_name: solar_zenith_angle + resolution: 1000 + coordinates: [longitude, latitude] + file_type: fy3a_mersi1_l1b_1000 + file_key: SolarZenith + + solar_azimuth_angle: + name: solar_azimuth_angle + units: degree + standard_name: solar_azimuth_angle + resolution: 1000 + coordinates: [longitude, latitude] + file_type: fy3a_mersi1_l1b_1000 + file_key: SolarAzimuth + + satellite_zenith_angle: + name: satellite_zenith_angle + units: degree + standard_name: sensor_zenith_angle + resolution: 1000 + coordinates: [longitude, latitude] + file_type: fy3a_mersi1_l1b_1000 + file_key: SensorZenith + + satellite_azimuth_angle: + name: satellite_azimuth_angle + units: degree + standard_name: sensor_azimuth_angle + resolution: 1000 + coordinates: [longitude, latitude] + file_type: fy3a_mersi1_l1b_1000 + file_key: SensorAzimuth diff --git a/satpy/etc/readers/fy3b_mersi1_l1b.yaml b/satpy/etc/readers/fy3b_mersi1_l1b.yaml new file mode 100644 index 0000000000..5d2dc48f56 --- /dev/null +++ b/satpy/etc/readers/fy3b_mersi1_l1b.yaml @@ -0,0 +1,454 @@ +reader: + name: fy3b_mersi1_l1b + short_name: FY3B MERSI-1 l1b + long_name: FY-3B MERSI-1 L1B data in HDF5 format + description: FY-3B Medium Resolution Spectral Imager 1 (MERSI-1) L1B Reader + status: Beta + supports_fsspec: false + sensors: [mersi-1] + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + +file_types: + fy3b_mersi1_l1b_1000: + file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B + rows_per_scan: 10 + file_patterns: + - 'FY3B_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_1000M_MS.{ext:3s}' + + fy3b_mersi1_l1b_250: + file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B + rows_per_scan: 40 + file_patterns: + - 'FY3B_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_0250M_MS.{ext:3s}' + + fy3b_mersi1_l1b_geo: + file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B + rows_per_scan: 40 + file_patterns: + - 'FY3B_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_GEOXX_MS.{ext:3s}' + +datasets: + '1': + name: '1' + wavelength: [0.445, 0.470, 0.495] + resolution: + 1000: + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_250_Aggr.1KM_RefSB + band_index: 0 + calibration_index: 0 + 250: + file_type: fy3b_mersi1_l1b_250 + file_key: EV_250_RefSB_b1 + calibration_index: 0 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '2': + name: '2' + wavelength: [0.525, 0.550, 0.575] + resolution: + 1000: + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_250_Aggr.1KM_RefSB + band_index: 1 + calibration_index: 1 + 250: + file_type: fy3b_mersi1_l1b_250 + file_key: EV_250_RefSB_b2 + calibration_index: 1 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '3': + name: '3' + wavelength: [0.625, 0.650, 0.675] + resolution: + 1000: + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_250_Aggr.1KM_RefSB + band_index: 2 + calibration_index: 2 + 250: + file_type: fy3b_mersi1_l1b_250 + file_key: EV_250_RefSB_b3 + calibration_index: 2 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '4': + name: '4' + wavelength: [0.840, 0.865, 0.890] + resolution: + 1000: + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_250_Aggr.1KM_RefSB + band_index: 3 + calibration_index: 3 + 250: + file_type: fy3b_mersi1_l1b_250 + file_key: EV_250_RefSB_b4 + calibration_index: 3 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '5': + name: '5' + wavelength: [10, 11.25, 12.5] + resolution: + 1000: + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_250_Aggr.1KM_Emissive + 250: + file_type: fy3b_mersi1_l1b_250 + file_key: EV_250_Emissive + coordinates: [longitude, latitude] + calibration: + brightness_temperature: + units: "K" + standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts + + '6': + name: '6' + wavelength: [1.615, 1.640, 1.665] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 0 + calibration_index: 4 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '7': + name: '7' + wavelength: [2.105, 2.130, 2.155] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 1 + calibration_index: 5 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '8': + name: '8' + wavelength: [0.402, 0.412, 0.422] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 2 + calibration_index: 6 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '9': + name: '9' + wavelength: [0.433, 0.443, 0.453] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 3 + calibration_index: 7 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '10': + name: '10' + wavelength: [0.480, 0.490, 0.500] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 4 + calibration_index: 8 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '11': + name: '11' + wavelength: [0.510, 0.520, 0.530] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 5 + calibration_index: 9 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '12': + name: '12' + wavelength: [0.555, 0.565, 0.575] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 6 + calibration_index: 10 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '13': + name: '13' + wavelength: [0.640, 0.650, 0.660] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 7 + calibration_index: 11 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '14': + name: '14' + wavelength: [0.675, 0.685, 0.695] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 8 + calibration_index: 12 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '15': + name: '15' + wavelength: [0.755, 0.765, 0.775] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 9 + calibration_index: 13 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '16': + name: '16' + wavelength: [0.855, 0.865, 0.875] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 10 + calibration_index: 14 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '17': + name: '17' + wavelength: [0.895, 0.905, 0.915] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 11 + calibration_index: 15 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '18': + name: '18' + wavelength: [0.930, 0.940, 0.950] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 12 + calibration_index: 16 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '19': + name: '19' + wavelength: [0.970, 0.980, 0.990] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 13 + calibration_index: 17 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '20': + name: '20' + wavelength: [1.020, 1.030, 1.040] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 14 + calibration_index: 18 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + longitude: + name: longitude + units: degrees_east + standard_name: longitude + resolution: + 1000: + file_type: fy3b_mersi1_l1b_1000 + file_key: Longitude + 250: + file_type: fy3b_mersi1_l1b_geo + file_key: Longitude + + latitude: + name: latitude + units: degrees_north + standard_name: latitude + resolution: + 1000: + file_type: fy3b_mersi1_l1b_1000 + file_key: Latitude + 250: + file_type: fy3b_mersi1_l1b_geo + file_key: Latitude + + solar_zenith_angle: + name: solar_zenith_angle + units: degree + standard_name: solar_zenith_angle + resolution: 1000 + coordinates: [longitude, latitude] + file_type: fy3b_mersi1_l1b_1000 + file_key: SolarZenith + + solar_azimuth_angle: + name: solar_azimuth_angle + units: degree + standard_name: solar_azimuth_angle + resolution: 1000 + coordinates: [longitude, latitude] + file_type: fy3b_mersi1_l1b_1000 + file_key: SolarAzimuth + + satellite_zenith_angle: + name: satellite_zenith_angle + units: degree + standard_name: sensor_zenith_angle + resolution: 1000 + coordinates: [longitude, latitude] + file_type: fy3b_mersi1_l1b_1000 + file_key: SensorZenith + + satellite_azimuth_angle: + name: satellite_azimuth_angle + units: degree + standard_name: sensor_azimuth_angle + resolution: 1000 + coordinates: [longitude, latitude] + file_type: fy3b_mersi1_l1b_1000 + file_key: SensorAzimuth diff --git a/satpy/etc/readers/fy3c_mersi1_l1b.yaml b/satpy/etc/readers/fy3c_mersi1_l1b.yaml new file mode 100644 index 0000000000..e797b52405 --- /dev/null +++ b/satpy/etc/readers/fy3c_mersi1_l1b.yaml @@ -0,0 +1,483 @@ +reader: + name: fy3c_mersi1_l1b + short_name: FY3C MERSI-1 l1b + long_name: FY-3C MERSI-1 L1B data in HDF5 format + description: FY-3B Medium Resolution Spectral Imager 1 (MERSI-1) L1B Reader + status: Beta + supports_fsspec: false + sensors: [mersi-1] + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + +file_types: + fy3c_mersi1_l1b_1000: + file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B + rows_per_scan: 10 + file_patterns: + - 'FY3C_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_1000M_MS.{ext:3s}' + + fy3c_mersi1_l1b_250: + file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B + rows_per_scan: 40 + file_patterns: + - 'FY3C_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_0250M_MS.{ext:3s}' + + fy3c_mersi1_l1b_1000_geo: + file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B + rows_per_scan: 10 + file_patterns: + - 'FY3C_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_GEO1K_MS.{ext:3s}' + + fy3c_mersi1_l1b_250_geo: + file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B + rows_per_scan: 40 + file_patterns: + - 'FY3C_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_GEOQK_MS.{ext:3s}' + +datasets: + '1': + name: '1' + wavelength: [0.445, 0.470, 0.495] + resolution: + 1000: + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_250_Aggr.1KM_RefSB + band_index: 0 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 0 + 250: + file_type: fy3c_mersi1_l1b_250 + file_key: Data/EV_250_RefSB_b1 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 0 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '2': + name: '2' + wavelength: [0.525, 0.550, 0.575] + resolution: + 1000: + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_250_Aggr.1KM_RefSB + band_index: 1 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 1 + 250: + file_type: fy3c_mersi1_l1b_250 + file_key: Data/EV_250_RefSB_b2 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 1 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '3': + name: '3' + wavelength: [0.625, 0.650, 0.675] + resolution: + 1000: + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_250_Aggr.1KM_RefSB + band_index: 2 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 2 + 250: + file_type: fy3c_mersi1_l1b_250 + file_key: Data/EV_250_RefSB_b3 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 2 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '4': + name: '4' + wavelength: [0.840, 0.865, 0.890] + resolution: + 1000: + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_250_Aggr.1KM_RefSB + band_index: 3 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 3 + 250: + file_type: fy3c_mersi1_l1b_250 + file_key: Data/EV_250_RefSB_b4 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 3 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '5': + name: '5' + wavelength: [10, 11.25, 12.5] + resolution: + 1000: + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_250_Aggr.1KM_Emissive + 250: + file_type: fy3c_mersi1_l1b_250 + file_key: Data/EV_250_Emissive + coordinates: [longitude, latitude] + calibration: + brightness_temperature: + units: "K" + standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts + + '6': + name: '6' + wavelength: [1.615, 1.640, 1.665] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 0 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 4 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '7': + name: '7' + wavelength: [2.105, 2.130, 2.155] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 1 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 5 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '8': + name: '8' + wavelength: [0.402, 0.412, 0.422] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 2 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 6 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '9': + name: '9' + wavelength: [0.433, 0.443, 0.453] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 3 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 7 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '10': + name: '10' + wavelength: [0.480, 0.490, 0.500] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 4 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 8 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '11': + name: '11' + wavelength: [0.510, 0.520, 0.530] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 5 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 9 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '12': + name: '12' + wavelength: [0.555, 0.565, 0.575] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 6 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 10 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '13': + name: '13' + wavelength: [0.640, 0.650, 0.660] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 7 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 11 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '14': + name: '14' + wavelength: [0.675, 0.685, 0.695] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 8 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 12 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '15': + name: '15' + wavelength: [0.755, 0.765, 0.775] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 9 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 13 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '16': + name: '16' + wavelength: [0.855, 0.865, 0.875] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 10 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 14 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '17': + name: '17' + wavelength: [0.895, 0.905, 0.915] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 11 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 15 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '18': + name: '18' + wavelength: [0.930, 0.940, 0.950] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 12 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 16 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '19': + name: '19' + wavelength: [0.970, 0.980, 0.990] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 13 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 17 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '20': + name: '20' + wavelength: [1.020, 1.030, 1.040] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 14 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 18 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + longitude: + name: longitude + units: degrees_east + standard_name: longitude + resolution: + 1000: + file_type: fy3c_mersi1_l1b_1000_geo + file_key: Geolocation/Longitude + 250: + file_type: fy3c_mersi1_l1b_250_geo + file_key: Longitude + + latitude: + name: latitude + units: degrees_north + standard_name: latitude + resolution: + 1000: + file_type: fy3c_mersi1_l1b_1000_geo + file_key: Geolocation/Latitude + 250: + file_type: fy3c_mersi1_l1b_250_geo + file_key: Latitude + + solar_zenith_angle: + name: solar_zenith_angle + units: degree + standard_name: solar_zenith_angle + resolution: 1000 + coordinates: [longitude, latitude] + file_type: fy3c_mersi1_l1b_1000_geo + file_key: Geolocation/SolarZenith + + solar_azimuth_angle: + name: solar_azimuth_angle + units: degree + standard_name: solar_azimuth_angle + resolution: 1000 + coordinates: [longitude, latitude] + file_type: fy3c_mersi1_l1b_1000_geo + file_key: Geolocation/SolarAzimuth + + satellite_zenith_angle: + name: satellite_zenith_angle + units: degree + standard_name: sensor_zenith_angle + resolution: 1000 + coordinates: [longitude, latitude] + file_type: fy3c_mersi1_l1b_1000_geo + file_key: Geolocation/SensorZenith + + satellite_azimuth_angle: + name: satellite_azimuth_angle + units: degree + standard_name: sensor_azimuth_angle + resolution: 1000 + coordinates: [longitude, latitude] + file_type: fy3c_mersi1_l1b_1000_geo + file_key: Geolocation/SensorAzimuth diff --git a/satpy/etc/readers/generic_image.yaml b/satpy/etc/readers/generic_image.yaml index 07d1bdeb50..fb6e0aab5d 100644 --- a/satpy/etc/readers/generic_image.yaml +++ b/satpy/etc/readers/generic_image.yaml @@ -4,7 +4,7 @@ reader: long_name: Generic Images e.g. GeoTIFF description: generic image reader status: Nominal - supports_fsspec: false + supports_fsspec: true reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [images] default_channels: [image] diff --git a/satpy/etc/readers/ghi_l1.yaml b/satpy/etc/readers/ghi_l1.yaml index 59c8f35f70..08c438127b 100644 --- a/satpy/etc/readers/ghi_l1.yaml +++ b/satpy/etc/readers/ghi_l1.yaml @@ -4,7 +4,11 @@ reader: name: ghi_l1 + short_name: GHI FY4A L1 + long_name: FY-4A GHI Level 1 HDF5 format description: FY-4A GHI instrument HDF5 reader + status: Nominal + supports_fsspec: false sensors: [ghi] default_channels: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader diff --git a/satpy/etc/readers/goci2_l2_nc.yaml b/satpy/etc/readers/goci2_l2_nc.yaml new file mode 100644 index 0000000000..d11db0a5d1 --- /dev/null +++ b/satpy/etc/readers/goci2_l2_nc.yaml @@ -0,0 +1,838 @@ +reader: + name: goci2_l2_nc + short_name: GOCI-II L2 NetCDF4 + long_name: GK-2B GOCI-II Level 2 products in netCDF4 format from NOSC + status: Beta + supports_fsspec: true + sensors: ['goci2'] + reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader + # file pattern keys to sort files by with 'satpy.utils.group_files' + group_keys: ['start_time', 'platform_shortname', "slot"] + +file_types: + goci2_l2_kd: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_Kd.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_Kd.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_Kd.nc' + + goci2_l2_zsd: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_Zsd.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_Zsd.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_Zsd.nc' + + goci2_l2_chl: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_Chl.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_Chl.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_Chl.nc' + + goci2_l2_cdom: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_CDOM.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_CDOM.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_CDOM.nc' + + goci2_l2_tss: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_TSS.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_TSS.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_TSS.nc' + + goci2_l2_ac: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_AC.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_AC.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_AC.nc' + + goci2_l2_iop: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_IOP.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_IOP.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_IOP.nc' + + goci2_l2_aod: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_AOD.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_AOD.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_AOD.nc' + + goci2_l2_mf: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_MF.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_MF.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_MF.nc' + + goci2_l2_cf: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_CF.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_CF.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_CF.nc' + + goci2_l2_fa: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_FA.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_FA.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_FA.nc' + + goci2_l2_fgi: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_FGI.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_FGI.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_FGI.nc' + + goci2_l2_lsss: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_LSSS.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_LSSS.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_LSSS.nc' + + goci2_l2_pp: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_PP.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_PP.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_PP.nc' + + goci2_l2_ri: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_RI.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_RI.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_RI.nc' + + goci2_l2_ssc: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_SSC.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_SSC.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_SSC.nc' + +datasets: +# --- Navigation Data --- + latitude: + name: latitude + file_type: [goci2_l2_kd, goci2_l2_zsd, goci2_l2_chl, goci2_l2_cdom, goci2_l2_tss, goci2_l2_ac, goci2_l2_iop, goci2_l2_aod, goci2_l2_mf, goci2_l2_cf, goci2_l2_fa, goci2_l2_fgi, goci2_l2_lsss, goci2_l2_pp, goci2_l2_ri, goci2_l2_ssc] + file_key: latitude + standard_name: latitude + units: degrees_north + + longitude: + name: longitude + file_type: [goci2_l2_kd, goci2_l2_zsd, goci2_l2_chl, goci2_l2_cdom, goci2_l2_tss, goci2_l2_ac, goci2_l2_iop, goci2_l2_aod, goci2_l2_mf, goci2_l2_cf, goci2_l2_fa, goci2_l2_fgi, goci2_l2_lsss, goci2_l2_pp, goci2_l2_ri, goci2_l2_ssc] + file_key: longitude + standard_name: longitude + units: degrees_east + +# --- Ocean Color Products --- + # --- Diffuse attenuation coefficient --- + Kd_380: + name: Kd_380 + long_name: Diffuse attenuation coefficient at 380 nm + file_type: goci2_l2_kd + file_key: Kd_380 + coordinates: [longitude, latitude] + units: m-1 + + Kd_412: + name: Kd_412 + long_name: Diffuse attenuation coefficient at 412 nm + file_type: goci2_l2_kd + file_key: Kd_412 + coordinates: [longitude, latitude] + units: m-1 + + Kd_443: + name: Kd_443 + long_name: Diffuse attenuation coefficient at 443 nm + file_type: goci2_l2_kd + file_key: Kd_443 + coordinates: [longitude, latitude] + units: m-1 + + Kd_490: + name: Kd_490 + long_name: Diffuse attenuation coefficient at 490 nm + file_type: goci2_l2_kd + file_key: Kd_490 + coordinates: [longitude, latitude] + units: m-1 + + Kd_510: + name: Kd_510 + long_name: Diffuse attenuation coefficient at 510 nm + file_type: goci2_l2_kd + file_key: Kd_510 + coordinates: [longitude, latitude] + units: m-1 + + Kd_555: + name: Kd_555 + long_name: Diffuse attenuation coefficient at 555 nm + file_type: goci2_l2_kd + file_key: Kd_555 + coordinates: [longitude, latitude] + units: m-1 + + Kd_620: + name: Kd_620 + long_name: Diffuse attenuation coefficient at 620 nm + file_type: goci2_l2_kd + file_key: Kd_620 + coordinates: [longitude, latitude] + units: m-1 + + Kd_660: + name: Kd_660 + long_name: Diffuse attenuation coefficient at 660 nm + file_type: goci2_l2_kd + file_key: Kd_660 + coordinates: [longitude, latitude] + units: m-1 + + Kd_680: + name: Kd_680 + long_name: Diffuse attenuation coefficient at 680 nm + file_type: goci2_l2_kd + file_key: Kd_680 + coordinates: [longitude, latitude] + units: m-1 + + Kd_709: + name: Kd_709 + long_name: Diffuse attenuation coefficient at 709 nm + file_type: goci2_l2_kd + file_key: Kd_709 + coordinates: [longitude, latitude] + units: m-1 + + Kd_745: + name: Kd_745 + long_name: Diffuse attenuation coefficient at 745 nm + file_type: goci2_l2_kd + file_key: Kd_745 + coordinates: [longitude, latitude] + units: m-1 + + Kd_865: + name: Kd_865 + long_name: Diffuse attenuation coefficient at 865 nm + file_type: goci2_l2_kd + file_key: Kd_865 + coordinates: [longitude, latitude] + units: m-1 + + # --- Other OC products --- + Secchi_disk_depth: + name: Zsd + file_type: goci2_l2_zsd + file_key: Zsd + coordinates: [longitude, latitude] + units: m + + Chlorophyll-a_concentration: + name: Chl + file_type: goci2_l2_chl + file_key: Chl + coordinates: [longitude, latitude] + units: mg m-3 + + Colored_Dissolved_Organic_Matter: + name: CDOM + file_type: goci2_l2_cdom + file_key: CDOM + coordinates: [longitude, latitude] + units: m-1 + + Total_Suspended_Sediments_concentration: + name: TSS + file_type: goci2_l2_tss + file_key: TSS + coordinates: [longitude, latitude] + units: g m-3 + +# --- Atomspheric Correction Products --- + # --- Rayleigh-corrected reflectance --- + RhoC_380: + name: RhoC_380 + sensor: goci2 + wavelength: [0.37, 0.38, 0.39] + long_name: Rayleigh-corrected reflectance at 380 nm + file_type: goci2_l2_ac + file_key: RhoC_380 + coordinates: [longitude, latitude] + + RhoC_412: + name: RhoC_412 + sensor: goci2 + wavelength: [0.402, 0.412, 0.422] + long_name: Rayleigh-corrected reflectance at 412 nm + file_type: goci2_l2_ac + file_key: RhoC_412 + coordinates: [longitude, latitude] + + RhoC_443: + name: RhoC_443 + sensor: goci2 + wavelength: [0.433, 0.443, 0.453] + long_name: Rayleigh-corrected reflectance at 443 nm + file_type: goci2_l2_ac + file_key: RhoC_443 + coordinates: [longitude, latitude] + + RhoC_490: + name: RhoC_490 + sensor: goci2 + wavelength: [0.48, 0.49, 0.50] + long_name: Rayleigh-corrected reflectance at 490 nm + file_type: goci2_l2_ac + file_key: RhoC_490 + coordinates: [longitude, latitude] + + RhoC_510: + name: RhoC_510 + sensor: goci2 + wavelength: [0.50, 0.51, 0.52] + long_name: Rayleigh-corrected reflectance at 510 nm + file_type: goci2_l2_ac + file_key: RhoC_510 + coordinates: [longitude, latitude] + + RhoC_555: + name: RhoC_555 + sensor: goci2 + wavelength: [0.545, 0.555, 0.565] + long_name: Rayleigh-corrected reflectance at 555 nm + file_type: goci2_l2_ac + file_key: RhoC_555 + coordinates: [longitude, latitude] + + RhoC_620: + name: RhoC_620 + sensor: goci2 + wavelength: [0.61, 0.62, 0.63] + long_name: Rayleigh-corrected reflectance at 620 nm + file_type: goci2_l2_ac + file_key: RhoC_620 + coordinates: [longitude, latitude] + + RhoC_660: + name: RhoC_660 + sensor: goci2 + wavelength: [0.65, 0.66, 0.67] + long_name: Rayleigh-corrected reflectance at 660 nm + file_type: goci2_l2_ac + file_key: RhoC_660 + coordinates: [longitude, latitude] + + RhoC_680: + name: RhoC_680 + sensor: goci2 + wavelength: [0.675, 0.680, 0.685] + long_name: Rayleigh-corrected reflectance at 680 nm + file_type: goci2_l2_ac + file_key: RhoC_680 + coordinates: [longitude, latitude] + + RhoC_709: + name: RhoC_709 + sensor: goci2 + wavelength: [0.704, 0.709, 0.714] + long_name: Rayleigh-corrected reflectance at 709 nm + file_type: goci2_l2_ac + file_key: RhoC_709 + coordinates: [longitude, latitude] + + RhoC_745: + name: RhoC_745 + sensor: goci2 + wavelength: [0.735, 0.745, 0.755] + long_name: Rayleigh-corrected reflectance at 745 nm + file_type: goci2_l2_ac + file_key: RhoC_745 + coordinates: [longitude, latitude] + + RhoC_865: + name: RhoC_865 + sensor: goci2 + wavelength: [0.845, 0.865, 0.885] + long_name: Rayleigh-corrected reflectance at 865 nm + file_type: goci2_l2_ac + file_key: RhoC_865 + coordinates: [longitude, latitude] + + # --- Remote sensing reflectance --- + Rrs_380: + name: Rrs_380 + sensor: goci2 + wavelength: [0.37, 0.38, 0.39] + long_name: Remote sensing reflectance at 380 nm, KOSC standard algorithm + file_type: goci2_l2_ac + file_key: Rrs_380 + coordinates: [longitude, latitude] + units: sr-1 + + Rrs_412: + name: Rrs_412 + sensor: goci2 + wavelength: [0.402, 0.412, 0.422] + long_name: Remote sensing reflectance at 412 nm, KOSC standard algorithm + file_type: goci2_l2_ac + file_key: Rrs_412 + coordinates: [longitude, latitude] + units: sr-1 + + Rrs_443: + name: Rrs_443 + sensor: goci2 + wavelength: [0.433, 0.443, 0.453] + long_name: Remote sensing reflectance at 443 nm, KOSC standard algorithm + file_type: goci2_l2_ac + file_key: Rrs_443 + coordinates: [longitude, latitude] + units: sr-1 + + Rrs_490: + name: Rrs_490 + sensor: goci2 + wavelength: [0.48, 0.49, 0.50] + long_name: Remote sensing reflectance at 490 nm, KOSC standard algorithm + file_type: goci2_l2_ac + file_key: Rrs_490 + coordinates: [longitude, latitude] + units: sr-1 + + Rrs_510: + name: Rrs_510 + sensor: goci2 + wavelength: [0.50, 0.51, 0.52] + long_name: Remote sensing reflectance at 510 nm, KOSC standard algorithm + file_type: goci2_l2_ac + file_key: Rrs_510 + coordinates: [longitude, latitude] + units: sr-1 + + Rrs_555: + name: Rrs_555 + sensor: goci2 + wavelength: [0.545, 0.555, 0.565] + long_name: Remote sensing reflectance at 555 nm, KOSC standard algorithm + file_type: goci2_l2_ac + file_key: Rrs_555 + coordinates: [longitude, latitude] + units: sr-1 + + Rrs_620: + name: Rrs_620 + sensor: goci2 + wavelength: [0.61, 0.62, 0.63] + long_name: Remote sensing reflectance at 620 nm, KOSC standard algorithm + file_type: goci2_l2_ac + file_key: Rrs_620 + coordinates: [longitude, latitude] + units: sr-1 + + Rrs_660: + name: Rrs_660 + sensor: goci2 + wavelength: [0.65, 0.66, 0.67] + long_name: Remote sensing reflectance at 660 nm, KOSC standard algorithm + file_type: goci2_l2_ac + file_key: Rrs_660 + coordinates: [longitude, latitude] + units: sr-1 + + Rrs_680: + name: Rrs_680 + sensor: goci2 + wavelength: [0.675, 0.680, 0.685] + long_name: Remote sensing reflectance at 680 nm, KOSC standard algorithm + file_type: goci2_l2_ac + file_key: Rrs_680 + coordinates: [longitude, latitude] + units: sr-1 + + Rrs_709: + name: Rrs_709 + sensor: goci2 + wavelength: [0.704, 0.709, 0.714] + long_name: Remote sensing reflectance at 709 nm, KOSC standard algorithm + file_type: goci2_l2_ac + file_key: Rrs_709 + coordinates: [longitude, latitude] + units: sr-1 + + Rrs_745: + name: Rrs_745 + sensor: goci2 + wavelength: [0.735, 0.745, 0.755] + long_name: Remote sensing reflectance at 745 nm, KOSC standard algorithm + file_type: goci2_l2_ac + file_key: Rrs_745 + coordinates: [longitude, latitude] + units: sr-1 + + Rrs_865: + name: Rrs_865 + sensor: goci2 + wavelength: [0.845, 0.865, 0.885] + long_name: Remote sensing reflectance at 865 nm, KOSC standard algorithm + file_type: goci2_l2_ac + file_key: Rrs_865 + coordinates: [longitude, latitude] + units: sr-1 + +# --- Inherent Optical Properties products --- + # --- Absorption coefficient --- + a_total_380: + name: a_total_380 + long_name: Spectral absorption coefficient at 380 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: a_total_380 + coordinates: [longitude, latitude] + units: m-1 + + a_total_412: + name: a_total_412 + long_name: Spectral absorption coefficient at 412 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: a_total_412 + coordinates: [longitude, latitude] + units: m-1 + + a_total_443: + name: a_total_443 + long_name: Spectral absorption coefficient at 443 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: a_total_443 + coordinates: [longitude, latitude] + units: m-1 + + a_total_490: + name: a_total_490 + long_name: Spectral absorption coefficient at 490 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: a_total_490 + coordinates: [longitude, latitude] + units: m-1 + + a_total_510: + name: a_total_510 + long_name: Spectral absorption coefficient at 510 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: a_total_510 + coordinates: [longitude, latitude] + units: m-1 + + a_total_555: + name: a_total_555 + long_name: Spectral absorption coefficient at 555 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: a_total_555 + coordinates: [longitude, latitude] + units: m-1 + + a_total_620: + name: a_total_620 + long_name: Spectral absorption coefficient at 620 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: a_total_620 + coordinates: [longitude, latitude] + units: m-1 + + a_total_660: + name: a_total_660 + long_name: Spectral absorption coefficient at 660 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: a_total_660 + coordinates: [longitude, latitude] + units: m-1 + + a_total_680: + name: a_total_680 + long_name: Spectral absorption coefficient at 680 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: a_total_680 + coordinates: [longitude, latitude] + units: m-1 + + a_total_709: + name: a_total_709 + long_name: Spectral absorption coefficient at 709 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: a_total_709 + coordinates: [longitude, latitude] + units: m-1 + + a_total_745: + name: a_total_745 + long_name: Spectral absorption coefficient at 745 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: a_total_745 + coordinates: [longitude, latitude] + units: m-1 + + a_total_865: + name: a_total_865 + long_name: Spectral absorption coefficient at 865 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: a_total_865 + coordinates: [longitude, latitude] + units: m-1 + + # --- Backscattering coefficient --- + bb_total_380: + name: bb_total_380 + long_name: Spectral backscattering coefficient at 380 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: bb_total_380 + coordinates: [longitude, latitude] + units: m-1 + + bb_total_412: + name: bb_total_412 + long_name: Spectral backscattering coefficient at 412 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: bb_total_412 + coordinates: [longitude, latitude] + units: m-1 + + bb_total_443: + name: bb_total_443 + long_name: Spectral backscattering coefficient at 443 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: bb_total_443 + coordinates: [longitude, latitude] + units: m-1 + + bb_total_490: + name: bb_total_490 + long_name: Spectral backscattering coefficient at 490 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: bb_total_490 + coordinates: [longitude, latitude] + units: m-1 + + bb_total_510: + name: bb_total_510 + long_name: Spectral backscattering coefficient at 510 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: bb_total_510 + coordinates: [longitude, latitude] + units: m-1 + + bb_total_555: + name: bb_total_555 + long_name: Spectral backscattering coefficient at 555 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: bb_total_555 + coordinates: [longitude, latitude] + units: m-1 + + bb_total_620: + name: bb_total_620 + long_name: Spectral backscattering coefficient at 620 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: bb_total_620 + coordinates: [longitude, latitude] + units: m-1 + + bb_total_660: + name: bb_total_660 + long_name: Spectral backscattering coefficient at 660 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: bb_total_660 + coordinates: [longitude, latitude] + units: m-1 + + bb_total_680: + name: bb_total_680 + long_name: Spectral backscattering coefficient at 680 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: bb_total_680 + coordinates: [longitude, latitude] + units: m-1 + + bb_total_709: + name: bb_total_709 + long_name: Spectral backscattering coefficient at 709 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: bb_total_709 + coordinates: [longitude, latitude] + units: m-1 + + bb_total_745: + name: bb_total_745 + long_name: Spectral backscattering coefficient at 745 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: bb_total_745 + coordinates: [longitude, latitude] + units: m-1 + + bb_total_865: + name: bb_total_865 + long_name: Spectral backscattering coefficient at 865 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: bb_total_865 + coordinates: [longitude, latitude] + units: m-1 + + # --- Other IOP output --- + a_dg_443: + name: a_dg_443 + long_name: Spectral absorption coefficient of detritus and gelbstoff at 443 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: a_dg_443 + coordinates: [longitude, latitude] + units: m-1 + + a_chl_443: + name: a_chl_443 + long_name: Spectral absorption coefficient of chlorophyll-a at 443 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: a_chl_443 + coordinates: [longitude, latitude] + units: m-1 + + bb_p_555: + name: bb_p_555 + long_name: Spectral backscattering coefficient of particle at 555 nm, QAA version 6 + file_type: goci2_l2_iop + file_key: bb_p_555 + coordinates: [longitude, latitude] + units: m-1 + +# --- Aerosol products --- + AOD_550: + name: AOD_550 + long_name: Aerosol Optical Depth at 550 nm + file_type: goci2_l2_aod + file_key: Aerosol_Optical_Depth + coordinates: [longitude, latitude] + + Aerosol_Type: + name: Aerosol_Type + long_name: Aerosol type; 1 = Dust, 2 = Non-absorbing Coarse, 3 = Mixture, 4 = High-absorbing Fine, 5 = Moderate-absorbing Fine, 6 = Non-absorbing Fine + file_type: goci2_l2_aod + file_key: Aerosol_Type + coordinates: [longitude, latitude] + + Angstrom_Exponent: + name: Angstrom_Exponent + long_name: Calculated Angstrom Exponent between 440 and 870 nm + file_type: goci2_l2_aod + file_key: Angstrom_Exponent + coordinates: [longitude, latitude] + + Fine_Mode_Fraction: + name: Fine_Mode_Fraction + long_name: Fine Mode Fraction at 550 nm + file_type: goci2_l2_aod + file_key: Fine_Mode_Fraction + coordinates: [longitude, latitude] + + Single_Scattering_Albedo: + name: Single_Scattering_Albedo + long_name: Single Scattering Albedo at 440 nm + file_type: goci2_l2_aod + file_key: Single_Scattering_Albedo + coordinates: [longitude, latitude] + +# --- Ocean Products --- + MF: + name: MF + long_name: Marine fog existence(Yes/No/Possible), Machine learning based KOSC Algorithm + file_type: goci2_l2_mf + file_key: MF + coordinates: [longitude, latitude] + + CF: + name: CF + long_name: Chlorophyll(-a) Front, CF + file_type: goci2_l2_cf + file_key: CF + coordinates: [longitude, latitude] + units : mg m-3 km-1 + + FA: + name: FA + long_name: Subpixel area fraction covered by floating brown algae or green algae + file_type: goci2_l2_fa + file_key: FA + coordinates: [longitude, latitude] + + FGI: + name: FGI + long_name: Fishing ground index for chub mackerel + file_type: goci2_l2_fgi + file_key: FGI + coordinates: [longitude, latitude] + + SSS: + name: SSS + long_name: Sea Surface Salinity, Neural network algorithm + file_type: goci2_l2_lsss + file_key: sss + coordinates: [longitude, latitude] + units: psu + + PP: + name: PP + long_name: Primary Production + file_type: goci2_l2_pp + file_key: PP + coordinates: [longitude, latitude] + units: PP unit + + RI: + name: RI + long_name: Red Tide Index + file_type: goci2_l2_ri + file_key: RI + coordinates: [longitude, latitude] + + SSC_direction: + name: SSC_direction + long_name: Sea Surface Current direction + file_type: goci2_l2_ssc + file_key: SSC_direction + coordinates: [longitude, latitude] + units: degree + + SSC_speed: + name: SSC_speed + long_name: Sea Surface Current speed + file_type: goci2_l2_ssc + file_key: SSC_speed + coordinates: [longitude, latitude] + units: m s-1 + + SSC_u: + name: SSC_u + long_name: Sea Surface Current u-component + file_type: goci2_l2_ssc + file_key: SSC_u + coordinates: [longitude, latitude] + units: m s-1 + + SSC_v: + name: SSC_v + long_name: Sea Surface Current v-component + file_type: goci2_l2_ssc + file_key: SSC_v + coordinates: [longitude, latitude] + units: m s-1 diff --git a/satpy/etc/readers/meris_nc_sen3.yaml b/satpy/etc/readers/meris_nc_sen3.yaml index ba3d02969a..28d5597665 100644 --- a/satpy/etc/readers/meris_nc_sen3.yaml +++ b/satpy/etc/readers/meris_nc_sen3.yaml @@ -1,6 +1,10 @@ reader: - description: NC Reader for MERIS data (Sentinel 3 like format) name: meris_nc_sen3 + short_name: MERIS Sentinel 3 + long_name: Sentinel 3 MERIS NetCDF format + description: NC Reader for MERIS data (Sentinel 3 like format) + status: Beta + supports_fsspec: false sensors: [meris] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader diff --git a/satpy/etc/readers/mersi2_l1b.yaml b/satpy/etc/readers/mersi2_l1b.yaml index 78bd861169..3e0ecb390c 100644 --- a/satpy/etc/readers/mersi2_l1b.yaml +++ b/satpy/etc/readers/mersi2_l1b.yaml @@ -468,8 +468,6 @@ datasets: counts: units: "1" standard_name: counts - - # Not sure how to get radiance for BT channels '20': name: '20' wavelength: [3.710, 3.800, 3.890] @@ -484,6 +482,9 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts @@ -501,6 +502,9 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts @@ -518,6 +522,9 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts @@ -535,6 +542,9 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts @@ -558,6 +568,9 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts @@ -581,6 +594,9 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts diff --git a/satpy/etc/readers/mersi_ll_l1b.yaml b/satpy/etc/readers/mersi_ll_l1b.yaml index 7c572885c7..47b6d432b0 100644 --- a/satpy/etc/readers/mersi_ll_l1b.yaml +++ b/satpy/etc/readers/mersi_ll_l1b.yaml @@ -1,6 +1,10 @@ reader: - description: FY-3E Medium Resolution Spectral Imager - Low Light (MERSI-LL) L1B Reader name: mersi_ll_l1b + short_name: MERSI Low Light FY3E L1B + long_name: FY-3E MERSI Low Light Level 1B + description: FY-3E Medium Resolution Spectral Imager - Low Light (MERSI-LL) L1B Reader + status: Nominal + supports_fsspec: true sensors: [mersi-ll] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader @@ -49,11 +53,18 @@ datasets: 1000: file_type: mersi_ll_l1b_1000 file_key: Data/EV_1KM_LL + calibration_key: Calibration/Solar_Irradiance_LL coordinates: [longitude, latitude] calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts '2': name: '2' wavelength: [3.710, 3.800, 3.890] @@ -72,6 +83,9 @@ datasets: radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts '3': name: '3' wavelength: [3.9725, 4.050, 4.1275] @@ -90,6 +104,9 @@ datasets: radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts '4': name: '4' wavelength: [6.950, 7.20, 7.450] @@ -108,6 +125,9 @@ datasets: radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts '5': name: '5' wavelength: [8.400, 8.550, 8.700] @@ -126,6 +146,9 @@ datasets: radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts '6': name: '6' wavelength: [10.300, 10.800, 11.300] @@ -149,6 +172,9 @@ datasets: radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts '7': name: '7' wavelength: [11.500, 12.000, 12.500] @@ -172,6 +198,9 @@ datasets: radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts longitude: name: longitude diff --git a/satpy/etc/readers/mersi_rm_l1b.yaml b/satpy/etc/readers/mersi_rm_l1b.yaml new file mode 100644 index 0000000000..fa70ad57a5 --- /dev/null +++ b/satpy/etc/readers/mersi_rm_l1b.yaml @@ -0,0 +1,275 @@ +reader: + name: mersi_rm_l1b + short_name: MERSI-RM l1b + long_name: MERSI-RM L1B data in HDF5 format + description: FY-3G Medium Resolution Spectral Imager - Rainfall Measurement (MERSI-RM) L1B Reader + status: Beta + supports_fsspec: false + sensors: [mersi-rm] + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + +file_types: + mersi_rm_l1b_500: + file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B + rows_per_scan: 10 + file_patterns: + # From National Meteorological Satellite Center + - '{platform_shortname}_MERSI_GRAN_L1_{start_time:%Y%m%d_%H%M}_0500M_V1.{ext:3s}' + + mersi_rm_l1b_500_geo: + file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B + rows_per_scan: 10 + file_patterns: + # From National Meteorological Satellite Center + - '{platform_shortname}_MERSI_GRAN_L1_{start_time:%Y%m%d_%H%M}_GEOHK_V1.{ext:3s}' + +# NOTE: Min/max wavelengths are defined here as the wavelength associated with a 1% SRF. +datasets: + '1': + name: '1' + wavelength: [0.60, 0.648, 0.70] + resolution: 500 + file_type: mersi_rm_l1b_500 + file_key: Data/EV_Reflectance + band_index: 0 + calibration_key: Calibration/RSB_Cal_Coeff + calibration_index: 0 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts + '2': + name: '2' + wavelength: [0.82, 0.862, 0.91] + resolution: 500 + file_type: mersi_rm_l1b_500 + file_key: Data/EV_Reflectance + band_index: 1 + calibration_key: Calibration/RSB_Cal_Coeff + calibration_index: 0 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts + '3': + name: '3' + wavelength: [0.89, 0.935, 0.97] + resolution: 500 + file_type: mersi_rm_l1b_500 + file_key: Data/EV_Reflectance + band_index: 2 + calibration_key: Calibration/RSB_Cal_Coeff + calibration_index: 0 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts + '4': + name: '4' + wavelength: [1.33, 1.377, 1.42] + resolution: 500 + file_type: mersi_rm_l1b_500 + file_key: Data/EV_Reflectance + band_index: 3 + calibration_key: Calibration/RSB_Cal_Coeff + calibration_index: 0 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts + '5': + name: '5' + wavelength: [1.58, 1.638, 1.69] + resolution: 500 + file_type: mersi_rm_l1b_500 + file_key: Data/EV_Reflectance + band_index: 4 + calibration_key: Calibration/RSB_Cal_Coeff + calibration_index: 0 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts + '6': + name: '6' + wavelength: [3.64, 3.809, 3.99] + resolution: 500 + file_type: mersi_rm_l1b_500 + file_key: Data/EV_Emissive + band_index: 0 + calibration_key: Calibration/IR_Cal_Coeff + calibration_index: 2 + coordinates: [longitude, latitude] + calibration: + brightness_temperature: + units: "K" + standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength + '7': + name: '7' + wavelength: [10.08, 10.736, 11.62] + resolution: 500 + file_type: mersi_rm_l1b_500 + file_key: Data/EV_Emissive + band_index: 1 + calibration_key: Calibration/IR_Cal_Coeff + calibration_index: 2 + coordinates: [longitude, latitude] + calibration: + brightness_temperature: + units: "K" + standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength + '8': + name: '8' + wavelength: [11.31, 12.019, 12.81] + resolution: 500 + file_type: mersi_rm_l1b_500 + file_key: Data/EV_Emissive + band_index: 2 + calibration_key: Calibration/IR_Cal_Coeff + calibration_index: 2 + coordinates: [longitude, latitude] + calibration: + brightness_temperature: + units: "K" + standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength + + longitude: + name: longitude + units: degrees_east + standard_name: longitude + resolution: 500 + file_type: mersi_rm_l1b_500_geo + file_key: Geolocation/Longitude + + latitude: + name: latitude + units: degrees_north + standard_name: latitude + resolution: 500 + file_type: mersi_rm_l1b_500_geo + file_key: Geolocation/Latitude + + solar_zenith_angle: + name: solar_zenith_angle + units: degree + standard_name: solar_zenith_angle + resolution: 500 + coordinates: [longitude, latitude] + file_type: mersi_rm_l1b_500_geo + file_key: Geolocation/SolarZenith + + solar_azimuth_angle: + name: solar_azimuth_angle + units: degree + standard_name: solar_azimuth_angle + resolution: 500 + coordinates: [longitude, latitude] + file_type: mersi_rm_l1b_500_geo + file_key: Geolocation/SolarAzimuth + + satellite_zenith_angle: + name: satellite_zenith_angle + units: degree + standard_name: sensor_zenith_angle + resolution: 500 + coordinates: [longitude, latitude] + file_type: mersi_rm_l1b_500_geo + file_key: Geolocation/SensorZenith + + satellite_azimuth_angle: + name: satellite_azimuth_angle + units: degree + standard_name: sensor_azimuth_angle + resolution: 500 + coordinates: [longitude, latitude] + file_type: mersi_rm_l1b_500_geo + file_key: Geolocation/SensorAzimuth + + moon_zenith_angle: + name: moon_zenith_angle + units: degree + standard_name: moon_zenith_angle + resolution: 500 + coordinates: [longitude, latitude] + file_type: mersi_rm_l1b_500_geo + file_key: Geolocation/MoonZenith + + moon_azimuth_angle: + name: moon_azimuth_angle + units: degree + standard_name: moon_azimuth_angle + resolution: 500 + coordinates: [longitude, latitude] + file_type: mersi_rm_l1b_500_geo + file_key: Geolocation/MoonAzimuth + + altitude: + name: altitude + units: degree + standard_name: altitude + resolution: 500 + coordinates: [longitude, latitude] + file_type: mersi_rm_l1b_500_geo + file_key: Geolocation/Altitude + + landcover: + name: landcover + units: degree + standard_name: landcover + resolution: 500 + coordinates: [longitude, latitude] + file_type: mersi_rm_l1b_500_geo + file_key: Geolocation/LandCover + + landseamask: + name: landseamask + units: degree + standard_name: landseamask + resolution: 500 + coordinates: [longitude, latitude] + file_type: mersi_rm_l1b_500_geo + file_key: Geolocation/LandSeaMask diff --git a/satpy/etc/readers/msi_safe.yaml b/satpy/etc/readers/msi_safe.yaml index d93d269782..cc39c26a74 100644 --- a/satpy/etc/readers/msi_safe.yaml +++ b/satpy/etc/readers/msi_safe.yaml @@ -1,8 +1,8 @@ reader: name: msi_safe - short_name: MSI SAFE - long_name: Sentinel-2 A and B MSI data in SAFE format - description: SAFE Reader for MSI data (Sentinel-2) + short_name: MSI SAFE L1C + long_name: Sentinel-2 A and B MSI L1C data in SAFE format + description: SAFE Reader for MSI L1C data (Sentinel-2) status: Nominal supports_fsspec: false sensors: [msi] @@ -10,23 +10,21 @@ reader: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: - safe_granule: + l1c_safe_granule: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C - file_patterns: ['{fmission_id:3s}_MSIL1C_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}.jp2'] - requires: [safe_metadata, safe_tile_metadata] - safe_tile_metadata: + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}.jp2'] + requires: [l1c_safe_metadata, l1c_safe_tile_metadata] + l1c_safe_tile_metadata: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSITileMDXML - file_patterns: ['{fmission_id:3s}_MSIL1C_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] - safe_metadata: + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] + l1c_safe_metadata: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIMDXML - file_patterns: ['{fmission_id:3s}_MSIL1C_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSIL1C.xml'] - + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSIL1C.xml'] datasets: - B01: name: B01 - sensor: MSI + sensor: msi wavelength: [0.415, 0.443, 0.470] resolution: 60 calibration: @@ -36,11 +34,14 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule + counts: + standard_name: counts + units: "1" + file_type: l1c_safe_granule B02: name: B02 - sensor: MSI + sensor: msi wavelength: [0.440, 0.490, 0.540] resolution: 10 calibration: @@ -50,11 +51,14 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule + counts: + standard_name: counts + units: "1" + file_type: l1c_safe_granule B03: name: B03 - sensor: MSI + sensor: msi wavelength: [0.540, 0.560, 0.580] resolution: 10 calibration: @@ -64,11 +68,14 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule + counts: + standard_name: counts + units: "1" + file_type: l1c_safe_granule B04: name: B04 - sensor: MSI + sensor: msi wavelength: [0.645, 0.665, 0.685] resolution: 10 calibration: @@ -78,11 +85,14 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule + counts: + standard_name: counts + units: "1" + file_type: l1c_safe_granule B05: name: B05 - sensor: MSI + sensor: msi wavelength: [0.695, 0.705, 0.715] resolution: 20 calibration: @@ -92,11 +102,14 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule + counts: + standard_name: counts + units: "1" + file_type: l1c_safe_granule B06: name: B06 - sensor: MSI + sensor: msi wavelength: [0.731, 0.740, 0.749] resolution: 20 calibration: @@ -106,11 +119,14 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule + counts: + standard_name: counts + units: "1" + file_type: l1c_safe_granule B07: name: B07 - sensor: MSI + sensor: msi wavelength: [0.764, 0.783, 0.802] resolution: 20 calibration: @@ -120,11 +136,14 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule + counts: + standard_name: counts + units: "1" + file_type: l1c_safe_granule B08: name: B08 - sensor: MSI + sensor: msi wavelength: [0.780, 0.842, 0.905] resolution: 10 calibration: @@ -134,11 +153,14 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule + counts: + standard_name: counts + units: "1" + file_type: l1c_safe_granule B8A: name: B8A - sensor: MSI + sensor: msi wavelength: [0.855, 0.865, 0.875] resolution: 20 calibration: @@ -148,11 +170,14 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule + counts: + standard_name: counts + units: "1" + file_type: l1c_safe_granule B09: name: B09 - sensor: MSI + sensor: msi wavelength: [0.935, 0.945, 0.955] resolution: 60 calibration: @@ -162,11 +187,14 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule + counts: + standard_name: counts + units: "1" + file_type: l1c_safe_granule B10: name: B10 - sensor: MSI + sensor: msi wavelength: [1.365, 1.375, 1.385] resolution: 60 calibration: @@ -176,11 +204,14 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule + counts: + standard_name: counts + units: "1" + file_type: l1c_safe_granule B11: name: B11 - sensor: MSI + sensor: msi wavelength: [1.565, 1.610, 1.655] resolution: 20 calibration: @@ -190,11 +221,14 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule + counts: + standard_name: counts + units: "1" + file_type: l1c_safe_granule B12: name: B12 - sensor: MSI + sensor: msi wavelength: [2.100, 2.190, 2.280] resolution: 20 calibration: @@ -204,31 +238,33 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule - + counts: + standard_name: counts + units: "1" + file_type: l1c_safe_granule solar_zenith_angle: name: solar_zenith_angle resolution: [10, 20, 60] - file_type: safe_tile_metadata + file_type: l1c_safe_tile_metadata xml_tag: Sun_Angles_Grid/Zenith solar_azimuth_angle: name: solar_azimuth_angle resolution: [10, 20, 60] - file_type: safe_tile_metadata + file_type: l1c_safe_tile_metadata xml_tag: Sun_Angles_Grid/Azimuth satellite_azimuth_angle: name: satellite_azimuth_angle resolution: [10, 20, 60] - file_type: safe_tile_metadata + file_type: l1c_safe_tile_metadata xml_tag: Viewing_Incidence_Angles_Grids xml_item: Azimuth satellite_zenith_angle: name: satellite_zenith_angle resolution: [10, 20, 60] - file_type: safe_tile_metadata + file_type: l1c_safe_tile_metadata xml_tag: Viewing_Incidence_Angles_Grids xml_item: Zenith diff --git a/satpy/etc/readers/msi_safe_l2a.yaml b/satpy/etc/readers/msi_safe_l2a.yaml new file mode 100644 index 0000000000..f4c6e4221a --- /dev/null +++ b/satpy/etc/readers/msi_safe_l2a.yaml @@ -0,0 +1,345 @@ +reader: + name: msi_safe_l2a + short_name: MSI SAFE L2A + long_name: Sentinel-2 A and B MSI L2A data in SAFE format + description: SAFE Reader for MSI L2A data (Sentinel-2) + status: Nominal + supports_fsspec: false + sensors: [msi] + default_channels: [] + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + data_identification_keys: + name: + required: true + wavelength: + type: !!python/name:satpy.dataset.dataid.WavelengthRange + resolution: + transitive: false + calibration: + enum: + - reflectance + - radiance + - counts + - aerosol_thickness + - water_vapor + transitive: true + modifiers: + default: [] + type: !!python/name:satpy.dataset.dataid.ModifierTuple + +file_types: + l2a_safe_granule_10m: + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R10m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_10m.jp2'] + requires: [l2a_safe_metadata, l2a_safe_tile_metadata] + l2a_safe_granule_20m: + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R20m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_20m.jp2'] + requires: [l2a_safe_metadata, l2a_safe_tile_metadata] + l2a_safe_granule_60m: + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R60m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_60m.jp2'] + requires: [l2a_safe_metadata, l2a_safe_tile_metadata] + l2a_safe_tile_metadata: + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSITileMDXML + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] + l2a_safe_metadata: + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIMDXML + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSIL2A.xml'] + +datasets: + B01: + name: B01 + sensor: msi + wavelength: [0.415, 0.443, 0.470] + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] + resolution: + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: boa_bidirectional_reflectance + units: "%" + radiance: + standard_name: boa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + + B02: + name: B02 + sensor: msi + wavelength: [0.440, 0.490, 0.540] + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] + resolution: + 10: {file_type: l2a_safe_granule_10m} + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: boa_bidirectional_reflectance + units: "%" + radiance: + standard_name: boa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + + B03: + name: B03 + sensor: msi + wavelength: [0.540, 0.560, 0.580] + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] + resolution: + 10: {file_type: l2a_safe_granule_10m} + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: boa_bidirectional_reflectance + units: "%" + radiance: + standard_name: boa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + + B04: + name: B04 + sensor: msi + wavelength: [0.645, 0.665, 0.685] + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] + resolution: + 10: {file_type: l2a_safe_granule_10m} + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: boa_bidirectional_reflectance + units: "%" + radiance: + standard_name: boa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + + B05: + name: B05 + sensor: msi + wavelength: [0.695, 0.705, 0.715] + modifiers: [esa_sunz_corrected] + resolution: + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: boa_bidirectional_reflectance + units: "%" + radiance: + standard_name: boa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + + B06: + name: B06 + sensor: msi + wavelength: [0.731, 0.740, 0.749] + modifiers: [esa_sunz_corrected] + resolution: + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: boa_bidirectional_reflectance + units: "%" + radiance: + standard_name: boa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + + B07: + name: B07 + sensor: msi + wavelength: [0.764, 0.783, 0.802] + modifiers: [esa_sunz_corrected] + resolution: + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: boa_bidirectional_reflectance + units: "%" + radiance: + standard_name: boa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + + B08: + name: B08 + sensor: msi + wavelength: [0.780, 0.842, 0.905] + modifiers: [esa_sunz_corrected] + resolution: + 10: {file_type: l2a_safe_granule_10m} + calibration: + reflectance: + standard_name: boa_bidirectional_reflectance + units: "%" + radiance: + standard_name: boa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + + B8A: + name: B8A + sensor: msi + wavelength: [0.855, 0.865, 0.875] + modifiers: [esa_sunz_corrected] + resolution: + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: boa_bidirectional_reflectance + units: "%" + radiance: + standard_name: boa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + + B09: + name: B09 + sensor: msi + wavelength: [0.935, 0.945, 0.955] + modifiers: [esa_sunz_corrected] + resolution: + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: boa_bidirectional_reflectance + units: "%" + radiance: + standard_name: boa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + + B11: + name: B11 + sensor: msi + wavelength: [1.565, 1.610, 1.655] + modifiers: [esa_sunz_corrected] + resolution: + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: boa_bidirectional_reflectance + units: "%" + radiance: + standard_name: boa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + + B12: + name: B12 + sensor: msi + wavelength: [2.100, 2.190, 2.280] + modifiers: [esa_sunz_corrected] + resolution: + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: boa_bidirectional_reflectance + units: "%" + radiance: + standard_name: boa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + + AOT: + name: AOT + sensor: msi + resolution: + 10: {file_type: l2a_safe_granule_10m} + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + aerosol_thickness: + standard_name: aerosol_optical_thickness + units: "1" + counts: + standard_name: counts + units: "1" + + WVP: + name: WVP + sensor: msi + resolution: + 10: {file_type: l2a_safe_granule_10m} + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + water_vapor: + standard_name: water_vapor + units: cm + counts: + standard_name: counts + units: "1" + + SCL: + name: SCL + sensor: msi + resolution: + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + counts: + standard_name: counts + units: "1" + + solar_zenith_angle: + name: solar_zenith_angle + resolution: [10, 20, 60] + file_type: l2a_safe_tile_metadata + xml_tag: Sun_Angles_Grid/Zenith + + solar_azimuth_angle: + name: solar_azimuth_angle + resolution: [10, 20, 60] + file_type: l2a_safe_tile_metadata + xml_tag: Sun_Angles_Grid/Azimuth + + satellite_azimuth_angle: + name: satellite_azimuth_angle + resolution: [10, 20, 60] + file_type: l2a_safe_tile_metadata + xml_tag: Viewing_Incidence_Angles_Grids + xml_item: Azimuth + + satellite_zenith_angle: + name: satellite_zenith_angle + resolution: [10, 20, 60] + file_type: l2a_safe_tile_metadata + xml_tag: Viewing_Incidence_Angles_Grids + xml_item: Zenith diff --git a/satpy/etc/readers/nwcsaf-geo.yaml b/satpy/etc/readers/nwcsaf-geo.yaml index e22ae09fc1..c003d6f6e7 100644 --- a/satpy/etc/readers/nwcsaf-geo.yaml +++ b/satpy/etc/readers/nwcsaf-geo.yaml @@ -77,6 +77,11 @@ datasets: resolution: 3000 file_type: nc_nwcsaf_cma + cma_quality: + name: cma_quality + resolution: 3000 + file_type: nc_nwcsaf_cma + cma_pal: name: cma_pal resolution: 3000 @@ -129,6 +134,11 @@ datasets: resolution: 3000 file_type: nc_nwcsaf_ct + ct_status_flag: + name: ct_status_flag + resolution: 3000 + file_type: nc_nwcsaf_ct + ct_pal: name: ct_pal resolution: 3000 @@ -370,7 +380,7 @@ datasets: file_type: nc_nwcsaf_crr-ph crrph_pal: - name: crrph_intensity_pal + name: crrph_pal resolution: 3000 file_type: nc_nwcsaf_crr-ph @@ -390,7 +400,7 @@ datasets: file_type: nc_nwcsaf_crr-ph crrph_status_flag: - name: crrph_status + name: crrph_status_flag resolution: 3000 file_type: nc_nwcsaf_crr-ph @@ -689,11 +699,13 @@ datasets: file_type: nc_nwcsaf_rdt # ----ASII products in multiple files ------------ + # until v2018 asii_turb_trop_prob: name: asii_turb_trop_prob resolution: 3000 file_type: [nc_nwcsaf_asii_tf, nc_nwcsaf_asii] + # until v2018 asii_turb_prob_pal: name: asii_turb_prob_pal resolution: 3000 @@ -701,6 +713,24 @@ datasets: # ----ASII-TF product ------------ + # v2021 onwards + asiitf_prob: + name: asiitf_prob + resolution: 3000 + file_type: nc_nwcsaf_asii_tf + + # v2021 onwards + asiitf_prob_pal: + name: asiitf_prob_pal + file_type: nc_nwcsaf_asii_tf + + # v2021 onwards + asiitf_status_flag: + name: asiitf_status_flag + resolution: 3000 + file_type: nc_nwcsaf_asii_tf + + # until v2018 asii_turb_prob_status_flag: name: asii_turb_trop_prob_status_flag resolution: 3000 @@ -718,11 +748,30 @@ datasets: # ----ASII-GW product ------------ + # v2021 onwards + asiigw_wv_prob: + name: asiigw_wv_prob + resolution: 3000 + file_type: nc_nwcsaf_asii_gw + + # v2021 onwards + asiigw_status_flag: + name: asiigw_status_flag + resolution: 3000 + file_type: nc_nwcsaf_asii_gw + + # v2021 onwards + asiigw_wv_prob_pal: + name: asiigw_wv_prob_pal + file_type: nc_nwcsaf_asii_gw + + # until v2018 asii_turb_wave_prob: name: asii_turb_wave_prob resolution: 3000 file_type: nc_nwcsaf_asii_gw + # until v2018 asii_turb_wave_prob_status_flag: name: asii_turb_wave_prob_status_flag resolution: 3000 diff --git a/satpy/etc/readers/sar-c_safe.yaml b/satpy/etc/readers/sar-c_safe.yaml index 4e45ca8584..a14a401af9 100644 --- a/satpy/etc/readers/sar-c_safe.yaml +++ b/satpy/etc/readers/sar-c_safe.yaml @@ -7,7 +7,7 @@ reader: supports_fsspec: false sensors: [sar-c] default_channels: [] - reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + reader: !!python/name:satpy.readers.sar_c_safe.SAFESARReader data_identification_keys: name: required: true @@ -40,19 +40,15 @@ reader: file_types: safe_measurement: - file_reader: !!python/name:satpy.readers.sar_c_safe.SAFEGRD file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/measurement/{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.tiff'] requires: [safe_calibration, safe_noise, safe_annotation] safe_calibration: - file_reader: !!python/name:satpy.readers.sar_c_safe.SAFEXMLCalibration file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/annotation/calibration/calibration-{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.xml'] requires: [safe_annotation] safe_noise: - file_reader: !!python/name:satpy.readers.sar_c_safe.SAFEXMLNoise file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/annotation/calibration/noise-{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.xml'] requires: [safe_annotation] safe_annotation: - file_reader: !!python/name:satpy.readers.sar_c_safe.SAFEXMLAnnotation file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/annotation/{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.xml'] diff --git a/satpy/etc/readers/sgli_l1b.yaml b/satpy/etc/readers/sgli_l1b.yaml index 9f8108510f..4cb86890c4 100644 --- a/satpy/etc/readers/sgli_l1b.yaml +++ b/satpy/etc/readers/sgli_l1b.yaml @@ -1,7 +1,11 @@ reader: + name: sgli_l1b + short_name: SGLI GCOM-C L1B + long_name: GCOM-C SGLI Level 1B HDF5 format description: Reader for SGLI data + status: Beta + supports_fsspec: false reference: https://gportal.jaxa.jp/gpr/assets/mng_upload/GCOM-C/SGLI_Level1_Product_Format_Description_en.pdf - name: sgli_l1b sensors: [sgli] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader diff --git a/satpy/etc/readers/slstr_l2.yaml b/satpy/etc/readers/slstr_l2.yaml deleted file mode 100644 index 7924cb198a..0000000000 --- a/satpy/etc/readers/slstr_l2.yaml +++ /dev/null @@ -1,63 +0,0 @@ -reader: - name: slstr_l2 - short_name: SLSTR l2 - long_name: Sentinel-3 SLSTR Level 2 data in netCDF format - description: NC Reader for Sentinel-3 SLSTR Level 2 data - status: defunct - supports_fsspec: false - sensors: [slstr_l2] - default_channels: [] - reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader - -file_types: - SLSTRB: - file_reader: !!python/name:satpy.readers.slstr_l2.SLSTRL2FileHandler - file_patterns: ['{start_time:%Y%m%d%H%M%S}-{generating_centre:3s}-{type_id:3s}_GHRSST-SSTskin-SLSTR{something:1s}-{end_time:%Y%m%d%H%M%S}-{version}.nc', - '{mission_id:3s}_SL_{processing_level:1s}_WST____{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3.tar'] - -datasets: - longitude: - name: longitude - resolution: 1000 - view: nadir - file_type: SLSTRB - standard_name: lon - units: degree - - latitude: - name: latitude - resolution: 1000 - view: nadir - file_type: SLSTRB - standard_name: lat - units: degree - - sea_surface_temperature: - name: sea_surface_temperature - sensor: slstr_l2 - coordinates: [longitude, latitude] - file_type: SLSTRB - resolution: 1000 - view: nadir - units: kelvin - standard_name: sea_surface_temperature - - sea_ice_fraction: - name: sea_ice_fraction - sensor: slstr_l2 - coordinates: [longitude, latitude] - file_type: SLSTRB - resolution: 1000 - view: nadir - units: "%" - standard_name: sea_ice_fraction - - # Quality estimation 0-5: no data, cloud, worst, low, acceptable, best - quality_level: - name: quality_level - sensor: slstr_l2 - coordinates: [longitude, latitude] - file_type: SLSTRB - resolution: 1000 - view: nadir - standard_name: quality_level diff --git a/satpy/etc/readers/viirs_compact.yaml b/satpy/etc/readers/viirs_compact.yaml index 31f4201930..5dca3abbaa 100644 --- a/satpy/etc/readers/viirs_compact.yaml +++ b/satpy/etc/readers/viirs_compact.yaml @@ -1,7 +1,7 @@ reader: name: viirs_compact short_name: VIIRS Compact - long_name: SNPP VIIRS SDR data in HDF5 Compact format + long_name: JPSS VIIRS SDR data in HDF5 Compact format description: Generic Eumetsat Compact VIIRS Reader status: Nominal supports_fsspec: false diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 37f36934b8..4c4c91a91f 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -1,6 +1,10 @@ reader: - description: VIIRS NOAA Enterprise EDR product reader name: viirs_edr + short_name: VIIRS EDR + long_name: JPSS VIIRS EDR NetCDF format + description: VIIRS NOAA Enterprise EDR product reader + status: Beta + supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs] group_keys: ['platform_shortname'] diff --git a/satpy/etc/readers/viirs_l1b.yaml b/satpy/etc/readers/viirs_l1b.yaml index f078c4247d..8f5a417acf 100644 --- a/satpy/etc/readers/viirs_l1b.yaml +++ b/satpy/etc/readers/viirs_l1b.yaml @@ -1,7 +1,7 @@ reader: name: viirs_l1b short_name: VIIRS l1b - long_name: SNPP VIIRS Level 1b data in netCDF4 format + long_name: JPSS VIIRS Level 1b data in netCDF4 format description: Generic NASA VIIRS L1B Reader status: Nominal supports_fsspec: false @@ -481,7 +481,7 @@ datasets: resolution: 743 coordinates: [dnb_lon, dnb_lat] file_type: vgeod - file_key: geolocation_data/solar_zenith + file_key: geolocation_data/sensor_zenith DNB_LZA: name: dnb_lunar_zenith_angle standard_name: lunar_zenith_angle diff --git a/satpy/etc/readers/viirs_l2.yaml b/satpy/etc/readers/viirs_l2.yaml new file mode 100644 index 0000000000..d4dceccccc --- /dev/null +++ b/satpy/etc/readers/viirs_l2.yaml @@ -0,0 +1,126 @@ +reader: + name: viirs_l2 + short_name: VIIRS L2 + long_name: SNPP VIIRS Level 2 data in netCDF4 format + description: Generic NASA VIIRS L2 Reader + status: Alpha + supports_fsspec: false + reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader + sensors: [viirs] + +file_types: + cldprop_l2_viirs: + file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler + file_patterns: + - 'CLDPROP_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' + cldmsk_l2_viirs: + file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler + file_patterns: + - 'CLDMSK_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' + aerdb_l2_viirs: + file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler + file_patterns: + - 'AERDB_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' + - 'AERDB_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.nrt.nc' + cldir_l2_viirs: + file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler + file_patterns: + - 'CLDIR_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf' + aerdt_l2_viirs: + file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler + file_patterns: + - 'AERDT_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' + fsnrad_l2_viirs: + file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler + file_patterns: + - 'FSNRAD_L2_VIIRS_CRIS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' + - 'FSNRAD_L2_VIIRS_CRIS_SS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' + +datasets: + cld_lon: + name: cld_lon + resolution: 1000 + file_type: [cldmsk_l2_viirs, cldprop_l2_viirs] + file_key: geolocation_data/longitude + units: degrees + standard_name: longitude + cld_lat: + name: cld_lat + resolution: 1000 + file_type: [cldmsk_l2_viirs, cldprop_l2_viirs] + file_key: geolocation_data/latitude + units: degrees + standard_name: latitude + aerdb_lon: + name: aerdb_lon + resolution: 1000 + file_type: [aerdb_l2_viirs] + file_key: Longitude + units: degrees + standard_name: longitude + aerdb_lat: + name: aerdb_lat + resolution: 1000 + file_type: [aerdb_l2_viirs] + file_key: Latitude + units: degrees + standard_name: latitude + aerdt_lon: + name: aerdt_lon + resolution: 1000 + file_type: [aerdt_l2_viirs] + file_key: longitude + units: degrees + standard_name: longitude + aerdt_lat: + name: aerdt_lat + resolution: 1000 + file_type: [aerdt_l2_viirs] + file_key: latitude + units: degrees + standard_name: latitude + +################################## +# Datasets in file cldmsk_l2_viirs +################################## + Clear_Sky_Confidence: + name: Clear_Sky_Confidence + long_name: VIIRS Clear Sky Confidence + units: "1" + coordinates: [cld_lon, cld_lat] + file_key: geophysical_data/Clear_Sky_Confidence + file_type: cldmsk_l2_viirs + standard_name: clear_sky_confidence + +################################### +# Datasets in file cldprop_l2_viirs +################################### + Cloud_Top_Height: + name: Cloud_Top_Height + long_name: Cloud Top Height from NOAA CLAVR-x AWG algorithm + units: m + coordinates: [cld_lon,cld_lat] + file_key: geophysical_data/Cloud_Top_Height + file_type: cldprop_l2_viirs + standard_name: cloud_top_height + +########################################## +# Datasets in files aerdb_l2_viirs +########################################## + Angstrom_Exponent_Land_Ocean_Best_Estimate: + name: Angstrom_Exponent_Land_Ocean_Best_Estimate + long_name: Deep Blue/SOAR Angstrom exponent over land and ocean + units: "1" + coordinates: [aerdb_lon,aerdb_lat] + file_key: Angstrom_Exponent_Land_Ocean_Best_Estimate + file_type: [aerdb_l2_viirs] + standard_name: angstrom_exponent_land_ocean_best_estimate + + Aerosol_Optical_Thickness_550_Land_Ocean: + name: Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate + long_name: Deep Blue/SOAR aerosol optical thickness at 550 nm over land and ocean + units: "1" + coordinates: [aerdb_lon,aerdb_lat] + file_key: Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate + file_type: [aerdb_l2_viirs] + standard_name: aerosol_optical_thickness_550_land_ocean diff --git a/satpy/etc/readers/viirs_sdr.yaml b/satpy/etc/readers/viirs_sdr.yaml index e85c7f4f70..70f2c5f34a 100644 --- a/satpy/etc/readers/viirs_sdr.yaml +++ b/satpy/etc/readers/viirs_sdr.yaml @@ -1,7 +1,7 @@ reader: name: viirs_sdr short_name: VIIRS SDR - long_name: SNPP VIIRS data in HDF5 SDR format + long_name: JPSS VIIRS data in HDF5 SDR format description: VIIRS SDR Reader status: Nominal supports_fsspec: false diff --git a/satpy/etc/readers/viirs_vgac_l1c_nc.yaml b/satpy/etc/readers/viirs_vgac_l1c_nc.yaml index bf3c254f80..33dc9571d2 100644 --- a/satpy/etc/readers/viirs_vgac_l1c_nc.yaml +++ b/satpy/etc/readers/viirs_vgac_l1c_nc.yaml @@ -247,8 +247,13 @@ datasets: units: degrees_east nc_key: lon - time: - name: time + scanline_timestamps: + name: scanline_timestamps resolution: 5000 file_type: vgac_nc nc_key: time + + proj_time0: + name: proj_time0 + file_type: vgac_nc + nc_key: proj_time0 diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 1471ba3669..5ea8530612 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -18,11 +18,11 @@ """Utilties for getting various angles for a dataset..""" from __future__ import annotations +import datetime as dt import hashlib import os import shutil import warnings -from datetime import datetime from functools import update_wrapper from glob import glob from typing import Any, Callable, Optional, Union @@ -45,7 +45,7 @@ # pyorbital's get_observer_look function. # The difference is on the order of 1e-10 at most as time changes so we force # it to a single time for easier caching. It is *only* used if caching. -STATIC_EARTH_INERTIAL_DATETIME = datetime(2000, 1, 1, 12, 0, 0) +STATIC_EARTH_INERTIAL_DATETIME = dt.datetime(2000, 1, 1, 12, 0, 0) DEFAULT_UNCACHE_TYPES = (SwathDefinition, xr.DataArray, da.Array) HASHABLE_GEOMETRIES = (AreaDefinition, StackedAreaDefinition) @@ -263,7 +263,7 @@ def _hash_args(*args, unhashable_types=DEFAULT_UNCACHE_TYPES): raise TypeError(f"Unhashable type ({type(arg)}).") if isinstance(arg, HASHABLE_GEOMETRIES): arg = hash(arg) - elif isinstance(arg, datetime): + elif isinstance(arg, dt.datetime): arg = arg.isoformat(" ") hashable_args.append(arg) arg_hash = hashlib.sha1() # nosec @@ -274,7 +274,7 @@ def _hash_args(*args, unhashable_types=DEFAULT_UNCACHE_TYPES): def _sanitize_observer_look_args(*args): new_args = [] for arg in args: - if isinstance(arg, datetime): + if isinstance(arg, dt.datetime): new_args.append(STATIC_EARTH_INERTIAL_DATETIME) elif isinstance(arg, (float, np.float64, np.float32)): # Round floating point numbers to nearest tenth. Numpy types don't @@ -448,7 +448,7 @@ def _cos_zen_ndarray(lons, lats, utc_time): return pyob_cos_zen(utc_time, lons, lats) -def _get_sun_azimuth_ndarray(lons: np.ndarray, lats: np.ndarray, start_time: datetime) -> np.ndarray: +def _get_sun_azimuth_ndarray(lons: np.ndarray, lats: np.ndarray, start_time: dt.datetime) -> np.ndarray: with ignore_invalid_float_warnings(): suna = get_alt_az(start_time, lons, lats)[1] suna = np.rad2deg(suna) diff --git a/satpy/multiscene/_blend_funcs.py b/satpy/multiscene/_blend_funcs.py index 49869a0418..7478648140 100644 --- a/satpy/multiscene/_blend_funcs.py +++ b/satpy/multiscene/_blend_funcs.py @@ -1,6 +1,5 @@ from __future__ import annotations -from datetime import datetime from typing import Callable, Iterable, Mapping, Optional, Sequence import pandas as pd @@ -13,7 +12,6 @@ def stack( data_arrays: Sequence[xr.DataArray], weights: Optional[Sequence[xr.DataArray]] = None, - combine_times: bool = True, blend_type: str = "select_with_weights" ) -> xr.DataArray: """Combine a series of datasets in different ways. @@ -39,19 +37,18 @@ def stack( """ if weights: - return _stack_with_weights(data_arrays, weights, combine_times, blend_type) - return _stack_no_weights(data_arrays, combine_times) + return _stack_with_weights(data_arrays, weights, blend_type) + return _stack_no_weights(data_arrays) def _stack_with_weights( datasets: Sequence[xr.DataArray], weights: Sequence[xr.DataArray], - combine_times: bool, blend_type: str ) -> xr.DataArray: blend_func = _get_weighted_blending_func(blend_type) filled_weights = list(_fill_weights_for_invalid_dataset_pixels(datasets, weights)) - return blend_func(datasets, filled_weights, combine_times) + return blend_func(datasets, filled_weights) def _get_weighted_blending_func(blend_type: str) -> Callable: @@ -84,10 +81,9 @@ def _fill_weights_for_invalid_dataset_pixels( def _stack_blend_by_weights( datasets: Sequence[xr.DataArray], weights: Sequence[xr.DataArray], - combine_times: bool ) -> xr.DataArray: """Stack datasets blending overlap using weights.""" - attrs = _combine_stacked_attrs([data_arr.attrs for data_arr in datasets], combine_times) + attrs = _combine_stacked_attrs([data_arr.attrs for data_arr in datasets]) overlays = [] for weight, overlay in zip(weights, datasets): @@ -109,14 +105,13 @@ def _stack_blend_by_weights( def _stack_select_by_weights( datasets: Sequence[xr.DataArray], weights: Sequence[xr.DataArray], - combine_times: bool ) -> xr.DataArray: """Stack datasets selecting pixels using weights.""" indices = da.argmax(da.dstack(weights), axis=-1) if "bands" in datasets[0].dims: indices = [indices] * datasets[0].sizes["bands"] - attrs = _combine_stacked_attrs([data_arr.attrs for data_arr in datasets], combine_times) + attrs = _combine_stacked_attrs([data_arr.attrs for data_arr in datasets]) dims = datasets[0].dims coords = datasets[0].coords selected_array = xr.DataArray(da.choose(indices, datasets), dims=dims, coords=coords, attrs=attrs) @@ -125,7 +120,6 @@ def _stack_select_by_weights( def _stack_no_weights( datasets: Sequence[xr.DataArray], - combine_times: bool ) -> xr.DataArray: base = datasets[0].copy() collected_attrs = [base.attrs] @@ -136,32 +130,13 @@ def _stack_no_weights( except KeyError: base = base.where(data_arr.isnull(), data_arr) - attrs = _combine_stacked_attrs(collected_attrs, combine_times) + attrs = _combine_stacked_attrs(collected_attrs) base.attrs = attrs return base -def _combine_stacked_attrs(collected_attrs: Sequence[Mapping], combine_times: bool) -> dict: - attrs = combine_metadata(*collected_attrs) - if combine_times and ("start_time" in attrs or "end_time" in attrs): - new_start, new_end = _get_combined_start_end_times(collected_attrs) - if new_start: - attrs["start_time"] = new_start - if new_end: - attrs["end_time"] = new_end - return attrs - - -def _get_combined_start_end_times(metadata_objects: Iterable[Mapping]) -> tuple[datetime | None, datetime | None]: - """Get the start and end times attributes valid for the entire dataset series.""" - start_time = None - end_time = None - for md_obj in metadata_objects: - if "start_time" in md_obj and (start_time is None or md_obj["start_time"] < start_time): - start_time = md_obj["start_time"] - if "end_time" in md_obj and (end_time is None or md_obj["end_time"] > end_time): - end_time = md_obj["end_time"] - return start_time, end_time +def _combine_stacked_attrs(collected_attrs: Sequence[Mapping]) -> dict: + return combine_metadata(*collected_attrs) def timeseries(datasets): diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index 21554ba465..67dac0c06b 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -15,15 +15,17 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Shared objects of the various reader classes.""" + from __future__ import annotations +import datetime as dt import logging import os import pathlib import pickle # nosec B403 import warnings -from datetime import datetime, timedelta from functools import total_ordering import yaml @@ -39,7 +41,9 @@ # Old Name -> New Name PENDING_OLD_READER_NAMES = {"fci_l1c_fdhsi": "fci_l1c_nc", "viirs_l2_cloud_mask_nc": "viirs_edr"} -OLD_READER_NAMES: dict[str, str] = {} +OLD_READER_NAMES: dict[str, str] = { + "slstr_l2": "ghrsst_l2", +} def group_files(files_to_sort, reader=None, time_threshold=10, @@ -211,7 +215,7 @@ def _get_sorted_file_groups(all_file_keys, time_threshold): # noqa: D417 # interest of sorting flat_keys = ((v[0], rn, v[1]) for (rn, vL) in all_file_keys.items() for v in vL) prev_key = None - threshold = timedelta(seconds=time_threshold) + threshold = dt.timedelta(seconds=time_threshold) # file_groups is sorted, because dictionaries are sorted by insertion # order in Python 3.7+ file_groups = {} @@ -220,7 +224,7 @@ def _get_sorted_file_groups(all_file_keys, time_threshold): # noqa: D417 if prev_key is None: is_new_group = True prev_key = gk - elif isinstance(gk[0], datetime): + elif isinstance(gk[0], dt.datetime): # datetimes within threshold difference are "the same time" is_new_group = (gk[0] - prev_key[0]) > threshold else: @@ -573,7 +577,7 @@ def load_readers(filenames=None, reader=None, reader_kwargs=None): continue loadables = reader_instance.select_files_from_pathnames(readers_files) if loadables: - reader_instance.create_filehandlers( + reader_instance.create_storage_items( loadables, fh_kwargs=reader_kwargs_without_filter[None if reader is None else reader[idx]]) reader_instances[reader_instance.name] = reader_instance @@ -778,7 +782,7 @@ def _get_compression(file): return None -def open_file_or_filename(unknown_file_thing): +def open_file_or_filename(unknown_file_thing, mode=None): """Try to open the provided file "thing" if needed, otherwise return the filename or Path. This wraps the logic of getting something like an fsspec OpenFile object @@ -792,7 +796,10 @@ def open_file_or_filename(unknown_file_thing): f_obj = unknown_file_thing else: try: - f_obj = unknown_file_thing.open() + if mode is None: + f_obj = unknown_file_thing.open() + else: + f_obj = unknown_file_thing.open(mode=mode) except AttributeError: f_obj = unknown_file_thing return f_obj diff --git a/satpy/readers/_geos_area.py b/satpy/readers/_geos_area.py index 03dabfa9a0..8a89b091c9 100644 --- a/satpy/readers/_geos_area.py +++ b/satpy/readers/_geos_area.py @@ -72,6 +72,7 @@ def get_area_extent(pdict): coff: Column offset factor loff: Line offset factor scandir: 'N2S' for standard (N->S), 'S2N' for inverse (S->N) + h: Altitude of satellite above the Earth's surface (m) Returns: aex: An area extent for the scene diff --git a/satpy/readers/aapp_l1b.py b/satpy/readers/aapp_l1b.py index e502a9da64..6e3072b4d0 100644 --- a/satpy/readers/aapp_l1b.py +++ b/satpy/readers/aapp_l1b.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Reader for aapp level 1b data. Options for loading: @@ -24,9 +25,10 @@ https://nwp-saf.eumetsat.int/site/download/documentation/aapp/NWPSAF-MF-UD-003_Formats_v8.0.pdf """ + +import datetime as dt import functools import logging -from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -102,14 +104,14 @@ def _set_filedata_layout(self): @property def start_time(self): """Get the time of the first observation.""" - return datetime(self._data["scnlinyr"][0], 1, 1) + timedelta( + return dt.datetime(self._data["scnlinyr"][0], 1, 1) + dt.timedelta( days=int(self._data["scnlindy"][0]) - 1, milliseconds=int(self._data["scnlintime"][0])) @property def end_time(self): """Get the time of the final observation.""" - return datetime(self._data["scnlinyr"][-1], 1, 1) + timedelta( + return dt.datetime(self._data["scnlinyr"][-1], 1, 1) + dt.timedelta( days=int(self._data["scnlindy"][-1]) - 1, milliseconds=int(self._data["scnlintime"][-1])) @@ -129,10 +131,10 @@ def _get_platform_name(self, platform_names_lookup): def read(self): """Read the data.""" - tic = datetime.now() + tic = dt.datetime.now() header = np.memmap(self.filename, dtype=self._header_type, mode="r", shape=(1, )) data = np.memmap(self.filename, dtype=self._scan_type, offset=self._header_offset, mode="r") - logger.debug("Reading time %s", str(datetime.now() - tic)) + logger.debug("Reading time %s", str(dt.datetime.now() - tic)) self._header = header self._data = data diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 107382d7ba..ecfd20a830 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -15,12 +15,13 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Advance Baseline Imager reader base class for the Level 1b and l2+ reader.""" +import datetime as dt import logging import math from contextlib import suppress -from datetime import datetime import dask import numpy as np @@ -291,12 +292,12 @@ def _get_areadef_fixedgrid(self, key): @property def start_time(self): """Start time of the current file's observations.""" - return datetime.strptime(self.nc.attrs["time_coverage_start"], "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(self.nc.attrs["time_coverage_start"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """End time of the current file's observations.""" - return datetime.strptime(self.nc.attrs["time_coverage_end"], "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(self.nc.attrs["time_coverage_end"], "%Y-%m-%dT%H:%M:%S.%fZ") def spatial_resolution_to_number(self): """Convert the 'spatial_resolution' global attribute to meters.""" diff --git a/satpy/readers/abi_l2_nc.py b/satpy/readers/abi_l2_nc.py index 2324d3e1fd..5e8ae1c7bb 100644 --- a/satpy/readers/abi_l2_nc.py +++ b/satpy/readers/abi_l2_nc.py @@ -43,7 +43,7 @@ def get_dataset(self, key, info): self._remove_problem_attrs(variable) # convert to satpy standard units - if variable.attrs["units"] == "1" and key["calibration"] == "reflectance": + if variable.attrs["units"] == "1" and key.get("calibration") == "reflectance": variable *= 100.0 variable.attrs["units"] = "%" diff --git a/satpy/readers/acspo.py b/satpy/readers/acspo.py index 8a8262af33..90356f46e2 100644 --- a/satpy/readers/acspo.py +++ b/satpy/readers/acspo.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """ACSPO SST Reader. See the following page for more information: @@ -22,8 +23,9 @@ https://podaac.jpl.nasa.gov/dataset/VIIRS_NPP-OSPO-L2P-v2.3 """ + +import datetime as dt import logging -from datetime import datetime import numpy as np @@ -83,7 +85,7 @@ def get_shape(self, ds_id, ds_info): @staticmethod def _parse_datetime(datestr): - return datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") + return dt.datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") @property def start_time(self): diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index 313e5ccab5..cedc626408 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Advanced Himawari Imager (AHI) standard format data reader. References: @@ -58,10 +59,10 @@ """ +import datetime as dt import logging import os import warnings -from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -414,65 +415,33 @@ def start_time(self): @property def end_time(self): """Get the nominal end time.""" - return self.nominal_start_time + return self.nominal_end_time @property def observation_start_time(self): """Get the observation start time.""" - return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_start_time"].item())) + return dt.datetime(1858, 11, 17) + dt.timedelta(days=float(self.basic_info["observation_start_time"].item())) @property def observation_end_time(self): """Get the observation end time.""" - return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_end_time"].item())) + return dt.datetime(1858, 11, 17) + dt.timedelta(days=float(self.basic_info["observation_end_time"].item())) + + @property + def _timeline(self): + return "{:04d}".format(self.basic_info["observation_timeline"][0]) @property def nominal_start_time(self): """Time this band was nominally to be recorded.""" - return self._modify_observation_time_for_nominal(self.observation_start_time) + calc = _NominalTimeCalculator(self._timeline, self.observation_area) + return calc.get_nominal_start_time(self.observation_start_time) @property def nominal_end_time(self): """Get the nominal end time.""" - return self._modify_observation_time_for_nominal(self.observation_end_time) - - @staticmethod - def _is_valid_timeline(timeline): - """Check that the `observation_timeline` value is not a fill value.""" - if int(timeline[:2]) > 23: - return False - return True - - def _modify_observation_time_for_nominal(self, observation_time): - """Round observation time to a nominal time based on known observation frequency. - - AHI observations are split into different sectors including Full Disk - (FLDK), Japan (JP) sectors, and smaller regional (R) sectors. Each - sector is observed at different frequencies (ex. every 10 minutes, - every 2.5 minutes, and every 30 seconds). This method will take the - actual observation time and round it to the nearest interval for this - sector. So if the observation time is 13:32:48 for the "JP02" sector - which is the second Japan observation where every Japan observation is - 2.5 minutes apart, then the result should be 13:32:30. - - """ - timeline = "{:04d}".format(self.basic_info["observation_timeline"][0]) - if not self._is_valid_timeline(timeline): - warnings.warn( - "Observation timeline is fill value, not rounding observation time.", - stacklevel=3 - ) - return observation_time - - if self.observation_area == "FLDK": - dt = 0 - else: - observation_frequency_seconds = {"JP": 150, "R3": 150, "R4": 30, "R5": 30}[self.observation_area[:2]] - dt = observation_frequency_seconds * (int(self.observation_area[2:]) - 1) - - return observation_time.replace( - hour=int(timeline[:2]), minute=int(timeline[2:4]) + dt//60, - second=dt % 60, microsecond=0) + calc = _NominalTimeCalculator(self._timeline, self.observation_area) + return calc.get_nominal_end_time(self.nominal_start_time) def get_dataset(self, key, info): """Get the dataset.""" @@ -775,3 +744,96 @@ def _ir_calibrate(self, data): c2_ = self._header["calibration"]["c2_rad2tb_conversion"][0] return (c0_ + c1_ * Te_ + c2_ * Te_ ** 2).clip(0) + + +class _NominalTimeCalculator: + """Get time when a scan was nominally to be recorded.""" + + def __init__(self, timeline, area): + """Initialize the nominal timestamp calculator. + + Args: + timeline (str): Observation timeline (four characters HHMM) + area (str): Observation area (four characters, e.g. FLDK) + """ + self.timeline = self._parse_timeline(timeline) + self.area = area + + def _parse_timeline(self, timeline): + try: + return dt.datetime.strptime(timeline, "%H%M").time() + except ValueError: + return None + + def get_nominal_start_time(self, observation_start_time): + """Get nominal start time of the scan.""" + return self._modify_observation_time_for_nominal(observation_start_time) + + def get_nominal_end_time(self, nominal_start_time): + """Get nominal end time of the scan.""" + freq = self._observation_frequency + return nominal_start_time + dt.timedelta(minutes=freq // 60, + seconds=freq % 60) + + def _modify_observation_time_for_nominal(self, observation_time): + """Round observation time to a nominal time based on known observation frequency. + + AHI observations are split into different sectors including Full Disk + (FLDK), Japan (JP) sectors, and smaller regional (R) sectors. Each + sector is observed at different frequencies (ex. every 10 minutes, + every 2.5 minutes, and every 30 seconds). This method will take the + actual observation time and round it to the nearest interval for this + sector. So if the observation time is 13:32:48 for the "JP02" sector + which is the second Japan observation where every Japan observation is + 2.5 minutes apart, then the result should be 13:32:30. + """ + if not self.timeline: + warnings.warn( + "Observation timeline is fill value, not rounding observation time.", + stacklevel=3 + ) + return observation_time + timeline = self._get_closest_timeline(observation_time) + offset = self._get_offset_relative_to_timeline() + return timeline + dt.timedelta(minutes=offset//60, seconds=offset % 60) + + def _get_closest_timeline(self, observation_time): + """Find the closest timeline for the given observation time. + + Needs to check surrounding days because the observation might start + a little bit before the planned time. + + Observation start time: 2022-12-31 23:59 + Timeline: 0000 + => Nominal start time: 2023-01-01 00:00 + """ + delta_days = [-1, 0, 1] + surrounding_dates = [ + (observation_time + dt.timedelta(days=delta)).date() + for delta in delta_days + ] + timelines = [ + dt.datetime.combine(date, self.timeline) + for date in surrounding_dates + ] + diffs = [ + abs((timeline - observation_time)) + for timeline in timelines + ] + argmin = np.argmin(diffs) + return timelines[argmin] + + def _get_offset_relative_to_timeline(self): + if self.area == "FLDK": + return 0 + sector_repeat = int(self.area[2:]) - 1 + return self._observation_frequency * sector_repeat + + @property + def _observation_frequency(self): + frequencies = {"FLDK": 600, "JP": 150, "R3": 150, "R4": 30, "R5": 30} + area = self.area + if area != "FLDK": + # e.g. JP01, JP02 etc + area = area[:2] + return frequencies[area] diff --git a/satpy/readers/ahi_l1b_gridded_bin.py b/satpy/readers/ahi_l1b_gridded_bin.py index 33289aee11..9c06e3c55b 100644 --- a/satpy/readers/ahi_l1b_gridded_bin.py +++ b/satpy/readers/ahi_l1b_gridded_bin.py @@ -37,7 +37,7 @@ import dask.array as da import numpy as np import xarray as xr -from appdirs import AppDirs +from platformdirs import AppDirs from pyresample import geometry from satpy.readers.file_handlers import BaseFileHandler diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index d6e6caa887..74872d5410 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Reader for Himawari L2 cloud products from NOAA's big data programme. For more information about the data, see: . @@ -43,8 +44,8 @@ supported. These include the CldHgtFlag and the CloudMaskPacked variables. """ +import datetime as dt import logging -from datetime import datetime import xarray as xr @@ -82,14 +83,14 @@ def __init__(self, filename, filename_info, filetype_info): @property def start_time(self): """Start timestamp of the dataset.""" - dt = self.nc.attrs["time_coverage_start"] - return datetime.strptime(dt, "%Y-%m-%dT%H:%M:%SZ") + date_str = self.nc.attrs["time_coverage_start"] + return dt.datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%SZ") @property def end_time(self): """End timestamp of the dataset.""" - dt = self.nc.attrs["time_coverage_end"] - return datetime.strptime(dt, "%Y-%m-%dT%H:%M:%SZ") + date_str = self.nc.attrs["time_coverage_end"] + return dt.datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%SZ") def get_dataset(self, key, info): """Load a dataset.""" diff --git a/satpy/readers/ami_l1b.py b/satpy/readers/ami_l1b.py index db8c8444d8..6841189eef 100644 --- a/satpy/readers/ami_l1b.py +++ b/satpy/readers/ami_l1b.py @@ -15,10 +15,11 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Advanced Meteorological Imager reader for the Level 1b NetCDF4 format.""" +import datetime as dt import logging -from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -117,14 +118,14 @@ def __init__(self, filename, filename_info, filetype_info, @property def start_time(self): """Get observation start time.""" - base = datetime(2000, 1, 1, 12, 0, 0) - return base + timedelta(seconds=self.nc.attrs["observation_start_time"]) + base = dt.datetime(2000, 1, 1, 12, 0, 0) + return base + dt.timedelta(seconds=self.nc.attrs["observation_start_time"]) @property def end_time(self): """Get observation end time.""" - base = datetime(2000, 1, 1, 12, 0, 0) - return base + timedelta(seconds=self.nc.attrs["observation_end_time"]) + base = dt.datetime(2000, 1, 1, 12, 0, 0) + return base + dt.timedelta(seconds=self.nc.attrs["observation_end_time"]) def get_area_def(self, dsid): """Get area definition for this file.""" diff --git a/satpy/readers/amsr2_l2_gaasp.py b/satpy/readers/amsr2_l2_gaasp.py index 54a3769747..21442f6f3a 100644 --- a/satpy/readers/amsr2_l2_gaasp.py +++ b/satpy/readers/amsr2_l2_gaasp.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """GCOM-W1 AMSR2 Level 2 files from the GAASP software. GAASP output files are in the NetCDF4 format. Software is provided by NOAA @@ -36,8 +37,8 @@ """ +import datetime as dt import logging -from datetime import datetime from typing import Tuple import numpy as np @@ -94,7 +95,7 @@ def start_time(self): return self.filename_info["start_time"] except KeyError: time_str = self.nc.attrs["time_coverage_start"] - return datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): @@ -103,7 +104,7 @@ def end_time(self): return self.filename_info["end_time"] except KeyError: time_str = self.nc.attrs["time_coverage_end"] - return datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%fZ") @property def sensor_names(self): diff --git a/satpy/readers/ascat_l2_soilmoisture_bufr.py b/satpy/readers/ascat_l2_soilmoisture_bufr.py index a5f77fd7eb..9619977e89 100644 --- a/satpy/readers/ascat_l2_soilmoisture_bufr.py +++ b/satpy/readers/ascat_l2_soilmoisture_bufr.py @@ -15,14 +15,15 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """ASCAT Soil moisture product reader for BUFR messages. Based on the IASI L2 SO2 BUFR reader. """ +import datetime as dt import logging -from datetime import datetime import dask.array as da import numpy as np @@ -82,7 +83,7 @@ def extract_msg_date_extremes(self, bufr, date_min=None, date_max=None): minutes = np.resize(ec.codes_get_array(bufr, "minute"), size) seconds = np.resize(ec.codes_get_array(bufr, "second"), size) for year, month, day, hour, minute, second in zip(years, months, days, hours, minutes, seconds): - time_stamp = datetime(year, month, day, hour, minute, second) + time_stamp = dt.datetime(year, month, day, hour, minute, second) date_min = time_stamp if not date_min else min(date_min, time_stamp) date_max = time_stamp if not date_max else max(date_max, time_stamp) return date_min, date_max diff --git a/satpy/readers/atms_l1b_nc.py b/satpy/readers/atms_l1b_nc.py index 95d48b81cd..4b8587c824 100644 --- a/satpy/readers/atms_l1b_nc.py +++ b/satpy/readers/atms_l1b_nc.py @@ -12,6 +12,7 @@ # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . + """Advanced Technology Microwave Sounder (ATMS) Level 1B product reader. The format is explained in the `ATMS L1B Product User Guide`_ @@ -21,8 +22,8 @@ """ +import datetime as dt import logging -from datetime import datetime from satpy.readers.netcdf_utils import NetCDF4FileHandler @@ -43,12 +44,12 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): @property def start_time(self): """Get observation start time.""" - return datetime.strptime(self["/attr/time_coverage_start"], DATE_FMT) + return dt.datetime.strptime(self["/attr/time_coverage_start"], DATE_FMT) @property def end_time(self): """Get observation end time.""" - return datetime.strptime(self["/attr/time_coverage_end"], DATE_FMT) + return dt.datetime.strptime(self["/attr/time_coverage_end"], DATE_FMT) @property def platform_name(self): diff --git a/satpy/readers/avhrr_l1b_gaclac.py b/satpy/readers/avhrr_l1b_gaclac.py index cfc3e1283e..47f0d97283 100644 --- a/satpy/readers/avhrr_l1b_gaclac.py +++ b/satpy/readers/avhrr_l1b_gaclac.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Reading and calibrating GAC and LAC AVHRR data. Uses Pygac under the hood. See the `Pygac Documentation`_ for supported data @@ -29,8 +30,8 @@ https://pygac.readthedocs.io/en/stable """ +import datetime as dt import logging -from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -93,20 +94,23 @@ def __init__(self, filename, filename_info, filetype_info, # noqa: D417 self.first_valid_lat = None self.last_valid_lat = None self._start_time = filename_info["start_time"] - self._end_time = datetime.combine(filename_info["start_time"].date(), - filename_info["end_time"].time()) + self._end_time = dt.datetime.combine(filename_info["start_time"].date(), + filename_info["end_time"].time()) if self._end_time < self._start_time: - self._end_time += timedelta(days=1) + self._end_time += dt.timedelta(days=1) self.platform_id = filename_info["platform_id"] - if self.platform_id in ["NK", "NL", "NM", "NN", "NP", "M1", "M2", - "M3"]: + + if len(self.platform_id) == 3: + self.reader_kwargs["header_date"] = dt.date(2000, 1, 1) + + if self._is_avhrr3(): if filename_info.get("transfer_mode") == "GHRR": self.reader_class = GACKLMReader else: self.reader_class = LACKLMReader self.chn_dict = AVHRR3_CHANNEL_NAMES self.sensor = "avhrr-3" - elif self.platform_id in ["NC", "ND", "NF", "NH", "NJ"]: + elif self._is_avhrr2(): if filename_info.get("transfer_mode") == "GHRR": self.reader_class = GACPODReader else: @@ -122,6 +126,16 @@ def __init__(self, filename, filename_info, filetype_info, # noqa: D417 self.sensor = "avhrr" self.filename_info = filename_info + def _is_avhrr2(self): + return self.platform_id in ["NC", "NE", "NF", "NG", "NH", "ND", "NJ", + "N07", "N08", "N09", "N10", "N11", "N12", "N14"] + + def _is_avhrr3(self): + return self.platform_id in ["NK", "NL", "NM", "NN", "NP", + "N15", "N16", "N17", "N18", "N19", + "M1", "M2", "M3", + "MOB", "MOA", "MOC"] + def read_raw_data(self): """Create a pygac reader and read raw data from the file.""" if self.reader is None: @@ -171,8 +185,8 @@ def get_dataset(self, key, info): # Update start/end time using the actual scanline timestamps times = self.reader.get_times() - self._start_time = times[0].astype(datetime) - self._end_time = times[-1].astype(datetime) + self._start_time = times[0].astype(dt.datetime) + self._end_time = times[-1].astype(dt.datetime) # Select user-defined scanlines and/or strip invalid coordinates if (self.start_line is not None or self.end_line is not None @@ -210,8 +224,8 @@ def slice(self, data, times): # noqa: A003 """ sliced = self._slice(data) times = self._slice(times) - self._start_time = times[0].astype(datetime) - self._end_time = times[-1].astype(datetime) + self._start_time = times[0].astype(dt.datetime) + self._end_time = times[-1].astype(dt.datetime) return sliced, times def _slice(self, data): diff --git a/satpy/readers/caliop_l2_cloud.py b/satpy/readers/caliop_l2_cloud.py index 54dd100ffc..e088dfd853 100644 --- a/satpy/readers/caliop_l2_cloud.py +++ b/satpy/readers/caliop_l2_cloud.py @@ -16,12 +16,13 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . # type: ignore + """Interface to CALIOP L2 HDF4 cloud products.""" +import datetime as dt import logging import os.path import re -from datetime import datetime from pyhdf.SD import SD, SDC @@ -56,7 +57,7 @@ def get_end_time(self): mda_dict = self.filehandle.attributes() core_mda = mda_dict["coremetadata"] end_time_str = self.parse_metadata_string(core_mda) - self._end_time = datetime.strptime(end_time_str, "%Y-%m-%dT%H:%M:%SZ") + self._end_time = dt.datetime.strptime(end_time_str, "%Y-%m-%dT%H:%M:%SZ") @staticmethod def parse_metadata_string(metadata_string): diff --git a/satpy/readers/clavrx.py b/satpy/readers/clavrx.py index 4303456c04..c355a1f0ba 100644 --- a/satpy/readers/clavrx.py +++ b/satpy/readers/clavrx.py @@ -17,10 +17,12 @@ # satpy. If not, see . """Interface to CLAVR-X HDF4 products.""" +from __future__ import annotations + import logging import os from glob import glob -from typing import Optional, Union +from typing import Optional import netCDF4 import numpy as np @@ -69,6 +71,20 @@ "abi": 2004, } +CHANNEL_ALIASES = { + "abi": {"refl_0_47um_nom": {"name": "C01", "wavelength": 0.47, "modifiers": ("sunz_corrected",)}, + "refl_0_65um_nom": {"name": "C02", "wavelength": 0.64, "modifiers": ("sunz_corrected",)}, + "refl_0_86um_nom": {"name": "C03", "wavelength": 0.865, "modifiers": ("sunz_corrected",)}, + "refl_1_38um_nom": {"name": "C04", "wavelength": 1.38, "modifiers": ("sunz_corrected",)}, + "refl_1_60um_nom": {"name": "C05", "wavelength": 1.61, "modifiers": ("sunz_corrected",)}, + "refl_2_10um_nom": {"name": "C06", "wavelength": 2.25, "modifiers": ("sunz_corrected",)}, + }, + "viirs": {"refl_0_65um_nom": {"name": "I01", "wavelength": 0.64, "modifiers": ("sunz_corrected",)}, + "refl_1_38um_nom": {"name": "M09", "wavelength": 1.38, "modifiers": ("sunz_corrected",)}, + "refl_1_60um_nom": {"name": "I03", "wavelength": 1.61, "modifiers": ("sunz_corrected",)} + } +} + def _get_sensor(sensor: str) -> str: """Get the sensor.""" @@ -94,9 +110,30 @@ def _get_rows_per_scan(sensor: str) -> Optional[int]: return None +def _scale_data(data_arr: xr.DataArray | int, scale_factor: float, add_offset: float) -> xr.DataArray: + """Scale data, if needed.""" + scaling_needed = not (scale_factor == 1.0 and add_offset == 0.0) + if scaling_needed: + data_arr = data_arr * np.float32(scale_factor) + np.float32(add_offset) + return data_arr + + class _CLAVRxHelper: """A base class for the CLAVRx File Handlers.""" + @staticmethod + def _get_nadir_resolution(sensor, filename_info_resolution): + """Get nadir resolution.""" + for k, v in NADIR_RESOLUTION.items(): + if sensor.startswith(k): + return v + if filename_info_resolution is None: + return None + if isinstance(filename_info_resolution, str) and filename_info_resolution.startswith("m"): + return int(filename_info_resolution[:-1]) + else: + return int(filename_info_resolution) + @staticmethod def _remove_attributes(attrs: dict) -> dict: """Remove attributes that described data before scaling.""" @@ -107,14 +144,6 @@ def _remove_attributes(attrs: dict) -> dict: attrs.pop(attr_key, None) return attrs - @staticmethod - def _scale_data(data_arr: Union[xr.DataArray, int], scale_factor: float, add_offset: float) -> xr.DataArray: - """Scale data, if needed.""" - scaling_needed = not (scale_factor == 1.0 and add_offset == 0.0) - if scaling_needed: - data_arr = data_arr * scale_factor + add_offset - return data_arr - @staticmethod def _get_data(data, dataset_id: dict) -> xr.DataArray: """Get a dataset.""" @@ -123,28 +152,31 @@ def _get_data(data, dataset_id: dict) -> xr.DataArray: attrs = data.attrs.copy() - fill = attrs.get("_FillValue") + # don't need these attributes after applied. factor = attrs.pop("scale_factor", (np.ones(1, dtype=data.dtype))[0]) offset = attrs.pop("add_offset", (np.zeros(1, dtype=data.dtype))[0]) + flag_values = data.attrs.get("flag_values", [None]) valid_range = attrs.get("valid_range", [None]) - if isinstance(valid_range, np.ndarray): - attrs["valid_range"] = valid_range.tolist() - flags = not data.attrs.get("SCALED", 1) and any(data.attrs.get("flag_values", [None])) - if not flags: + if isinstance(valid_range, np.ndarray): + valid_range = valid_range.tolist() + attrs["valid_range"] = valid_range + + flags = not data.attrs.get("SCALED", 1) and any(flag_values) + if flags: + fill = attrs.get("_FillValue", None) + if isinstance(flag_values, np.ndarray) or isinstance(flag_values, list): + data = data.where((data >= flag_values[0]) & (data <= flag_values[-1]), fill) + else: + fill = attrs.pop("_FillValue", None) data = data.where(data != fill) - data = _CLAVRxHelper._scale_data(data, factor, offset) - # don't need _FillValue if it has been applied. - attrs.pop("_FillValue", None) - - if all(valid_range): - valid_min = _CLAVRxHelper._scale_data(valid_range[0], factor, offset) - valid_max = _CLAVRxHelper._scale_data(valid_range[1], factor, offset) - if flags: - data = data.where((data >= valid_min) & (data <= valid_max), fill) - else: + data = _scale_data(data, factor, offset) + + if valid_range[0] is not None: + valid_min = _scale_data(valid_range[0], factor, offset) + valid_max = _scale_data(valid_range[1], factor, offset) data = data.where((data >= valid_min) & (data <= valid_max)) - attrs["valid_range"] = [valid_min, valid_max] + attrs["valid_range"] = [valid_min, valid_max] data.attrs = _CLAVRxHelper._remove_attributes(attrs) @@ -183,23 +215,27 @@ def _read_pug_fixed_grid(projection_coordinates: netCDF4.Variable, distance_mult return proj_dict @staticmethod - def _find_input_nc(filename: str, l1b_base: str) -> str: + def _find_input_nc(filename: str, sensor: str, l1b_base: str) -> str: dirname = os.path.dirname(filename) l1b_filename = os.path.join(dirname, l1b_base + ".nc") if os.path.exists(l1b_filename): return str(l1b_filename) - glob_pat = os.path.join(dirname, l1b_base + "*R20*.nc") + if sensor == "AHI": + glob_pat = os.path.join(dirname, l1b_base + "*R20*.nc") + else: + glob_pat = os.path.join(dirname, l1b_base + "*.nc") + LOG.debug("searching for {0}".format(glob_pat)) found_l1b_filenames = list(glob(glob_pat)) if len(found_l1b_filenames) == 0: - raise IOError("Could not find navigation donor for {0}" - " in same directory as CLAVR-x data".format(l1b_base)) + fp = os.path.join(dirname, l1b_base) + raise IOError(f"Missing navigation donor {fp}") LOG.debug("Candidate nav donors: {0}".format(repr(found_l1b_filenames))) return found_l1b_filenames[0] @staticmethod - def _read_axi_fixed_grid(filename: str, l1b_attr) -> geometry.AreaDefinition: + def _read_axi_fixed_grid(filename: str, sensor: str, l1b_attr) -> geometry.AreaDefinition: """Read a fixed grid. CLAVR-x does not transcribe fixed grid parameters to its output @@ -214,7 +250,7 @@ def _read_axi_fixed_grid(filename: str, l1b_attr) -> geometry.AreaDefinition: """ LOG.debug(f"looking for corresponding input file for {l1b_attr}" " to act as fixed grid navigation donor") - l1b_path = _CLAVRxHelper._find_input_nc(filename, l1b_attr) + l1b_path = _CLAVRxHelper._find_input_nc(filename, sensor, l1b_attr) LOG.info(f"CLAVR-x does not include fixed-grid parameters, use input file {l1b_path} as donor") l1b = netCDF4.Dataset(l1b_path) proj = None @@ -236,13 +272,13 @@ def _read_axi_fixed_grid(filename: str, l1b_attr) -> geometry.AreaDefinition: area_extent, ncols, nlines = _CLAVRxHelper._area_extent(x, y, h) area = geometry.AreaDefinition( - "ahi_geos", - "AHI L2 file area", - "ahi_geos", - proj, - ncols, - nlines, - np.asarray(area_extent)) + f"{sensor}_geos", + f"{sensor.upper()} L2 file area", + f"{sensor}_geos", + proj, + ncols, + nlines, + area_extent) return area @@ -284,6 +320,11 @@ def __init__(self, filename, filename_info, filetype_info): filename_info, filetype_info) + self.sensor = _get_sensor(self.file_content.get("/attr/sensor")) + self.platform = _get_platform(self.file_content.get("/attr/platform")) + self.resolution = _CLAVRxHelper._get_nadir_resolution(self.sensor, + self.filename_info.get("resolution")) + @property def start_time(self): """Get the start time.""" @@ -295,7 +336,7 @@ def end_time(self): return self.filename_info.get("end_time", self.start_time) def get_dataset(self, dataset_id, ds_info): - """Get a dataset.""" + """Get a dataset for Polar Sensors.""" var_name = ds_info.get("file_key", dataset_id["name"]) data = self[var_name] data = _CLAVRxHelper._get_data(data, dataset_id) @@ -303,61 +344,61 @@ def get_dataset(self, dataset_id, ds_info): data.attrs, ds_info) return data - def get_nadir_resolution(self, sensor): - """Get nadir resolution.""" - for k, v in NADIR_RESOLUTION.items(): - if sensor.startswith(k): - return v - res = self.filename_info.get("resolution") - if res.endswith("m"): - return int(res[:-1]) - elif res is not None: - return int(res) + def _available_aliases(self, ds_info, current_var): + """Add alias if there is a match.""" + new_info = ds_info.copy() + alias_info = CHANNEL_ALIASES.get(self.sensor).get(current_var, None) + if alias_info is not None: + alias_info.update({"file_key": current_var}) + new_info.update(alias_info) + yield True, new_info def available_datasets(self, configured_datasets=None): - """Automatically determine datasets provided by this file.""" - self.sensor = _get_sensor(self.file_content.get("/attr/sensor")) - self.platform = _get_platform(self.file_content.get("/attr/platform")) - - nadir_resolution = self.get_nadir_resolution(self.sensor) - coordinates = ("longitude", "latitude") + """Add more information if this reader can provide it.""" handled_variables = set() - - # update previously configured datasets for is_avail, ds_info in (configured_datasets or []): - this_res = ds_info.get("resolution") - this_coords = ds_info.get("coordinates") # some other file handler knows how to load this if is_avail is not None: yield is_avail, ds_info + new_info = ds_info.copy() # don't change input + this_res = ds_info.get("resolution") var_name = ds_info.get("file_key", ds_info["name"]) matches = self.file_type_matches(ds_info["file_type"]) # we can confidently say that we can provide this dataset and can # provide more info - if matches and var_name in self and this_res != nadir_resolution: + if matches and var_name in self and this_res != self.resolution: handled_variables.add(var_name) - new_info = ds_info.copy() # don't mess up the above yielded - new_info["resolution"] = nadir_resolution - if self._is_polar() and this_coords is None: - new_info["coordinates"] = coordinates + new_info["resolution"] = self.resolution + if self._is_polar(): + new_info["coordinates"] = ds_info.get("coordinates", ("longitude", "latitude")) yield True, new_info elif is_avail is None: # if we didn't know how to handle this dataset and no one else did # then we should keep it going down the chain yield is_avail, ds_info - # add new datasets + # get data from file dynamically + yield from self._dynamic_datasets() + + def _dynamic_datasets(self): + """Get data from file and build aliases.""" for var_name, val in self.file_content.items(): if isinstance(val, SDS): ds_info = { "file_type": self.filetype_info["file_type"], - "resolution": nadir_resolution, + "resolution": self.resolution, "name": var_name, } if self._is_polar(): ds_info["coordinates"] = ["longitude", "latitude"] + + # always yield what we have yield True, ds_info + if CHANNEL_ALIASES.get(self.sensor) is not None: + # yield variable as it is + # yield any associated aliases + yield from self._available_aliases(ds_info, var_name) def get_shape(self, dataset_id, ds_info): """Get the shape.""" @@ -376,7 +417,7 @@ def get_area_def(self, key): return super(CLAVRXHDF4FileHandler, self).get_area_def(key) l1b_att = str(self.file_content.get("/attr/L1B", None)) - area_def = _CLAVRxHelper._read_axi_fixed_grid(self.filename, l1b_att) + area_def = _CLAVRxHelper._read_axi_fixed_grid(self.filename, self.sensor, l1b_att) return area_def @@ -402,25 +443,37 @@ def __init__(self, filename, filename_info, filetype_info): self.platform = _get_platform( self.filename_info.get("platform_shortname", None)) self.sensor = _get_sensor(self.nc.attrs.get("sensor", None)) + self.resolution = _CLAVRxHelper._get_nadir_resolution(self.sensor, + self.filename_info.get("resolution")) # coordinates need scaling and valid_range (mask_and_scale won't work on valid_range) self.nc.coords["latitude"] = _CLAVRxHelper._get_data(self.nc.coords["latitude"], {"name": "latitude"}) self.nc.coords["longitude"] = _CLAVRxHelper._get_data(self.nc.coords["longitude"], {"name": "longitude"}) - def _get_ds_info_for_data_arr(self, var_name): + def _dynamic_dataset_info(self, var_name): + """Set data name and, if applicable, aliases.""" ds_info = { "file_type": self.filetype_info["file_type"], "name": var_name, } - return ds_info + yield True, ds_info + + if CHANNEL_ALIASES.get(self.sensor) is not None: + alias_info = ds_info.copy() + channel_info = CHANNEL_ALIASES.get(self.sensor).get(var_name, None) + if channel_info is not None: + channel_info["file_key"] = var_name + alias_info.update(channel_info) + yield True, alias_info - def _is_2d_yx_data_array(self, data_arr): + @staticmethod + def _is_2d_yx_data_array(data_arr): has_y_dim = data_arr.dims[0] == "y" has_x_dim = data_arr.dims[1] == "x" return has_y_dim and has_x_dim - def _available_new_datasets(self, handled_vars): + def _available_file_datasets(self, handled_vars): """Metadata for available variables other than BT.""" possible_vars = list(self.nc.items()) + list(self.nc.coords.items()) for var_name, data_arr in possible_vars: @@ -433,8 +486,7 @@ def _available_new_datasets(self, handled_vars): # we need 'traditional' y/x dimensions currently continue - ds_info = self._get_ds_info_for_data_arr(var_name) - yield True, ds_info + yield from self._dynamic_dataset_info(var_name) def available_datasets(self, configured_datasets=None): """Dynamically discover what variables can be loaded from this file. @@ -450,17 +502,23 @@ def available_datasets(self, configured_datasets=None): # we don't know any more information than the previous # file handler so let's yield early yield is_avail, ds_info - continue - if self.file_type_matches(ds_info["file_type"]): + + matches = self.file_type_matches(ds_info["file_type"]) + if matches and ds_info.get("resolution") != self.resolution: + # reader knows something about this dataset (file type matches) + # add any information that this reader can add. + new_info = ds_info.copy() + if self.resolution is not None: + new_info["resolution"] = self.resolution handled_vars.add(ds_info["name"]) - yield self.file_type_matches(ds_info["file_type"]), ds_info - yield from self._available_new_datasets(handled_vars) + yield True, new_info + yield from self._available_file_datasets(handled_vars) def _is_polar(self): l1b_att, inst_att = (str(self.nc.attrs.get("L1B", None)), str(self.nc.attrs.get("sensor", None))) - return (inst_att != "AHI" and "GOES" not in inst_att) or (l1b_att is None) + return (inst_att not in ["ABI", "AHI"] and "GOES" not in inst_att) or (l1b_att is None) def get_area_def(self, key): """Get the area definition of the data at hand.""" @@ -468,11 +526,11 @@ def get_area_def(self, key): return super(CLAVRXNetCDFFileHandler, self).get_area_def(key) l1b_att = str(self.nc.attrs.get("L1B", None)) - return _CLAVRxHelper._read_axi_fixed_grid(self.filename, l1b_att) + return _CLAVRxHelper._read_axi_fixed_grid(self.filename, self.sensor, l1b_att) def get_dataset(self, dataset_id, ds_info): - """Get a dataset.""" - var_name = ds_info.get("name", dataset_id["name"]) + """Get a dataset for supported geostationary sensors.""" + var_name = ds_info.get("file_key", dataset_id["name"]) data = self[var_name] data = _CLAVRxHelper._get_data(data, dataset_id) data.attrs = _CLAVRxHelper.get_metadata(self.sensor, self.platform, @@ -481,5 +539,6 @@ def get_dataset(self, dataset_id, ds_info): def __getitem__(self, item): """Wrap around `self.nc[item]`.""" + # Check if "item" is an alias: data = self.nc[item] return data diff --git a/satpy/readers/electrol_hrit.py b/satpy/readers/electrol_hrit.py index c773850a73..62f99fb0a4 100644 --- a/satpy/readers/electrol_hrit.py +++ b/satpy/readers/electrol_hrit.py @@ -24,8 +24,8 @@ """ +import datetime as dt import logging -from datetime import datetime import numpy as np import xarray as xr @@ -299,7 +299,7 @@ def get_dataset(self, key, info): def calibrate(self, data, calibration): """Calibrate the data.""" - tic = datetime.now() + tic = dt.datetime.now() if calibration == "counts": res = data elif calibration in ["radiance", "brightness_temperature"]: @@ -311,7 +311,7 @@ def calibrate(self, data, calibration): res.attrs["standard_name"] = calibration res.attrs["calibration"] = calibration - logger.debug("Calibration time " + str(datetime.now() - tic)) + logger.debug("Calibration time " + str(dt.datetime.now() - tic)) return res @staticmethod diff --git a/satpy/readers/epic_l1b_h5.py b/satpy/readers/epic_l1b_h5.py index 3fb8f69c01..0d993b0b6c 100644 --- a/satpy/readers/epic_l1b_h5.py +++ b/satpy/readers/epic_l1b_h5.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """File handler for DSCOVR EPIC L1B data in hdf5 format. The ``epic_l1b_h5`` reader reads and calibrates EPIC L1B image data in hdf5 format. @@ -37,8 +38,8 @@ """ +import datetime as dt import logging -from datetime import datetime import dask.array as da import numpy as np @@ -74,13 +75,13 @@ def __init__(self, filename, filename_info, filetype_info): @property def start_time(self): """Get the start time.""" - start_time = datetime.strptime(self.file_content["/attr/begin_time"], "%Y-%m-%d %H:%M:%S") + start_time = dt.datetime.strptime(self.file_content["/attr/begin_time"], "%Y-%m-%d %H:%M:%S") return start_time @property def end_time(self): """Get the end time.""" - end_time = datetime.strptime(self.file_content["/attr/end_time"], "%Y-%m-%d %H:%M:%S") + end_time = dt.datetime.strptime(self.file_content["/attr/end_time"], "%Y-%m-%d %H:%M:%S") return end_time @staticmethod diff --git a/satpy/readers/eum_base.py b/satpy/readers/eum_base.py index 3cbbb46433..fe4579301d 100644 --- a/satpy/readers/eum_base.py +++ b/satpy/readers/eum_base.py @@ -17,7 +17,7 @@ # satpy. If not, see . """Utilities for EUMETSAT satellite data.""" -from datetime import datetime, timedelta +import datetime as dt import numpy as np @@ -44,9 +44,9 @@ def timecds2datetime(tcds): except (KeyError, ValueError): pass - reference = datetime(1958, 1, 1) - delta = timedelta(days=days, milliseconds=milliseconds, - microseconds=microseconds) + reference = dt.datetime(1958, 1, 1) + delta = dt.timedelta(days=days, milliseconds=milliseconds, + microseconds=microseconds) return reference + delta diff --git a/satpy/readers/eum_l2_grib.py b/satpy/readers/eum_l2_grib.py index c3cc7e61c4..543aa71c30 100644 --- a/satpy/readers/eum_l2_grib.py +++ b/satpy/readers/eum_l2_grib.py @@ -22,6 +22,7 @@ https://navigator.eumetsat.int/ """ +import datetime as dt import logging import dask.array as da @@ -32,7 +33,9 @@ from satpy.readers.eum_base import get_service_mode from satpy.readers.fci_base import calculate_area_extent as fci_calculate_area_extent from satpy.readers.file_handlers import BaseFileHandler -from satpy.readers.seviri_base import PLATFORM_DICT, REPEAT_CYCLE_DURATION, REPEAT_CYCLE_DURATION_RSS +from satpy.readers.seviri_base import PLATFORM_DICT as SEVIRI_PLATFORM_DICT +from satpy.readers.seviri_base import REPEAT_CYCLE_DURATION as SEVIRI_REPEAT_CYCLE_DURATION +from satpy.readers.seviri_base import REPEAT_CYCLE_DURATION_RSS as SEVIRI_REPEAT_CYCLE_DURATION_RSS from satpy.readers.seviri_base import calculate_area_extent as seviri_calculate_area_extent from satpy.utils import get_legacy_chunk_size @@ -60,7 +63,7 @@ def __init__(self, filename, filename_info, filetype_info): if "seviri" in self.filetype_info["file_type"]: self.sensor = "seviri" - self.PLATFORM_NAME = PLATFORM_DICT[self.filename_info["spacecraft"]] + self.PLATFORM_NAME = SEVIRI_PLATFORM_DICT[self.filename_info["spacecraft"]] elif "fci" in self.filetype_info["file_type"]: self.sensor = "fci" self.PLATFORM_NAME = f"MTG-i{self.filename_info['spacecraft_id']}" @@ -74,8 +77,11 @@ def start_time(self): @property def end_time(self): """Return the sensing end time.""" - delta = REPEAT_CYCLE_DURATION_RSS if self._ssp_lon == 9.5 else REPEAT_CYCLE_DURATION - return self.start_time + delta + if self.sensor == "seviri": + delta = SEVIRI_REPEAT_CYCLE_DURATION_RSS if self._ssp_lon == 9.5 else SEVIRI_REPEAT_CYCLE_DURATION + return self.start_time + dt.timedelta(minutes=delta) + elif self.sensor == "fci": + return self.filename_info["end_time"] def get_area_def(self, dataset_id): """Return the area definition for a dataset.""" @@ -282,13 +288,8 @@ def _get_attributes(self): "projection_longitude": self._ssp_lon } - attributes = { - "orbital_parameters": orbital_parameters, - "sensor": self.sensor - } - - - attributes["platform_name"] = self.PLATFORM_NAME + attributes = {"orbital_parameters": orbital_parameters, "sensor": self.sensor, + "platform_name": self.PLATFORM_NAME} return attributes diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index 0c7b9fb8cc..83e58d73a5 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Interface to MTG-FCI L1c NetCDF files. This module defines the :class:`FCIL1cNCFileHandler` file handler, to @@ -29,7 +30,9 @@ .. note:: This reader currently supports Full Disk High Spectral Resolution Imagery - (FDHSI) and High Spatial Resolution Fast Imagery (HRFI) data in full-disc ("FD") scanning mode. + (FDHSI), High Spatial Resolution Fast Imagery (HRFI) data in full-disc ("FD") scanning mode. + In addition it also supports the L1C format for the African dissemination ("AF"), where each file + contains the masked full-dic of a single channel see `AF PUG`_. If the user provides a list of both FDHSI and HRFI files from the same repeat cycle to the Satpy ``Scene``, Satpy will automatically read the channels from the source with the finest resolution, i.e. from the HRFI files for the vis_06, nir_22, ir_38, and ir_105 channels. @@ -104,6 +107,7 @@ If you use ``hdf5plugin``, make sure to add the line ``import hdf5plugin`` at the top of your script. +.. _AF PUG: https://www-cdn.eumetsat.int/files/2022-07/MTG%20EUMETCast%20Africa%20Product%20User%20Guide%20%5BAfricaPUG%5D_v2E.pdf .. _PUG: https://www-cdn.eumetsat.int/files/2020-07/pdf_mtg_fci_l1_pug.pdf .. _EUMETSAT: https://www.eumetsat.int/mtg-flexible-combined-imager # noqa: E501 .. _test data releases: https://www.eumetsat.int/mtg-test-data @@ -111,8 +115,8 @@ from __future__ import absolute_import, division, print_function, unicode_literals +import datetime as dt import logging -from datetime import timedelta from functools import cached_property import dask.array as da @@ -146,11 +150,13 @@ HIGH_RES_GRID_INFO = {"fci_l1c_hrfi": {"grid_type": "500m", "grid_width": 22272}, "fci_l1c_fdhsi": {"grid_type": "1km", - "grid_width": 11136}} + "grid_width": 11136}, + } LOW_RES_GRID_INFO = {"fci_l1c_hrfi": {"grid_type": "1km", "grid_width": 11136}, "fci_l1c_fdhsi": {"grid_type": "2km", - "grid_width": 5568}} + "grid_width": 5568}, + } def _get_aux_data_name_from_dsname(dsname): @@ -218,7 +224,7 @@ def rc_period_min(self): As RSS is not yet implemeted and error will be raised if RSS are to be read """ - if not self.filename_info["coverage"] == "FD": + if self.filename_info["coverage"] not in ["FD","AF"]: raise NotImplementedError(f"coverage for {self.filename_info['coverage']} not supported by this reader") return 2.5 return 10 @@ -227,12 +233,13 @@ def rc_period_min(self): def nominal_start_time(self): """Get nominal start time.""" rc_date = self.observation_start_time.replace(hour=0, minute=0, second=0, microsecond=0) - return rc_date + timedelta(minutes=(self.filename_info["repeat_cycle_in_day"]-1)*self.rc_period_min) + return rc_date + dt.timedelta( + minutes=(self.filename_info["repeat_cycle_in_day"]-1)*self.rc_period_min) @property def nominal_end_time(self): """Get nominal end time.""" - return self.nominal_start_time + timedelta(minutes=self.rc_period_min) + return self.nominal_start_time + dt.timedelta(minutes=self.rc_period_min) @property def observation_start_time(self): @@ -272,29 +279,28 @@ def get_segment_position_info(self): Note: in the FCI terminology, a segment is actually called "chunk". To avoid confusion with the dask concept of chunk, and to be consistent with SEVIRI, we opt to use the word segment. + + Note: This function is not used for the African data as it contains only one segment. """ + file_type = self.filetype_info["file_type"] vis_06_measured_path = self.get_channel_measured_group_path("vis_06") ir_105_measured_path = self.get_channel_measured_group_path("ir_105") - - file_type = self.filetype_info["file_type"] - segment_position_info = { - HIGH_RES_GRID_INFO[file_type]["grid_type"]: { - "start_position_row": self.get_and_cache_npxr(vis_06_measured_path + "/start_position_row").item(), - "end_position_row": self.get_and_cache_npxr(vis_06_measured_path + "/end_position_row").item(), - "segment_height": self.get_and_cache_npxr(vis_06_measured_path + "/end_position_row").item() - - self.get_and_cache_npxr(vis_06_measured_path + "/start_position_row").item() + 1, - "grid_width": HIGH_RES_GRID_INFO[file_type]["grid_width"] - }, - LOW_RES_GRID_INFO[file_type]["grid_type"]: { - "start_position_row": self.get_and_cache_npxr(ir_105_measured_path + "/start_position_row").item(), - "end_position_row": self.get_and_cache_npxr(ir_105_measured_path + "/end_position_row").item(), - "segment_height": self.get_and_cache_npxr(ir_105_measured_path + "/end_position_row").item() - - self.get_and_cache_npxr(ir_105_measured_path + "/start_position_row").item() + 1, - "grid_width": LOW_RES_GRID_INFO[file_type]["grid_width"] - } - } - + HIGH_RES_GRID_INFO[file_type]["grid_type"]: { + "start_position_row": self.get_and_cache_npxr(vis_06_measured_path + "/start_position_row").item(), + "end_position_row": self.get_and_cache_npxr(vis_06_measured_path + "/end_position_row").item(), + "segment_height": self.get_and_cache_npxr(vis_06_measured_path + "/end_position_row").item() - + self.get_and_cache_npxr(vis_06_measured_path + "/start_position_row").item() + 1, + "grid_width": HIGH_RES_GRID_INFO[file_type]["grid_width"] + }, + LOW_RES_GRID_INFO[file_type]["grid_type"]: { + "start_position_row": self.get_and_cache_npxr(ir_105_measured_path + "/start_position_row").item(), + "end_position_row": self.get_and_cache_npxr(ir_105_measured_path + "/end_position_row").item(), + "segment_height": self.get_and_cache_npxr(ir_105_measured_path + "/end_position_row").item() - + self.get_and_cache_npxr(ir_105_measured_path + "/start_position_row").item() + 1, + "grid_width": LOW_RES_GRID_INFO[file_type]["grid_width"] + } + } return segment_position_info def get_dataset(self, key, info=None): @@ -397,9 +403,13 @@ def orbital_param(self): actual_subsat_lon = float(np.nanmean(self._get_aux_data_lut_vector("subsatellite_longitude"))) actual_subsat_lat = float(np.nanmean(self._get_aux_data_lut_vector("subsatellite_latitude"))) actual_sat_alt = float(np.nanmean(self._get_aux_data_lut_vector("platform_altitude"))) - nominal_and_proj_subsat_lon = float( - self.get_and_cache_npxr("data/mtg_geos_projection/attr/longitude_of_projection_origin")) - nominal_and_proj_subsat_lat = 0 + # The "try" is a temporary part of the code as long as the AF data are not modified + try : + nominal_and_proj_subsat_lon = float( + self.get_and_cache_npxr("data/mtg_geos_projection/attr/longitude_of_projection_origin")) + except ValueError: + nominal_and_proj_subsat_lon = 0.0 + nominal_and_proj_subsat_lat = 0.0 nominal_and_proj_sat_alt = float( self.get_and_cache_npxr("data/mtg_geos_projection/attr/perspective_point_height")) @@ -551,7 +561,11 @@ def get_area_def(self, key): a = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/semi_major_axis")) h = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/perspective_point_height")) rf = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/inverse_flattening")) - lon_0 = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/longitude_of_projection_origin")) + # The "try" is a temporary part of the code as long as the AF data are not modified + try: + lon_0 = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/longitude_of_projection_origin")) + except ValueError: + lon_0 = 0.0 sweep = str(self.get_and_cache_npxr("data/mtg_geos_projection/attr/sweep_angle_axis")) area_extent, nlines, ncols = self.calc_area_extent(key) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index cb5924b448..8971eb4996 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -18,6 +18,7 @@ import logging from contextlib import suppress +import netCDF4 import numpy as np import xarray as xr from pyresample import geometry @@ -87,13 +88,45 @@ def _set_attributes(self, variable, dataset_info, segmented=False): else: xdim, ydim = "number_of_columns", "number_of_rows" - if dataset_info["file_key"] not in ["product_quality", "product_completeness", "product_timeliness"]: - variable = variable.rename({ydim: "y", xdim: "x"}) + if dataset_info["nc_key"] not in ["product_quality", "product_completeness", "product_timeliness"]: + variable = variable.swap_dims({ydim: "y", xdim: "x"}) variable.attrs.setdefault("units", None) + if "unit" in variable.attrs: + # Need to convert this attribute to the expected satpy entry + variable.attrs.update({"units": variable.attrs["unit"]}) + del variable.attrs["unit"] + variable.attrs.update(dataset_info) variable.attrs.update(self._get_global_attributes()) + import_enum_information = dataset_info.get("import_enum_information", False) + if import_enum_information: + variable = self._add_flag_values_and_meanings(self.filename, dataset_info["nc_key"], variable) + + if variable.attrs["units"] == "none": + variable.attrs.update({"units": None}) + + return variable + + @staticmethod + def _add_flag_values_and_meanings(filename, key, variable): + """Build flag values and meaning from enum datatype.""" + nc_dataset = netCDF4.Dataset(filename, "r") + # This currently assumes a flat netCDF file + data_type = nc_dataset.variables[key].datatype + if hasattr(data_type, "enum_dict"): + enum = data_type.enum_dict + flag_values = [] + flag_meanings = [] + for meaning, value in enum.items(): + flag_values.append(value) + flag_meanings.append(meaning) + + variable.attrs["flag_values"] = flag_values + variable.attrs["flag_meanings"] = flag_meanings + nc_dataset.close() + return variable def _slice_dataset(self, variable, dataset_info, dimensions): @@ -154,7 +187,6 @@ def __init__(self, filename, filename_info, filetype_info, with_area_definition= self._projection = self.nc["mtg_geos_projection"] self.multi_dims = {"maximum_number_of_layers": "layer", "number_of_vis_channels": "vis_channel_id"} - def get_area_def(self, key): """Return the area definition.""" try: @@ -163,8 +195,8 @@ def get_area_def(self, key): raise NotImplementedError def get_dataset(self, dataset_id, dataset_info): - """Get dataset using the file_key in dataset_info.""" - var_key = dataset_info["file_key"] + """Get dataset using the nc_key in dataset_info.""" + var_key = dataset_info["nc_key"] par_name = dataset_info["name"] logger.debug("Reading in file to get dataset with key %s.", var_key) @@ -196,7 +228,7 @@ def get_dataset(self, dataset_id, dataset_info): @staticmethod def _decode_clm_test_data(variable, dataset_info): - if dataset_info["file_key"] != "cloud_mask_cmrt6_test_result": + if dataset_info["nc_key"] != "cloud_mask_cmrt6_test_result": variable = variable.astype("uint32") variable.values = (variable.values >> dataset_info["extract_byte"] << 31 >> 31).astype("int8") @@ -237,9 +269,9 @@ def _get_area_extent(self): area_extent_pixel_center = make_ext(ll_x, ur_x, ll_y, ur_y, h) # Shift area extent by half a pixel to get the area extent w.r.t. the dataset/pixel corners - scale_factor = (x[1:]-x[0:-1]).values.mean() + scale_factor = (x[1:] - x[0:-1]).values.mean() res = abs(scale_factor) * h - area_extent = tuple(i + res/2 if i > 0 else i - res/2 for i in area_extent_pixel_center) + area_extent = tuple(i + res / 2 if i > 0 else i - res / 2 for i in area_extent_pixel_center) return area_extent @@ -326,8 +358,8 @@ def get_area_def(self, key): raise NotImplementedError def get_dataset(self, dataset_id, dataset_info): - """Get dataset using the file_key in dataset_info.""" - var_key = dataset_info["file_key"] + """Get dataset using the nc_key in dataset_info.""" + var_key = dataset_info["nc_key"] logger.debug("Reading in file to get dataset with key %s.", var_key) try: @@ -404,8 +436,10 @@ def _modify_area_extent(stand_area_extent): return area_extent + class FciL2NCAMVFileHandler(FciL2CommonFunctions, BaseFileHandler): """Reader class for FCI L2 AMV products in NetCDF4 format.""" + def __init__(self, filename, filename_info, filetype_info): """Open the NetCDF file with xarray and prepare for dataset reading.""" super().__init__(filename, filename_info, filetype_info) @@ -439,13 +473,13 @@ def _get_global_attributes(self): "spacecraft_name": self.spacecraft_name, "sensor": self.sensor_name, "platform_name": self.spacecraft_name, - "channel":self.filename_info["channel"] + "channel": self.filename_info["channel"] } return attributes def get_dataset(self, dataset_id, dataset_info): - """Get dataset using the file_key in dataset_info.""" - var_key = dataset_info["file_key"] + """Get dataset using the nc_key in dataset_info.""" + var_key = dataset_info["nc_key"] logger.debug("Reading in file to get dataset with key %s.", var_key) try: diff --git a/satpy/readers/file_handlers.py b/satpy/readers/file_handlers.py index 66a028eb4c..b844732a2e 100644 --- a/satpy/readers/file_handlers.py +++ b/satpy/readers/file_handlers.py @@ -112,10 +112,9 @@ def combine_info(self, all_infos): """ combined_info = combine_metadata(*all_infos) - new_dict = self._combine(all_infos, min, "start_time", "start_orbit") - new_dict.update(self._combine(all_infos, max, "end_time", "end_orbit")) + new_dict = self._combine(all_infos, min, "start_orbit") + new_dict.update(self._combine(all_infos, max, "end_orbit")) new_dict.update(self._combine_orbital_parameters(all_infos)) - new_dict.update(self._combine_time_parameters(all_infos)) try: area = SwathDefinition(lons=np.ma.vstack([info["area"].lons for info in all_infos]), @@ -145,27 +144,6 @@ def _combine_orbital_parameters(self, all_infos): orb_params_comb.update(self._combine(orb_params, np.mean, *keys)) return {"orbital_parameters": orb_params_comb} - def _combine_time_parameters(self, all_infos): - time_params = [info.get("time_parameters", {}) for info in all_infos] - if not all(time_params): - return {} - # Collect all available keys - time_params_comb = {} - for d in time_params: - time_params_comb.update(d) - - start_keys = ( - "nominal_start_time", - "observation_start_time", - ) - end_keys = ( - "nominal_end_time", - "observation_end_time", - ) - time_params_comb.update(self._combine(time_params, min, *start_keys)) - time_params_comb.update(self._combine(time_params, max, *end_keys)) - return {"time_parameters": time_params_comb} - @property def start_time(self): """Get start time.""" diff --git a/satpy/readers/fy4_base.py b/satpy/readers/fy4_base.py index 144e559858..160b5795dd 100644 --- a/satpy/readers/fy4_base.py +++ b/satpy/readers/fy4_base.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Base reader for the L1 HDF data from the AGRI and GHI instruments aboard the FengYun-4A/B satellites. The files read by this reader are described in the official Real Time Data Service: @@ -23,11 +24,12 @@ """ +import datetime as dt import logging -from datetime import datetime import dask.array as da import numpy as np +import numpy.typing as npt import xarray as xr from satpy._compat import cached_property @@ -86,7 +88,7 @@ def scale(dn, slope, offset): return ref - def apply_lut(self, data, lut): + def _apply_lut(self, data: xr.DataArray, lut: npt.NDArray[np.float32]) -> xr.DataArray: """Calibrate digital number (DN) by applying a LUT. Args: @@ -96,8 +98,16 @@ def apply_lut(self, data, lut): Calibrated quantity """ # append nan to the end of lut for fillvalue + fill_value = data.attrs.get("FillValue") + if fill_value is not None and fill_value.item() <= lut.shape[0] - 1: + # If LUT includes the fill_value, remove that entry and everything + # after it. + # Ex. C07 has a LUT of 65536 elements, but fill value is 65535 + # This is considered a bug in the input file format + lut = lut[:fill_value.item()] + lut = np.append(lut, np.nan) - data.data = da.where(data.data > lut.shape[0], lut.shape[0] - 1, data.data) + data.data = da.where(data.data >= lut.shape[0], lut.shape[0] - 1, data.data) res = data.data.map_blocks(self._getitem, lut, dtype=lut.dtype) res = xr.DataArray(res, dims=data.dims, attrs=data.attrs, coords=data.coords) @@ -138,8 +148,8 @@ def calibrate(self, data, ds_info, ds_name, file_key): raise NotImplementedError("Calibration to radiance is not supported.") # Apply range limits, but not for counts or we convert to float! if calibration != "counts": - data = data.where((data >= min(data.attrs["valid_range"])) & - (data <= max(data.attrs["valid_range"]))) + data = data.where((data >= min(ds_info["valid_range"])) & + (data <= max(ds_info["valid_range"]))) else: data.attrs["_FillValue"] = data.attrs["FillValue"].item() return data @@ -182,7 +192,7 @@ def calibrate_to_bt(self, data, ds_info, ds_name): lut = self[lut_key] # the value of dn is the index of brightness_temperature - data = self.apply_lut(data, lut) + data = self._apply_lut(data, lut.compute().data) ds_info["valid_range"] = lut.attrs["valid_range"] return data @@ -191,20 +201,20 @@ def start_time(self): """Get the start time.""" start_time = self["/attr/Observing Beginning Date"] + "T" + self["/attr/Observing Beginning Time"] + "Z" try: - return datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S.%fZ") except ValueError: # For some data there is no sub-second component - return datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%SZ") + return dt.datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%SZ") @property def end_time(self): """Get the end time.""" end_time = self["/attr/Observing Ending Date"] + "T" + self["/attr/Observing Ending Time"] + "Z" try: - return datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%S.%fZ") except ValueError: # For some data there is no sub-second component - return datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%SZ") + return dt.datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%SZ") def get_area_def(self, key): """Get the area definition.""" diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index 4f34c1fde8..6b3ceb5e0a 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -16,16 +16,14 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . - """GERB L2 HR HDF5 reader. A reader for the Top of Atmosphere outgoing fluxes from the Geostationary Earth Radiation Budget instrument aboard the Meteosat Second Generation satellites. """ - +import datetime as dt import logging -from datetime import timedelta from satpy.readers.hdf5_utils import HDF5FileHandler from satpy.resample import get_area_def @@ -55,7 +53,7 @@ class GERB_HR_FileHandler(HDF5FileHandler): @property def end_time(self): """Get end time.""" - return self.start_time + timedelta(minutes=15) + return self.start_time + dt.timedelta(minutes=15) @property def start_time(self): diff --git a/satpy/readers/ghrsst_l2.py b/satpy/readers/ghrsst_l2.py index 6c4005623e..d407d49f14 100644 --- a/satpy/readers/ghrsst_l2.py +++ b/satpy/readers/ghrsst_l2.py @@ -14,12 +14,13 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Reader for the GHRSST level-2 formatted data.""" +import datetime as dt import os import tarfile from contextlib import suppress -from datetime import datetime from functools import cached_property import xarray as xr @@ -39,9 +40,9 @@ def __init__(self, filename, filename_info, filetype_info, engine=None): self._engine = engine self._tarfile = None - self.filename_info["start_time"] = datetime.strptime( + self.filename_info["start_time"] = dt.datetime.strptime( self.nc.start_time, "%Y%m%dT%H%M%SZ") - self.filename_info["end_time"] = datetime.strptime( + self.filename_info["end_time"] = dt.datetime.strptime( self.nc.stop_time, "%Y%m%dT%H%M%SZ") @cached_property diff --git a/satpy/readers/ghrsst_l3c_sst.py b/satpy/readers/ghrsst_l3c_sst.py index ef1dd220a9..8960275995 100644 --- a/satpy/readers/ghrsst_l3c_sst.py +++ b/satpy/readers/ghrsst_l3c_sst.py @@ -16,10 +16,11 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . # type: ignore + """An OSISAF SST reader for the netCDF GHRSST format.""" +import datetime as dt import logging -from datetime import datetime import numpy as np @@ -37,7 +38,7 @@ class GHRSST_OSISAFL2(NetCDF4FileHandler): """Reader for the OSISAF SST GHRSST format.""" def _parse_datetime(self, datestr): - return datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") + return dt.datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") def get_area_def(self, area_id, area_info): """Override abstract baseclass method.""" diff --git a/satpy/readers/glm_l2.py b/satpy/readers/glm_l2.py index ceb11a33bc..7f1e77cd50 100644 --- a/satpy/readers/glm_l2.py +++ b/satpy/readers/glm_l2.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Geostationary Lightning Mapper reader for the Level 2 format from glmtools. More information about `glmtools` and the files it produces can be found on @@ -23,8 +24,9 @@ https://github.com/deeplycloudy/glmtools """ + +import datetime as dt import logging -from datetime import datetime import numpy as np @@ -52,12 +54,12 @@ def sensor(self): @property def start_time(self): """Start time of the current file's observations.""" - return datetime.strptime(self.nc.attrs["time_coverage_start"], "%Y-%m-%dT%H:%M:%SZ") + return dt.datetime.strptime(self.nc.attrs["time_coverage_start"], "%Y-%m-%dT%H:%M:%SZ") @property def end_time(self): """End time of the current file's observations.""" - return datetime.strptime(self.nc.attrs["time_coverage_end"], "%Y-%m-%dT%H:%M:%SZ") + return dt.datetime.strptime(self.nc.attrs["time_coverage_end"], "%Y-%m-%dT%H:%M:%SZ") def _is_category_product(self, data_arr): # if after autoscaling we still have an integer diff --git a/satpy/readers/goci2_l2_nc.py b/satpy/readers/goci2_l2_nc.py new file mode 100644 index 0000000000..0679be41ff --- /dev/null +++ b/satpy/readers/goci2_l2_nc.py @@ -0,0 +1,91 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . + +"""Reader for GK-2B GOCI-II L2 products from NOSC. + +For more information about the data, see: +""" + +import datetime as dt +import logging + +import xarray as xr + +from satpy.readers.netcdf_utils import NetCDF4FileHandler + +logger = logging.getLogger(__name__) + +GROUPS_MAP = { + "goci2_l2_ac": ["geophysical_data/RhoC", "geophysical_data/Rrs", "navigation_data"], + "goci2_l2_iop": [ + "geophysical_data/a_total", + "geophysical_data/bb_total", + "navigation_data", + ], +} + + +class GOCI2L2NCFileHandler(NetCDF4FileHandler): + """File handler for GOCI-II L2 official data in netCDF format.""" + + def __init__(self, filename, filename_info, filetype_info): + """Initialize the reader.""" + super().__init__(filename, filename_info, filetype_info) + + self.attrs = self["/attrs"] + self.nc = self._merge_navigation_data(filetype_info["file_type"]) + + # Read metadata which are common to all datasets + self.nlines = self.nc.sizes["number_of_lines"] + self.ncols = self.nc.sizes["pixels_per_line"] + self.coverage = filename_info["coverage"] + + def _merge_navigation_data(self, filetype): + """Merge navigation data and geophysical data.""" + if filetype in GROUPS_MAP.keys(): + groups = GROUPS_MAP[filetype] + else: + groups = ["geophysical_data", "navigation_data"] + return xr.merge([self[group] for group in groups]) + + @property + def start_time(self): + """Start timestamp of the dataset.""" + date_str = self.attrs["observation_start_time"] + return dt.datetime.strptime(date_str, "%Y%m%d_%H%M%S") + + @property + def end_time(self): + """End timestamp of the dataset.""" + date_str = self.attrs["observation_end_time"] + return dt.datetime.strptime(date_str, "%Y%m%d_%H%M%S") + + def get_dataset(self, key, info): + """Load a dataset.""" + var = info["file_key"] + logger.debug("Reading in get_dataset %s.", var) + variable = self.nc[var] + + variable = variable.rename({"number_of_lines": "y", "pixels_per_line": "x"}) + + # Some products may miss lon/lat standard_name, use name as base name if it is not already present + if variable.attrs.get("standard_name", None) is None: + variable.attrs.update({"standard_name": variable.name}) + + variable.attrs.update(key.to_dict()) + return variable diff --git a/satpy/readers/goes_imager_hrit.py b/satpy/readers/goes_imager_hrit.py index d90ebb4a72..401274debb 100644 --- a/satpy/readers/goes_imager_hrit.py +++ b/satpy/readers/goes_imager_hrit.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """GOES HRIT format reader. References: @@ -24,8 +25,8 @@ """ +import datetime as dt import logging -from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -116,21 +117,21 @@ class CalibrationError(Exception): ("msecs", "u1")]) -def make_sgs_time(sgs_time_array: ArrayLike) -> datetime: +def make_sgs_time(sgs_time_array: ArrayLike) -> dt.datetime: """Make sgs time.""" epoch_year = _epoch_year_from_sgs_time(sgs_time_array) doy_offset = _epoch_doy_offset_from_sgs_time(sgs_time_array) return epoch_year + doy_offset -def _epoch_year_from_sgs_time(sgs_time_array: ArrayLike) -> datetime: +def _epoch_year_from_sgs_time(sgs_time_array: ArrayLike) -> dt.datetime: century = sgs_time_array["century"].astype(np.int64) year = sgs_time_array["year"].astype(np.int64) year = ((century >> 4) * 1000 + (century & 15) * 100 + (year >> 4) * 10 + (year & 15)) - return datetime(int(year), 1, 1) + return dt.datetime(int(year), 1, 1) -def _epoch_doy_offset_from_sgs_time(sgs_time_array: ArrayLike) -> timedelta: +def _epoch_doy_offset_from_sgs_time(sgs_time_array: ArrayLike) -> dt.timedelta: doy1 = sgs_time_array["doy1"].astype(np.int64) doy_hours = sgs_time_array["doy_hours"].astype(np.int64) hours_mins = sgs_time_array["hours_mins"].astype(np.int64) @@ -143,7 +144,7 @@ def _epoch_doy_offset_from_sgs_time(sgs_time_array: ArrayLike) -> timedelta: mins = ((hours_mins & 15) * 10 + (mins_secs >> 4)) secs = ((mins_secs & 15) * 10 + (secs_msecs >> 4)) msecs = ((secs_msecs & 15) * 100 + (msecs >> 4) * 10 + (msecs & 15)) - return timedelta( + return dt.timedelta( days=int(doy - 1), hours=int(hours), minutes=int(mins), @@ -426,7 +427,7 @@ def _get_calibration_params(self): def calibrate(self, data, calibration): """Calibrate the data.""" logger.debug("Calibration") - tic = datetime.now() + tic = dt.datetime.now() if calibration == "counts": return data if calibration == "reflectance": @@ -437,7 +438,7 @@ def calibrate(self, data, calibration): raise NotImplementedError("Don't know how to calibrate to " + str(calibration)) - logger.debug("Calibration time " + str(datetime.now() - tic)) + logger.debug("Calibration time " + str(dt.datetime.now() - tic)) return res def _calibrate(self, data): diff --git a/satpy/readers/goes_imager_nc.py b/satpy/readers/goes_imager_nc.py index 1b88919886..1c5b513f36 100644 --- a/satpy/readers/goes_imager_nc.py +++ b/satpy/readers/goes_imager_nc.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Reader for GOES 8-15 imager data in netCDF format. Supports netCDF files from both NOAA-CLASS and EUMETSAT. @@ -223,10 +224,10 @@ .. _[SCHED-E]: http://www.ospo.noaa.gov/Operations/GOES/east/imager-routine.html """ +import datetime as dt import logging import re from abc import abstractmethod -from datetime import datetime, timedelta import numpy as np import pyresample.geometry @@ -593,11 +594,11 @@ } # (nlines, ncols) SCAN_DURATION = { - FULL_DISC: timedelta(minutes=26), - NORTH_HEMIS_WEST: timedelta(minutes=10, seconds=5), - SOUTH_HEMIS_WEST: timedelta(minutes=6, seconds=54), - NORTH_HEMIS_EAST: timedelta(minutes=14, seconds=15), - SOUTH_HEMIS_EAST: timedelta(minutes=4, seconds=49) + FULL_DISC: dt.timedelta(minutes=26), + NORTH_HEMIS_WEST: dt.timedelta(minutes=10, seconds=5), + SOUTH_HEMIS_WEST: dt.timedelta(minutes=6, seconds=54), + NORTH_HEMIS_EAST: dt.timedelta(minutes=14, seconds=15), + SOUTH_HEMIS_EAST: dt.timedelta(minutes=4, seconds=49) } # Source: [SCHED-W], [SCHED-E] @@ -730,10 +731,15 @@ def _get_area_def_uniform_sampling(self, lon0, channel): @property def start_time(self): """Start timestamp of the dataset.""" - dt = self.nc["time"].dt - return datetime(year=int(dt.year.item()), month=int(dt.month.item()), day=int(dt.day.item()), - hour=int(dt.hour.item()), minute=int(dt.minute.item()), - second=int(dt.second.item()), microsecond=int(dt.microsecond.item())) + timestamp = self.nc["time"].dt + return dt.datetime( + year=int(timestamp.year.item()), + month=int(timestamp.month.item()), + day=int(timestamp.day.item()), + hour=int(timestamp.hour.item()), + minute=int(timestamp.minute.item()), + second=int(timestamp.second.item()), + microsecond=int(timestamp.microsecond.item())) @property def end_time(self): @@ -1018,11 +1024,11 @@ def get_dataset(self, key, info): elif "latitude" in key["name"]: data = self.geo_data["lat"] else: - tic = datetime.now() + tic = dt.datetime.now() data = self.calibrate(self.nc["data"].isel(time=0), calibration=key["calibration"], channel=key["name"]) - logger.debug("Calibration time: {}".format(datetime.now() - tic)) + logger.debug("Calibration time: {}".format(dt.datetime.now() - tic)) # Mask space pixels data = data.where(self.meta["earth_mask"]) @@ -1076,11 +1082,11 @@ def get_dataset(self, key, info): """Load dataset designated by the given key from file.""" logger.debug("Reading dataset {}".format(key["name"])) - tic = datetime.now() + tic = dt.datetime.now() data = self.calibrate(self.nc["data"].isel(time=0), calibration=key["calibration"], channel=key["name"]) - logger.debug("Calibration time: {}".format(datetime.now() - tic)) + logger.debug("Calibration time: {}".format(dt.datetime.now() - tic)) # Mask space pixels data = data.where(self.meta["earth_mask"]) diff --git a/satpy/readers/gpm_imerg.py b/satpy/readers/gpm_imerg.py index 7bc65ac4c6..4463be31b9 100644 --- a/satpy/readers/gpm_imerg.py +++ b/satpy/readers/gpm_imerg.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Reader for GPM imerg data on half-hourly timesteps. References: @@ -23,8 +24,8 @@ """ +import datetime as dt import logging -from datetime import datetime import dask.array as da import h5py @@ -49,22 +50,22 @@ def __init__(self, filename, filename_info, filetype_info): @property def start_time(self): """Find the start time from filename info.""" - return datetime(self.finfo["date"].year, - self.finfo["date"].month, - self.finfo["date"].day, - self.finfo["start_time"].hour, - self.finfo["start_time"].minute, - self.finfo["start_time"].second) + return dt.datetime(self.finfo["date"].year, + self.finfo["date"].month, + self.finfo["date"].day, + self.finfo["start_time"].hour, + self.finfo["start_time"].minute, + self.finfo["start_time"].second) @property def end_time(self): """Find the end time from filename info.""" - return datetime(self.finfo["date"].year, - self.finfo["date"].month, - self.finfo["date"].day, - self.finfo["end_time"].hour, - self.finfo["end_time"].minute, - self.finfo["end_time"].second) + return dt.datetime(self.finfo["date"].year, + self.finfo["date"].month, + self.finfo["date"].day, + self.finfo["end_time"].hour, + self.finfo["end_time"].minute, + self.finfo["end_time"].second) def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" diff --git a/satpy/readers/grib.py b/satpy/readers/grib.py index dadccce77a..4372226c12 100644 --- a/satpy/readers/grib.py +++ b/satpy/readers/grib.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Generic Reader for GRIB2 files. Currently this reader depends on the `pygrib` python package. The `eccodes` @@ -22,8 +23,9 @@ of writing. """ + +import datetime as dt import logging -from datetime import datetime import dask.array as da import numpy as np @@ -105,7 +107,7 @@ def _create_dataset_ids(self, keys): @staticmethod def _convert_datetime(msg, date_key, time_key, date_format="%Y%m%d%H%M"): date_str = "{:d}{:04d}".format(msg[date_key], msg[time_key]) - return datetime.strptime(date_str, date_format) + return dt.datetime.strptime(date_str, date_format) @property def start_time(self): diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index 37fe714435..3fd920c01f 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -15,15 +15,16 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Base HDF-EOS reader.""" from __future__ import annotations +import datetime as dt import logging import re from ast import literal_eval from contextlib import suppress -from datetime import datetime import numpy as np import xarray as xr @@ -182,7 +183,7 @@ def start_time(self): try: date = (self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEBEGINNINGDATE"]["VALUE"] + " " + self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEBEGINNINGTIME"]["VALUE"]) - return datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f") + return dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f") except KeyError: return self._start_time_from_filename() @@ -195,7 +196,7 @@ def end_time(self): try: date = (self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEENDINGDATE"]["VALUE"] + " " + self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEENDINGTIME"]["VALUE"]) - return datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f") + return dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f") except KeyError: return self.start_time diff --git a/satpy/readers/hrit_base.py b/satpy/readers/hrit_base.py index bf53d84a65..d0b9ee44db 100644 --- a/satpy/readers/hrit_base.py +++ b/satpy/readers/hrit_base.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """HRIT/LRIT format reader. This module is the base module for all HRIT-based formats. Here, you will find @@ -28,10 +29,10 @@ """ +import datetime as dt import logging import os from contextlib import contextmanager, nullcontext -from datetime import timedelta from io import BytesIO from subprocess import PIPE, Popen # nosec B404 @@ -176,7 +177,7 @@ def __init__(self, filename, filename_info, filetype_info, hdr_info): self.hdr_info = hdr_info self._get_hd(self.hdr_info) self._start_time = filename_info["start_time"] - self._end_time = self._start_time + timedelta(minutes=15) + self._end_time = self._start_time + dt.timedelta(minutes=15) def _get_hd(self, hdr_info): """Open the file, read and get the basic file header info and set the mda dictionary.""" diff --git a/satpy/readers/hrit_jma.py b/satpy/readers/hrit_jma.py index 0c88faf46b..bfdd5da93b 100644 --- a/satpy/readers/hrit_jma.py +++ b/satpy/readers/hrit_jma.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """HRIT format reader for JMA data. Introduction @@ -107,8 +108,8 @@ .. _AHI sample data: https://www.data.jma.go.jp/mscweb/en/himawari89/space_segment/sample_hrit.html """ +import datetime as dt import logging -from datetime import datetime import numpy as np import xarray as xr @@ -453,7 +454,7 @@ def _interp(arr, cal): def calibrate(self, data, calibration): """Calibrate the data.""" - tic = datetime.now() + tic = dt.datetime.now() if calibration == "counts": return data @@ -466,17 +467,17 @@ def calibrate(self, data, calibration): dims=data.dims, attrs=data.attrs, coords=data.coords) res = res.where(data < 65535) - logger.debug("Calibration time " + str(datetime.now() - tic)) + logger.debug("Calibration time " + str(dt.datetime.now() - tic)) return res @property def start_time(self): """Get start time of the scan.""" if self._use_acquisition_time_as_start_time: - return self.acq_time[0].astype(datetime) + return self.acq_time[0].astype(dt.datetime) return self._start_time @property def end_time(self): """Get end time of the scan.""" - return self.acq_time[-1].astype(datetime) + return self.acq_time[-1].astype(dt.datetime) diff --git a/satpy/readers/hrpt.py b/satpy/readers/hrpt.py index c4862e8169..cac8b9cd3d 100644 --- a/satpy/readers/hrpt.py +++ b/satpy/readers/hrpt.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Reading and calibrating hrpt avhrr data. Todo: @@ -29,8 +30,8 @@ """ +import datetime as dt import logging -from datetime import datetime import dask.array as da import numpy as np @@ -130,7 +131,7 @@ def __init__(self, filename, filename_info, filetype_info): self.channels = {i: None for i in AVHRR_CHANNEL_NAMES} self.units = {i: "counts" for i in AVHRR_CHANNEL_NAMES} - self.year = filename_info.get("start_time", datetime.utcnow()).year + self.year = filename_info.get("start_time", dt.datetime.utcnow()).year @cached_property def times(self): @@ -272,10 +273,10 @@ def _get_avhrr_tiepoints(self, scan_points, scanline_nb): def start_time(self): """Get the start time.""" return time_seconds(self._data["timecode"][0, np.newaxis, :], - self.year).astype(datetime)[0] + self.year).astype(dt.datetime)[0] @property def end_time(self): """Get the end time.""" return time_seconds(self._data["timecode"][-1, np.newaxis, :], - self.year).astype(datetime)[0] + self.year).astype(dt.datetime)[0] diff --git a/satpy/readers/hsaf_grib.py b/satpy/readers/hsaf_grib.py index a041bf0c73..b8238f17a5 100644 --- a/satpy/readers/hsaf_grib.py +++ b/satpy/readers/hsaf_grib.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """A reader for files produced by the Hydrology SAF. Currently this reader depends on the `pygrib` python package. The `eccodes` @@ -22,8 +23,9 @@ of writing. """ + +import datetime as dt import logging -from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -68,7 +70,7 @@ def __init__(self, filename, filename_info, filetype_info): @staticmethod def _get_datetime(msg): dtstr = str(msg["dataDate"]) + str(msg["dataTime"]).zfill(4) - return datetime.strptime(dtstr, "%Y%m%d%H%M") + return dt.datetime.strptime(dtstr, "%Y%m%d%H%M") @property def analysis_time(self): @@ -151,7 +153,7 @@ def get_dataset(self, ds_id, ds_info): flen = len(self.filename) timedelt = self.filename[flen-10:flen-8] ds_info["start_time"] = (ds_info["end_time"] - - timedelta(hours=int(timedelt))) + dt.timedelta(hours=int(timedelt))) else: ds_info["start_time"] = ds_info["end_time"] fill = msg["missingValue"] diff --git a/satpy/readers/hsaf_h5.py b/satpy/readers/hsaf_h5.py index 478b91ce2d..25b42ec6a5 100644 --- a/satpy/readers/hsaf_h5.py +++ b/satpy/readers/hsaf_h5.py @@ -15,9 +15,11 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """A reader for HDF5 Snow Cover (SC) file produced by the Hydrology SAF.""" + +import datetime as dt import logging -from datetime import timedelta import dask.array as da import h5py @@ -47,7 +49,7 @@ def __init__(self, filename, filename_info, filetype_info): @property def end_time(self): """Get end time.""" - return self.start_time + timedelta(hours=23, minutes=59, seconds=59) + return self.start_time + dt.timedelta(hours=23, minutes=59, seconds=59) @property def start_time(self): diff --git a/satpy/readers/hy2_scat_l2b_h5.py b/satpy/readers/hy2_scat_l2b_h5.py index 929d7dc934..dae6e44bf6 100644 --- a/satpy/readers/hy2_scat_l2b_h5.py +++ b/satpy/readers/hy2_scat_l2b_h5.py @@ -21,7 +21,7 @@ Also handle the HDF5 files from NSOAS, based on a file example. """ -from datetime import datetime +import datetime as dt import numpy as np import xarray as xr @@ -35,14 +35,14 @@ class HY2SCATL2BH5FileHandler(HDF5FileHandler): @property def start_time(self): """Time for first observation.""" - return datetime.strptime(self["/attr/Range_Beginning_Time"], - "%Y%m%dT%H:%M:%S") + return dt.datetime.strptime(self["/attr/Range_Beginning_Time"], + "%Y%m%dT%H:%M:%S") @property def end_time(self): """Time for final observation.""" - return datetime.strptime(self["/attr/Range_Ending_Time"], - "%Y%m%dT%H:%M:%S") + return dt.datetime.strptime(self["/attr/Range_Ending_Time"], + "%Y%m%dT%H:%M:%S") @property def platform_name(self): diff --git a/satpy/readers/iasi_l2_so2_bufr.py b/satpy/readers/iasi_l2_so2_bufr.py index b5088aa041..d66edb7995 100644 --- a/satpy/readers/iasi_l2_so2_bufr.py +++ b/satpy/readers/iasi_l2_so2_bufr.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + r"""IASI L2 SO2 BUFR format reader. Introduction @@ -84,8 +85,8 @@ # TDB: this reader is based on iasi_l2.py and seviri_l2_bufr.py +import datetime as dt import logging -from datetime import datetime import dask.array as da import numpy as np @@ -154,7 +155,7 @@ def get_start_end_date(self): minute = ec.codes_get(bufr, "minute") second = ec.codes_get(bufr, "second") - obs_time = datetime(year=year, month=month, day=day, hour=hour, minute=minute, second=second) + obs_time = dt.datetime(year=year, month=month, day=day, hour=hour, minute=minute, second=second) if i == 0: start_time = obs_time diff --git a/satpy/readers/ici_l1b_nc.py b/satpy/readers/ici_l1b_nc.py index b063c51c4f..a5fd23c23b 100644 --- a/satpy/readers/ici_l1b_nc.py +++ b/satpy/readers/ici_l1b_nc.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . + """EUMETSAT EPS-SG Ice Cloud Imager (ICI) Level 1B products reader. The format is explained in the @@ -26,8 +27,8 @@ """ +import datetime as dt import logging -from datetime import datetime from enum import Enum from functools import cached_property @@ -77,12 +78,12 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): def start_time(self): """Get observation start time.""" try: - start_time = datetime.strptime( + start_time = dt.datetime.strptime( self["/attr/sensing_start_time_utc"], "%Y%m%d%H%M%S.%f", ) except ValueError: - start_time = datetime.strptime( + start_time = dt.datetime.strptime( self["/attr/sensing_start_time_utc"], "%Y-%m-%d %H:%M:%S.%f", ) @@ -92,12 +93,12 @@ def start_time(self): def end_time(self): """Get observation end time.""" try: - end_time = datetime.strptime( + end_time = dt.datetime.strptime( self["/attr/sensing_end_time_utc"], "%Y%m%d%H%M%S.%f", ) except ValueError: - end_time = datetime.strptime( + end_time = dt.datetime.strptime( self["/attr/sensing_end_time_utc"], "%Y-%m-%d %H:%M:%S.%f", ) @@ -197,7 +198,10 @@ def _interpolate_geo( n_subs = longitude.n_subs lons = da.zeros((n_scan.size, n_samples, horns.size)) lats = da.zeros((n_scan.size, n_samples, horns.size)) - n_subs = np.linspace(0, n_samples - 1, n_subs.size).astype(int) + n_subs = np.append( + np.arange(0, n_samples, np.ceil(n_samples / n_subs.size)), + n_samples - 1 + ).astype(int) for horn in horns.values: satint = GeoInterpolator( (longitude.values[:, :, horn], latitude.values[:, :, horn]), diff --git a/satpy/readers/insat3d_img_l1b_h5.py b/satpy/readers/insat3d_img_l1b_h5.py index 205f4d17b2..41ddee5df6 100644 --- a/satpy/readers/insat3d_img_l1b_h5.py +++ b/satpy/readers/insat3d_img_l1b_h5.py @@ -1,6 +1,7 @@ """File handler for Insat 3D L1B data in hdf5 format.""" + +import datetime as dt from contextlib import suppress -from datetime import datetime from functools import cached_property import dask.array as da @@ -120,13 +121,15 @@ class Insat3DIMGL1BH5FileHandler(BaseFileHandler): @property def start_time(self): """Get the start time.""" - start_time = datetime.strptime(self.datatree.attrs["Acquisition_Start_Time"], "%d-%b-%YT%H:%M:%S") + start_time = dt.datetime.strptime( + self.datatree.attrs["Acquisition_Start_Time"], "%d-%b-%YT%H:%M:%S") return start_time @property def end_time(self): """Get the end time.""" - end_time = datetime.strptime(self.datatree.attrs["Acquisition_End_Time"], "%d-%b-%YT%H:%M:%S") + end_time = dt.datetime.strptime( + self.datatree.attrs["Acquisition_End_Time"], "%d-%b-%YT%H:%M:%S") return end_time @cached_property diff --git a/satpy/readers/li_base_nc.py b/satpy/readers/li_base_nc.py index 848306e77c..cefbcc7e55 100644 --- a/satpy/readers/li_base_nc.py +++ b/satpy/readers/li_base_nc.py @@ -191,12 +191,12 @@ import xarray as xr from pyproj import Proj -from satpy.readers.netcdf_utils import NetCDF4FileHandler +from satpy.readers.netcdf_utils import NetCDF4FsspecFileHandler logger = logging.getLogger(__name__) -class LINCFileHandler(NetCDF4FileHandler): +class LINCFileHandler(NetCDF4FsspecFileHandler): """Base class used as parent for the concrete LI reader classes.""" def __init__(self, filename, filename_info, filetype_info, cache_handle=True): @@ -371,6 +371,9 @@ def inverse_projection(self, azimuth, elevation, proj_dict): azimuth = azimuth.values * point_height elevation = elevation.values * point_height + # In the MTG world, azimuth is defined as positive towards west, while proj expects it positive towards east + azimuth *= -1 + lon, lat = projection(azimuth, elevation, inverse=True) return np.stack([lon.astype(azimuth.dtype), lat.astype(elevation.dtype)]) diff --git a/satpy/readers/li_l2_nc.py b/satpy/readers/li_l2_nc.py index 4fe0826380..c172e99a0f 100644 --- a/satpy/readers/li_l2_nc.py +++ b/satpy/readers/li_l2_nc.py @@ -13,18 +13,52 @@ # You should have received a copy of the GNU General Public License # along with satpy. If not, see . -"""MTG Lighting Imager (LI) L2 unified reader. +"""MTG Lightning Imager (LI) Level-2 (L2) unified reader. This reader supports reading all the products from the LI L2 processing level: - * L2-LE - * L2-LGR - * L2-AFA - * L2-LEF - * L2-LFL - * L2-AF - * L2-AFR +Point products: + * L2-LE Lightning Events + * L2-LEF Lightning Events Filtered + * L2-LFL Lightning Flashes + * L2-LGR Lightning Groups +Accumulated products: + * L2-AF Accumulated Flashes + * L2-AFA Accumulated Flash Area + * L2-AFR Accumulated Flash Radiance + +Per default, the unified LI L2 reader returns the data either as an 1-D array +or as a 2-D array depending on the product type. + +Point-based products (LE, LEF, LFL, LGR) are "classic" lightning products +consisting of values with attached latitude and longitude coordinates. +Hence, these products are provided by the reader as 1-D arrays, +with a ``pyresample.geometry.SwathDefinition`` area +attribute containing the points lat-lon coordinates. + +Accumulated products (AF, AFA, AFR) are the result of temporal accumulation +of events (e.g. over 30 seconds), and are gridded in the FCI 2km geostationary +projection grid, in order to facilitate the synergistic usage together with FCI. +Compared to the point products, the gridded products also give information +about the spatial extent of the lightning activity. +Hence, these products are provided by the reader as 2-D arrays in the FCI 2km +grid as per intended usage, with a ``pyresample.geometry.AreaDefinition`` area +attribute containing the grid geolocation information. +In this way, the products can directly be overlaid to FCI data. +If needed, the accumulated products can also be accessed as 1-d array by +setting the reader kwarg ``with_area_definition=False``, +e.g.:: + + scn = Scene(filenames=filenames, reader="li_l2_nc", reader_kwargs={'with_area_definition': False}) + +For both 1-d and 2-d products, the lat-lon coordinates of the points/grid pixels +can be accessed using e.g. +``scn['dataset_name'].attrs['area'].get_lonlats()``. + +See the LI L2 Product User Guide `PUG`_ for more information. + +.. _PUG: https://www-dr.eumetsat.int/media/49348 """ @@ -46,7 +80,7 @@ class LIL2NCFileHandler(LINCFileHandler): """Implementation class for the unified LI L2 satpy reader.""" - def __init__(self, filename, filename_info, filetype_info, with_area_definition=False): + def __init__(self, filename, filename_info, filetype_info, with_area_definition=True): """Initialize LIL2NCFileHandler.""" super(LIL2NCFileHandler, self).__init__(filename, filename_info, filetype_info) @@ -73,7 +107,7 @@ def get_area_def(self, dsid): if var_with_swath_coord and self.with_area_def: return get_area_def("mtg_fci_fdss_2km") - raise NotImplementedError("Area definition is not supported for accumulated products.") + raise NotImplementedError("Area definition is not supported for non-accumulated products.") def is_var_with_swath_coord(self, dsid): """Check if the variable corresponding to this dataset is listed as variable with swath coordinates.""" diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py index 7070131f51..5b1a960031 100644 --- a/satpy/readers/mersi_l1b.py +++ b/satpy/readers/mersi_l1b.py @@ -15,16 +15,18 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Reader for the FY-3D MERSI-2 L1B file format. The files for this reader are HDF5 and come in four varieties; band data and geolocation data, both at 250m and 1000m resolution. -This reader was tested on FY-3D MERSI-2 data, but should work on future -platforms as well assuming no file format changes. +This reader was tested on FY-3A/B/C MERSI-1, FY-3D MERSI-2, FY-3E MERSI-LL and FY-3G MERSI-RM data, +but should work on future platforms as well assuming no file format changes. """ -from datetime import datetime + +import datetime as dt import dask.array as da import numpy as np @@ -33,10 +35,17 @@ from satpy.readers.hdf5_utils import HDF5FileHandler N_TOT_IR_CHANS_LL = 6 +PLATFORMS_INSTRUMENTS = {"FY-3A": "mersi-1", + "FY-3B": "mersi-1", + "FY-3C": "mersi-1", + "FY-3D": "mersi-2", + "FY-3E": "mersi-ll", + "FY-3F": "mersi-3", + "FY-3G": "mersi-rm"} class MERSIL1B(HDF5FileHandler): - """MERSI-2/MERSI-LL L1B file reader.""" + """MERSI-1/MERSI-2/MERSI-LL/MERSI-RM L1B file reader.""" def _strptime(self, date_attr, time_attr): """Parse date/time strings.""" @@ -44,7 +53,7 @@ def _strptime(self, date_attr, time_attr): time = self[time_attr] # "18:27:39.720" # cuts off microseconds because of unknown meaning # is .720 == 720 microseconds or 720000 microseconds - return datetime.strptime(date + " " + time.split(".")[0], "%Y-%m-%d %H:%M:%S") + return dt.datetime.strptime(date + " " + time.split(".")[0], "%Y-%m-%d %H:%M:%S") @property def start_time(self): @@ -59,12 +68,20 @@ def end_time(self): @property def sensor_name(self): """Map sensor name to Satpy 'standard' sensor names.""" - file_sensor = self["/attr/Sensor Identification Code"] - sensor = { - "MERSI": "mersi-2", - "MERSI LL": "mersi-ll", - }.get(file_sensor, file_sensor) - return sensor + return PLATFORMS_INSTRUMENTS.get(self.platform_name) + + @property + def platform_name(self): + """Platform name.""" + return self["/attr/Satellite Name"] + + def get_refl_mult(self): + """Get reflectance multiplier.""" + if self.sensor_name == "mersi-rm": + # MERSI-RM has reflectance in the range 0-1, so we need to convert + return 100. + else: + return 1. def _get_single_slope_intercept(self, slope, intercept, cal_index): try: @@ -75,7 +92,9 @@ def _get_single_slope_intercept(self, slope, intercept, cal_index): return slope[cal_index], intercept[cal_index] def _get_coefficients(self, cal_key, cal_index): - coeffs = self[cal_key][cal_index] + """Get VIS calibration coeffs from calibration datasets.""" + # Only one VIS band for MERSI-LL + coeffs = self[cal_key][cal_index] if self.sensor_name != "mersi-ll" else self[cal_key] slope = coeffs.attrs.pop("Slope", None) intercept = coeffs.attrs.pop("Intercept", None) if slope is not None: @@ -84,40 +103,59 @@ def _get_coefficients(self, cal_key, cal_index): coeffs = coeffs * slope + intercept return coeffs + def _get_coefficients_mersi1(self, cal_index): + """Get VIS calibration coeffs from attributes. Only for MERSI-1 on FY-3A/B.""" + try: + # This is found in the actual file. + coeffs = self["/attr/VIR_Cal_Coeff"] + except KeyError: + # This is in the official manual. + coeffs = self["/attr/VIS_Cal_Coeff"] + coeffs = coeffs.reshape(19, 3) + coeffs = coeffs[cal_index].tolist() + return coeffs + + def _get_dn_corrections(self, data, band_index, dataset_id, attrs): + """Use slope and intercept to get DN corrections.""" + slope = attrs.pop("Slope", None) + intercept = attrs.pop("Intercept", None) + if slope is not None and dataset_id.get("calibration") != "counts": + if band_index is not None and slope.size > 1: + slope = slope[band_index] + intercept = intercept[band_index] + # There's a bug in slope for MERSI-1 IR band + slope = 0.01 if self.sensor_name == "mersi-1" and dataset_id["name"] == "5" else slope + data = data * slope + intercept + return data + def get_dataset(self, dataset_id, ds_info): """Load data variable and metadata and calibrate if needed.""" file_key = ds_info.get("file_key", dataset_id["name"]) band_index = ds_info.get("band_index") data = self[file_key] - if band_index is not None: - data = data[band_index] - if data.ndim >= 2: - data = data.rename({data.dims[-2]: "y", data.dims[-1]: "x"}) + data = data[band_index] if band_index is not None else data + data = data.rename({data.dims[-2]: "y", data.dims[-1]: "x"}) if data.ndim >= 2 else data + attrs = data.attrs.copy() # avoid contaminating other band loading attrs.update(ds_info) if "rows_per_scan" in self.filetype_info: attrs.setdefault("rows_per_scan", self.filetype_info["rows_per_scan"]) data = self._mask_data(data, dataset_id, attrs) - - slope = attrs.pop("Slope", None) - intercept = attrs.pop("Intercept", None) - if slope is not None and dataset_id.get("calibration") != "counts": - if band_index is not None: - slope = slope[band_index] - intercept = intercept[band_index] - data = data * slope + intercept + data = self._get_dn_corrections(data, band_index, dataset_id, attrs) if dataset_id.get("calibration") == "reflectance": - coeffs = self._get_coefficients(ds_info["calibration_key"], - ds_info["calibration_index"]) - data = coeffs[0] + coeffs[1] * data + coeffs[2] * data ** 2 + data = self._get_ref_dataset(data, ds_info) + + elif dataset_id.get("calibration") == "radiance": + data = self._get_rad_dataset(data, ds_info, dataset_id) + elif dataset_id.get("calibration") == "brightness_temperature": - calibration_index = ds_info["calibration_index"] # Converts um^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. wave_number = 1. / (dataset_id["wavelength"][1] / 1e6) - + # MERSI-1 doesn't have additional corrections + calibration_index = None if self.sensor_name == "mersi-1" else ds_info["calibration_index"] data = self._get_bt_dataset(data, calibration_index, wave_number) data.attrs = attrs @@ -128,7 +166,7 @@ def get_dataset(self, dataset_id, ds_info): data.attrs[key] = val.decode("utf8") data.attrs.update({ - "platform_name": self["/attr/Satellite Name"], + "platform_name": self.platform_name, "sensor": self.sensor_name, }) @@ -136,7 +174,8 @@ def get_dataset(self, dataset_id, ds_info): def _mask_data(self, data, dataset_id, attrs): """Mask the data using fill_value and valid_range attributes.""" - fill_value = attrs.pop("FillValue", np.nan) # covered by valid_range + fill_value = attrs.pop("_FillValue", np.nan) if self.platform_name in ["FY-3A", "FY-3B"] else \ + attrs.pop("FillValue", np.nan) # covered by valid_range valid_range = attrs.pop("valid_range", None) if dataset_id.get("calibration") == "counts": # preserve integer type of counts if possible @@ -144,17 +183,74 @@ def _mask_data(self, data, dataset_id, attrs): new_fill = data.dtype.type(fill_value) else: new_fill = np.nan - if valid_range is not None: + try: # Due to a bug in the valid_range upper limit in the 10.8(24) and 12.0(25) # in the HDF data, this is hardcoded here. - if dataset_id["name"] in ["24", "25"] and valid_range[1] == 4095: - valid_range[1] = 25000 + valid_range[1] = 25000 if self.sensor_name == "mersi-2" and dataset_id["name"] in ["24", "25"] and \ + valid_range[1] == 4095 else valid_range[1] + # Similar bug also found in MERSI-1 + valid_range[1] = 25000 if self.sensor_name == "mersi-1" and dataset_id["name"] == "5" and \ + valid_range[1] == 4095 else valid_range[1] # typically bad_values == 65535, saturated == 65534 # dead detector == 65533 data = data.where((data >= valid_range[0]) & (data <= valid_range[1]), new_fill) + return data + # valid_range could be None + except TypeError: + return data + + def _get_ref_dataset(self, data, ds_info): + """Get the dataset as reflectance. + + For MERSI-1/2/RM, coefficients will be as:: + + Reflectance = coeffs_1 + coeffs_2 * DN + coeffs_3 * DN ** 2 + + For MERSI-LL, the DN value is in radiance and the reflectance could be calculated by:: + + Reflectance = Rad * pi / E0 * 100 + + Here E0 represents the solar irradiance of the specific band and is the coefficient. + + """ + # Only FY-3A/B stores VIS calibration coefficients in attributes + coeffs = self._get_coefficients_mersi1(ds_info["calibration_index"]) if self.platform_name in ["FY-3A", + "FY-3B"] else self._get_coefficients(ds_info["calibration_key"], ds_info.get("calibration_index", None)) + data = coeffs[0] + coeffs[1] * data + coeffs[2] * data ** 2 if self.sensor_name != "mersi-ll" else \ + data * np.pi / coeffs[0] * 100 + + data = data * self.get_refl_mult() return data + def _get_rad_dataset(self, data, ds_info, datset_id): + """Get the dataset as radiance. + + For MERSI-2/RM VIS bands, this could be calculated by:: + + Rad = Reflectance / 100 * E0 / pi + + For MERSI-2, E0 is in the attribute "Solar_Irradiance". + For MERSI-RM, E0 is in the calibration dataset "Solar_Irradiance". + However we can't find the way to retrieve this value from MERSI-1. + + For MERSI-LL VIS band, it has already been stored in DN values. + After applying slope and intercept, we just get it. And Same way for IR bands, no matter which sensor it is. + + """ + mersi_2_vis = [str(i) for i in range(1, 20)] + mersi_rm_vis = [str(i) for i in range(1, 6)] + + if self.sensor_name == "mersi-2" and datset_id["name"] in mersi_2_vis: + E0 = self["/attr/Solar_Irradiance"] + rad = self._get_ref_dataset(data, ds_info) / 100 * E0[mersi_2_vis.index(datset_id["name"])] / np.pi + elif self.sensor_name == "mersi-rm" and datset_id["name"] in mersi_rm_vis: + E0 = self._get_coefficients("Calibration/Solar_Irradiance", mersi_rm_vis.index(datset_id["name"])) + rad = self._get_ref_dataset(data, ds_info) / 100 * E0 / np.pi + else: + rad = data + return rad + def _get_bt_dataset(self, data, calibration_index, wave_number): """Get the dataset as brightness temperature. @@ -172,19 +268,21 @@ def _get_bt_dataset(self, data, calibration_index, wave_number): """ # pass the dask array bt_data = rad2temp(wave_number, data.data * 1e-5) # brightness temperature - if isinstance(bt_data, np.ndarray): - # old versions of pyspectral produce numpy arrays - data.data = da.from_array(bt_data, chunks=data.data.chunks) - else: - # new versions of pyspectral can do dask arrays - data.data = bt_data + + # old versions of pyspectral produce numpy arrays + # new versions of pyspectral can do dask arrays + data.data = da.from_array(bt_data, chunks=data.data.chunks) if isinstance(bt_data, np.ndarray) else bt_data # Some BT bands seem to have 0 in the first 10 columns # and it is an invalid measurement, so let's mask data = data.where(data != 0) # additional corrections from the file - if self.sensor_name == "mersi-2": + if self.sensor_name == "mersi-1": + # https://img.nsmc.org.cn/PORTAL/NSMC/DATASERVICE/SRF/FY3C/FY3C_MERSI_SRF.rar + corr_coeff_a = 1.0047 + corr_coeff_b = -0.8549 + elif self.sensor_name == "mersi-2": corr_coeff_a = float(self["/attr/TBB_Trans_Coefficient_A"][calibration_index]) corr_coeff_b = float(self["/attr/TBB_Trans_Coefficient_B"][calibration_index]) elif self.sensor_name == "mersi-ll": @@ -195,9 +293,13 @@ def _get_bt_dataset(self, data, calibration_index, wave_number): corr_coeff_b = coeffs[calibration_index + N_TOT_IR_CHANS_LL] except KeyError: return data + else: + # MERSI-RM has no correction coefficients + corr_coeff_a = 0 if corr_coeff_a != 0: - data = (data - corr_coeff_b) / corr_coeff_a + data = (data - corr_coeff_b) / corr_coeff_a if self.sensor_name != "mersi-1" else \ + data * corr_coeff_a + corr_coeff_b # some bands have 0 counts for the first N columns and # seem to be invalid data points data = data.where(data != 0) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index 1131e40a96..b041436a74 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -28,13 +28,14 @@ reader_kwargs={'mask_saturated': False}) scene.load(['B01']) -L1B format description for the files read here: +L1C/L2A format description for the files read here: - https://sentinels.copernicus.eu/documents/247904/0/Sentinel-2-product-specifications-document-V14-9.pdf/ + https://sentinels.copernicus.eu/documents/247904/685211/S2-PDGS-TAS-DI-PSD-V14.9.pdf/3d3b6c9c-4334-dcc4-3aa7-f7c0deffbaf7?t=1643013091529 """ import logging +from datetime import datetime import dask.array as da import defusedxml.ElementTree as ET @@ -63,12 +64,13 @@ def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, mask_s super(SAFEMSIL1C, self).__init__(filename, filename_info, filetype_info) del mask_saturated - self._start_time = filename_info["observation_time"] - self._end_time = filename_info["observation_time"] self._channel = filename_info["band_name"] + self.process_level = filename_info["process_level"] self._tile_mda = tile_mda self._mda = mda self.platform_name = PLATFORMS[filename_info["fmission_id"]] + self._start_time = self._tile_mda.start_time() + self._end_time = filename_info["observation_time"] def get_dataset(self, key, info): """Load a dataset.""" @@ -76,7 +78,10 @@ def get_dataset(self, key, info): return logger.debug("Reading %s.", key["name"]) + proj = self._read_from_file(key) + if proj is None: + return proj.attrs = info.copy() proj.attrs["units"] = "%" proj.attrs["platform_name"] = self.platform_name @@ -89,6 +94,10 @@ def _read_from_file(self, key): return self._mda.calibrate_to_reflectances(proj, self._channel) if key["calibration"] == "radiance": return self._mda.calibrate_to_radiances(proj, self._channel) + if key["calibration"] == "counts": + return self._mda._sanitize_data(proj) + if key["calibration"] in ["aerosol_thickness", "water_vapor"]: + return self._mda.calibrate_to_atmospheric(proj, self._channel) @property def start_time(self): @@ -104,6 +113,7 @@ def get_area_def(self, dsid): """Get the area def.""" if self._channel != dsid["name"]: return + return self._tile_mda.get_area_def(dsid) @@ -117,6 +127,7 @@ def __init__(self, filename, filename_info, filetype_info, mask_saturated=True): self._end_time = filename_info["observation_time"] self.root = ET.parse(self.filename) self.tile = filename_info["dtile_number"] + self.process_level = filename_info["process_level"] self.platform_name = PLATFORMS[filename_info["fmission_id"]] self.mask_saturated = mask_saturated import bottleneck # noqa @@ -138,10 +149,23 @@ class SAFEMSIMDXML(SAFEMSIXMLMetadata): def calibrate_to_reflectances(self, data, band_name): """Calibrate *data* using the radiometric information for the metadata.""" - quantification = int(self.root.find(".//QUANTIFICATION_VALUE").text) + quantification = int(self.root.find(".//QUANTIFICATION_VALUE").text) if self.process_level == "L1C" else \ + int(self.root.find(".//BOA_QUANTIFICATION_VALUE").text) data = self._sanitize_data(data) return (data + self.band_offset(band_name)) / quantification * 100 + def calibrate_to_atmospheric(self, data, band_name): + """Calibrate L2A AOT/WVP product.""" + atmospheric_bands = ["AOT", "WVP"] + if self.process_level == "L1C": + return + elif self.process_level == "L2A" and band_name not in atmospheric_bands: + return + + quantification = float(self.root.find(f".//{band_name}_QUANTIFICATION_VALUE").text) + data = self._sanitize_data(data) + return data / quantification + def _sanitize_data(self, data): data = data.where(data != self.no_data) if self.mask_saturated: @@ -170,7 +194,8 @@ def band_indices(self): @cached_property def band_offsets(self): """Get the band offsets from the metadata.""" - offsets = self.root.find(".//Radiometric_Offset_List") + offsets = self.root.find(".//Radiometric_Offset_List") if self.process_level == "L1C" else \ + self.root.find(".//BOA_ADD_OFFSET_VALUES_LIST") if offsets is not None: band_offsets = {int(off.attrib["band_id"]): float(off.text) for off in offsets} else: @@ -267,6 +292,11 @@ def _shape(self, resolution): cols = int(self.geocoding.find('Size[@resolution="' + str(resolution) + '"]/NCOLS').text) return cols, rows + def start_time(self): + """Get the observation time from the tile metadata.""" + timestr = self.root.find(".//SENSING_TIME").text + return datetime.strptime(timestr, "%Y-%m-%dT%H:%M:%S.%fZ") + @staticmethod def _do_interp(minterp, xcoord, ycoord): interp_points2 = np.vstack((ycoord.ravel(), xcoord.ravel())) diff --git a/satpy/readers/msu_gsa_l1b.py b/satpy/readers/msu_gsa_l1b.py index c4e45aa333..4a4ff3518f 100644 --- a/satpy/readers/msu_gsa_l1b.py +++ b/satpy/readers/msu_gsa_l1b.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Reader for the Arctica-M1 MSU-GS/A data. The files for this reader are HDF5 and contain channel data at 1km resolution @@ -24,7 +25,8 @@ This reader was tested on sample data provided by EUMETSAT. """ -from datetime import datetime + +import datetime as dt import numpy as np @@ -38,7 +40,7 @@ class MSUGSAFileHandler(HDF5FileHandler): def start_time(self): """Time for timeslot scan start.""" dtstr = self["/attr/timestamp_without_timezone"] - return datetime.strptime(dtstr, "%Y-%m-%dT%H:%M:%S") + return dt.datetime.strptime(dtstr, "%Y-%m-%dT%H:%M:%S") @property def satellite_altitude(self): diff --git a/satpy/readers/mws_l1b.py b/satpy/readers/mws_l1b.py index 372a59ac37..1dc076e68f 100644 --- a/satpy/readers/mws_l1b.py +++ b/satpy/readers/mws_l1b.py @@ -1,24 +1,25 @@ # Copyright (c) 2022 Pytroll Developers - +# # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. - +# # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. - +# # You should have received a copy of the GNU General Public License # along with this program. If not, see . + """Reader for the EPS-SG Microwave Sounder (MWS) level-1b data. Documentation: https://www.eumetsat.int/media/44139 """ +import datetime as dt import logging -from datetime import datetime import dask.array as da import numpy as np @@ -101,13 +102,13 @@ def __init__(self, filename, filename_info, filetype_info): @property def start_time(self): """Get start time.""" - return datetime.strptime(self["/attr/sensing_start_time_utc"], + return dt.datetime.strptime(self["/attr/sensing_start_time_utc"], "%Y-%m-%d %H:%M:%S.%f") @property def end_time(self): """Get end time.""" - return datetime.strptime(self["/attr/sensing_end_time_utc"], + return dt.datetime.strptime(self["/attr/sensing_end_time_utc"], "%Y-%m-%d %H:%M:%S.%f") @property diff --git a/satpy/readers/netcdf_utils.py b/satpy/readers/netcdf_utils.py index cb5c38d1cf..c8b8a3f85f 100644 --- a/satpy/readers/netcdf_utils.py +++ b/satpy/readers/netcdf_utils.py @@ -258,13 +258,7 @@ def collect_cache_vars(self, cache_var_size): cache_vars = self._collect_cache_var_names(cache_var_size) for var_name in cache_vars: v = self.file_content[var_name] - try: - arr = xr.DataArray( - v[:], dims=v.dimensions, attrs=v.__dict__, name=v.name) - except ValueError: - # Handle scalars for h5netcdf backend - arr = xr.DataArray( - v.__array__(), dims=v.dimensions, attrs=v.__dict__, name=v.name) + arr = get_data_as_xarray(v) self.cached_file_content[var_name] = arr def _collect_cache_var_names(self, cache_var_size): @@ -380,6 +374,24 @@ def _compose_replacement_names(variable_name_replacements, var, variable_names): variable_names.append(var.format(**{key: val})) +def get_data_as_xarray(variable): + """Get data in variable as xr.DataArray.""" + try: + attrs = variable.attrs + except AttributeError: + # netCDF4 backend requires usage of __dict__ to get the attributes + attrs = variable.__dict__ + try: + data = variable[:] + except (ValueError, IndexError): + # Handle scalars for h5netcdf backend + data = variable.__array__() + + arr = xr.DataArray(data, dims=variable.dimensions, attrs=attrs, name=variable.name) + + return arr + + class NetCDF4FsspecFileHandler(NetCDF4FileHandler): """NetCDF4 file handler using fsspec to read files remotely.""" diff --git a/satpy/readers/nwcsaf_msg2013_hdf5.py b/satpy/readers/nwcsaf_msg2013_hdf5.py index 40a6441655..a3bc9ca168 100644 --- a/satpy/readers/nwcsaf_msg2013_hdf5.py +++ b/satpy/readers/nwcsaf_msg2013_hdf5.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Reader for the old NWCSAF/Geo (v2013 and earlier) cloud product format. References: @@ -27,8 +28,8 @@ """ +import datetime as dt import logging -from datetime import datetime import h5py import numpy as np @@ -127,7 +128,7 @@ def get_area_def(self, dsid): @property def start_time(self): """Return the start time of the object.""" - return datetime.strptime(self.file_content["/attr/IMAGE_ACQUISITION_TIME"], "%Y%m%d%H%M") + return dt.datetime.strptime(self.file_content["/attr/IMAGE_ACQUISITION_TIME"], "%Y%m%d%H%M") def get_area_extent(cfac, lfac, coff, loff, numcols, numlines): diff --git a/satpy/readers/nwcsaf_nc.py b/satpy/readers/nwcsaf_nc.py index e9809bdce5..64a284200d 100644 --- a/satpy/readers/nwcsaf_nc.py +++ b/satpy/readers/nwcsaf_nc.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Nowcasting SAF common PPS&MSG NetCDF/CF format reader. References: @@ -22,11 +23,11 @@ """ +import datetime as dt import functools import logging import os from contextlib import suppress -from datetime import datetime import dask.array as da import numpy as np @@ -435,9 +436,9 @@ def read_nwcsaf_time(time_value): try: # MSG: try: - return datetime.strptime(time_value, "%Y-%m-%dT%H:%M:%SZ") + return dt.datetime.strptime(time_value, "%Y-%m-%dT%H:%M:%SZ") except TypeError: # Remove this in summer 2024 (this is not needed since h5netcdf 0.14) - return datetime.strptime(time_value.astype(str), "%Y-%m-%dT%H:%M:%SZ") + return dt.datetime.strptime(time_value.astype(str), "%Y-%m-%dT%H:%M:%SZ") except ValueError: # PPS: - return datetime.strptime(time_value, "%Y%m%dT%H%M%S%fZ") + return dt.datetime.strptime(time_value, "%Y%m%dT%H%M%S%fZ") diff --git a/satpy/readers/oceancolorcci_l3_nc.py b/satpy/readers/oceancolorcci_l3_nc.py index 075e885b36..d38e91c9e6 100644 --- a/satpy/readers/oceancolorcci_l3_nc.py +++ b/satpy/readers/oceancolorcci_l3_nc.py @@ -23,8 +23,9 @@ are supported and both the merged product files (OC_PRODUCTS) and single product (RRS, CHLOR_A, IOP, K_490) are supported. """ + +import datetime as dt import logging -from datetime import datetime import dask.array as da import numpy as np @@ -41,7 +42,7 @@ class OCCCIFileHandler(NetCDF4FileHandler): @staticmethod def _parse_datetime(datestr): """Parse datetime.""" - return datetime.strptime(datestr, "%Y%m%d%H%MZ") + return dt.datetime.strptime(datestr, "%Y%m%d%H%MZ") @property def start_time(self): diff --git a/satpy/readers/olci_nc.py b/satpy/readers/olci_nc.py index 84b21c3284..a6637b4b8e 100644 --- a/satpy/readers/olci_nc.py +++ b/satpy/readers/olci_nc.py @@ -70,17 +70,27 @@ class BitFlags: def __init__(self, value, flag_list=None): """Init the flags.""" self._value = value - flag_list = flag_list or ["INVALID", "WATER", "LAND", "CLOUD", "SNOW_ICE", - "INLAND_WATER", "TIDAL", "COSMETIC", "SUSPECT", - "HISOLZEN", "SATURATED", "MEGLINT", "HIGHGLINT", - "WHITECAPS", "ADJAC", "WV_FAIL", "PAR_FAIL", - "AC_FAIL", "OC4ME_FAIL", "OCNN_FAIL", - "Extra_1", - "KDM_FAIL", - "Extra_2", - "CLOUD_AMBIGUOUS", "CLOUD_MARGIN", "BPAC_ON", "WHITE_SCATT", - "LOWRW", "HIGHRW"] - self.meaning = {f: i for i, f in enumerate(flag_list)} + + if flag_list is None: + try: + meanings = value.attrs["flag_meanings"].split() + masks = value.attrs["flag_masks"] + except (AttributeError, KeyError): + meanings = ["INVALID", "WATER", "LAND", "CLOUD", "SNOW_ICE", + "INLAND_WATER", "TIDAL", "COSMETIC", "SUSPECT", + "HISOLZEN", "SATURATED", "MEGLINT", "HIGHGLINT", + "WHITECAPS", "ADJAC", "WV_FAIL", "PAR_FAIL", + "AC_FAIL", "OC4ME_FAIL", "OCNN_FAIL", + "Extra_1", + "KDM_FAIL", + "Extra_2", + "CLOUD_AMBIGUOUS", "CLOUD_MARGIN", "BPAC_ON", "WHITE_SCATT", + "LOWRW", "HIGHRW"] + self.meaning = {meaning: mask for mask, meaning in enumerate(meanings)} + else: + self.meaning = {meaning: int(np.log2(mask)) for meaning, mask in zip(meanings, masks)} + else: + self.meaning = {meaning: mask for mask, meaning in enumerate(flag_list)} def __getitem__(self, item): """Get the item.""" diff --git a/satpy/readers/omps_edr.py b/satpy/readers/omps_edr.py index 5421ae2cd2..12ef7d0ce4 100644 --- a/satpy/readers/omps_edr.py +++ b/satpy/readers/omps_edr.py @@ -15,16 +15,18 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Interface to OMPS EDR format.""" + +import datetime as dt import logging -from datetime import datetime, timedelta import numpy as np from satpy.readers.hdf5_utils import HDF5FileHandler -NO_DATE = datetime(1958, 1, 1) -EPSILON_TIME = timedelta(days=2) +NO_DATE = dt.datetime(1958, 1, 1) +EPSILON_TIME = dt.timedelta(days=2) LOG = logging.getLogger(__name__) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 56d4773a43..1356471524 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -15,8 +15,8 @@ # satpy. If not, see . """A reader for OSI-SAF level 3 products in netCDF format.""" +import datetime as dt import logging -from datetime import datetime from satpy.readers.netcdf_utils import NetCDF4FileHandler @@ -197,7 +197,7 @@ def _get_platname(self): def _parse_datetime(datestr): for dt_format in ("%Y-%m-%d %H:%M:%S","%Y%m%dT%H%M%SZ", "%Y-%m-%dT%H:%M:%SZ"): try: - return datetime.strptime(datestr, dt_format) + return dt.datetime.strptime(datestr, dt_format) except ValueError: continue raise ValueError(f"Unsupported date format: {datestr}") diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 19e5396b61..a5ec535462 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -36,6 +36,11 @@ import functools import logging +import warnings +from collections import defaultdict +from datetime import timezone as tz +from functools import cached_property +from pathlib import Path from threading import Lock import defusedxml.ElementTree as ET @@ -43,10 +48,15 @@ import rasterio import xarray as xr from dask import array as da -from dask.base import tokenize +from geotiepoints.geointerpolator import lonlat2xyz, xyz2lonlat +from geotiepoints.interpolator import MultipleGridInterpolator from xarray import DataArray +from satpy.dataset.data_dict import DatasetDict +from satpy.dataset.dataid import DataID +from satpy.readers import open_file_or_filename from satpy.readers.file_handlers import BaseFileHandler +from satpy.readers.yaml_reader import GenericYAMLReader from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) @@ -92,21 +102,18 @@ class SAFEXML(BaseFileHandler): """XML file reader for the SAFE format.""" def __init__(self, filename, filename_info, filetype_info, - header_file=None): + header_file=None, image_shape=None): """Init the xml filehandler.""" - super(SAFEXML, self).__init__(filename, filename_info, filetype_info) + super().__init__(filename, filename_info, filetype_info) - self._start_time = filename_info["start_time"] - self._end_time = filename_info["end_time"] + self._start_time = filename_info["start_time"].replace(tzinfo=tz.utc) + self._end_time = filename_info["end_time"].replace(tzinfo=tz.utc) self._polarization = filename_info["polarization"] - self.root = ET.parse(self.filename) - self.hdr = {} - if header_file is not None: - self.hdr = header_file.get_metadata() - else: - self.hdr = self.get_metadata() - self._image_shape = (self.hdr["product"]["imageAnnotation"]["imageInformation"]["numberOfLines"], - self.hdr["product"]["imageAnnotation"]["imageInformation"]["numberOfSamples"]) + if isinstance(self.filename, str): + self.filename = Path(self.filename) + with self.filename.open() as fd: + self.root = ET.parse(fd) + self._image_shape = image_shape def get_metadata(self): """Convert the xml metadata to dict.""" @@ -133,6 +140,14 @@ def __init__(self, filename, filename_info, filetype_info, self.get_incidence_angle = functools.lru_cache(maxsize=10)( self._get_incidence_angle_uncached ) + self.hdr = self.get_metadata() + self._image_shape = (self.hdr["product"]["imageAnnotation"]["imageInformation"]["numberOfLines"], + self.hdr["product"]["imageAnnotation"]["imageInformation"]["numberOfSamples"]) + + @property + def image_shape(self): + """Return the image shape of this dataset.""" + return self._image_shape def get_dataset(self, key, info, chunks=None): """Load a dataset.""" @@ -148,13 +163,13 @@ def _get_incidence_angle_uncached(self, chunks): return incidence_angle.expand(self._image_shape, chunks=chunks) -class SAFEXMLCalibration(SAFEXML): +class Calibrator(SAFEXML): """XML file reader for the SAFE format, Calibration file.""" def __init__(self, filename, filename_info, filetype_info, - header_file=None): + header_file=None, image_shape=None): """Init the XML calibration reader.""" - super().__init__(filename, filename_info, filetype_info, header_file) + super().__init__(filename, filename_info, filetype_info, header_file, image_shape) self.get_calibration = functools.lru_cache(maxsize=10)( self._get_calibration_uncached ) @@ -182,14 +197,22 @@ def _get_calibration_vector(self, calibration_name, chunks): calibration_vector = XMLArray(self.root, ".//calibrationVector", calibration_name) return calibration_vector.expand(self._image_shape, chunks=chunks) + def __call__(self, dn, calibration_type, chunks=None): + """Calibrate the data.""" + logger.debug("Reading calibration data.") + cal = self.get_calibration(calibration_type, chunks=chunks) + cal_constant = self.get_calibration_constant() + logger.debug("Calibrating.") + data = ((dn + cal_constant) / (cal ** 2)).clip(min=0) + return data -class SAFEXMLNoise(SAFEXML): +class Denoiser(SAFEXML): """XML file reader for the SAFE format, Noise file.""" def __init__(self, filename, filename_info, filetype_info, - header_file=None): + header_file=None, image_shape=None): """Init the xml filehandler.""" - super().__init__(filename, filename_info, filetype_info, header_file) + super().__init__(filename, filename_info, filetype_info, header_file, image_shape) self.azimuth_noise_reader = AzimuthNoiseReader(self.root, self._image_shape) self.get_noise_correction = functools.lru_cache(maxsize=10)( @@ -223,6 +246,14 @@ def read_range_noise_array(self, chunks): range_noise = XMLArray(self.root, ".//noiseRangeVector", "noiseRangeLut") return range_noise.expand(self._image_shape, chunks) + def __call__(self, dn, chunks): + """Denoise the data.""" + logger.debug("Reading noise data.") + noise = self.get_noise_correction(chunks=chunks).fillna(0) + dn = dn - noise + return dn + + class AzimuthNoiseReader: """Class to parse and read azimuth-noise data. @@ -360,13 +391,6 @@ def _fill_dask_pieces(dask_pieces, shape, chunks): dask_pieces.append(new_piece) -def interpolate_slice(slice_rows, slice_cols, interpolator): - """Interpolate the given slice of the larger array.""" - fine_rows = np.arange(slice_rows.start, slice_rows.stop, slice_rows.step) - fine_cols = np.arange(slice_cols.start, slice_cols.stop, slice_cols.step) - return interpolator(fine_cols, fine_rows) - - class _AzimuthBlock: """Implementation of an single azimuth-noise block.""" @@ -479,37 +503,6 @@ def interpolate_xml_array(self, shape, chunks): return interpolate_xarray_linear(xpoints, ypoints, self.data, shape, chunks=chunks) -def interpolate_xarray(xpoints, ypoints, values, shape, - blocksize=CHUNK_SIZE): - """Interpolate, generating a dask array.""" - from scipy.interpolate import RectBivariateSpline - - vchunks = range(0, shape[0], blocksize) - hchunks = range(0, shape[1], blocksize) - - token = tokenize(blocksize, xpoints, ypoints, values, shape) - name = "interpolate-" + token - - spline = RectBivariateSpline(xpoints, ypoints, values.T) - - def interpolator(xnew, ynew): - """Interpolator function.""" - return spline(xnew, ynew).T - - dskx = {(name, i, j): (interpolate_slice, - slice(vcs, min(vcs + blocksize, shape[0])), - slice(hcs, min(hcs + blocksize, shape[1])), - interpolator) - for i, vcs in enumerate(vchunks) - for j, hcs in enumerate(hchunks) - } - - res = da.Array(dskx, name, shape=list(shape), - chunks=(blocksize, blocksize), - dtype=values.dtype) - return DataArray(res, dims=("y", "x")) - - def intp(grid_x, grid_y, interpolator): """Interpolate.""" return interpolator((grid_y, grid_x)) @@ -547,24 +540,20 @@ class SAFEGRD(BaseFileHandler): block size. """ - def __init__(self, filename, filename_info, filetype_info, calfh, noisefh, annotationfh): + def __init__(self, filename, filename_info, filetype_info, calibrator, denoiser): """Init the grd filehandler.""" - super(SAFEGRD, self).__init__(filename, filename_info, - filetype_info) - - self._start_time = filename_info["start_time"] - self._end_time = filename_info["end_time"] + super().__init__(filename, filename_info, filetype_info) + self._start_time = filename_info["start_time"].replace(tzinfo=tz.utc) + self._end_time = filename_info["end_time"].replace(tzinfo=tz.utc) self._polarization = filename_info["polarization"] self._mission_id = filename_info["mission_id"] - self.calibration = calfh - self.noise = noisefh - self.annotation = annotationfh + self.calibrator = calibrator + self.denoiser = denoiser self.read_lock = Lock() - self.filehandle = rasterio.open(self.filename, "r", sharing=False) self.get_lonlatalts = functools.lru_cache(maxsize=2)( self._get_lonlatalts_uncached ) @@ -585,11 +574,7 @@ def get_dataset(self, key, info): data.attrs.update(info) else: - data = xr.open_dataset(self.filename, engine="rasterio", - chunks={"band": 1, "y": CHUNK_SIZE, "x": CHUNK_SIZE})["band_data"].squeeze() - data = data.assign_coords(x=np.arange(len(data.coords["x"])), - y=np.arange(len(data.coords["y"]))) - data = self._calibrate_and_denoise(data, key) + data = self._calibrate_and_denoise(self._data, key) data.attrs.update(info) data.attrs.update({"platform_name": self._mission_id}) @@ -597,6 +582,17 @@ def get_dataset(self, key, info): return data + @cached_property + def _data(self): + data = xr.open_dataarray(open_file_or_filename(self.filename, mode="rb"), engine="rasterio", + chunks="auto" + ).squeeze() + self.chunks = data.data.chunksize + data = data.assign_coords(x=np.arange(len(data.coords["x"])), + y=np.arange(len(data.coords["y"]))) + + return data + @staticmethod def _change_quantity(data, quantity): """Change quantity to dB if needed.""" @@ -610,11 +606,9 @@ def _change_quantity(data, quantity): def _calibrate_and_denoise(self, data, key): """Calibrate and denoise the data.""" - chunks = CHUNK_SIZE - dn = self._get_digital_number(data) - dn = self._denoise(dn, chunks) - data = self._calibrate(dn, chunks, key) + dn = self.denoiser(dn, self.chunks) + data = self.calibrator(dn, key["calibration"], self.chunks) return data @@ -625,22 +619,6 @@ def _get_digital_number(self, data): dn = data * data return dn - def _denoise(self, dn, chunks): - """Denoise the data.""" - logger.debug("Reading noise data.") - noise = self.noise.get_noise_correction(chunks=chunks).fillna(0) - dn = dn - noise - return dn - - def _calibrate(self, dn, chunks, key): - """Calibrate the data.""" - logger.debug("Reading calibration data.") - cal = self.calibration.get_calibration(key["calibration"], chunks=chunks) - cal_constant = self.calibration.get_calibration_constant() - logger.debug("Calibrating.") - data = ((dn + cal_constant) / (cal ** 2)).clip(min=0) - return data - def _get_lonlatalts_uncached(self): """Obtain GCPs and construct latitude and longitude arrays. @@ -650,16 +628,19 @@ def _get_lonlatalts_uncached(self): Returns: coordinates (tuple): A tuple with longitude and latitude arrays """ - band = self.filehandle + shape = self._data.shape (xpoints, ypoints), (gcp_lons, gcp_lats, gcp_alts), (gcps, crs) = self.get_gcps() - # FIXME: do interpolation on cartesian coordinates if the area is - # problematic. + fine_points = [np.arange(size) for size in shape] + x, y, z = lonlat2xyz(gcp_lons, gcp_lats) + interpolator = MultipleGridInterpolator((ypoints, xpoints), x, y, z, gcp_alts) + hx, hy, hz, altitudes = interpolator.interpolate(fine_points, method="cubic", chunks=self.chunks) + longitudes, latitudes = xyz2lonlat(hx, hy, hz) - longitudes = interpolate_xarray(xpoints, ypoints, gcp_lons, band.shape) - latitudes = interpolate_xarray(xpoints, ypoints, gcp_lats, band.shape) - altitudes = interpolate_xarray(xpoints, ypoints, gcp_alts, band.shape) + altitudes = xr.DataArray(altitudes, dims=["y", "x"]) + longitudes = xr.DataArray(longitudes, dims=["y", "x"]) + latitudes = xr.DataArray(latitudes, dims=["y", "x"]) longitudes.attrs["gcps"] = gcps longitudes.attrs["crs"] = crs @@ -682,9 +663,12 @@ def get_gcps(self): gcp_coords (tuple): longitude and latitude 1d arrays """ - gcps = self.filehandle.gcps + gcps = self._data.coords["spatial_ref"].attrs["gcps"] + crs = self._data.rio.crs - gcp_array = np.array([(p.row, p.col, p.x, p.y, p.z) for p in gcps[0]]) + gcp_list = [(feature["properties"]["row"], feature["properties"]["col"], *feature["geometry"]["coordinates"]) + for feature in gcps["features"]] + gcp_array = np.array(gcp_list) ypoints = np.unique(gcp_array[:, 0]) xpoints = np.unique(gcp_array[:, 1]) @@ -693,7 +677,9 @@ def get_gcps(self): gcp_lats = gcp_array[:, 3].reshape(ypoints.shape[0], xpoints.shape[0]) gcp_alts = gcp_array[:, 4].reshape(ypoints.shape[0], xpoints.shape[0]) - return (xpoints, ypoints), (gcp_lons, gcp_lats, gcp_alts), gcps + rio_gcps = [rasterio.control.GroundControlPoint(*gcp) for gcp in gcp_list] + + return (xpoints, ypoints), (gcp_lons, gcp_lats, gcp_alts), (rio_gcps, crs) @property def start_time(self): @@ -704,3 +690,109 @@ def start_time(self): def end_time(self): """Get the end time.""" return self._end_time + + +class SAFESARReader(GenericYAMLReader): + """A reader for SAFE SAR-C data for Sentinel 1 satellites.""" + + def __init__(self, config, filter_parameters=None): + """Set up the SAR reader.""" + super().__init__(config) + self.filter_parameters = filter_parameters + self.files_by_type = defaultdict(list) + self.storage_items = [] + + @property + def start_time(self): + """Get the start time.""" + return self.storage_items.values()[0].filename_info["start_time"].replace(tzinfo=tz.utc) + + @property + def end_time(self): + """Get the end time.""" + return self.storage_items.values()[0].filename_info["end_time"].replace(tzinfo=tz.utc) + + def load(self, dataset_keys, **kwargs): + """Load some data.""" + if kwargs: + warnings.warn(f"Don't know how to handle kwargs {kwargs}") + datasets = DatasetDict() + for key in dataset_keys: + for handler in self.storage_items.values(): + val = handler.get_dataset(key, info=dict()) + if val is not None: + val.attrs["start_time"] = handler.start_time + if key["name"] not in ["longitude", "latitude"]: + lonlats = self.load([DataID(self._id_keys, name="longitude", polarization=key["polarization"]), + DataID(self._id_keys, name="latitude", polarization=key["polarization"])]) + gcps = val.coords["spatial_ref"].attrs["gcps"] + from pyresample.future.geometry import SwathDefinition + val.attrs["area"] = SwathDefinition(lonlats["longitude"], lonlats["latitude"], + attrs=dict(gcps=gcps)) + datasets[key] = val + continue + return datasets + + def create_storage_items(self, files, **kwargs): + """Create the storage items.""" + self.files_by_type = self._get_files_by_type(files) + image_shapes = self._get_image_shapes() + calibrators = self._create_calibrators(image_shapes) + denoisers = self._create_denoisers(image_shapes) + measurement_handlers = self._create_measurement_handlers(calibrators, denoisers) + + self.storage_items = measurement_handlers + + + def _get_files_by_type(self, files): + files_by_type = defaultdict(list) + for file_type, type_info in self.config["file_types"].items(): + files_by_type[file_type].extend(self.filename_items_for_filetype(files, type_info)) + return files_by_type + + + def _get_image_shapes(self): + image_shapes = dict() + for annotation_file, annotation_info in self.files_by_type["safe_annotation"]: + annotation_fh = SAFEXMLAnnotation(annotation_file, + filename_info=annotation_info, + filetype_info=None) + image_shapes[annotation_info["polarization"]] = annotation_fh.image_shape + return image_shapes + + + def _create_calibrators(self, image_shapes): + calibrators = dict() + for calibration_file, calibration_info in self.files_by_type["safe_calibration"]: + polarization = calibration_info["polarization"] + calibrators[polarization] = Calibrator(calibration_file, + filename_info=calibration_info, + filetype_info=None, + image_shape=image_shapes[polarization]) + + return calibrators + + + def _create_denoisers(self, image_shapes): + denoisers = dict() + for noise_file, noise_info in self.files_by_type["safe_noise"]: + polarization = noise_info["polarization"] + denoisers[polarization] = Denoiser(noise_file, + filename_info=noise_info, + filetype_info=None, + image_shape=image_shapes[polarization]) + + return denoisers + + + def _create_measurement_handlers(self, calibrators, denoisers): + measurement_handlers = dict() + for measurement_file, measurement_info in self.files_by_type["safe_measurement"]: + polarization = measurement_info["polarization"] + measurement_handlers[polarization] = SAFEGRD(measurement_file, + filename_info=measurement_info, + calibrator=calibrators[polarization], + denoiser=denoisers[polarization], + filetype_info=None) + + return measurement_handlers diff --git a/satpy/readers/satpy_cf_nc.py b/satpy/readers/satpy_cf_nc.py index 1dfd68a206..9f742272a1 100644 --- a/satpy/readers/satpy_cf_nc.py +++ b/satpy/readers/satpy_cf_nc.py @@ -177,12 +177,12 @@ """ import itertools -import json import logging import xarray as xr from pyresample import AreaDefinition +import satpy.cf.decoding from satpy.dataset.dataid import WavelengthRange from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size @@ -311,9 +311,7 @@ def get_dataset(self, ds_id, ds_info): if name != ds_id["name"]: data = data.rename(ds_id["name"]) data.attrs.update(nc.attrs) # For now add global attributes to all datasets - if "orbital_parameters" in data.attrs: - data.attrs["orbital_parameters"] = _str2dict(data.attrs["orbital_parameters"]) - + data.attrs = satpy.cf.decoding.decode_attrs(data.attrs) return data def get_area_def(self, dataset_id): @@ -327,10 +325,3 @@ def get_area_def(self, dataset_id): # with the yaml_reader NotImplementedError is raised. logger.debug("No AreaDefinition to load from nc file. Falling back to SwathDefinition.") raise NotImplementedError - - -def _str2dict(val): - """Convert string to dictionary.""" - if isinstance(val, str): - val = json.loads(val) - return val diff --git a/satpy/readers/scatsat1_l2b.py b/satpy/readers/scatsat1_l2b.py index 886ce458b3..d14665759d 100644 --- a/satpy/readers/scatsat1_l2b.py +++ b/satpy/readers/scatsat1_l2b.py @@ -17,7 +17,7 @@ # type: ignore """ScatSat-1 L2B Reader, distributed by Eumetsat in HDF5 format.""" -from datetime import datetime +import datetime as dt import h5py @@ -34,8 +34,10 @@ def __init__(self, filename, filename_info, filetype_info): self.h5f = h5py.File(self.filename, "r") h5data = self.h5f["science_data"] - self.filename_info["start_time"] = datetime.strptime(h5data.attrs["Range Beginning Date"], "%Y-%jT%H:%M:%S.%f") - self.filename_info["end_time"] = datetime.strptime(h5data.attrs["Range Ending Date"], "%Y-%jT%H:%M:%S.%f") + self.filename_info["start_time"] = dt.datetime.strptime( + h5data.attrs["Range Beginning Date"], "%Y-%jT%H:%M:%S.%f") + self.filename_info["end_time"] = dt.datetime.strptime( + h5data.attrs["Range Ending Date"], "%Y-%jT%H:%M:%S.%f") self.lons = None self.lats = None diff --git a/satpy/readers/scmi.py b/satpy/readers/scmi.py index a4b8620f8b..fe19c63d8d 100644 --- a/satpy/readers/scmi.py +++ b/satpy/readers/scmi.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """SCMI NetCDF4 Reader. SCMI files are typically used for data for the ABI instrument onboard the @@ -40,9 +41,9 @@ """ +import datetime as dt import logging import os -from datetime import datetime import numpy as np import xarray as xr @@ -273,7 +274,7 @@ def get_area_def(self, key): @property def start_time(self): """Get the start time.""" - return datetime.strptime(self.nc.attrs["start_date_time"], "%Y%j%H%M%S") + return dt.datetime.strptime(self.nc.attrs["start_date_time"], "%Y%j%H%M%S") @property def end_time(self): diff --git a/satpy/readers/seadas_l2.py b/satpy/readers/seadas_l2.py index 03fa648330..24ee429fda 100644 --- a/satpy/readers/seadas_l2.py +++ b/satpy/readers/seadas_l2.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Reader for SEADAS L2 products. This reader currently only supports MODIS and VIIRS Chlorophyll A from SEADAS. @@ -28,7 +29,7 @@ """ -from datetime import datetime +import datetime as dt from .hdf4_utils import HDF4FileHandler from .netcdf_utils import NetCDF4FileHandler @@ -66,13 +67,13 @@ def _platform_name(self): def start_time(self): """Get the starting observation time of this file's data.""" start_time = self[self.start_time_attr_name] - return datetime.strptime(start_time[:-3], self.time_format) + return dt.datetime.strptime(start_time[:-3], self.time_format) @property def end_time(self): """Get the ending observation time of this file's data.""" end_time = self[self.end_time_attr_name] - return datetime.strptime(end_time[:-3], self.time_format) + return dt.datetime.strptime(end_time[:-3], self.time_format) @property def sensor_names(self): diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index d2ed5c3847..1207f6a55d 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Common functionality for SEVIRI L1.5 data readers. Introduction @@ -186,8 +187,8 @@ """ from __future__ import annotations +import datetime as dt import warnings -from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -389,7 +390,7 @@ # To obtain the slope for the calibration, one should use the routine get_seviri_meirink_slope # Epoch for the MEIRINK re-calibration -MEIRINK_EPOCH = datetime(2000, 1, 1) +MEIRINK_EPOCH = dt.datetime(2000, 1, 1) MEIRINK_COEFS: dict[str, dict[int, dict[str, tuple[float, float]]]] = {} MEIRINK_COEFS["2023"] = {} @@ -1095,17 +1096,17 @@ def mask_bad_quality(data, line_validity, line_geometric_quality, line_radiometr return data -def round_nom_time(dt, time_delta): +def round_nom_time(date, time_delta): """Round a datetime object to a multiple of a timedelta. - dt : datetime.datetime object, default now. + date : datetime.datetime object, default now. time_delta : timedelta object, we round to a multiple of this, default 1 minute. adapted for SEVIRI from: https://stackoverflow.com/questions/3463930/how-to-round-the-minute-of-a-datetime-object-python """ - seconds = (dt - dt.min).seconds + seconds = (date - date.min).seconds round_to = time_delta.total_seconds() rounding = (seconds + round_to / 2) // round_to * round_to - return dt + timedelta(0, rounding - seconds, - dt.microsecond) + return date + dt.timedelta(0, rounding - seconds, - date.microsecond) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 3b3aa82277..f65faa8ecc 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + r"""SEVIRI Level 1.5 HRIT format reader. Introduction @@ -213,8 +214,8 @@ from __future__ import division import copy +import datetime as dt import logging -from datetime import timedelta import dask.array as da import numpy as np @@ -528,14 +529,14 @@ def nominal_start_time(self): """Get the start time and round it according to scan law.""" tm = self.prologue["ImageAcquisition"][ "PlannedAcquisitionTime"]["TrueRepeatCycleStart"] - return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) + return round_nom_time(tm, time_delta=dt.timedelta(minutes=self._repeat_cycle_duration)) @property def nominal_end_time(self): """Get the end time and round it according to scan law.""" tm = self.prologue["ImageAcquisition"][ "PlannedAcquisitionTime"]["PlannedRepeatCycleEnd"] - return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) + return round_nom_time(tm, time_delta=dt.timedelta(minutes=self._repeat_cycle_duration)) @property def observation_start_time(self): @@ -765,8 +766,8 @@ def _update_attrs(self, res, info): res.attrs["standard_name"] = info["standard_name"] res.attrs["platform_name"] = self.platform_name res.attrs["sensor"] = "seviri" - res.attrs["nominal_start_time"] = self.nominal_start_time, - res.attrs["nominal_end_time"] = self.nominal_end_time, + res.attrs["nominal_start_time"] = self.nominal_start_time + res.attrs["nominal_end_time"] = self.nominal_end_time res.attrs["time_parameters"] = { "nominal_start_time": self.nominal_start_time, "nominal_end_time": self.nominal_end_time, diff --git a/satpy/readers/seviri_l1b_icare.py b/satpy/readers/seviri_l1b_icare.py index 2024c46532..4d3243f5c8 100644 --- a/satpy/readers/seviri_l1b_icare.py +++ b/satpy/readers/seviri_l1b_icare.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + r"""Interface to SEVIRI L1B data from ICARE (Lille). Introduction @@ -69,7 +70,8 @@ ancillary_variables: [] """ -from datetime import datetime + +import datetime as dt import numpy as np @@ -169,9 +171,9 @@ def end_time(self): attr = str(attr.astype(str)) # In some versions milliseconds are present, sometimes not. try: - endacq = datetime.strptime(attr, "%Y-%m-%dT%H:%M:%SZ") + endacq = dt.datetime.strptime(attr, "%Y-%m-%dT%H:%M:%SZ") except ValueError: - endacq = datetime.strptime(attr, "%Y-%m-%dT%H:%M:%S.%fZ") + endacq = dt.datetime.strptime(attr, "%Y-%m-%dT%H:%M:%S.%fZ") return endacq @property @@ -182,9 +184,9 @@ def start_time(self): attr = str(attr.astype(str)) # In some versions milliseconds are present, sometimes not. try: - stacq = datetime.strptime(attr, "%Y-%m-%dT%H:%M:%SZ") + stacq = dt.datetime.strptime(attr, "%Y-%m-%dT%H:%M:%SZ") except ValueError: - stacq = datetime.strptime(attr, "%Y-%m-%dT%H:%M:%S.%fZ") + stacq = dt.datetime.strptime(attr, "%Y-%m-%dT%H:%M:%S.%fZ") return stacq @property diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index 361dd1bb50..976cb7c338 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + r"""SEVIRI Level 1.5 native format reader. Introduction @@ -97,9 +98,9 @@ https://www-cdn.eumetsat.int/files/2020-04/pdf_fg15_msg-native-format-15.pdf """ +import datetime as dt import logging import warnings -from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -207,13 +208,13 @@ def _repeat_cycle_duration(self): def nominal_start_time(self): """Get the repeat cycle nominal start time from file header and round it to expected nominal time slot.""" tm = self.header["15_DATA_HEADER"]["ImageAcquisition"]["PlannedAcquisitionTime"]["TrueRepeatCycleStart"] - return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) + return round_nom_time(tm, time_delta=dt.timedelta(minutes=self._repeat_cycle_duration)) @property def nominal_end_time(self): """Get the repeat cycle nominal end time from file header and round it to expected nominal time slot.""" tm = self.header["15_DATA_HEADER"]["ImageAcquisition"]["PlannedAcquisitionTime"]["PlannedRepeatCycleEnd"] - return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) + return round_nom_time(tm, time_delta=dt.timedelta(minutes=self._repeat_cycle_duration)) @property def observation_start_time(self): @@ -609,7 +610,7 @@ def _get_hrv_channel(self): def calibrate(self, data, dataset_id): """Calibrate the data.""" - tic = datetime.now() + tic = dt.datetime.now() channel_name = dataset_id["name"] calib = SEVIRICalibrationHandler( platform_id=self.platform_id, @@ -619,7 +620,7 @@ def calibrate(self, data, dataset_id): scan_time=self.observation_start_time ) res = calib.calibrate(data, dataset_id["calibration"]) - logger.debug("Calibration time " + str(datetime.now() - tic)) + logger.debug("Calibration time " + str(dt.datetime.now() - tic)) return res def _get_calib_coefs(self, channel_name): diff --git a/satpy/readers/seviri_l1b_nc.py b/satpy/readers/seviri_l1b_nc.py index 22b55eceda..fd19634fda 100644 --- a/satpy/readers/seviri_l1b_nc.py +++ b/satpy/readers/seviri_l1b_nc.py @@ -17,9 +17,8 @@ # satpy. If not, see . """SEVIRI netcdf format reader.""" -import datetime +import datetime as dt import logging -from datetime import timedelta import numpy as np @@ -67,7 +66,7 @@ def __init__(self, filename, filename_info, filetype_info, self.ext_calib_coefs = ext_calib_coefs or {} self.mask_bad_quality_scan_lines = mask_bad_quality_scan_lines self.mda = {} - self.reference = datetime.datetime(1958, 1, 1) + self.reference = dt.datetime(1958, 1, 1) self.get_metadata() @property @@ -82,13 +81,13 @@ def _repeat_cycle_duration(self): def nominal_start_time(self): """Read the repeat cycle nominal start time from metadata and round it to expected nominal time slot.""" tm = self.deltaSt - return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) + return round_nom_time(tm, time_delta=dt.timedelta(minutes=self._repeat_cycle_duration)) @property def nominal_end_time(self): """Read the repeat cycle nominal end time from metadata and round it to expected nominal time slot.""" tm = self.deltaEnd - return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) + return round_nom_time(tm, time_delta=dt.timedelta(minutes=self._repeat_cycle_duration)) @property def observation_start_time(self): @@ -146,11 +145,11 @@ def get_metadata(self): # self.mda['hrv_number_of_lines'] = int(self.nc.dims['num_rows_hrv']) # self.mda['hrv_number_of_columns'] = int(self.nc.dims['num_columns_hrv']) - self.deltaSt = self.reference + datetime.timedelta( + self.deltaSt = self.reference + dt.timedelta( days=int(self.nc.attrs["true_repeat_cycle_start_day"]), milliseconds=int(self.nc.attrs["true_repeat_cycle_start_mi_sec"])) - self.deltaEnd = self.reference + datetime.timedelta( + self.deltaEnd = self.reference + dt.timedelta( days=int(self.nc.attrs["planned_repeat_cycle_end_day"]), milliseconds=int(self.nc.attrs["planned_repeat_cycle_end_mi_sec"])) diff --git a/satpy/readers/seviri_l2_bufr.py b/satpy/readers/seviri_l2_bufr.py index 02aa0c2767..a48a7e00d6 100644 --- a/satpy/readers/seviri_l2_bufr.py +++ b/satpy/readers/seviri_l2_bufr.py @@ -23,8 +23,9 @@ https://navigator.eumetsat.int/ """ + +import datetime as dt import logging -from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -95,7 +96,7 @@ def __init__(self, filename, filename_info, filetype_info, with_area_definition= else: # Product was retrieved from the EUMETSAT Data Center timeStr = self.get_attribute("typicalDate")+self.get_attribute("typicalTime") - buf_start_time = datetime.strptime(timeStr, "%Y%m%d%H%M%S") + buf_start_time = dt.datetime.strptime(timeStr, "%Y%m%d%H%M%S") sc_id = self.get_attribute("satelliteIdentifier") self.mpef_header = {} self.mpef_header["NominalTime"] = buf_start_time @@ -120,7 +121,7 @@ def start_time(self): @property def end_time(self): """Return the repeat cycle end time.""" - return self.start_time + timedelta(minutes=15) + return self.start_time + dt.timedelta(minutes=15) @property def platform_name(self): diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py index 1e2a64f783..079f93d2f3 100644 --- a/satpy/readers/sgli_l1b.py +++ b/satpy/readers/sgli_l1b.py @@ -13,6 +13,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """GCOM-C SGLI L1b reader. GCOM-C has an imager instrument: SGLI @@ -27,8 +28,8 @@ """ +import datetime as dt import logging -from datetime import datetime import dask.array as da import h5py @@ -63,13 +64,13 @@ def __init__(self, filename, filename_info, filetype_info): def start_time(self): """Get the start time.""" the_time = self.h5file["Global_attributes"].attrs["Scene_start_time"].item() - return datetime.strptime(the_time.decode("ascii"), "%Y%m%d %H:%M:%S.%f") + return dt.datetime.strptime(the_time.decode("ascii"), "%Y%m%d %H:%M:%S.%f") @property def end_time(self): """Get the end time.""" the_time = self.h5file["Global_attributes"].attrs["Scene_end_time"].item() - return datetime.strptime(the_time.decode("ascii"), "%Y%m%d %H:%M:%S.%f") + return dt.datetime.strptime(the_time.decode("ascii"), "%Y%m%d %H:%M:%S.%f") def get_dataset(self, key, info): """Get the dataset from the file.""" diff --git a/satpy/readers/slstr_l1b.py b/satpy/readers/slstr_l1b.py index 02aae9f72b..3353ade4d3 100644 --- a/satpy/readers/slstr_l1b.py +++ b/satpy/readers/slstr_l1b.py @@ -15,13 +15,14 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """SLSTR L1b reader.""" +import datetime as dt import logging import os import re import warnings -from datetime import datetime import dask.array as da import numpy as np @@ -95,12 +96,12 @@ def get_dataset(self, key, info): @property def start_time(self): """Get the start time.""" - return datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get the end time.""" - return datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") class NCSLSTR1B(BaseFileHandler): @@ -224,12 +225,12 @@ def get_dataset(self, key, info): @property def start_time(self): """Get the start time.""" - return datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get the end time.""" - return datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") class NCSLSTRAngles(BaseFileHandler): @@ -326,12 +327,12 @@ def get_dataset(self, key, info): @property def start_time(self): """Get the start time.""" - return datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get the end time.""" - return datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") class NCSLSTRFlag(BaseFileHandler): @@ -376,9 +377,9 @@ def get_dataset(self, key, info): @property def start_time(self): """Get the start time.""" - return datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get the end time.""" - return datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") diff --git a/satpy/readers/smos_l2_wind.py b/satpy/readers/smos_l2_wind.py index 4a909ee2e4..c4d349ea67 100644 --- a/satpy/readers/smos_l2_wind.py +++ b/satpy/readers/smos_l2_wind.py @@ -24,8 +24,8 @@ SMOS_WIND_DS_PDD_20191107_signed.pdf """ +import datetime as dt import logging -from datetime import datetime import numpy as np from pyresample.geometry import AreaDefinition @@ -41,12 +41,12 @@ class SMOSL2WINDFileHandler(NetCDF4FileHandler): @property def start_time(self): """Get start time.""" - return datetime.strptime(self["/attr/time_coverage_start"], "%Y-%m-%dT%H:%M:%S Z") + return dt.datetime.strptime(self["/attr/time_coverage_start"], "%Y-%m-%dT%H:%M:%S Z") @property def end_time(self): """Get end time.""" - return datetime.strptime(self["/attr/time_coverage_end"], "%Y-%m-%dT%H:%M:%S Z") + return dt.datetime.strptime(self["/attr/time_coverage_end"], "%Y-%m-%dT%H:%M:%S Z") @property def platform_shortname(self): diff --git a/satpy/readers/tropomi_l2.py b/satpy/readers/tropomi_l2.py index 768ca70948..2d571e2f12 100644 --- a/satpy/readers/tropomi_l2.py +++ b/satpy/readers/tropomi_l2.py @@ -29,8 +29,8 @@ """ +import datetime as dt import logging -from datetime import datetime import dask.array as da import numpy as np @@ -65,12 +65,12 @@ def platform_shortname(self): @property def time_coverage_start(self): """Get time_coverage_start.""" - return datetime.strptime(self["/attr/time_coverage_start"], DATE_FMT) + return dt.datetime.strptime(self["/attr/time_coverage_start"], DATE_FMT) @property def time_coverage_end(self): """Get time_coverage_end.""" - return datetime.strptime(self["/attr/time_coverage_end"], DATE_FMT) + return dt.datetime.strptime(self["/attr/time_coverage_end"], DATE_FMT) @property def sensor(self): diff --git a/satpy/readers/vii_base_nc.py b/satpy/readers/vii_base_nc.py index 83056189dc..07c5f6749a 100644 --- a/satpy/readers/vii_base_nc.py +++ b/satpy/readers/vii_base_nc.py @@ -19,8 +19,8 @@ """EUMETSAT EPS-SG Visible/Infrared Imager (VII) readers base class.""" +import datetime as dt import logging -from datetime import datetime from geotiepoints.viiinterpolator import tie_points_geo_interpolation, tie_points_interpolation @@ -213,18 +213,18 @@ def _get_global_attributes(self): def start_time(self): """Get observation start time.""" try: - start_time = datetime.strptime(self["/attr/sensing_start_time_utc"], "%Y%m%d%H%M%S.%f") + start_time = dt.datetime.strptime(self["/attr/sensing_start_time_utc"], "%Y%m%d%H%M%S.%f") except ValueError: - start_time = datetime.strptime(self["/attr/sensing_start_time_utc"], "%Y-%m-%d %H:%M:%S.%f") + start_time = dt.datetime.strptime(self["/attr/sensing_start_time_utc"], "%Y-%m-%d %H:%M:%S.%f") return start_time @property def end_time(self): """Get observation end time.""" try: - end_time = datetime.strptime(self["/attr/sensing_end_time_utc"], "%Y%m%d%H%M%S.%f") + end_time = dt.datetime.strptime(self["/attr/sensing_end_time_utc"], "%Y%m%d%H%M%S.%f") except ValueError: - end_time = datetime.strptime(self["/attr/sensing_end_time_utc"], "%Y-%m-%d %H:%M:%S.%f") + end_time = dt.datetime.strptime(self["/attr/sensing_end_time_utc"], "%Y-%m-%d %H:%M:%S.%f") return end_time @property diff --git a/satpy/readers/vii_l1b_nc.py b/satpy/readers/vii_l1b_nc.py index 2dbcb63eda..804c2481fa 100644 --- a/satpy/readers/vii_l1b_nc.py +++ b/satpy/readers/vii_l1b_nc.py @@ -47,7 +47,7 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): self._bt_conversion_a = self["data/calibration_data/bt_conversion_a"].values self._bt_conversion_b = self["data/calibration_data/bt_conversion_b"].values self._channel_cw_thermal = self["data/calibration_data/channel_cw_thermal"].values - self._integrated_solar_irradiance = self["data/calibration_data/Band_averaged_solar_irradiance"].values + self._integrated_solar_irradiance = self["data/calibration_data/band_averaged_solar_irradiance"].values # Computes the angle factor for reflectance calibration as inverse of cosine of solar zenith angle # (the values in the product file are on tie points and in degrees, # therefore interpolation and conversion to radians are required) diff --git a/satpy/readers/viirs_atms_sdr_base.py b/satpy/readers/viirs_atms_sdr_base.py index 159a84a070..b55368e92b 100644 --- a/satpy/readers/viirs_atms_sdr_base.py +++ b/satpy/readers/viirs_atms_sdr_base.py @@ -18,8 +18,8 @@ """Common utilities for reading VIIRS and ATMS SDR data.""" +import datetime as dt import logging -from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -27,8 +27,8 @@ from satpy.readers.hdf5_utils import HDF5FileHandler -NO_DATE = datetime(1958, 1, 1) -EPSILON_TIME = timedelta(days=2) +NO_DATE = dt.datetime(1958, 1, 1) +EPSILON_TIME = dt.timedelta(days=2) LOG = logging.getLogger(__name__) @@ -106,7 +106,7 @@ def _parse_datetime(self, datestr, timestr): timestr = str(timestr.data.compute().astype(str)) datetime_str = datestr + timestr - time_val = datetime.strptime(datetime_str, "%Y%m%d%H%M%S.%fZ") + time_val = dt.datetime.strptime(datetime_str, "%Y%m%d%H%M%S.%fZ") if abs(time_val - NO_DATE) < EPSILON_TIME: # catch rare case when SDR files have incorrect date raise ValueError("Datetime invalid {}".format(time_val)) diff --git a/satpy/readers/viirs_compact.py b/satpy/readers/viirs_compact.py index af3a4ce766..bb3bd83b71 100644 --- a/satpy/readers/viirs_compact.py +++ b/satpy/readers/viirs_compact.py @@ -29,9 +29,9 @@ """ +import datetime as dt import logging from contextlib import suppress -from datetime import datetime, timedelta import dask.array as da import h5py @@ -173,10 +173,10 @@ def start_time(self): @property def end_time(self): """Get the end time.""" - end_time = datetime.combine(self.start_time.date(), + end_time = dt.datetime.combine(self.start_time.date(), self.finfo["end_time"].time()) if end_time < self.start_time: - end_time += timedelta(days=1) + end_time += dt.timedelta(days=1) return end_time def read_geo(self, key, info): diff --git a/satpy/readers/viirs_l1b.py b/satpy/readers/viirs_l1b.py index 510a37165d..7dd3079dbb 100644 --- a/satpy/readers/viirs_l1b.py +++ b/satpy/readers/viirs_l1b.py @@ -17,8 +17,8 @@ # satpy. If not, see . """Interface to VIIRS L1B format.""" +import datetime as dt import logging -from datetime import datetime import numpy as np @@ -32,7 +32,7 @@ class VIIRSL1BFileHandler(NetCDF4FileHandler): def _parse_datetime(self, datestr): """Parse datetime.""" - return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%S.000Z") + return dt.datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%S.000Z") @property def start_orbit_number(self): diff --git a/satpy/readers/viirs_l2.py b/satpy/readers/viirs_l2.py new file mode 100644 index 0000000000..7a54b3e10c --- /dev/null +++ b/satpy/readers/viirs_l2.py @@ -0,0 +1,177 @@ +"""Interface to VIIRS L2 format. + +This reader implements the support of L2 files generated using the VIIRS instrument on SNPP and NOAA satellite files. +The intent of this reader is to be able to reproduce images from L2 layers in NASA worldview with identical colormaps. + +Currently a subset of four of these layers are supported +1. Deep Blue Aerosol Angstrom Exponent (Land and Ocean) +2. Clear Sky Confidence +3. Cloud Top Height +4. Deep Blue Aerosol Optical Thickness (Land and Ocean) +""" + +import datetime as dt +import logging + +import numpy as np + +from satpy.readers.netcdf_utils import NetCDF4FileHandler + +LOG = logging.getLogger(__name__) + + +class VIIRSL2FileHandler(NetCDF4FileHandler): + """NetCDF File Handler for VIIRS L2 Products.""" + def _parse_datetime(self, datestr): + """Parse datetime.""" + return dt.datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%S.000Z") + + @property + def start_time(self): + """Get start time.""" + return self._parse_datetime(self["/attr/time_coverage_start"]) + + @property + def end_time(self): + """Get end time.""" + return self._parse_datetime(self["/attr/time_coverage_end"]) + + @property + def start_orbit_number(self): + """Get start orbit number.""" + try: + return int(self["/attr/orbit_number"]) + except KeyError: + return int(self["/attr/OrbitNumber"]) + + @property + def end_orbit_number(self): + """Get end orbit number.""" + try: + return int(self["/attr/orbit_number"]) + except KeyError: + return int(self["/attr/OrbitNumber"]) + + @property + def platform_name(self): + """Get platform name.""" + try: + res = self.get("/attr/platform", self.filename_info["platform_shortname"]) + except KeyError: + res = "Unknown" + + return { + "JPSS-1": "NOAA-20", + "NP": "Suomi-NPP", + "J1": "NOAA-20", + "J2": "NOAA-21", + "JPSS-2": "NOAA-21", + }.get(res, res) + + @property + def sensor_name(self): + """Get sensor name.""" + return self["/attr/instrument"].lower() + + def _get_dataset_file_units(self, ds_info, var_path): + file_units = ds_info.get("units") + if file_units is None: + file_units = self.get(var_path + "/attr/units") + if file_units == "none" or file_units == "None": + file_units = "1" + return file_units + + def _get_dataset_valid_range(self, ds_info, var_path): + valid_min = self.get(var_path + "/attr/valid_min") + valid_max = self.get(var_path + "/attr/valid_max") + if not valid_min and not valid_max: + valid_range = self.get(var_path + "/attr/valid_range") + if valid_range is not None: + valid_min = valid_range[0] + valid_max = valid_range[1] + scale_factor = self.get(var_path + "/attr/scale_factor") + scale_offset = self.get(var_path + "/attr/add_offset") + return valid_min, valid_max, scale_factor, scale_offset + + def get_metadata(self, dataset_id, ds_info): + """Get metadata.""" + var_path = ds_info.get("file_key", ds_info["name"]) + file_units = self._get_dataset_file_units(ds_info, var_path) + + # Get extra metadata + i = getattr(self[var_path], "attrs", {}) + i.update(ds_info) + i.update(dataset_id.to_dict()) + i.update( + { + "file_units": file_units, + "platform_name": self.platform_name, + "sensor": self.sensor_name, + "start_orbit": self.start_orbit_number, + "end_orbit": self.end_orbit_number, + } + ) + i.update(dataset_id.to_dict()) + return i + + def adjust_scaling_factors(self, factors, file_units, output_units): + """Adjust scaling factors.""" + if factors is None or factors[0] is None: + factors = [1, 0] + if file_units == output_units: + LOG.debug("File units and output units are the same (%s)", file_units) + return factors + factors = np.array(factors) + return factors + + def available_datasets(self, configured_datasets=None): + """Generate dataset info and their availablity. + + See + :meth:`satpy.readers.file_handlers.BaseFileHandler.available_datasets` + for details. + + """ + for is_avail, ds_info in configured_datasets or []: + if is_avail is not None: + yield is_avail, ds_info + continue + ft_matches = self.file_type_matches(ds_info["file_type"]) + if ft_matches is None: + yield None, ds_info + continue + var_path = ds_info.get("file_key", ds_info["name"]) + yield var_path in self, ds_info + + def get_dataset(self, ds_id, ds_info): + """Get DataArray for specified dataset.""" + var_path = ds_info.get("file_key", ds_info["name"]) + metadata = self.get_metadata(ds_id, ds_info) + ( + valid_min, + valid_max, + scale_factor, + scale_offset, + ) = self._get_dataset_valid_range(ds_info, var_path) + data = self[var_path] + # For aerdb Longitude and Latitude datasets have coordinates + # This check is needed to work with yaml_reader + if "long_name" in metadata and metadata["long_name"] == "Longitude": + data.coords["Latitude"].attrs["standard_name"] = "latitude" + elif "long_name" in metadata and metadata["long_name"] == "Latitude": + data.coords["Longitude"].attrs["standard_name"] = "longitude" + + data.attrs.update(metadata) + if valid_min is not None and valid_max is not None: + data = data.where((data >= valid_min) & (data <= valid_max)) + factors = (scale_factor, scale_offset) + factors = self.adjust_scaling_factors( + factors, metadata["file_units"], ds_info.get("units") + ) + if factors[0] != 1 or factors[1] != 0: + data *= factors[0] + data += factors[1] + # rename dimensions to correspond to satpy's 'y' and 'x' standard + if "number_of_lines" in data.dims: + data = data.rename({"number_of_lines": "y", "number_of_pixels": "x"}) + return data diff --git a/satpy/readers/viirs_sdr.py b/satpy/readers/viirs_sdr.py index eef02f7777..28854b185d 100644 --- a/satpy/readers/viirs_sdr.py +++ b/satpy/readers/viirs_sdr.py @@ -28,10 +28,11 @@ - http://npp.gsfc.nasa.gov/science/sciencedocuments/082012/474-00001-03_CDFCBVolIII_RevC.pdf """ + +import datetime as dt import logging import os.path from contextlib import suppress -from datetime import datetime, timedelta from glob import glob import numpy as np @@ -39,8 +40,8 @@ from satpy.readers.viirs_atms_sdr_base import ATMS_DATASET_KEYS, DATASET_KEYS, VIIRS_DATASET_KEYS, JPSS_SDR_FileHandler from satpy.readers.yaml_reader import FileYAMLReader -NO_DATE = datetime(1958, 1, 1) -EPSILON_TIME = timedelta(days=2) +NO_DATE = dt.datetime(1958, 1, 1) +EPSILON_TIME = dt.timedelta(days=2) LOG = logging.getLogger(__name__) diff --git a/satpy/readers/viirs_vgac_l1c_nc.py b/satpy/readers/viirs_vgac_l1c_nc.py index 0fa8ddf782..2f43ffd2a2 100644 --- a/satpy/readers/viirs_vgac_l1c_nc.py +++ b/satpy/readers/viirs_vgac_l1c_nc.py @@ -15,8 +15,8 @@ # satpy. If not, see . """Reading VIIRS VGAC data.""" +import datetime as dt import logging -from datetime import datetime import numpy as np import xarray as xr @@ -63,16 +63,43 @@ def convert_to_bt(self, data, data_lut, scale_factor): def fix_radiances_not_in_percent(self, data): """Scale radiances to percent. This was not done in first version of data.""" - return 100*data + return 100 * data def set_time_attrs(self, data): """Set time from attributes.""" if "StartTime" in data.attrs: - data.attrs["start_time"] = datetime.strptime(data.attrs["StartTime"], "%Y-%m-%dT%H:%M:%S") - data.attrs["end_time"] = datetime.strptime(data.attrs["EndTime"], "%Y-%m-%dT%H:%M:%S") + data.attrs["start_time"] = dt.datetime.strptime(data.attrs["StartTime"], "%Y-%m-%dT%H:%M:%S") + data.attrs["end_time"] = dt.datetime.strptime(data.attrs["EndTime"], "%Y-%m-%dT%H:%M:%S") self._end_time = data.attrs["end_time"] self._start_time = data.attrs["start_time"] + def dt64_to_datetime(self, dt64): + """Conversion of numpy.datetime64 to datetime objects.""" + if isinstance(dt64, np.datetime64): + return dt64.astype(dt.datetime) + return dt64 + + def extract_time_data(self, data, nc): + """Decode time data.""" + reference_time = np.datetime64(dt.datetime.strptime(nc["proj_time0"].attrs["units"], + "days since %d/%m/%YT%H:%M:%S")) + delta_part_of_day, delta_full_days = np.modf(nc["proj_time0"].values) + delta_full_days = np.timedelta64(delta_full_days.astype(np.int64), "D").astype("timedelta64[us]") + delta_part_of_day = delta_part_of_day * np.timedelta64(1, "D").astype("timedelta64[us]") + delta_hours = data.values * np.timedelta64(1, "h").astype("timedelta64[us]") + time_data = xr.DataArray(reference_time + delta_full_days + delta_part_of_day + delta_hours, + coords=data.coords, attrs={"long_name": "Scanline time"}) + return time_data + + def decode_time_variable(self, data, file_key, nc): + """Decide if time data should be decoded.""" + if file_key != "time": + return data + if data.attrs["units"] == "hours since proj_time0": + return self.extract_time_data(data, nc) + else: + raise AttributeError('Unit of time variable in VGAC nc file is not "hours since proj_time0"') + def get_dataset(self, key, yaml_info): """Get dataset.""" logger.debug("Getting data for: %s", yaml_info["name"]) @@ -82,6 +109,7 @@ def get_dataset(self, key, yaml_info): file_key = yaml_info.get("nc_key", name) data = nc[file_key] data = self.calibrate(data, yaml_info, file_key, nc) + data = self.decode_time_variable(data, file_key, nc) data.attrs.update(nc.attrs) # For now add global attributes to all datasets data.attrs.update(yaml_info) self.set_time_attrs(data) diff --git a/satpy/readers/virr_l1b.py b/satpy/readers/virr_l1b.py index 260666ff8b..23e0339c93 100644 --- a/satpy/readers/virr_l1b.py +++ b/satpy/readers/virr_l1b.py @@ -40,8 +40,8 @@ """ +import datetime as dt import logging -from datetime import datetime import dask.array as da import numpy as np @@ -162,10 +162,10 @@ def _correct_slope(self, slope): def start_time(self): """Get starting observation time.""" start_time = self["/attr/Observing Beginning Date"] + "T" + self["/attr/Observing Beginning Time"] + "Z" - return datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get ending observation time.""" end_time = self["/attr/Observing Ending Date"] + "T" + self["/attr/Observing Ending Time"] + "Z" - return datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%S.%fZ") diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py index 5444d7e16f..5bbaba4a6c 100644 --- a/satpy/readers/yaml_reader.py +++ b/satpy/readers/yaml_reader.py @@ -337,7 +337,125 @@ def _build_id_permutations(self, dataset, id_keys): return id_kwargs -class FileYAMLReader(AbstractYAMLReader, DataDownloadMixin): +class GenericYAMLReader(AbstractYAMLReader): + """A Generic YAML-based reader.""" + + def __init__(self, config_dict, filter_parameters=None, filter_filenames=True): + """Set up the yaml reader.""" + super().__init__(config_dict) + self.filter_parameters = filter_parameters or {} + self.filter_filenames = self.info.get("filter_filenames", filter_filenames) + + def filter_selected_filenames(self, filenames): + """Filter provided files based on metadata in the filename.""" + if not isinstance(filenames, set): + # we perform set operations later on to improve performance + filenames = set(filenames) + for _, filetype_info in self.sorted_filetype_items(): + filename_iter = self.filename_items_for_filetype(filenames, + filetype_info) + if self.filter_filenames: + filename_iter = self.filter_filenames_by_info(filename_iter) + + for fn, _ in filename_iter: + yield fn + + def sorted_filetype_items(self): + """Sort the instance's filetypes in using order.""" + processed_types = [] + file_type_items = deque(self.config["file_types"].items()) + while len(file_type_items): + filetype, filetype_info = file_type_items.popleft() + + requirements = filetype_info.get("requires") + if requirements is not None: + # requirements have not been processed yet -> wait + missing = [req for req in requirements + if req not in processed_types] + if missing: + file_type_items.append((filetype, filetype_info)) + continue + + processed_types.append(filetype) + yield filetype, filetype_info + + @staticmethod + def filename_items_for_filetype(filenames, filetype_info): + """Iterate over the filenames matching *filetype_info*.""" + if not isinstance(filenames, set): + # we perform set operations later on to improve performance + filenames = set(filenames) + for pattern in filetype_info["file_patterns"]: + matched_files = set() + matches = _match_filenames(filenames, pattern) + for filename in matches: + try: + filename_info = parse( + pattern, _get_filebase(filename, pattern)) + except ValueError: + logger.debug("Can't parse %s with %s.", filename, pattern) + continue + matched_files.add(filename) + yield filename, filename_info + filenames -= matched_files + + def filter_filenames_by_info(self, filename_items): + """Filter out file using metadata from the filenames. + + Currently only uses start and end time. If only start time is available + from the filename, keep all the filename that have a start time before + the requested end time. + """ + for filename, filename_info in filename_items: + fend = filename_info.get("end_time") + fstart = filename_info.setdefault("start_time", fend) + if fend and fend < fstart: + # correct for filenames with 1 date and 2 times + fend = fend.replace(year=fstart.year, + month=fstart.month, + day=fstart.day) + filename_info["end_time"] = fend + if self.metadata_matches(filename_info): + yield filename, filename_info + + def metadata_matches(self, sample_dict, file_handler=None): + """Check that file metadata matches filter_parameters of this reader.""" + # special handling of start/end times + if not self.time_matches( + sample_dict.get("start_time"), sample_dict.get("end_time")): + return False + for key, val in self.filter_parameters.items(): + if key != "area" and key not in sample_dict: + continue + + if key in ["start_time", "end_time"]: + continue + elif key == "area" and file_handler: + if not self.check_file_covers_area(file_handler, val): + logger.info("Filtering out %s based on area", + file_handler.filename) + break + elif key in sample_dict and val != sample_dict[key]: + # don't use this file + break + else: + # all the metadata keys are equal + return True + return False + + def time_matches(self, fstart, fend): + """Check that a file's start and end time mtach filter_parameters of this reader.""" + start_time = self.filter_parameters.get("start_time") + end_time = self.filter_parameters.get("end_time") + fend = fend or fstart + if start_time and fend and fend < start_time: + return False + if end_time and fstart and fstart > end_time: + return False + return True + + +class FileYAMLReader(GenericYAMLReader, DataDownloadMixin): """Primary reader base class that is configured by a YAML file. This class uses the idea of per-file "file handler" objects to read file @@ -359,12 +477,10 @@ def __init__(self, filter_filenames=True, **kwargs): """Set up initial internal storage for loading file data.""" - super(FileYAMLReader, self).__init__(config_dict) + super().__init__(config_dict, filter_parameters, filter_filenames) self.file_handlers = {} self.available_ids = {} - self.filter_filenames = self.info.get("filter_filenames", filter_filenames) - self.filter_parameters = filter_parameters or {} self.register_data_files() @property @@ -450,45 +566,6 @@ def find_required_filehandlers(self, requirements, filename_info): # filetype! return req_fh - def sorted_filetype_items(self): - """Sort the instance's filetypes in using order.""" - processed_types = [] - file_type_items = deque(self.config["file_types"].items()) - while len(file_type_items): - filetype, filetype_info = file_type_items.popleft() - - requirements = filetype_info.get("requires") - if requirements is not None: - # requirements have not been processed yet -> wait - missing = [req for req in requirements - if req not in processed_types] - if missing: - file_type_items.append((filetype, filetype_info)) - continue - - processed_types.append(filetype) - yield filetype, filetype_info - - @staticmethod - def filename_items_for_filetype(filenames, filetype_info): - """Iterate over the filenames matching *filetype_info*.""" - if not isinstance(filenames, set): - # we perform set operations later on to improve performance - filenames = set(filenames) - for pattern in filetype_info["file_patterns"]: - matched_files = set() - matches = _match_filenames(filenames, pattern) - for filename in matches: - try: - filename_info = parse( - pattern, _get_filebase(filename, pattern)) - except ValueError: - logger.debug("Can't parse %s with %s.", filename, pattern) - continue - matched_files.add(filename) - yield filename, filename_info - filenames -= matched_files - def _new_filehandler_instances(self, filetype_info, filename_items, fh_kwargs=None): """Generate new filehandler instances.""" requirements = filetype_info.get("requires") @@ -512,61 +589,6 @@ def _new_filehandler_instances(self, filetype_info, filename_items, fh_kwargs=No yield filetype_cls(filename, filename_info, filetype_info, *req_fh, **fh_kwargs) - def time_matches(self, fstart, fend): - """Check that a file's start and end time mtach filter_parameters of this reader.""" - start_time = self.filter_parameters.get("start_time") - end_time = self.filter_parameters.get("end_time") - fend = fend or fstart - if start_time and fend and fend < start_time: - return False - if end_time and fstart and fstart > end_time: - return False - return True - - def metadata_matches(self, sample_dict, file_handler=None): - """Check that file metadata matches filter_parameters of this reader.""" - # special handling of start/end times - if not self.time_matches( - sample_dict.get("start_time"), sample_dict.get("end_time")): - return False - for key, val in self.filter_parameters.items(): - if key != "area" and key not in sample_dict: - continue - - if key in ["start_time", "end_time"]: - continue - elif key == "area" and file_handler: - if not self.check_file_covers_area(file_handler, val): - logger.info("Filtering out %s based on area", - file_handler.filename) - break - elif key in sample_dict and val != sample_dict[key]: - # don't use this file - break - else: - # all the metadata keys are equal - return True - return False - - def filter_filenames_by_info(self, filename_items): - """Filter out file using metadata from the filenames. - - Currently only uses start and end time. If only start time is available - from the filename, keep all the filename that have a start time before - the requested end time. - """ - for filename, filename_info in filename_items: - fend = filename_info.get("end_time") - fstart = filename_info.setdefault("start_time", fend) - if fend and fend < fstart: - # correct for filenames with 1 date and 2 times - fend = fend.replace(year=fstart.year, - month=fstart.month, - day=fstart.day) - filename_info["end_time"] = fend - if self.metadata_matches(filename_info): - yield filename, filename_info - def filter_fh_by_metadata(self, filehandlers): """Filter out filehandlers using provide filter parameters.""" for filehandler in filehandlers: @@ -575,20 +597,6 @@ def filter_fh_by_metadata(self, filehandlers): if self.metadata_matches(filehandler.metadata, filehandler): yield filehandler - def filter_selected_filenames(self, filenames): - """Filter provided files based on metadata in the filename.""" - if not isinstance(filenames, set): - # we perform set operations later on to improve performance - filenames = set(filenames) - for _, filetype_info in self.sorted_filetype_items(): - filename_iter = self.filename_items_for_filetype(filenames, - filetype_info) - if self.filter_filenames: - filename_iter = self.filter_filenames_by_info(filename_iter) - - for fn, _ in filename_iter: - yield fn - def _new_filehandlers_for_filetype(self, filetype_info, filenames, fh_kwargs=None): """Create filehandlers for a given filetype.""" filename_iter = self.filename_items_for_filetype(filenames, @@ -603,6 +611,11 @@ def _new_filehandlers_for_filetype(self, filetype_info, filenames, fh_kwargs=Non filtered_iter = self.filter_fh_by_metadata(filehandler_iter) return list(filtered_iter) + + def create_storage_items(self, files, **kwargs): + """Create the storage items.""" + return self.create_filehandlers(files, **kwargs) + def create_filehandlers(self, filenames, fh_kwargs=None): """Organize the filenames into file types and create file handlers.""" filenames = list(OrderedDict.fromkeys(filenames)) diff --git a/satpy/resample.py b/satpy/resample.py index 8b8f67dabf..50de0723f4 100644 --- a/satpy/resample.py +++ b/satpy/resample.py @@ -135,7 +135,12 @@ For examples of area definitions, see the file ``etc/areas.yaml`` that is included with Satpy and where all the area definitions shipped with Satpy are -defined. +defined. The section below gives an overview of these area definitions. + +Area definitions included in Satpy +---------------------------------- + +.. include:: /area_def_list.rst """ import hashlib diff --git a/satpy/tests/cf_tests/test_attrs.py b/satpy/tests/cf_tests/test_attrs.py index 082dea602c..4772d5e428 100644 --- a/satpy/tests/cf_tests/test_attrs.py +++ b/satpy/tests/cf_tests/test_attrs.py @@ -24,14 +24,14 @@ class TestCFAttributeEncoding: def test__encode_nc_attrs(self): """Test attributes encoding.""" - from satpy.cf.attrs import _encode_nc_attrs + from satpy.cf.attrs import encode_attrs_to_cf from satpy.tests.cf_tests._test_data import get_test_attrs from satpy.tests.utils import assert_dict_array_equality attrs, expected, _ = get_test_attrs() # Test encoding - encoded = _encode_nc_attrs(attrs) + encoded = encode_attrs_to_cf(attrs) assert_dict_array_equality(expected, encoded) # Test decoding of json-encoded attributes diff --git a/satpy/tests/cf_tests/test_decoding.py b/satpy/tests/cf_tests/test_decoding.py new file mode 100644 index 0000000000..51c1bfecaf --- /dev/null +++ b/satpy/tests/cf_tests/test_decoding.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . + +"""Tests for CF decoding.""" + +import datetime as dt + +import pytest + +import satpy.cf.decoding + + +class TestDecodeAttrs: + """Test decoding of CF-encoded attributes.""" + + @pytest.fixture() + def attrs(self): + """Get CF-encoded attributes.""" + return { + "my_integer": 0, + "my_float": 0.0, + "my_list": [1, 2, 3], + "my_timestamp1": "2000-01-01", + "my_timestamp2": "2000-01-01 12:15:33", + "my_timestamp3": "2000-01-01 12:15:33.123456", + "my_dict": '{"a": {"b": [1, 2, 3]}, "c": {"d": "2000-01-01 12:15:33.123456"}}' + } + + @pytest.fixture() + def expected(self): + """Get expected decoded results.""" + return { + "my_integer": 0, + "my_float": 0.0, + "my_list": [1, 2, 3], + "my_timestamp1": dt.datetime(2000, 1, 1), + "my_timestamp2": dt.datetime(2000, 1, 1, 12, 15, 33), + "my_timestamp3": dt.datetime(2000, 1, 1, 12, 15, 33, 123456), + "my_dict": {"a": {"b": [1, 2, 3]}, + "c": {"d": dt.datetime(2000, 1, 1, 12, 15, 33, 123456)}} + } + + def test_decoding(self, attrs, expected): + """Test decoding of CF-encoded attributes.""" + res = satpy.cf.decoding.decode_attrs(attrs) + assert res == expected + + def test_decoding_doesnt_modify_original(self, attrs): + """Test that decoding doesn't modify the original attributes.""" + satpy.cf.decoding.decode_attrs(attrs) + assert isinstance(attrs["my_dict"], str) diff --git a/satpy/tests/compositor_tests/test_viirs.py b/satpy/tests/compositor_tests/test_viirs.py index 1641e4248b..95ff3e0d39 100644 --- a/satpy/tests/compositor_tests/test_viirs.py +++ b/satpy/tests/compositor_tests/test_viirs.py @@ -15,9 +15,10 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Tests for VIIRS compositors.""" -from datetime import datetime +import datetime as dt import dask.array as da import numpy as np @@ -52,7 +53,7 @@ def dnb(self, area): c01 = xr.DataArray(dnb, dims=("y", "x"), attrs={"name": "DNB", "area": area, - "start_time": datetime(2020, 1, 1, 12, 0, 0)}) + "start_time": dt.datetime(2020, 1, 1, 12, 0, 0)}) return c01 @pytest.fixture() @@ -66,7 +67,7 @@ def sza(self, area): c02 = xr.DataArray(sza, dims=("y", "x"), attrs={"name": "solar_zenith_angle", "area": area, - "start_time": datetime(2020, 1, 1, 12, 0, 0)}) + "start_time": dt.datetime(2020, 1, 1, 12, 0, 0)}) return c02 @pytest.fixture() @@ -79,7 +80,7 @@ def lza(self, area): c03 = xr.DataArray(lza, dims=("y", "x"), attrs={"name": "lunar_zenith_angle", "area": area, - "start_time": datetime(2020, 1, 1, 12, 0, 0) + "start_time": dt.datetime(2020, 1, 1, 12, 0, 0) }) return c03 diff --git a/satpy/tests/features/steps/steps-load.py b/satpy/tests/features/steps/steps-load.py index 7e2d1829a2..d83ac24754 100644 --- a/satpy/tests/features/steps/steps-load.py +++ b/satpy/tests/features/steps/steps-load.py @@ -45,13 +45,13 @@ def step_impl_data_available(context): @when(u"user loads the data without providing a config file") def step_impl_user_loads_no_config(context): """Load the data without a config.""" - from datetime import datetime + import datetime as dt from satpy import Scene, find_files_and_readers os.chdir("/tmp/") readers_files = find_files_and_readers(sensor="viirs", - start_time=datetime(2015, 3, 11, 11, 20), - end_time=datetime(2015, 3, 11, 11, 26)) + start_time=dt.datetime(2015, 3, 11, 11, 20), + end_time=dt.datetime(2015, 3, 11, 11, 26)) scn = Scene(filenames=readers_files) scn.load(["M02"]) context.scene = scn @@ -73,13 +73,13 @@ def step_impl_items_not_available(context): @when(u"user wants to know what data is available") def step_impl_user_checks_availability(context): """Check availability.""" - from datetime import datetime + import datetime as dt from satpy import Scene, find_files_and_readers os.chdir("/tmp/") reader_files = find_files_and_readers(sensor="viirs", - start_time=datetime(2015, 3, 11, 11, 20), - end_time=datetime(2015, 3, 11, 11, 26)) + start_time=dt.datetime(2015, 3, 11, 11, 20), + end_time=dt.datetime(2015, 3, 11, 11, 26)) scn = Scene(filenames=reader_files) context.available_dataset_ids = scn.available_dataset_ids() diff --git a/satpy/tests/modifier_tests/test_angles.py b/satpy/tests/modifier_tests/test_angles.py index ecf0805ca8..b4f5430436 100644 --- a/satpy/tests/modifier_tests/test_angles.py +++ b/satpy/tests/modifier_tests/test_angles.py @@ -14,10 +14,11 @@ # A PARTICULAR PURPOSE. See the GNU General Public License for more details. """Tests for the angles in modifiers.""" + import contextlib +import datetime as dt import warnings from copy import deepcopy -from datetime import datetime, timedelta from glob import glob from typing import Optional, Union from unittest import mock @@ -74,7 +75,7 @@ def _get_angle_test_data(area_def: Optional[Union[AreaDefinition, StackedAreaDef "satellite_nominal_longitude": 10.0, "satellite_nominal_latitude": 0.0, } - stime = datetime(2020, 1, 1, 12, 0, 0) + stime = dt.datetime(2020, 1, 1, 12, 0, 0) data = da.zeros(shape, chunks=chunks) vis = xr.DataArray(data, dims=dims, @@ -113,7 +114,7 @@ def _similar_sat_pos_datetime(orig_data, lon_offset=0.04): new_data = orig_data.copy() old_lon = new_data.attrs["orbital_parameters"]["satellite_nominal_longitude"] new_data.attrs["orbital_parameters"]["satellite_nominal_longitude"] = old_lon + lon_offset - new_data.attrs["start_time"] = new_data.attrs["start_time"] + timedelta(hours=36) + new_data.attrs["start_time"] = new_data.attrs["start_time"] + dt.timedelta(hours=36) return new_data @@ -372,15 +373,13 @@ def test_relative_azimuth_calculation(self): def test_solazi_correction(self): """Test that solar azimuth angles are corrected into the right range.""" - from datetime import datetime - from satpy.modifiers.angles import _get_sun_azimuth_ndarray lats = np.array([-80, 40, 0, 40, 80]) lons = np.array([-80, 40, 0, 40, 80]) - dt = datetime(2022, 1, 5, 12, 50, 0) + date = dt.datetime(2022, 1, 5, 12, 50, 0) - azi = _get_sun_azimuth_ndarray(lats, lons, dt) + azi = _get_sun_azimuth_ndarray(lats, lons, date) assert np.all(azi > 0) diff --git a/satpy/tests/modifier_tests/test_crefl.py b/satpy/tests/modifier_tests/test_crefl.py index dc9f4a232a..27c9847030 100644 --- a/satpy/tests/modifier_tests/test_crefl.py +++ b/satpy/tests/modifier_tests/test_crefl.py @@ -13,8 +13,8 @@ # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. """Tests for the CREFL ReflectanceCorrector modifier.""" +import datetime as dt from contextlib import contextmanager -from datetime import datetime from unittest import mock import numpy as np @@ -82,8 +82,8 @@ def _make_viirs_xarray(data, area, name, standard_name, wavelength=None, units=" "resolution": 371, "name": name, "standard_name": standard_name, "platform_name": "Suomi-NPP", "polarization": None, "sensor": "viirs", "units": units, - "start_time": datetime(2012, 2, 25, 18, 1, 24, 570942), - "end_time": datetime(2012, 2, 25, 18, 11, 21, 175760), "area": area, + "start_time": dt.datetime(2012, 2, 25, 18, 1, 24, 570942), + "end_time": dt.datetime(2012, 2, 25, 18, 11, 21, 175760), "area": area, "ancillary_variables": [] }) @@ -259,8 +259,8 @@ def test_reflectance_corrector_viirs(self, tmpdir, url, dem_mock_cm, dem_sds): assert res.attrs["platform_name"] == "Suomi-NPP" assert res.attrs["sensor"] == "viirs" assert res.attrs["units"] == "%" - assert res.attrs["start_time"] == datetime(2012, 2, 25, 18, 1, 24, 570942) - assert res.attrs["end_time"] == datetime(2012, 2, 25, 18, 11, 21, 175760) + assert res.attrs["start_time"] == dt.datetime(2012, 2, 25, 18, 1, 24, 570942) + assert res.attrs["end_time"] == dt.datetime(2012, 2, 25, 18, 11, 21, 175760) assert res.attrs["area"] == area assert res.attrs["ancillary_variables"] == [] data = res.values @@ -304,8 +304,8 @@ def make_xarray(name, calibration, wavelength=None, modifiers=None, resolution=1 "calibration": calibration, "resolution": resolution, "name": name, "coordinates": ["longitude", "latitude"], "platform_name": "EOS-Aqua", "polarization": None, "sensor": "modis", - "units": "%", "start_time": datetime(2012, 8, 13, 18, 46, 1, 439838), - "end_time": datetime(2012, 8, 13, 18, 57, 47, 746296), "area": area, + "units": "%", "start_time": dt.datetime(2012, 8, 13, 18, 46, 1, 439838), + "end_time": dt.datetime(2012, 8, 13, 18, 57, 47, 746296), "area": area, "ancillary_variables": [] }) @@ -327,8 +327,8 @@ def make_xarray(name, calibration, wavelength=None, modifiers=None, resolution=1 assert res.attrs["platform_name"] == "EOS-Aqua" assert res.attrs["sensor"] == "modis" assert res.attrs["units"] == "%" - assert res.attrs["start_time"] == datetime(2012, 8, 13, 18, 46, 1, 439838) - assert res.attrs["end_time"] == datetime(2012, 8, 13, 18, 57, 47, 746296) + assert res.attrs["start_time"] == dt.datetime(2012, 8, 13, 18, 46, 1, 439838) + assert res.attrs["end_time"] == dt.datetime(2012, 8, 13, 18, 57, 47, 746296) assert res.attrs["area"] == area assert res.attrs["ancillary_variables"] == [] data = res.values diff --git a/satpy/tests/multiscene_tests/test_blend.py b/satpy/tests/multiscene_tests/test_blend.py index f9d7e35462..c003106dea 100644 --- a/satpy/tests/multiscene_tests/test_blend.py +++ b/satpy/tests/multiscene_tests/test_blend.py @@ -19,7 +19,7 @@ """Unit tests for blending datasets with the Multiscene object.""" -from datetime import datetime +import datetime as dt import dask.array as da import numpy as np @@ -101,8 +101,8 @@ def cloud_type_data_array1(test_area, data_type, image_mode): "satellite_nominal_longitude": 0.0, "satellite_nominal_latitude": 0, } - data_arr.attrs["start_time"] = datetime(2023, 1, 16, 11, 9, 17) - data_arr.attrs["end_time"] = datetime(2023, 1, 16, 11, 12, 22) + data_arr.attrs["start_time"] = dt.datetime(2023, 1, 16, 11, 9, 17) + data_arr.attrs["end_time"] = dt.datetime(2023, 1, 16, 11, 12, 22) data_arr.attrs["_satpy_id"] = dsid1 return data_arr @@ -127,8 +127,8 @@ def cloud_type_data_array2(test_area, data_type, image_mode): data_arr.attrs["sensor"] = {"avhrr-3"} data_arr.attrs["units"] = "1" data_arr.attrs["long_name"] = "SAFNWC PPS CT Cloud Type" - data_arr.attrs["start_time"] = datetime(2023, 1, 16, 11, 12, 57, 500000) - data_arr.attrs["end_time"] = datetime(2023, 1, 16, 11, 28, 1, 900000) + data_arr.attrs["start_time"] = dt.datetime(2023, 1, 16, 11, 12, 57, 500000) + data_arr.attrs["end_time"] = dt.datetime(2023, 1, 16, 11, 28, 1, 900000) data_arr.attrs["_satpy_id"] = dsid1 return data_arr @@ -152,8 +152,8 @@ def scene1_with_weights(cloud_type_data_array1, test_area): modifiers=() ) scene[dsid2] = _create_test_int8_dataset(name="geo-cma", area=test_area, values=2) - scene[dsid2].attrs["start_time"] = datetime(2023, 1, 16, 11, 9, 17) - scene[dsid2].attrs["end_time"] = datetime(2023, 1, 16, 11, 12, 22) + scene[dsid2].attrs["start_time"] = dt.datetime(2023, 1, 16, 11, 9, 17) + scene[dsid2].attrs["end_time"] = dt.datetime(2023, 1, 16, 11, 12, 22) wgt2 = _create_test_dataset(name="geo-cma-wgt", area=test_area, values=0) @@ -176,8 +176,8 @@ def scene2_with_weights(cloud_type_data_array2, test_area): modifiers=() ) scene[dsid2] = _create_test_int8_dataset(name="polar-cma", area=test_area, values=4) - scene[dsid2].attrs["start_time"] = datetime(2023, 1, 16, 11, 12, 57, 500000) - scene[dsid2].attrs["end_time"] = datetime(2023, 1, 16, 11, 28, 1, 900000) + scene[dsid2].attrs["start_time"] = dt.datetime(2023, 1, 16, 11, 12, 57, 500000) + scene[dsid2].attrs["end_time"] = dt.datetime(2023, 1, 16, 11, 28, 1, 900000) wgt2 = _create_test_dataset(name="polar-cma-wgt", area=test_area, values=1) return scene, [wgt1, wgt2] @@ -223,8 +223,8 @@ def test_blend_two_scenes_using_stack(self, multi_scene_and_weights, groups, xr.testing.assert_equal(result, expected.compute()) _check_stacked_metadata(result, "CloudType") - assert result.attrs["start_time"] == datetime(2023, 1, 16, 11, 9, 17) - assert result.attrs["end_time"] == datetime(2023, 1, 16, 11, 28, 1, 900000) + assert result.attrs["start_time"] == dt.datetime(2023, 1, 16, 11, 9, 17) + assert result.attrs["end_time"] == dt.datetime(2023, 1, 16, 11, 28, 1, 900000) def test_blend_two_scenes_bad_blend_type(self, multi_scene_and_weights, groups): """Test exception is raised when bad 'blend_type' is used.""" @@ -245,10 +245,9 @@ def test_blend_two_scenes_bad_blend_type(self, multi_scene_and_weights, groups): ("select_with_weights", _get_expected_stack_select), ("blend_with_weights", _get_expected_stack_blend), ]) - @pytest.mark.parametrize("combine_times", [False, True]) def test_blend_two_scenes_using_stack_weighted(self, multi_scene_and_weights, groups, scene1_with_weights, scene2_with_weights, - combine_times, blend_func, exp_result_func): + blend_func, exp_result_func): """Test stacking two scenes using weights. Here we test that the start and end times can be combined so that they @@ -266,7 +265,7 @@ def test_blend_two_scenes_using_stack_weighted(self, multi_scene_and_weights, gr multi_scene.group(simple_groups) weights = [weights[0][0], weights[1][0]] - stack_func = partial(stack, weights=weights, blend_type=blend_func, combine_times=combine_times) + stack_func = partial(stack, weights=weights, blend_type=blend_func) weighted_blend = multi_scene.blend(blend_function=stack_func) expected = exp_result_func(scene1, scene2) @@ -275,12 +274,8 @@ def test_blend_two_scenes_using_stack_weighted(self, multi_scene_and_weights, gr np.testing.assert_allclose(result.data, expected.data) _check_stacked_metadata(result, "CloudType") - if combine_times: - assert result.attrs["start_time"] == datetime(2023, 1, 16, 11, 9, 17) - assert result.attrs["end_time"] == datetime(2023, 1, 16, 11, 28, 1, 900000) - else: - assert result.attrs["start_time"] == datetime(2023, 1, 16, 11, 11, 7, 250000) - assert result.attrs["end_time"] == datetime(2023, 1, 16, 11, 20, 11, 950000) + assert result.attrs["start_time"] == dt.datetime(2023, 1, 16, 11, 9, 17) + assert result.attrs["end_time"] == dt.datetime(2023, 1, 16, 11, 28, 1, 900000) @pytest.fixture() def datasets_and_weights(self): @@ -291,23 +286,23 @@ def datasets_and_weights(self): shape[1], shape[0], [-200, -200, 200, 200]) ds1 = xr.DataArray(da.ones(shape, chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area}) + attrs={"start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "area": area}) ds2 = xr.DataArray(da.ones(shape, chunks=-1) * 2, dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1, 1, 0, 0), "area": area}) + attrs={"start_time": dt.datetime(2018, 1, 1, 1, 0, 0), "area": area}) ds3 = xr.DataArray(da.ones(shape, chunks=-1) * 3, dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1, 1, 0, 0), "area": area}) + attrs={"start_time": dt.datetime(2018, 1, 1, 1, 0, 0), "area": area}) ds4 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "time"), - attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area}) + attrs={"start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "area": area}) ds5 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "time"), - attrs={"start_time": datetime(2018, 1, 1, 1, 0, 0), "area": area}) + attrs={"start_time": dt.datetime(2018, 1, 1, 1, 0, 0), "area": area}) wgt1 = xr.DataArray(da.ones(shape, chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area}) + attrs={"start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "area": area}) wgt2 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area}) + attrs={"start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "area": area}) wgt3 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area}) + attrs={"start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "area": area}) datastruct = {"shape": shape, "area": area, @@ -329,7 +324,7 @@ def test_blend_function_stack_weighted(self, datasets_and_weights, line, column) input_data["weights"][1][line, :] = 2 input_data["weights"][2][:, column] = 2 - stack_with_weights = partial(stack, weights=input_data["weights"], combine_times=False) + stack_with_weights = partial(stack, weights=input_data["weights"]) blend_result = stack_with_weights(input_data["datasets"][0:3]) ds1 = input_data["datasets"][0] @@ -397,9 +392,9 @@ class TestTemporalRGB: @pytest.fixture() def nominal_data(self): """Return the input arrays for the nominal use case.""" - da1 = xr.DataArray([1, 0, 0], attrs={"start_time": datetime(2023, 5, 22, 9, 0, 0)}) - da2 = xr.DataArray([0, 1, 0], attrs={"start_time": datetime(2023, 5, 22, 10, 0, 0)}) - da3 = xr.DataArray([0, 0, 1], attrs={"start_time": datetime(2023, 5, 22, 11, 0, 0)}) + da1 = xr.DataArray([1, 0, 0], attrs={"start_time": dt.datetime(2023, 5, 22, 9, 0, 0)}) + da2 = xr.DataArray([0, 1, 0], attrs={"start_time": dt.datetime(2023, 5, 22, 10, 0, 0)}) + da3 = xr.DataArray([0, 0, 1], attrs={"start_time": dt.datetime(2023, 5, 22, 11, 0, 0)}) return [da1, da2, da3] @@ -427,7 +422,7 @@ def test_extra_datasets(self, nominal_data, expected_result): """Test that only the first three arrays affect the usage.""" from satpy.multiscene import temporal_rgb - da4 = xr.DataArray([0, 0, 1], attrs={"start_time": datetime(2023, 5, 22, 12, 0, 0)}) + da4 = xr.DataArray([0, 0, 1], attrs={"start_time": dt.datetime(2023, 5, 22, 12, 0, 0)}) res = temporal_rgb(nominal_data + [da4,]) diff --git a/satpy/tests/multiscene_tests/test_save_animation.py b/satpy/tests/multiscene_tests/test_save_animation.py index 7ec1a53df8..67158c2334 100644 --- a/satpy/tests/multiscene_tests/test_save_animation.py +++ b/satpy/tests/multiscene_tests/test_save_animation.py @@ -15,17 +15,18 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Unit tests for saving animations using Multiscene.""" # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path +import datetime as dt import os import shutil import tempfile import unittest -from datetime import datetime from unittest import mock import pytest @@ -63,12 +64,12 @@ def test_save_mp4_distributed(self): scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time for ds_id in ["ds1", "ds2", "ds3"]: - scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + scenes[1][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 2, 12) if ds_id == "ds3": continue - scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) fn = os.path.join( @@ -125,12 +126,12 @@ def test_save_mp4_no_distributed(self): scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time for ds_id in ["ds1", "ds2", "ds3"]: - scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + scenes[1][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 2, 12) if ds_id == "ds3": continue - scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) fn = os.path.join( @@ -165,12 +166,12 @@ def test_save_datasets_simple(self): scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time for ds_id in ["ds1", "ds2", "ds3"]: - scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + scenes[1][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 2, 12) if ds_id == "ds3": continue - scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) client_mock = mock.MagicMock() @@ -198,12 +199,12 @@ def test_save_datasets_distributed_delayed(self): scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time for ds_id in ["ds1", "ds2", "ds3"]: - scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + scenes[1][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 2, 12) if ds_id == "ds3": continue - scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) client_mock = mock.MagicMock() @@ -233,12 +234,12 @@ def test_save_datasets_distributed_source_target(self): scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time for ds_id in ["ds1", "ds2", "ds3"]: - scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + scenes[1][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 2, 12) if ds_id == "ds3": continue - scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) client_mock = mock.MagicMock() @@ -313,12 +314,12 @@ def test_save_mp4(smg, tmp_path): scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time for ds_id in ["ds1", "ds2", "ds3"]: - scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + scenes[1][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 2, 12) if ds_id == "ds3": continue - scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) fn = str(tmp_path / diff --git a/satpy/tests/reader_tests/_li_test_utils.py b/satpy/tests/reader_tests/_li_test_utils.py index 32107006fc..5ac9730dee 100644 --- a/satpy/tests/reader_tests/_li_test_utils.py +++ b/satpy/tests/reader_tests/_li_test_utils.py @@ -14,12 +14,13 @@ # along with satpy. If not, see . """Common utility modules used for LI mock-oriented unit tests.""" -from datetime import datetime +import datetime as dt import numpy as np import xarray as xr from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler +from satpy.tests.utils import RANDOM_GEN # mapping of netcdf type code to numpy data type: TYPE_MAP = { @@ -44,7 +45,7 @@ def l2_le_schema(settings=None): nfilters = settings.get("num_filters", 2) def rand_u16(num): - return np.random.randint(low=0, high=np.iinfo(np.uint16).max - 1, size=num, dtype=np.uint16) + return RANDOM_GEN.integers(low=0, high=np.iinfo(np.uint16).max - 1, size=num, dtype=np.uint16) return { "providers": settings.get("providers", {}), @@ -100,7 +101,7 @@ def rand_u16(num): "scale_factor": 0.004, "add_offset": 0.0, "long_name": "L2 filter results", - "default_data": lambda: np.random.randint(low=0, high=255, size=(nobs, nfilters), dtype=np.uint8) + "default_data": lambda: RANDOM_GEN.integers(low=0, high=255, size=(nobs, nfilters), dtype=np.uint8) }, "epoch_time": { "format": "f8", @@ -127,8 +128,8 @@ def rand_u16(num): def l2_lef_schema(settings=None): """Define schema for LI L2 LEF product.""" - epoch_ts = datetime(2000, 1, 1, 0, 0, 0, 0) - start_time = datetime.now() + epoch_ts = dt.datetime(2000, 1, 1, 0, 0, 0, 0) + start_time = dt.datetime.now() start_ts = (start_time - epoch_ts).total_seconds() settings = settings or {} @@ -212,13 +213,13 @@ def l2_lef_schema(settings=None): "long_name": "Radiance of Flash", "standard_name": "radiance", "units": "mW.m-2.sr-1", - "default_data": lambda: np.clip(np.round(np.random.normal(500, 100, nobs)), 1, 2 ** 16 - 1) + "default_data": lambda: np.clip(np.round(RANDOM_GEN.normal(500, 100, nobs)), 1, 2 ** 16 - 1) }, "event_filter_qa": { "format": "u1", "shape": ("events",), "long_name": "L2 event pre-filtering quality assurance value", - "default_data": lambda: np.random.randint(1, 2 ** 8 - 1, nobs) + "default_data": lambda: RANDOM_GEN.integers(1, 2 ** 8 - 1, nobs) }, "epoch_time": { "format": "f8", @@ -232,21 +233,21 @@ def l2_lef_schema(settings=None): "shape": ("events",), "long_name": "Time offset from epoch time", "units": "seconds", - "default_data": lambda: np.random.uniform(1, 2 ** 31 - 1, nobs) + "default_data": lambda: RANDOM_GEN.uniform(1, 2 ** 31 - 1, nobs) }, "detector_row": { "format": "u2", "shape": ("events",), "long_name": "Detector row position of event pixel", "units": "1", - "default_data": lambda: np.random.randint(1, 1000, nobs) + "default_data": lambda: RANDOM_GEN.integers(1, 1000, nobs) }, "detector_column": { "format": "u2", "shape": ("events",), "long_name": "Detector column position of event pixel", "units": "1", - "default_data": lambda: np.random.randint(1, 1000, nobs) + "default_data": lambda: RANDOM_GEN.integers(1, 1000, nobs) }, } } @@ -287,9 +288,9 @@ def l2_lfl_schema(settings=None): settings = settings or {} nobs = settings.get("num_obs", 1234) - epoch = datetime(2000, 1, 1) - stime = (datetime(2019, 1, 1) - epoch).total_seconds() - etime = (datetime(2019, 1, 2) - epoch).total_seconds() + epoch = dt.datetime(2000, 1, 1) + stime = (dt.datetime(2019, 1, 1) - epoch).total_seconds() + etime = (dt.datetime(2019, 1, 2) - epoch).total_seconds() return { "providers": settings.get("providers", {}), @@ -328,7 +329,7 @@ def l2_lfl_schema(settings=None): "long_name": "Radiance of Flash", "standard_name": "radiance", "units": "mW.m-2.sr-1", - "default_data": lambda: np.round(np.random.normal(500, 100, nobs)) + "default_data": lambda: np.round(RANDOM_GEN.normal(500, 100, nobs)) }, "flash_duration": { "format": "u2", @@ -343,7 +344,7 @@ def l2_lfl_schema(settings=None): "shape": ("flashes",), "long_name": "L2 filtered flash confidence", "standard_name": "flash_filter_confidence", - "default_data": lambda: np.clip(np.round(np.random.normal(20, 10, nobs)), 1, 2 ** 7 - 1) + "default_data": lambda: np.clip(np.round(RANDOM_GEN.normal(20, 10, nobs)), 1, 2 ** 7 - 1) }, "flash_footprint": { "format": "u2", @@ -351,7 +352,7 @@ def l2_lfl_schema(settings=None): "long_name": "Flash footprint size", "standard_name": "flash_footprint", "units": "L1 grid pixels", - "default_data": lambda: np.maximum(1, np.round(np.random.normal(5, 3, nobs))) + "default_data": lambda: np.maximum(1, np.round(RANDOM_GEN.normal(5, 3, nobs))) }, "flash_id": { "format": "u4", @@ -367,7 +368,7 @@ def l2_lfl_schema(settings=None): "units": "seconds since 2000-01-01 00:00:00.0", "standard_name": "time", "precision": "1 millisecond", - "default_data": lambda: np.random.uniform(stime, etime, nobs) + "default_data": lambda: RANDOM_GEN.uniform(stime, etime, nobs) }, "l1b_geolocation_warning": { "format": "i1", @@ -437,7 +438,7 @@ def l2_af_schema(settings=None): "flash_accumulation": { "format": "u2", "shape": ("pixels",), - "default_data": lambda: np.clip(np.round(np.random.normal(1, 2, nobs)), 1, 2 ** 16 - 1) + "default_data": lambda: np.clip(np.round(RANDOM_GEN.normal(1, 2, nobs)), 1, 2 ** 16 - 1) }, "mtg_geos_projection": mtg_geos_projection(), "x": fci_grid_definition("X", nobs), @@ -495,7 +496,7 @@ def l2_afr_schema(settings=None): "long_name": "Area averaged flash radiance accumulation", "grid_mapping": "mtg_geos_projection", "coordinate": "sparse: x y", - "default_data": lambda: np.random.randint(low=1, high=6548, size=(120), dtype=np.int16) + "default_data": lambda: RANDOM_GEN.integers(low=1, high=6548, size=(120), dtype=np.int16) }, "accumulation_start_times": { "format": "f4", @@ -521,9 +522,13 @@ def accumulation_dimensions(nacc, nobs): def fci_grid_definition(axis, nobs): """FCI grid definition on X or Y axis.""" + scale_factor = 5.58871526031607e-5 + add_offset = -0.15561777642350116 if axis == "X": long_name = "azimuth angle encoded as column" standard_name = "projection_x_coordinate" + scale_factor *= -1 + add_offset *= -1 else: long_name = "zenith angle encoded as row" standard_name = "projection_y_coordinate" @@ -531,14 +536,14 @@ def fci_grid_definition(axis, nobs): return { "format": "i2", "shape": ("pixels",), - "add_offset": -0.155619516, + "add_offset": add_offset, "axis": axis, "long_name": long_name, - "scale_factor": 5.58878e-5, + "scale_factor": scale_factor, "standard_name": standard_name, "units": "radian", "valid_range": np.asarray([1, 5568]), - "default_data": lambda: np.clip(np.round(np.random.normal(2000, 500, nobs)), 1, 2 ** 16 - 1) + "default_data": lambda: np.clip(np.round(RANDOM_GEN.normal(2000, 500, nobs)), 1, 2 ** 16 - 1) } @@ -548,12 +553,12 @@ def mtg_geos_projection(): "format": "i4", "shape": ("accumulations",), "grid_mapping_name": "geostationary", - "inverse_flattening": 298.2572221, + "inverse_flattening": 298.257223563, "latitude_of_projection_origin": 0, "longitude_of_projection_origin": 0, - "perspective_point_height": 42164000, - "semi_major_axis": 6378169, - "semi_minor_axis": 6356583.8, + "perspective_point_height": 3.57864e7, + "semi_major_axis": 6378137.0, + "semi_minor_axis": 6356752.31424518, "sweep_angle_axis": "y", "long_name": "MTG geostationary projection", "default_data": lambda: -2147483647 diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index 6dc4bf2d05..d663f7b9d9 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -15,10 +15,12 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """MODIS L1b and L2 test fixtures.""" + from __future__ import annotations -from datetime import datetime, timedelta +import datetime as dt from typing import Optional import numpy as np @@ -216,13 +218,13 @@ def _get_l1b_geo_variable_info(filename: str, def generate_nasa_l1b_filename(prefix): """Generate a filename that follows NASA MODIS L1b convention.""" - now = datetime.now() + now = dt.datetime.now() return f"{prefix}_A{now:%y%j_%H%M%S}_{now:%Y%j%H%M%S}.hdf" def generate_imapp_filename(suffix): """Generate a filename that follows IMAPP MODIS L1b convention.""" - now = datetime.now() + now = dt.datetime.now() return f"t1.{now:%y%j.%H%M}.{suffix}.hdf" @@ -275,8 +277,8 @@ def _add_variable_to_file(h, var_name, var_info): def _create_core_metadata(file_shortname: str) -> str: - beginning_date = datetime.now() - ending_date = beginning_date + timedelta(minutes=5) + beginning_date = dt.datetime.now() + ending_date = beginning_date + dt.timedelta(minutes=5) core_metadata_header = "GROUP = INVENTORYMETADATA\nGROUPTYPE = MASTERGROUP\n\n" \ 'GROUP = RANGEDATETIME\n\nOBJECT = RANGEBEGINNINGDATE\nNUM_VAL = 1\nVALUE = "{}"\n' \ "END_OBJECT = RANGEBEGINNINGDATE\n\nOBJECT = RANGEBEGINNINGTIME\n" \ @@ -593,7 +595,7 @@ def _get_mask_byte1_variable_info() -> dict: def generate_nasa_l2_filename(prefix: str) -> str: """Generate a file name that follows MODIS 35 L2 convention in a temporary directory.""" - now = datetime.now() + now = dt.datetime.now() return f"{prefix}_L2.A{now:%Y%j.%H%M}.061.{now:%Y%j%H%M%S}.hdf" @@ -614,7 +616,7 @@ def modis_l2_nasa_mod35_file(tmpdir_factory) -> list[str]: def generate_nasa_l3_filename(prefix: str) -> str: """Generate a file name that follows MODIS 09 L3 convention in a temporary directory.""" - now = datetime.now() + now = dt.datetime.now() return f"{prefix}.A{now:%Y%j}.061.{now:%Y%j%H%M%S}.hdf" diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py index d4998a67f9..47f5f92c8e 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py @@ -22,7 +22,7 @@ import dask import numpy as np import pytest -from pytest_lazyfixture import lazy_fixture +from pytest_lazy_fixtures import lf as lazy_fixture from satpy import Scene, available_readers from satpy.tests.utils import CustomScheduler, make_dataid diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l2.py b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py index 8876decb59..a30bfc392d 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l2.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py @@ -23,7 +23,7 @@ import dask.array as da import numpy as np import pytest -from pytest_lazyfixture import lazy_fixture +from pytest_lazy_fixtures import lf as lazy_fixture from satpy import Scene, available_readers from satpy.tests.utils import CustomScheduler, make_dataid diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l3.py b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py index de8ff682a1..ca6c5e353a 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l3.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py @@ -23,7 +23,7 @@ import numpy as np import pytest from pyresample import geometry -from pytest_lazyfixture import lazy_fixture +from pytest_lazy_fixtures import lf as lazy_fixture from satpy import Scene, available_readers diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index 969c497410..ac82512a2a 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -15,10 +15,12 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """The abi_l1b reader tests package.""" + from __future__ import annotations -from datetime import datetime +import datetime as dt from pathlib import Path from typing import Any, Callable from unittest import mock @@ -29,7 +31,7 @@ import numpy.typing as npt import pytest import xarray as xr -from pytest_lazyfixture import lazy_fixture +from pytest_lazy_fixtures import lf as lazy_fixture from satpy import DataQuery from satpy.readers.abi_l1b import NC_ABI_L1B @@ -372,8 +374,8 @@ def test_get_dataset(self, c01_data_arr): "timeline_ID": None, "suffix": "suffix", "units": "W m-2 um-1 sr-1", - "start_time": datetime(2017, 9, 20, 17, 30, 40, 800000), - "end_time": datetime(2017, 9, 20, 17, 41, 17, 500000), + "start_time": dt.datetime(2017, 9, 20, 17, 30, 40, 800000), + "end_time": dt.datetime(2017, 9, 20, 17, 41, 17, 500000), } res = c01_data_arr diff --git a/satpy/tests/reader_tests/test_abi_l2_nc.py b/satpy/tests/reader_tests/test_abi_l2_nc.py index 4b8d3a9578..1f2d2dc409 100644 --- a/satpy/tests/reader_tests/test_abi_l2_nc.py +++ b/satpy/tests/reader_tests/test_abi_l2_nc.py @@ -14,7 +14,9 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . + """The abi_l2_nc reader tests package.""" + import contextlib from typing import Optional from unittest import mock @@ -96,23 +98,36 @@ def _create_mcmip_dataset(): return ds1 +def _create_aod_dataset(): + ds1 = _create_cmip_dataset("AOD") + ds1["AOD"].attrs["units"] = "1" + return ds1 + + class Test_NC_ABI_L2_get_dataset: """Test get dataset function of the NC_ABI_L2 reader.""" - def test_get_dataset(self): + @pytest.mark.parametrize( + ("obs_type", "ds_func", "var_name", "var_attrs"), + [ + ("ACHA", _create_cmip_dataset, "HT", {"units": "m"}), + ("AOD", _create_aod_dataset, "AOD", {"units": "1"}), + ] + ) + def test_get_dataset(self, obs_type, ds_func, var_name, var_attrs): """Test basic L2 load.""" from satpy.tests.utils import make_dataid - key = make_dataid(name="HT") - with _create_reader_for_fake_data("ACHA", _create_cmip_dataset()) as reader: - res = reader.get_dataset(key, {"file_key": "HT"}) + key = make_dataid(name=var_name) + with _create_reader_for_fake_data(obs_type, ds_func()) as reader: + res = reader.get_dataset(key, {"file_key": var_name}) exp_data = np.array([[2 * 0.3052037, np.nan], [32768 * 0.3052037, 32767 * 0.3052037]]) exp_attrs = {"instrument_ID": None, "modifiers": (), - "name": "HT", - "observation_type": "ACHA", + "name": var_name, + "observation_type": obs_type, "orbital_slot": None, "platform_name": "GOES-16", "platform_shortname": "G16", @@ -122,7 +137,8 @@ def test_get_dataset(self): "scene_id": None, "sensor": "abi", "timeline_ID": None, - "units": "m"} + } + exp_attrs.update(var_attrs) np.testing.assert_allclose(res.data, exp_data, equal_nan=True) _compare_subdict(res.attrs, exp_attrs) @@ -151,7 +167,7 @@ class TestMCMIPReading: @mock.patch("satpy.readers.abi_base.xr") def test_mcmip_get_dataset(self, xr_, product, exp_metadata): """Test getting channel from MCMIP file.""" - from datetime import datetime + import datetime as dt from pyresample.geometry import AreaDefinition @@ -183,8 +199,8 @@ def test_mcmip_get_dataset(self, xr_, product, exp_metadata): "scene_id": None, "sensor": "abi", "timeline_ID": None, - "start_time": datetime(2017, 9, 20, 17, 30, 40, 800000), - "end_time": datetime(2017, 9, 20, 17, 41, 17, 500000), + "start_time": dt.datetime(2017, 9, 20, 17, 30, 40, 800000), + "end_time": dt.datetime(2017, 9, 20, 17, 41, 17, 500000), "ancillary_variables": [], } exp_attrs.update(exp_metadata) diff --git a/satpy/tests/reader_tests/test_acspo.py b/satpy/tests/reader_tests/test_acspo.py index 723d1dbecd..b85232bad4 100644 --- a/satpy/tests/reader_tests/test_acspo.py +++ b/satpy/tests/reader_tests/test_acspo.py @@ -17,8 +17,8 @@ # satpy. If not, see . """Module for testing the satpy.readers.acspo module.""" +import datetime as dt import os -from datetime import datetime, timedelta from unittest import mock import numpy as np @@ -43,7 +43,7 @@ class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" - dt = filename_info.get("start_time", datetime(2016, 1, 1, 12, 0, 0)) + date = filename_info.get("start_time", dt.datetime(2016, 1, 1, 12, 0, 0)) sat, inst = { "VIIRS_NPP": ("NPP", "VIIRS"), "VIIRS_N20": ("N20", "VIIRS"), @@ -53,8 +53,8 @@ def get_test_content(self, filename, filename_info, filetype_info): "/attr/platform": sat, "/attr/sensor": inst, "/attr/spatial_resolution": "742 m at nadir", - "/attr/time_coverage_start": dt.strftime("%Y%m%dT%H%M%SZ"), - "/attr/time_coverage_end": (dt + timedelta(minutes=6)).strftime("%Y%m%dT%H%M%SZ"), + "/attr/time_coverage_start": date.strftime("%Y%m%dT%H%M%SZ"), + "/attr/time_coverage_end": (date + dt.timedelta(minutes=6)).strftime("%Y%m%dT%H%M%SZ"), } file_content["lat"] = DEFAULT_LAT_DATA diff --git a/satpy/tests/reader_tests/test_agri_l1.py b/satpy/tests/reader_tests/test_agri_l1.py index 66395a8cee..3de679796c 100644 --- a/satpy/tests/reader_tests/test_agri_l1.py +++ b/satpy/tests/reader_tests/test_agri_l1.py @@ -56,71 +56,65 @@ class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" - def make_test_data(self, cwl, ch, prefix, dims, file_type): + def _make_cal_data(self, cwl, ch, dims): """Make test data.""" - if prefix == "CAL": - data = xr.DataArray( - da.from_array((np.arange(10.) + 1.) / 10., [dims[0] * dims[1]]), - attrs={ - "Slope": np.array(1.), "Intercept": np.array(0.), - "FillValue": np.array(-65535.0), - "units": "NUL", - "center_wavelength": "{}um".format(cwl).encode("utf-8"), - "band_names": "band{}(band number is range from 1 to 14)" - .format(ch).encode("utf-8"), - "long_name": "Calibration table of {}um Channel".format(cwl).encode("utf-8"), - "valid_range": np.array([0, 1.5]), - }, - dims="_const") - - elif prefix == "NOM": - data = xr.DataArray( - da.from_array(np.arange(10, dtype=np.uint16).reshape((2, 5)) + 1, - [dim for dim in dims]), - attrs={ - "Slope": np.array(1.), "Intercept": np.array(0.), - "FillValue": np.array(65535), - "units": "DN", - "center_wavelength": "{}um".format(cwl).encode("utf-8"), - "band_names": "band{}(band number is range from 1 to 14)" - .format(ch).encode("utf-8"), - "long_name": "Calibration table of {}um Channel".format(cwl).encode("utf-8"), - "valid_range": np.array([0, 4095]), - }, - dims=("_RegLength", "_RegWidth")) - - elif prefix == "GEO": - data = xr.DataArray( - da.from_array(np.arange(0., 360., 36., dtype=np.float32).reshape((2, 5)), - [dim for dim in dims]), - attrs={ - "Slope": np.array(1.), "Intercept": np.array(0.), - "FillValue": np.array(65535.), - "units": "NUL", - "band_names": "NUL", - "valid_range": np.array([0., 360.]), - }, - dims=("_RegLength", "_RegWidth")) - - elif prefix == "COEF": - if file_type == "500": - data = self._create_coeff_array(1) - - elif file_type == "1000": - data = self._create_coeff_array(3) - - elif file_type == "2000": - data = self._create_coeff_array(7) - - elif file_type == "4000": - data = self._create_coeff_array(14) + return xr.DataArray( + da.from_array((np.arange(10.) + 1.) / 10., [dims[0] * dims[1]]), + attrs={ + "Slope": np.array(1.), "Intercept": np.array(0.), + "FillValue": np.array(-65535.0), + "units": "NUL", + "center_wavelength": "{}um".format(cwl).encode("utf-8"), + "band_names": "band{}(band number is range from 1 to 14)" + .format(ch).encode("utf-8"), + "long_name": "Calibration table of {}um Channel".format(cwl).encode("utf-8"), + "valid_range": np.array([0, 1.5]), + }, + dims="_const") + + def _make_nom_data(self, cwl, ch, dims): + # Add +1 to check that values beyond the LUT are clipped + data_np = np.arange(10, dtype=np.uint16).reshape((2, 5)) + 1 + fill_value = 65535 + valid_max = 4095 + if ch == 7: + # mimic C07 bug where the fill value is in the LUT + fill_value = 9 # at index [1, 3] (second to last element) + valid_max = 8 + return xr.DataArray( + da.from_array(data_np, chunks=[dim for dim in dims]), + attrs={ + "Slope": np.array(1.), "Intercept": np.array(0.), + "FillValue": np.array(fill_value), + "units": "DN", + "center_wavelength": "{}um".format(cwl).encode("utf-8"), + "band_names": "band{}(band number is range from 1 to 14)" + .format(ch).encode("utf-8"), + "long_name": "Calibration table of {}um Channel".format(cwl).encode("utf-8"), + "valid_range": np.array([0, valid_max]), + }, + dims=("_RegLength", "_RegWidth")) - return data + def _make_geo_data(self, dims): + return xr.DataArray( + da.from_array(np.arange(0., 360., 36., dtype=np.float32).reshape((2, 5)), + [dim for dim in dims]), + attrs={ + "Slope": np.array(1.), "Intercept": np.array(0.), + "FillValue": np.array(65535.), + "units": "NUL", + "band_names": "NUL", + "valid_range": np.array([0., 360.]), + }, + dims=("_RegLength", "_RegWidth")) - def _create_coeff_array(self, nb_channels): + def _create_coeffs_array(self, channel_numbers: list[int]) -> xr.DataArray: + # make coefficients consistent between file types + all_possible_coeffs = (np.arange(14 * 2).reshape((14, 2)) + 1.0) / np.array([1E4, 1E2]) + # get the coefficients for the specific channels this resolution has + these_coeffs = all_possible_coeffs[[chan_num - 1 for chan_num in channel_numbers]] data = xr.DataArray( - da.from_array((np.arange(nb_channels * 2).reshape((nb_channels, 2)) + 1.) / - np.array([1E4, 1E2]), [nb_channels, 2]), + da.from_array(these_coeffs, chunks=[len(channel_numbers), 2]), attrs={ "Slope": 1., "Intercept": 0., "FillValue": 0, @@ -132,60 +126,46 @@ def _create_coeff_array(self, nb_channels): dims=("_num_channel", "_coefs")) return data - def _create_channel_data(self, chs, cwls, file_type): + def _create_channel_data(self, chs, cwls): dim_0 = 2 dim_1 = 5 data = {} - for index, _cwl in enumerate(cwls): - data["CALChannel" + "%02d" % chs[index]] = self.make_test_data(cwls[index], chs[index], "CAL", - [dim_0, dim_1], file_type) - data["Calibration/CALChannel" + "%02d" % chs[index]] = self.make_test_data(cwls[index], chs[index], "CAL", - [dim_0, dim_1], file_type) - data["NOMChannel" + "%02d" % chs[index]] = self.make_test_data(cwls[index], chs[index], "NOM", - [dim_0, dim_1], file_type) - data["Data/NOMChannel" + "%02d" % chs[index]] = self.make_test_data(cwls[index], chs[index], "NOM", - [dim_0, dim_1], file_type) - data["CALIBRATION_COEF(SCALE+OFFSET)"] = self.make_test_data(cwls[index], chs[index], "COEF", - [dim_0, dim_1], file_type) - data["Calibration/CALIBRATION_COEF(SCALE+OFFSET)"] = self.make_test_data(cwls[index], chs[index], "COEF", - [dim_0, dim_1], file_type) + for chan_num, chan_wl in zip(chs, cwls): + cal_data = self._make_cal_data(chan_wl, chan_num, [dim_0, dim_1]) + data[f"CALChannel{chan_num:02d}"] = cal_data + data[f"Calibration/CALChannel{chan_num:02d}"] = cal_data + nom_data = self._make_nom_data(chan_wl, chan_num, [dim_0, dim_1]) + data[f"NOMChannel{chan_num:02d}"] = nom_data + data[f"Data/NOMChannel{chan_num:02d}"] = nom_data + data["CALIBRATION_COEF(SCALE+OFFSET)"] = self._create_coeffs_array(chs) + data["Calibration/CALIBRATION_COEF(SCALE+OFFSET)"] = self._create_coeffs_array(chs) return data - def _get_500m_data(self, file_type): + def _get_500m_data(self): chs = [2] cwls = [0.65] - data = self._create_channel_data(chs, cwls, file_type) + return self._create_channel_data(chs, cwls) - return data - - def _get_1km_data(self, file_type): - chs = np.linspace(1, 3, 3) + def _get_1km_data(self): + chs = [1, 2, 3] cwls = [0.47, 0.65, 0.83] - data = self._create_channel_data(chs, cwls, file_type) + return self._create_channel_data(chs, cwls) - return data - - def _get_2km_data(self, file_type): - chs = np.linspace(1, 7, 7) + def _get_2km_data(self): + chs = [1, 2, 3, 4, 5, 6, 7] cwls = [0.47, 0.65, 0.83, 1.37, 1.61, 2.22, 3.72] - data = self._create_channel_data(chs, cwls, file_type) + return self._create_channel_data(chs, cwls) - return data - - def _get_4km_data(self, file_type): - chs = np.linspace(1, 14, 14) + def _get_4km_data(self): + chs = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14] cwls = [0.47, 0.65, 0.83, 1.37, 1.61, 2.22, 3.72, 3.72, 6.25, 7.10, 8.50, 10.8, 12, 13.5] - data = self._create_channel_data(chs, cwls, file_type) - - return data + return self._create_channel_data(chs, cwls) - def _get_geo_data(self, file_type): + def _get_geo_data(self): dim_0 = 2 dim_1 = 5 - data = {"NOMSunAzimuth": self.make_test_data("NUL", "NUL", "GEO", - [dim_0, dim_1], file_type), - "Navigation/NOMSunAzimuth": self.make_test_data("NUL", "NUL", "GEO", - [dim_0, dim_1], file_type)} + data = {"NOMSunAzimuth": self._make_geo_data([dim_0, dim_1]), + "Navigation/NOMSunAzimuth": self._make_geo_data([dim_0, dim_1])} return data def get_test_content(self, filename, filename_info, filetype_info): @@ -210,17 +190,17 @@ def get_test_content(self, filename, filename_info, filetype_info): data = {} if self.filetype_info["file_type"] == "agri_l1_0500m": - data = self._get_500m_data("500") + data = self._get_500m_data() elif self.filetype_info["file_type"] == "agri_l1_1000m": - data = self._get_1km_data("1000") + data = self._get_1km_data() elif self.filetype_info["file_type"] == "agri_l1_2000m": - data = self._get_2km_data("2000") + data = self._get_2km_data() global_attrs["/attr/Observing Beginning Time"] = "00:30:01" global_attrs["/attr/Observing Ending Time"] = "00:34:07" elif self.filetype_info["file_type"] == "agri_l1_4000m": - data = self._get_4km_data("4000") + data = self._get_4km_data() elif self.filetype_info["file_type"] == "agri_l1_4000m_geo": - data = self._get_geo_data("4000") + data = self._get_geo_data() test_content = {} test_content.update(global_attrs) @@ -263,7 +243,7 @@ def setup_method(self): 4: np.array([[8.07, 8.14, 8.21, 8.28, 8.35], [8.42, 8.49, 8.56, 8.63, 8.7]]), 5: np.array([[10.09, 10.18, 10.27, 10.36, 10.45], [10.54, 10.63, 10.72, 10.81, 10.9]]), 6: np.array([[12.11, 12.22, 12.33, 12.44, 12.55], [12.66, 12.77, 12.88, 12.99, 13.1]]), - 7: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), + 7: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, np.nan, np.nan]]), 8: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), 9: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), 10: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), @@ -398,10 +378,11 @@ def test_agri_for_one_resolution(self, resolution_to_test, satname): AREA_EXTENTS_BY_RESOLUTION[satname][resolution_to_test]) def _check_calibration_and_units(self, band_names, result): - for index, band_name in enumerate(band_names): + for band_name in band_names: + band_number = int(band_name[-2:]) assert result[band_name].attrs["sensor"].islower() assert result[band_name].shape == (2, 5) - np.testing.assert_allclose(result[band_name].values, self.expected[index + 1], equal_nan=True) + np.testing.assert_allclose(result[band_name].values, self.expected[band_number], equal_nan=True) self._check_units(band_name, result) @staticmethod diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 7c88c9e5ac..fbb0857734 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -15,13 +15,15 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """The ahi_hsd reader tests package.""" + from __future__ import annotations import contextlib +import datetime as dt import unittest import warnings -from datetime import datetime from typing import Any, Dict from unittest import mock @@ -29,7 +31,7 @@ import numpy as np import pytest -from satpy.readers.ahi_hsd import AHIHSDFileHandler +from satpy.readers.ahi_hsd import AHIHSDFileHandler, _NominalTimeCalculator from satpy.readers.utils import get_geostationary_mask from satpy.tests.utils import make_dataid @@ -40,7 +42,7 @@ "satellite": "Himawari-8", "observation_area": "FLDK", "observation_start_time": 58413.12523839, - "observation_end_time": 58413.12562439, + "observation_end_time": 58413.132182834444, "observation_timeline": "0300", } FAKE_DATA_INFO: InfoDict = { @@ -340,10 +342,10 @@ def test_read_band(self, calibrate, *mocks): np.testing.assert_allclose(value, actual_obs_params[key]) time_params_exp = { - "nominal_start_time": datetime(2018, 10, 22, 3, 0, 0, 0), - "nominal_end_time": datetime(2018, 10, 22, 3, 0, 0, 0), - "observation_start_time": datetime(2018, 10, 22, 3, 0, 20, 596896), - "observation_end_time": datetime(2018, 10, 22, 3, 0, 53, 947296), + "nominal_start_time": dt.datetime(2018, 10, 22, 3, 0, 0, 0), + "nominal_end_time": dt.datetime(2018, 10, 22, 3, 10, 0, 0), + "observation_start_time": dt.datetime(2018, 10, 22, 3, 0, 20, 596896), + "observation_end_time": dt.datetime(2018, 10, 22, 3, 10, 20, 596896), } actual_time_params = im.attrs["time_parameters"] for key, value in time_params_exp.items(): @@ -416,31 +418,12 @@ def test_scene_loading(self, calibrate, *mocks): def test_time_properties(self): """Test start/end/scheduled time properties.""" with _fake_hsd_handler() as fh: - assert fh.start_time == datetime(2018, 10, 22, 3, 0) - assert fh.end_time == datetime(2018, 10, 22, 3, 0) - assert fh.observation_start_time == datetime(2018, 10, 22, 3, 0, 20, 596896) - assert fh.observation_end_time == datetime(2018, 10, 22, 3, 0, 53, 947296) - assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 0, 0, 0) - assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 0, 0, 0) - - def test_scanning_frequencies(self): - """Test scanning frequencies.""" - with _fake_hsd_handler() as fh: - fh.observation_area = "JP04" - assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 7, 30, 0) - assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 7, 30, 0) - fh.observation_area = "R304" - assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 7, 30, 0) - assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 7, 30, 0) - fh.observation_area = "R420" - assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 9, 30, 0) - assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 9, 30, 0) - fh.observation_area = "R520" - assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 9, 30, 0) - assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 9, 30, 0) - fh.observation_area = "FLDK" - assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 0, 0, 0) - assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 0, 0, 0) + assert fh.start_time == dt.datetime(2018, 10, 22, 3, 0) + assert fh.end_time == dt.datetime(2018, 10, 22, 3, 10) + assert fh.observation_start_time == dt.datetime(2018, 10, 22, 3, 0, 20, 596896) + assert fh.observation_end_time == dt.datetime(2018, 10, 22, 3, 10, 20, 596896) + assert fh.nominal_start_time == dt.datetime(2018, 10, 22, 3, 0, 0, 0) + assert fh.nominal_end_time == dt.datetime(2018, 10, 22, 3, 10, 0, 0) def test_blocklen_error(self, *mocks): """Test erraneous blocklength.""" @@ -460,25 +443,6 @@ def test_blocklen_error(self, *mocks): with pytest.warns(UserWarning, match=r"Actual .* header size does not match expected"): fh._check_fpos(fp_, fpos, 0, "header 1") - def test_is_valid_time(self): - """Test that valid times are correctly identified.""" - assert AHIHSDFileHandler._is_valid_timeline(FAKE_BASIC_INFO["observation_timeline"]) - assert not AHIHSDFileHandler._is_valid_timeline("65526") - - def test_time_rounding(self): - """Test rounding of the nominal time.""" - mocker = mock.MagicMock() - in_date = datetime(2020, 1, 1, 12, 0, 0) - - with mock.patch("satpy.readers.ahi_hsd.AHIHSDFileHandler._is_valid_timeline", mocker): - with _fake_hsd_handler() as fh: - mocker.return_value = True - assert fh._modify_observation_time_for_nominal(in_date) == datetime(2020, 1, 1, 3, 0, 0) - mocker.return_value = False - with pytest.warns(UserWarning, - match=r"Observation timeline is fill value, not rounding observation time"): - assert fh._modify_observation_time_for_nominal(in_date) == datetime(2020, 1, 1, 12, 0, 0) - class TestAHICalibration(unittest.TestCase): """Test case for various AHI calibration types.""" @@ -669,3 +633,111 @@ def _create_fake_file_handler(in_fname, filename_info=None, filetype_info=None, assert in_fname != fh.filename assert str(filename_info["segment"]).zfill(2) == fh.filename[0:2] return fh + + +class TestNominalTimeCalculator: + """Test case for nominal timestamp computation.""" + + @pytest.mark.parametrize( + ("timeline", "expected"), + [ + ("0300", dt.datetime(2020, 1, 1, 3, 0, 0)), + ("65526", dt.datetime(2020, 1, 1, 12, 0, 0)) + ] + ) + def test_invalid_timeline(self, timeline, expected): + """Test handling of invalid timeline.""" + calc = _NominalTimeCalculator(timeline, "FLDK") + res = calc.get_nominal_start_time(dt.datetime(2020, 1, 1, 12, 0, 0)) + assert res == expected + + @pytest.mark.parametrize( + ("area", "expected"), + [ + ( + "JP01", + {"tstart": dt.datetime(2018, 10, 22, 3, 0, 0), + "tend": dt.datetime(2018, 10, 22, 3, 2, 30)} + ), + ( + "JP04", + {"tstart": dt.datetime(2018, 10, 22, 3, 7, 30, 0), + "tend": dt.datetime(2018, 10, 22, 3, 10, 0, 0)} + ), + ( + "R301", + {"tstart": dt.datetime(2018, 10, 22, 3, 0, 0), + "tend": dt.datetime(2018, 10, 22, 3, 2, 30)} + ), + ( + "R304", + {"tstart": dt.datetime(2018, 10, 22, 3, 7, 30, 0), + "tend": dt.datetime(2018, 10, 22, 3, 10, 0, 0)} + ), + ( + "R401", + {"tstart": dt.datetime(2018, 10, 22, 3, 0, 0), + "tend": dt.datetime(2018, 10, 22, 3, 0, 30)} + ), + ( + "R420", + {"tstart": dt.datetime(2018, 10, 22, 3, 9, 30, 0), + "tend": dt.datetime(2018, 10, 22, 3, 10, 0, 0)} + ), + ( + "R501", + {"tstart": dt.datetime(2018, 10, 22, 3, 0, 0), + "tend": dt.datetime(2018, 10, 22, 3, 0, 30)} + ), + ( + "R520", + {"tstart": dt.datetime(2018, 10, 22, 3, 9, 30, 0), + "tend": dt.datetime(2018, 10, 22, 3, 10, 0, 0)} + ), + ] + ) + def test_areas(self, area, expected): + """Test nominal timestamps for multiple areas.""" + obs_start_time = dt.datetime(2018, 10, 22, 3, 0, 20, 596896) + calc = _NominalTimeCalculator("0300", area) + nom_start_time = calc.get_nominal_start_time(obs_start_time) + nom_end_time = calc.get_nominal_end_time(nom_start_time) + assert nom_start_time == expected["tstart"] + assert nom_end_time == expected["tend"] + + @pytest.mark.parametrize( + ("timeline", "obs_start_time", "expected"), + [ + ( + "2350", + dt.datetime(2022, 12, 31, 23, 50, 1), + {"tstart": dt.datetime(2022, 12, 31, 23, 50, 0), + "tend": dt.datetime(2023, 1, 1, 0, 0, 0)} + ), + ( + "2350", + dt.datetime(2022, 12, 31, 23, 49, 59), + {"tstart": dt.datetime(2022, 12, 31, 23, 50, 0), + "tend": dt.datetime(2023, 1, 1, 0, 0, 0)} + ), + ( + "0000", + dt.datetime(2023, 1, 1, 0, 0, 1), + {"tstart": dt.datetime(2023, 1, 1, 0, 0, 0), + "tend": dt.datetime(2023, 1, 1, 0, 10, 0)} + ), + ( + "0000", + dt.datetime(2022, 12, 31, 23, 59, 59), + {"tstart": dt.datetime(2023, 1, 1, 0, 0, 0), + "tend": dt.datetime(2023, 1, 1, 0, 10, 0)} + ), + ] + ) + def test_timelines(self, timeline, obs_start_time, expected): + """Test nominal timestamps for multiple timelines.""" + calc = _NominalTimeCalculator(timeline, "FLDK") + nom_start_time = calc.get_nominal_start_time(obs_start_time) + nom_end_time = calc.get_nominal_end_time(nom_start_time) + assert nom_start_time == expected["tstart"] + assert nom_end_time == expected["tend"] diff --git a/satpy/tests/reader_tests/test_ahi_l2_nc.py b/satpy/tests/reader_tests/test_ahi_l2_nc.py index 817738bb82..fcb1c34658 100644 --- a/satpy/tests/reader_tests/test_ahi_l2_nc.py +++ b/satpy/tests/reader_tests/test_ahi_l2_nc.py @@ -1,6 +1,6 @@ """Tests for the Himawari L2 netCDF reader.""" -from datetime import datetime +import datetime as dt import numpy as np import pytest @@ -15,8 +15,8 @@ lat_data = rng.uniform(-90, 90, (5500, 5500)) lon_data = rng.uniform(-180, 180, (5500, 5500)) -start_time = datetime(2023, 8, 24, 5, 40, 21) -end_time = datetime(2023, 8, 24, 5, 49, 40) +start_time = dt.datetime(2023, 8, 24, 5, 40, 21) +end_time = dt.datetime(2023, 8, 24, 5, 49, 40) dimensions = {"Columns": 5500, "Rows": 5500} diff --git a/satpy/tests/reader_tests/test_ami_l1b.py b/satpy/tests/reader_tests/test_ami_l1b.py index 7dd2cfcb33..6af6c1099f 100644 --- a/satpy/tests/reader_tests/test_ami_l1b.py +++ b/satpy/tests/reader_tests/test_ami_l1b.py @@ -173,9 +173,9 @@ def test_filename_grouping(self): def test_basic_attributes(self): """Test getting basic file attributes.""" - from datetime import datetime - assert self.reader.start_time == datetime(2019, 9, 30, 3, 0, 31, 957882) - assert self.reader.end_time == datetime(2019, 9, 30, 3, 9, 35, 606133) + import datetime as dt + assert self.reader.start_time == dt.datetime(2019, 9, 30, 3, 0, 31, 957882) + assert self.reader.end_time == dt.datetime(2019, 9, 30, 3, 9, 35, 606133) def test_get_dataset(self): """Test gettting radiance data.""" diff --git a/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py b/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py index 2f1b3ad7b0..d6e6597d69 100644 --- a/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py +++ b/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py @@ -17,8 +17,8 @@ # satpy. If not, see . """Tests for the 'amsr2_l2_gaasp' reader.""" +import datetime as dt import os -from datetime import datetime from unittest import mock import dask.array as da @@ -259,8 +259,8 @@ def _check_attrs(data_arr): assert "add_offset" not in attrs assert attrs["platform_name"] == "GCOM-W1" assert attrs["sensor"] == "amsr2" - assert attrs["start_time"] == datetime(2020, 8, 12, 5, 58, 31) - assert attrs["end_time"] == datetime(2020, 8, 12, 6, 7, 1) + assert attrs["start_time"] == dt.datetime(2020, 8, 12, 5, 58, 31) + assert attrs["end_time"] == dt.datetime(2020, 8, 12, 6, 7, 1) @pytest.mark.parametrize( ("filenames", "loadable_ids"), diff --git a/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py b/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py index 07ed218e72..dc3e371b46 100644 --- a/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py +++ b/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py @@ -15,12 +15,13 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Unittesting the ASCAT SCATTEROMETER SOIL MOISTURE BUFR reader.""" +import datetime as dt import os import sys import unittest -from datetime import datetime import numpy as np @@ -152,8 +153,8 @@ def test_scene(self): fname = os.path.join(self.base_dir, FILENAME) scn = Scene(reader="ascat_l2_soilmoisture_bufr", filenames=[fname]) assert "scatterometer" in scn.sensor_names - assert datetime(2020, 12, 21, 9, 33, 0) == scn.start_time - assert datetime(2020, 12, 21, 9, 33, 59) == scn.end_time + assert dt.datetime(2020, 12, 21, 9, 33, 0) == scn.start_time + assert dt.datetime(2020, 12, 21, 9, 33, 59) == scn.end_time @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene_load_available_datasets(self): diff --git a/satpy/tests/reader_tests/test_atms_l1b_nc.py b/satpy/tests/reader_tests/test_atms_l1b_nc.py index 6b27081ed9..f1f729311a 100644 --- a/satpy/tests/reader_tests/test_atms_l1b_nc.py +++ b/satpy/tests/reader_tests/test_atms_l1b_nc.py @@ -12,9 +12,10 @@ # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . + """The atms_l1b_nc reader tests package.""" -from datetime import datetime +import datetime as dt import numpy as np import pytest @@ -32,7 +33,7 @@ def reader(l1b_file): """Return reader of ATMS level1b data.""" return AtmsL1bNCFileHandler( filename=l1b_file, - filename_info={"creation_time": datetime(2020, 1, 2, 3, 4, 5)}, + filename_info={"creation_time": dt.datetime(2020, 1, 2, 3, 4, 5)}, filetype_info={"antenna_temperature": "antenna_temp"}, ) @@ -78,11 +79,11 @@ class TestAtsmsL1bNCFileHandler: def test_start_time(self, reader): """Test start time.""" - assert reader.start_time == datetime(2000, 1, 2, 3, 4, 5) + assert reader.start_time == dt.datetime(2000, 1, 2, 3, 4, 5) def test_end_time(self, reader): """Test end time.""" - assert reader.end_time == datetime(2000, 1, 2, 4, 5, 6) + assert reader.end_time == dt.datetime(2000, 1, 2, 4, 5, 6) def test_sensor(self, reader): """Test sensor.""" @@ -100,8 +101,8 @@ def test_antenna_temperature(self, reader, atms_fake_dataset): ) @pytest.mark.parametrize(("param", "expect"), [ - ("start_time", datetime(2000, 1, 2, 3, 4, 5)), - ("end_time", datetime(2000, 1, 2, 4, 5, 6)), + ("start_time", dt.datetime(2000, 1, 2, 3, 4, 5)), + ("end_time", dt.datetime(2000, 1, 2, 4, 5, 6)), ("platform_name", "JPSS-1"), ("sensor", "ATMS"), ]) @@ -135,11 +136,11 @@ def test_drop_coords(self, reader): assert coords not in data.coords @pytest.mark.parametrize(("param", "expect"), [ - ("start_time", datetime(2000, 1, 2, 3, 4, 5)), - ("end_time", datetime(2000, 1, 2, 4, 5, 6)), + ("start_time", dt.datetime(2000, 1, 2, 3, 4, 5)), + ("end_time", dt.datetime(2000, 1, 2, 4, 5, 6)), ("platform_name", "JPSS-1"), ("sensor", "ATMS"), - ("creation_time", datetime(2020, 1, 2, 3, 4, 5)), + ("creation_time", dt.datetime(2020, 1, 2, 3, 4, 5)), ("type", "test_data"), ("name", "test"), ]) diff --git a/satpy/tests/reader_tests/test_atms_sdr_hdf5.py b/satpy/tests/reader_tests/test_atms_sdr_hdf5.py index 8971c2d933..4fe6c120a1 100644 --- a/satpy/tests/reader_tests/test_atms_sdr_hdf5.py +++ b/satpy/tests/reader_tests/test_atms_sdr_hdf5.py @@ -18,8 +18,8 @@ """Module for testing the ATMS SDR HDF5 reader.""" +import datetime as dt import os -from datetime import datetime from unittest import mock import numpy as np @@ -288,8 +288,8 @@ def test_init_start_end_time(self): """Test basic init with start and end times around the start/end times of the provided file.""" r = load_reader(self.reader_configs, filter_parameters={ - "start_time": datetime(2022, 12, 19), - "end_time": datetime(2022, 12, 21) + "start_time": dt.datetime(2022, 12, 19), + "end_time": dt.datetime(2022, 12, 21) }) loadables = r.select_files_from_pathnames([ "SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5", diff --git a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py index dfcaff4514..3040a46750 100644 --- a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py +++ b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py @@ -15,9 +15,10 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Pygac interface.""" -from datetime import datetime +import datetime as dt from unittest import TestCase, mock import dask.array as da @@ -26,6 +27,7 @@ import xarray as xr GAC_PATTERN = '{creation_site:3s}.{transfer_mode:4s}.{platform_id:2s}.D{start_time:%y%j.S%H%M}.E{end_time:%H%M}.B{orbit_number:05d}{end_orbit_last_digits:02d}.{station:2s}' # noqa +EOSIP_PATTERN = '{platform_id:3s}_RPRO_AVH_L1B_1P_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{orbit_number:06d}/image.l1b' # noqa GAC_POD_FILENAMES = ["NSS.GHRR.NA.D79184.S1150.E1337.B0008384.WI", "NSS.GHRR.NA.D79184.S2350.E0137.B0008384.WI", @@ -68,6 +70,8 @@ "NSS.FRAC.M2.D12153.S1729.E1910.B2915354.SV", "NSS.LHRR.NP.D16306.S1803.E1814.B3985555.WI"] +LAC_EOSIP_FILENAMES = ["N06_RPRO_AVH_L1B_1P_20061206T010808_20061206T012223_007961/image.l1b"] + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__", return_value=None) def _get_fh_mocked(init_mock, **attrs): @@ -138,6 +142,12 @@ def _get_fh(self, filename="NSS.GHRR.NG.D88002.S0614.E0807.B0670506.WI", filename_info = parse(GAC_PATTERN, filename) return self.GACLACFile(filename, filename_info, {}, **kwargs) + def _get_eosip_fh(self, filename, **kwargs): + """Create a file handler.""" + from trollsift import parse + filename_info = parse(EOSIP_PATTERN, filename) + return self.GACLACFile(filename, filename_info, {}, **kwargs) + def test_init(self): """Test GACLACFile initialization.""" from pygac.gac_klm import GACKLMReader @@ -161,6 +171,28 @@ def test_init(self): assert fh.start_time < fh.end_time assert fh.reader_class is reader_cls + + def test_init_eosip(self): + """Test GACLACFile initialization.""" + from pygac.lac_pod import LACPODReader + + kwargs = {"start_line": 1, + "end_line": 2, + "strip_invalid_coords": True, + "interpolate_coords": True, + "adjust_clock_drift": True, + "tle_dir": "tle_dir", + "tle_name": "tle_name", + "tle_thresh": 123, + "calibration": "calibration"} + for filenames, reader_cls in zip([LAC_EOSIP_FILENAMES], + [LACPODReader]): + for filename in filenames: + fh = self._get_eosip_fh(filename, **kwargs) + assert fh.start_time < fh.end_time + assert fh.reader_class is reader_cls + assert fh.reader_kwargs["header_date"] > dt.date(1994, 11, 15) + def test_read_raw_data(self): """Test raw data reading.""" fh = _get_fh_mocked(reader=None, @@ -425,8 +457,8 @@ def _slice_patched(data): data_slc, times_slc = fh.slice(data, times) np.testing.assert_array_equal(data_slc, data[1:3]) np.testing.assert_array_equal(times_slc, times[1:3]) - assert fh.start_time == datetime(1970, 1, 1, 0, 0, 0, 2) - assert fh.end_time == datetime(1970, 1, 1, 0, 0, 0, 3) + assert fh.start_time == dt.datetime(1970, 1, 1, 0, 0, 0, 2) + assert fh.end_time == dt.datetime(1970, 1, 1, 0, 0, 0, 3) @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_qual_flags") @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._strip_invalid_lat") diff --git a/satpy/tests/reader_tests/test_clavrx/__init__.py b/satpy/tests/reader_tests/test_clavrx/__init__.py new file mode 100644 index 0000000000..6f62e3a26b --- /dev/null +++ b/satpy/tests/reader_tests/test_clavrx/__init__.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2018 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""The clavrx reader tests package.""" diff --git a/satpy/tests/reader_tests/test_clavrx/test_clavrx_geohdf.py b/satpy/tests/reader_tests/test_clavrx/test_clavrx_geohdf.py new file mode 100644 index 0000000000..85a7f6faa3 --- /dev/null +++ b/satpy/tests/reader_tests/test_clavrx/test_clavrx_geohdf.py @@ -0,0 +1,246 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2018 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Module for testing the satpy.readers.clavrx module.""" + +import os +import unittest +from unittest import mock + +import numpy as np +import pytest +import xarray as xr +from pyresample.geometry import AreaDefinition + +from satpy.tests.reader_tests.test_hdf4_utils import FakeHDF4FileHandler + +DEFAULT_FILE_DTYPE = np.uint16 +DEFAULT_FILE_SHAPE = (10, 300) +DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], + dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) +DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) +DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) +DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) +DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) +DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) + +class FakeHDF4FileHandlerGeo(FakeHDF4FileHandler): + """Swap-in HDF4 File Handler.""" + + def get_test_content(self, filename, filename_info, filetype_info): + """Mimic reader input file content.""" + file_content = { + "/attr/platform": "HIM8", + "/attr/sensor": "AHI", + # this is a Level 2 file that came from a L1B file + "/attr/L1B": "clavrx_H08_20180806_1800", + } + + file_content["longitude"] = xr.DataArray( + DEFAULT_LON_DATA, + dims=("y", "x"), + attrs={ + "_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "standard_name": "longitude", + }) + file_content["longitude/shape"] = DEFAULT_FILE_SHAPE + + file_content["latitude"] = xr.DataArray( + DEFAULT_LAT_DATA, + dims=("y", "x"), + attrs={ + "_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "standard_name": "latitude", + }) + file_content["latitude/shape"] = DEFAULT_FILE_SHAPE + + file_content["refl_1_38um_nom"] = xr.DataArray( + DEFAULT_FILE_DATA.astype(np.float32), + dims=("y", "x"), + attrs={ + "SCALED": 1, + "add_offset": 59.0, + "scale_factor": 0.0018616290763020515, + "units": "%", + "_FillValue": -32768, + "valid_range": [-32767, 32767], + "actual_range": [-2., 120.], + "actual_missing": -999.0 + }) + file_content["refl_1_38um_nom/shape"] = DEFAULT_FILE_SHAPE + + # data with fill values + file_content["variable2"] = xr.DataArray( + DEFAULT_FILE_DATA.astype(np.float32), + dims=("y", "x"), + attrs={ + "_FillValue": -1, + "scale_factor": 1., + "add_offset": 0., + "units": "1", + }) + file_content["variable2/shape"] = DEFAULT_FILE_SHAPE + file_content["variable2"] = file_content["variable2"].where( + file_content["variable2"] % 2 != 0) + + # category + file_content["variable3"] = xr.DataArray( + DEFAULT_FILE_DATA.astype(np.byte), + dims=("y", "x"), + attrs={ + "SCALED": 0, + "_FillValue": -128, + "flag_meanings": "clear water supercooled mixed ice unknown", + "flag_values": [0, 1, 2, 3, 4, 5], + "units": "1", + }) + file_content["variable3/shape"] = DEFAULT_FILE_SHAPE + + return file_content + + +class TestCLAVRXReaderGeo(unittest.TestCase): + """Test CLAVR-X Reader with Geo files.""" + + yaml_file = "clavrx.yaml" + + def setUp(self): + """Wrap HDF4 file handler with our own fake handler.""" + from satpy._config import config_search_paths + from satpy.readers.clavrx import CLAVRXHDF4FileHandler + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) + # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library + self.p = mock.patch.object(CLAVRXHDF4FileHandler, "__bases__", (FakeHDF4FileHandlerGeo,)) + self.fake_handler = self.p.start() + self.p.is_local = True + + def tearDown(self): + """Stop wrapping the NetCDF4 file handler.""" + self.p.stop() + + def test_init(self): + """Test basic init with no extra parameters.""" + from satpy.readers import load_reader + r = load_reader(self.reader_configs) + loadables = r.select_files_from_pathnames([ + "clavrx_H08_20180806_1800.level2.hdf", + ]) + assert len(loadables) == 1 + r.create_filehandlers(loadables) + # make sure we have some files + assert r.file_handlers + + def test_no_nav_donor(self): + """Test exception raised when no donor file is available.""" + import xarray as xr + + from satpy.readers import load_reader + r = load_reader(self.reader_configs) + fake_fn = "clavrx_H08_20180806_1800.level2.hdf" + with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): + loadables = r.select_files_from_pathnames([fake_fn]) + r.create_filehandlers(loadables) + l1b_base = fake_fn.split(".")[0] + msg = f"Missing navigation donor {l1b_base}" + with pytest.raises(IOError, match=msg): + r.load(["refl_1_38um_nom", "variable2", "variable3"]) + + def test_load_all_old_donor(self): + """Test loading all test datasets with old donor.""" + import xarray as xr + + from satpy.readers import load_reader + r = load_reader(self.reader_configs) + with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): + loadables = r.select_files_from_pathnames([ + "clavrx_H08_20180806_1800.level2.hdf", + ]) + r.create_filehandlers(loadables) + with mock.patch("satpy.readers.clavrx.glob") as g, mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: + g.return_value = ["fake_donor.nc"] + x = np.linspace(-0.1518, 0.1518, 300) + y = np.linspace(0.1518, -0.1518, 10) + proj = mock.Mock( + semi_major_axis=6378.137, + semi_minor_axis=6356.7523142, + perspective_point_height=35791, + longitude_of_projection_origin=140.7, + sweep_angle_axis="y", + ) + d.return_value = fake_donor = mock.MagicMock( + variables={"Projection": proj, "x": x, "y": y}, + ) + fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] + datasets = r.load(["refl_1_38um_nom", "variable2", "variable3"]) + assert len(datasets) == 3 + for v in datasets.values(): + assert "calibration" not in v.attrs + assert v.attrs["units"] in ["1", "%"] + assert isinstance(v.attrs["area"], AreaDefinition) + if v.attrs.get("flag_values"): + assert "_FillValue" in v.attrs + else: + assert "_FillValue" not in v.attrs + if v.attrs["name"] == "refl_1_38um_nom": + assert "valid_range" in v.attrs + assert isinstance(v.attrs["valid_range"], list) + else: + assert "valid_range" not in v.attrs + if "flag_values" in v.attrs: + assert np.issubdtype(v.dtype, np.integer) + assert v.attrs.get("flag_meanings") is not None + + def test_load_all_new_donor(self): + """Test loading all test datasets with new donor.""" + import xarray as xr + + from satpy.readers import load_reader + r = load_reader(self.reader_configs) + with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): + loadables = r.select_files_from_pathnames([ + "clavrx_H08_20180806_1800.level2.hdf", + ]) + r.create_filehandlers(loadables) + with mock.patch("satpy.readers.clavrx.glob") as g, mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: + g.return_value = ["fake_donor.nc"] + x = np.linspace(-0.1518, 0.1518, 300) + y = np.linspace(0.1518, -0.1518, 10) + proj = mock.Mock( + semi_major_axis=6378137, + semi_minor_axis=6356752.3142, + perspective_point_height=35791000, + longitude_of_projection_origin=140.7, + sweep_angle_axis="y", + ) + d.return_value = fake_donor = mock.MagicMock( + variables={"goes_imager_projection": proj, "x": x, "y": y}, + ) + fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] + datasets = r.load(["refl_1_38um_nom", "variable2", "variable3"]) + assert len(datasets) == 3 + for v in datasets.values(): + assert "calibration" not in v.attrs + assert v.attrs["units"] in ["1", "%"] + assert isinstance(v.attrs["area"], AreaDefinition) + assert v.attrs["area"].is_geostationary is True + assert v.attrs["platform_name"] == "himawari8" + assert v.attrs["sensor"] == "ahi" + assert datasets["variable3"].attrs.get("flag_meanings") is not None diff --git a/satpy/tests/reader_tests/test_clavrx/test_clavrx_nc.py b/satpy/tests/reader_tests/test_clavrx/test_clavrx_nc.py new file mode 100644 index 0000000000..3cb188d76c --- /dev/null +++ b/satpy/tests/reader_tests/test_clavrx/test_clavrx_nc.py @@ -0,0 +1,326 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2018 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Module for testing the satpy.readers.clavrx module.""" +import os +from unittest import mock + +import numpy as np +import pytest +import xarray as xr +from pyresample.geometry import AreaDefinition + +from satpy.readers import load_reader + +ABI_FILE = "clavrx_OR_ABI-L1b-RadC-M6C01_G16_s20231021601173.level2.nc" +DEFAULT_FILE_DTYPE = np.uint16 +DEFAULT_FILE_SHAPE = (5, 5) +DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], + dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) +DEFAULT_FILE_FLAGS = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], + dtype=np.byte).reshape(DEFAULT_FILE_SHAPE) +DEFAULT_FILE_FLAGS_BEYOND_FILL = DEFAULT_FILE_FLAGS +DEFAULT_FILE_FLAGS_BEYOND_FILL[-1][:-2] = [-127, -127, -128] +DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) +DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) +DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) +DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) +DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) +L1B_FILE = "clavrx_OR_ABI-L1b-RadC-M6C01_G16_s20231021601173" +ABI_FILE = f"{L1B_FILE}.level2.nc" +FILL_VALUE = -32768 + + +def fake_test_content(filename, **kwargs): + """Mimic reader input file content.""" + attrs = { + "platform": "G16", + "sensor": "ABI", + # this is a Level 2 file that came from a L1B file + "L1B": L1B_FILE, + } + + longitude = xr.DataArray(DEFAULT_LON_DATA, + dims=("scan_lines_along_track_direction", + "pixel_elements_along_scan_direction"), + attrs={"_FillValue": -999., + "SCALED": 0, + "scale_factor": 1., + "add_offset": 0., + "standard_name": "longitude", + "units": "degrees_east" + }) + + latitude = xr.DataArray(DEFAULT_LAT_DATA, + dims=("scan_lines_along_track_direction", + "pixel_elements_along_scan_direction"), + attrs={"_FillValue": -999., + "SCALED": 0, + "scale_factor": 1., + "add_offset": 0., + "standard_name": "latitude", + "units": "degrees_south" + }) + + variable1 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.int8), + dims=("scan_lines_along_track_direction", + "pixel_elements_along_scan_direction"), + attrs={"_FillValue": -127, + "SCALED": 0, + "units": "1", + }) + + # data with fill values and a file_type alias + variable2 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.int16), + dims=("scan_lines_along_track_direction", + "pixel_elements_along_scan_direction"), + attrs={"_FillValue": FILL_VALUE, + "SCALED": 1, + "scale_factor": 0.001861629, + "add_offset": 59., + "units": "%", + "valid_range": [-32767, 32767], + # this is a Level 2 file that came from a L1B file + "L1B": "clavrx_H08_20210603_1500_B01_FLDK_R", + } + ) + variable2 = variable2.where(variable2 % 2 != 0, FILL_VALUE) + + # category + var_flags = xr.DataArray(DEFAULT_FILE_FLAGS.astype(np.int8), + dims=("scan_lines_along_track_direction", + "pixel_elements_along_scan_direction"), + attrs={"SCALED": 0, + "_FillValue": -127, + "units": "1", + "flag_values": [0, 1, 2, 3]}) + + out_of_range_flags = xr.DataArray(DEFAULT_FILE_FLAGS_BEYOND_FILL.astype(np.int8), + dims=("scan_lines_along_track_direction", + "pixel_elements_along_scan_direction"), + attrs={"SCALED": 0, + "_FillValue": -127, + "units": "1", + "flag_values": [0, 1, 2, 3]}) + + ds_vars = { + "longitude": longitude, + "latitude": latitude, + "variable1": variable1, + "refl_0_65um_nom": variable2, + "var_flags": var_flags, + "out_of_range_flags": out_of_range_flags, + } + ds = xr.Dataset(ds_vars, attrs=attrs) + ds = ds.assign_coords({"latitude": latitude, "longitude": longitude}) + + return ds + + +class TestCLAVRXReaderGeo: + """Test CLAVR-X Reader with Geo files.""" + + yaml_file = "clavrx.yaml" + + def setup_method(self): + """Read fake data.""" + from satpy._config import config_search_paths + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) + + @pytest.mark.parametrize( + ("filenames", "expected_loadables"), + [([ABI_FILE], 1)] + ) + def test_reader_creation(self, filenames, expected_loadables): + """Test basic initialization.""" + with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od: + od.side_effect = fake_test_content + r = load_reader(self.reader_configs) + loadables = r.select_files_from_pathnames(filenames) + assert len(loadables) == expected_loadables + r.create_filehandlers(loadables) + # make sure we have some files + assert r.file_handlers + + @pytest.mark.parametrize( + ("filenames", "expected_datasets"), + [([ABI_FILE], ["variable1", "refl_0_65um_nom", "C02", "var_flags", + "out_of_range_flags", "longitude", "latitude"]), ] + ) + def test_available_datasets(self, filenames, expected_datasets): + """Test that variables are dynamically discovered.""" + from satpy.readers import load_reader + with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od: + od.side_effect = fake_test_content + r = load_reader(self.reader_configs) + loadables = r.select_files_from_pathnames(filenames) + r.create_filehandlers(loadables) + avails = list(r.available_dataset_names) + for var_name in expected_datasets: + assert var_name in avails + + @pytest.mark.parametrize( + ("filenames", "loadable_ids"), + [([ABI_FILE], ["variable1", "refl_0_65um_nom", "var_flags", "out_of_range_flags"]), ] + ) + def test_load_all_new_donor(self, filenames, loadable_ids): + """Test loading all test datasets with new donor.""" + with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od: + od.side_effect = fake_test_content + r = load_reader(self.reader_configs) + loadables = r.select_files_from_pathnames(filenames) + r.create_filehandlers(loadables) + + with mock.patch("satpy.readers.clavrx.glob") as g, \ + mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: + g.return_value = ["fake_donor.nc"] + x = np.linspace(-0.1518, 0.1518, DEFAULT_FILE_SHAPE[1]) + y = np.linspace(0.1518, -0.1518, DEFAULT_FILE_SHAPE[0]) + proj = mock.Mock( + semi_major_axis=6378137, + semi_minor_axis=6356752.3142, + perspective_point_height=35791000, + longitude_of_projection_origin=140.7, + sweep_angle_axis="y", + ) + d.return_value = fake_donor = mock.MagicMock( + variables={"goes_imager_projection": proj, "x": x, "y": y}, + ) + fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] + + datasets = r.load(loadable_ids + ["C02"]) + assert len(datasets) == len(loadable_ids)+1 + + # should have file variable and one alias for reflectance + assert "valid_range" not in datasets["variable1"].attrs + assert "_FillValue" not in datasets["variable1"].attrs + assert np.float32 == datasets["variable1"].dtype + assert "valid_range" not in datasets["variable1"].attrs + + assert np.issubdtype(datasets["var_flags"].dtype, np.integer) + assert datasets["var_flags"].attrs.get("flag_meanings") is not None + assert "" == datasets["var_flags"].attrs.get("flag_meanings") + assert np.issubdtype(datasets["out_of_range_flags"].dtype, np.integer) + assert "valid_range" not in datasets["out_of_range_flags"].attrs + + assert isinstance(datasets["refl_0_65um_nom"].valid_range, list) + assert np.float32 == datasets["refl_0_65um_nom"].dtype + assert "_FillValue" not in datasets["refl_0_65um_nom"].attrs + assert "valid_range" in datasets["refl_0_65um_nom"].attrs + + assert "refl_0_65um_nom" == datasets["C02"].file_key + assert "_FillValue" not in datasets["C02"].attrs + + for v in datasets.values(): + assert isinstance(v.area, AreaDefinition) + assert v.platform_name == "GOES-16" + assert v.sensor == "abi" + + assert "calibration" not in v.attrs + assert "rows_per_scan" not in v.coords.get("longitude").attrs + assert "units" in v.attrs + + @pytest.mark.parametrize( + ("filenames", "expected_loadables"), + [([ABI_FILE], 1)] + ) + def test_yaml_datasets(self, filenames, expected_loadables): + """Test available_datasets with fake variables from YAML.""" + with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od: + od.side_effect = fake_test_content + r = load_reader(self.reader_configs) + loadables = r.select_files_from_pathnames(filenames) + r.create_filehandlers(loadables) + + with mock.patch("satpy.readers.clavrx.glob") as g, \ + mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: + g.return_value = ["fake_donor.nc"] + x = np.linspace(-0.1518, 0.1518, 5) + y = np.linspace(0.1518, -0.1518, 5) + proj = mock.Mock( + semi_major_axis=6378137, + semi_minor_axis=6356752.3142, + perspective_point_height=35791000, + longitude_of_projection_origin=-137.2, + sweep_angle_axis="x", + ) + d.return_value = fake_donor = mock.MagicMock( + variables={"goes_imager_projection": proj, "x": x, "y": y}, + ) + fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] + # mimic the YAML file being configured for more datasets + fake_dataset_info = [ + (None, {"name": "yaml1", "resolution": None, "file_type": ["clavrx_nc"]}), + (True, {"name": "yaml2", "resolution": 0.5, "file_type": ["clavrx_nc"]}), + ] + new_ds_infos = list(r.file_handlers["clavrx_nc"][0].available_datasets( + fake_dataset_info)) + assert len(new_ds_infos) == 10 + + # we have this and can provide the resolution + assert (new_ds_infos[0][0]) + assert new_ds_infos[0][1]["resolution"] == 2004 # hardcoded + + # we have this, but previous file handler said it knew about it + # and it is producing the same resolution as what we have + assert (new_ds_infos[1][0]) + assert new_ds_infos[1][1]["resolution"] == 0.5 + + # we have this, but don"t want to change the resolution + # because a previous handler said it has it + assert (new_ds_infos[2][0]) + assert new_ds_infos[2][1]["resolution"] == 2004 + + @pytest.mark.parametrize( + ("filenames", "loadable_ids"), + [([ABI_FILE], ["variable1", "refl_0_65um_nom", "var_flags", "out_of_range_flags"]), ] + ) + def test_scale_data(self, filenames, loadable_ids): + """Test that data is scaled when necessary and not scaled data are flags.""" + from satpy.readers.clavrx import _scale_data + with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od: + od.side_effect = fake_test_content + r = load_reader(self.reader_configs) + loadables = r.select_files_from_pathnames(filenames) + r.create_filehandlers(loadables) + with mock.patch("satpy.readers.clavrx.glob") as g, \ + mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: + g.return_value = ["fake_donor.nc"] + x = np.linspace(-0.1518, 0.1518, 5) + y = np.linspace(0.1518, -0.1518, 5) + proj = mock.Mock( + semi_major_axis=6378137, + semi_minor_axis=6356752.3142, + perspective_point_height=35791000, + longitude_of_projection_origin=-137.2, + sweep_angle_axis="x", + ) + d.return_value = fake_donor = mock.MagicMock( + variables={"goes_imager_projection": proj, "x": x, "y": y}, + ) + fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] + + ds_scale = ["variable1", "refl_0_65um_nom"] + ds_no_scale = ["var_flags", "out_of_range_flags"] + + with mock.patch("satpy.readers.clavrx._scale_data", wraps=_scale_data) as scale_data: + r.load(ds_scale) + scale_data.assert_called() + + with mock.patch("satpy.readers.clavrx._scale_data", wraps=_scale_data) as scale_data2: + r.load(ds_no_scale) + scale_data2.assert_not_called() diff --git a/satpy/tests/reader_tests/test_clavrx.py b/satpy/tests/reader_tests/test_clavrx/test_clavrx_polarhdf.py similarity index 53% rename from satpy/tests/reader_tests/test_clavrx.py rename to satpy/tests/reader_tests/test_clavrx/test_clavrx_polarhdf.py index b4b1aef1a5..f8ae93c38b 100644 --- a/satpy/tests/reader_tests/test_clavrx.py +++ b/satpy/tests/reader_tests/test_clavrx/test_clavrx_polarhdf.py @@ -23,9 +23,8 @@ import dask.array as da import numpy as np -import pytest import xarray as xr -from pyresample.geometry import AreaDefinition, SwathDefinition +from pyresample.geometry import SwathDefinition from satpy.tests.reader_tests.test_hdf4_utils import FakeHDF4FileHandler @@ -49,7 +48,6 @@ def get_test_content(self, filename, filename_info, filetype_info): "/attr/platform": "SNPP", "/attr/sensor": "VIIRS", } - file_content["longitude"] = xr.DataArray( da.from_array(DEFAULT_LON_DATA, chunks=4096), attrs={ @@ -105,6 +103,20 @@ def get_test_content(self, filename, filename_info, filetype_info): }) file_content["variable3/shape"] = DEFAULT_FILE_SHAPE + file_content["refl_1_38um_nom"] = xr.DataArray( + da.from_array(DEFAULT_FILE_DATA, chunks=4096).astype(np.float32), + attrs={ + "SCALED": 1, + "add_offset": 59.0, + "scale_factor": 0.0018616290763020515, + "units": "%", + "_FillValue": -32768, + "valid_range": [-32767, 32767], + "actual_range": [-2., 120.], + "actual_missing": -999.0 + }) + file_content["refl_1_38um_nom/shape"] = DEFAULT_FILE_SHAPE + return file_content @@ -189,7 +201,7 @@ def test_available_datasets(self): assert new_ds_infos[4][0] assert new_ds_infos[4][1]["resolution"] == 742 - # we don't have this variable, don't change it + # we don"t have this variable, don't change it assert not new_ds_infos[5][0] assert new_ds_infos[5][1].get("resolution") is None @@ -205,8 +217,8 @@ def test_available_datasets(self): assert new_ds_infos[8][0] assert new_ds_infos[8][1]["resolution"] == 742 - def test_load_all(self): - """Test loading all test datasets.""" + def test_available_datasets_with_alias(self): + """Test availability of aliased dataset.""" import xarray as xr from satpy.readers import load_reader @@ -216,218 +228,32 @@ def test_load_all(self): "clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf", ]) r.create_filehandlers(loadables) + available_ds = list(r.file_handlers["clavrx_hdf4"][0].available_datasets()) - var_list = ["variable1", "variable2", "variable3"] - datasets = r.load(var_list) - assert len(datasets) == len(var_list) - for v in datasets.values(): - assert v.attrs["units"] == "1" - assert v.attrs["platform_name"] == "npp" - assert v.attrs["sensor"] == "viirs" - assert isinstance(v.attrs["area"], SwathDefinition) - assert v.attrs["area"].lons.attrs["rows_per_scan"] == 16 - assert v.attrs["area"].lats.attrs["rows_per_scan"] == 16 - assert isinstance(datasets["variable3"].attrs.get("flag_meanings"), list) + assert available_ds[5][1]["name"] == "refl_1_38um_nom" + assert available_ds[6][1]["name"] == "M09" + assert available_ds[6][1]["file_key"] == "refl_1_38um_nom" - -class FakeHDF4FileHandlerGeo(FakeHDF4FileHandler): - """Swap-in HDF4 File Handler.""" - - def get_test_content(self, filename, filename_info, filetype_info): - """Mimic reader input file content.""" - file_content = { - "/attr/platform": "HIM8", - "/attr/sensor": "AHI", - # this is a Level 2 file that came from a L1B file - "/attr/L1B": "clavrx_H08_20180806_1800", - } - - file_content["longitude"] = xr.DataArray( - DEFAULT_LON_DATA, - dims=("y", "x"), - attrs={ - "_FillValue": np.nan, - "scale_factor": 1., - "add_offset": 0., - "standard_name": "longitude", - }) - file_content["longitude/shape"] = DEFAULT_FILE_SHAPE - - file_content["latitude"] = xr.DataArray( - DEFAULT_LAT_DATA, - dims=("y", "x"), - attrs={ - "_FillValue": np.nan, - "scale_factor": 1., - "add_offset": 0., - "standard_name": "latitude", - }) - file_content["latitude/shape"] = DEFAULT_FILE_SHAPE - - file_content["variable1"] = xr.DataArray( - DEFAULT_FILE_DATA.astype(np.float32), - dims=("y", "x"), - attrs={ - "_FillValue": -1, - "scale_factor": 1., - "add_offset": 0., - "units": "1", - "valid_range": (-32767, 32767), - }) - file_content["variable1/shape"] = DEFAULT_FILE_SHAPE - - # data with fill values - file_content["variable2"] = xr.DataArray( - DEFAULT_FILE_DATA.astype(np.float32), - dims=("y", "x"), - attrs={ - "_FillValue": -1, - "scale_factor": 1., - "add_offset": 0., - "units": "1", - }) - file_content["variable2/shape"] = DEFAULT_FILE_SHAPE - file_content["variable2"] = file_content["variable2"].where( - file_content["variable2"] % 2 != 0) - - # category - file_content["variable3"] = xr.DataArray( - DEFAULT_FILE_DATA.astype(np.byte), - dims=("y", "x"), - attrs={ - "SCALED": 0, - "_FillValue": -128, - "flag_meanings": "clear water supercooled mixed ice unknown", - "flag_values": [0, 1, 2, 3, 4, 5], - "units": "1", - }) - file_content["variable3/shape"] = DEFAULT_FILE_SHAPE - - return file_content - - -class TestCLAVRXReaderGeo(unittest.TestCase): - """Test CLAVR-X Reader with Geo files.""" - - yaml_file = "clavrx.yaml" - - def setUp(self): - """Wrap HDF4 file handler with our own fake handler.""" - from satpy._config import config_search_paths - from satpy.readers.clavrx import CLAVRXHDF4FileHandler - self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) - # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(CLAVRXHDF4FileHandler, "__bases__", (FakeHDF4FileHandlerGeo,)) - self.fake_handler = self.p.start() - self.p.is_local = True - - def tearDown(self): - """Stop wrapping the NetCDF4 file handler.""" - self.p.stop() - - def test_init(self): - """Test basic init with no extra parameters.""" - from satpy.readers import load_reader - r = load_reader(self.reader_configs) - loadables = r.select_files_from_pathnames([ - "clavrx_H08_20180806_1800.level2.hdf", - ]) - assert len(loadables) == 1 - r.create_filehandlers(loadables) - # make sure we have some files - assert r.file_handlers - - def test_no_nav_donor(self): - """Test exception raised when no donor file is available.""" - import xarray as xr - - from satpy.readers import load_reader - r = load_reader(self.reader_configs) - with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): - loadables = r.select_files_from_pathnames([ - "clavrx_H08_20180806_1800.level2.hdf", - ]) - r.create_filehandlers(loadables) - with pytest.raises(IOError, match="Could not find navigation donor for"): - r.load(["variable1", "variable2", "variable3"]) - - def test_load_all_old_donor(self): - """Test loading all test datasets with old donor.""" + def test_load_all(self): + """Test loading all test datasets.""" import xarray as xr from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): loadables = r.select_files_from_pathnames([ - "clavrx_H08_20180806_1800.level2.hdf", + "clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf", ]) r.create_filehandlers(loadables) - with mock.patch("satpy.readers.clavrx.glob") as g, mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: - g.return_value = ["fake_donor.nc"] - x = np.linspace(-0.1518, 0.1518, 300) - y = np.linspace(0.1518, -0.1518, 10) - proj = mock.Mock( - semi_major_axis=6378.137, - semi_minor_axis=6356.7523142, - perspective_point_height=35791, - longitude_of_projection_origin=140.7, - sweep_angle_axis="y", - ) - d.return_value = fake_donor = mock.MagicMock( - variables={"Projection": proj, "x": x, "y": y}, - ) - fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] - datasets = r.load(["variable1", "variable2", "variable3"]) - assert len(datasets) == 3 - for v in datasets.values(): - assert "calibration" not in v.attrs - assert v.attrs["units"] == "1" - assert isinstance(v.attrs["area"], AreaDefinition) - if v.attrs.get("flag_values"): - assert "_FillValue" in v.attrs - else: - assert "_FillValue" not in v.attrs - if v.attrs["name"] == "variable1": - assert isinstance(v.attrs["valid_range"], list) - else: - assert "valid_range" not in v.attrs - if "flag_values" in v.attrs: - assert np.issubdtype(v.dtype, np.integer) - assert v.attrs.get("flag_meanings") is not None - - def test_load_all_new_donor(self): - """Test loading all test datasets with new donor.""" - import xarray as xr - from satpy.readers import load_reader - r = load_reader(self.reader_configs) - with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): - loadables = r.select_files_from_pathnames([ - "clavrx_H08_20180806_1800.level2.hdf", - ]) - r.create_filehandlers(loadables) - with mock.patch("satpy.readers.clavrx.glob") as g, mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: - g.return_value = ["fake_donor.nc"] - x = np.linspace(-0.1518, 0.1518, 300) - y = np.linspace(0.1518, -0.1518, 10) - proj = mock.Mock( - semi_major_axis=6378137, - semi_minor_axis=6356752.3142, - perspective_point_height=35791000, - longitude_of_projection_origin=140.7, - sweep_angle_axis="y", - ) - d.return_value = fake_donor = mock.MagicMock( - variables={"goes_imager_projection": proj, "x": x, "y": y}, - ) - fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] - datasets = r.load(["variable1", "variable2", "variable3"]) - assert len(datasets) == 3 + var_list = ["M09", "variable2", "variable3"] + datasets = r.load(var_list) + assert len(datasets) == len(var_list) for v in datasets.values(): - assert "calibration" not in v.attrs - assert v.attrs["units"] == "1" - assert isinstance(v.attrs["area"], AreaDefinition) - assert v.attrs["area"].is_geostationary - assert v.attrs["platform_name"] == "himawari8" - assert v.attrs["sensor"] == "ahi" - assert datasets["variable3"].attrs.get("flag_meanings") is not None + assert v.attrs["units"] in ["1", "%"] + assert v.attrs["platform_name"] == "npp" + assert v.attrs["sensor"] == "viirs" + assert isinstance(v.attrs["area"], SwathDefinition) + assert v.attrs["area"].lons.attrs["rows_per_scan"] == 16 + assert v.attrs["area"].lats.attrs["rows_per_scan"] == 16 + assert isinstance(datasets["variable3"].attrs.get("flag_meanings"), list) diff --git a/satpy/tests/reader_tests/test_clavrx_nc.py b/satpy/tests/reader_tests/test_clavrx_nc.py deleted file mode 100644 index 33be29078a..0000000000 --- a/satpy/tests/reader_tests/test_clavrx_nc.py +++ /dev/null @@ -1,202 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# Copyright (c) 2021 Satpy developers -# -# This file is part of satpy. -# -# satpy is free software: you can redistribute it and/or modify it under the -# terms of the GNU General Public License as published by the Free Software -# Foundation, either version 3 of the License, or (at your option) any later -# version. -# -# satpy is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# satpy. If not, see . -"""Module for testing the satpy.readers.clavrx module.""" - -import os -from unittest import mock - -import numpy as np -import pytest -import xarray as xr -from pyresample.geometry import AreaDefinition - -DEFAULT_FILE_DTYPE = np.uint16 -DEFAULT_FILE_SHAPE = (10, 300) -DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], - dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) -DEFAULT_FILE_FLAGS = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], - dtype=np.byte).reshape(DEFAULT_FILE_SHAPE) -DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) -DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) -DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) -DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) -DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) -AHI_FILE = "clavrx_H08_20210603_1500_B01_FLDK_R.level2.nc" - - -def fake_test_content(filename, **kwargs): - """Mimic reader input file content.""" - attrs = { - "platform": "HIM8", - "sensor": "AHI", - # this is a Level 2 file that came from a L1B file - "L1B": "clavrx_H08_20210603_1500_B01_FLDK_R", - } - - longitude = xr.DataArray(DEFAULT_LON_DATA, - dims=("scan_lines_along_track_direction", - "pixel_elements_along_scan_direction"), - attrs={"_FillValue": np.nan, - "scale_factor": 1., - "add_offset": 0., - "standard_name": "longitude", - "units": "degrees_east" - }) - - latitude = xr.DataArray(DEFAULT_LAT_DATA, - dims=("scan_lines_along_track_direction", - "pixel_elements_along_scan_direction"), - attrs={"_FillValue": np.nan, - "scale_factor": 1., - "add_offset": 0., - "standard_name": "latitude", - "units": "degrees_south" - }) - - variable1 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.float32), - dims=("scan_lines_along_track_direction", - "pixel_elements_along_scan_direction"), - attrs={"_FillValue": np.nan, - "scale_factor": 1., - "add_offset": 0., - "units": "1", - "valid_range": [-32767, 32767], - }) - - # data with fill values - variable2 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.float32), - dims=("scan_lines_along_track_direction", - "pixel_elements_along_scan_direction"), - attrs={"_FillValue": np.nan, - "scale_factor": 1., - "add_offset": 0., - "units": "1", - "valid_range": [-32767, 32767], - }) - variable2 = variable2.where(variable2 % 2 != 0) - - # category - variable3 = xr.DataArray(DEFAULT_FILE_FLAGS, - dims=("scan_lines_along_track_direction", - "pixel_elements_along_scan_direction"), - attrs={"SCALED": 0, - "_FillValue": -127, - "units": "1", - "flag_values": [0, 1, 2, 3]}) - - ds_vars = { - "longitude": longitude, - "latitude": latitude, - "variable1": variable1, - "variable2": variable2, - "variable3": variable3 - } - - ds = xr.Dataset(ds_vars, attrs=attrs) - ds = ds.assign_coords({"latitude": latitude, "longitude": longitude}) - - return ds - - -class TestCLAVRXReaderGeo: - """Test CLAVR-X Reader with Geo files.""" - - yaml_file = "clavrx.yaml" - - def setup_method(self): - """Read fake data.""" - from satpy._config import config_search_paths - self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) - - @pytest.mark.parametrize( - ("filenames", "expected_loadables"), - [([AHI_FILE], 1)] - ) - def test_reader_creation(self, filenames, expected_loadables): - """Test basic initialization.""" - from satpy.readers import load_reader - with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od: - od.side_effect = fake_test_content - r = load_reader(self.reader_configs) - loadables = r.select_files_from_pathnames(filenames) - assert len(loadables) == expected_loadables - r.create_filehandlers(loadables) - # make sure we have some files - assert r.file_handlers - - @pytest.mark.parametrize( - ("filenames", "expected_datasets"), - [([AHI_FILE], ["variable1", "variable2", "variable3"]), ] - ) - def test_available_datasets(self, filenames, expected_datasets): - """Test that variables are dynamically discovered.""" - from satpy.readers import load_reader - with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od: - od.side_effect = fake_test_content - r = load_reader(self.reader_configs) - loadables = r.select_files_from_pathnames(filenames) - r.create_filehandlers(loadables) - avails = list(r.available_dataset_names) - for var_name in expected_datasets: - assert var_name in avails - - @pytest.mark.parametrize( - ("filenames", "loadable_ids"), - [([AHI_FILE], ["variable1", "variable2", "variable3"]), ] - ) - def test_load_all_new_donor(self, filenames, loadable_ids): - """Test loading all test datasets with new donor.""" - from satpy.readers import load_reader - with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od: - od.side_effect = fake_test_content - r = load_reader(self.reader_configs) - loadables = r.select_files_from_pathnames(filenames) - r.create_filehandlers(loadables) - with mock.patch("satpy.readers.clavrx.glob") as g, \ - mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: - g.return_value = ["fake_donor.nc"] - x = np.linspace(-0.1518, 0.1518, 300) - y = np.linspace(0.1518, -0.1518, 10) - proj = mock.Mock( - semi_major_axis=6378137, - semi_minor_axis=6356752.3142, - perspective_point_height=35791000, - longitude_of_projection_origin=140.7, - sweep_angle_axis="y", - ) - d.return_value = fake_donor = mock.MagicMock( - variables={"goes_imager_projection": proj, "x": x, "y": y}, - ) - fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] - datasets = r.load(loadable_ids) - assert len(datasets) == 3 - for v in datasets.values(): - assert "calibration" not in v.attrs - assert v.attrs["units"] == "1" - assert isinstance(v.attrs["area"], AreaDefinition) - assert v.attrs["platform_name"] == "himawari8" - assert v.attrs["sensor"] == "ahi" - assert "rows_per_scan" not in v.coords.get("longitude").attrs - if v.attrs["name"] in ["variable1", "variable2"]: - assert isinstance(v.attrs["valid_range"], list) - assert v.dtype == np.float32 - assert "_FillValue" not in v.attrs.keys() - else: - assert (datasets["variable3"].attrs.get("flag_meanings")) is not None - assert (datasets["variable3"].attrs.get("flag_meanings") == "") - assert np.issubdtype(v.dtype, np.integer) diff --git a/satpy/tests/reader_tests/test_epic_l1b_h5.py b/satpy/tests/reader_tests/test_epic_l1b_h5.py index 472cda7f2d..9861535b3f 100644 --- a/satpy/tests/reader_tests/test_epic_l1b_h5.py +++ b/satpy/tests/reader_tests/test_epic_l1b_h5.py @@ -25,14 +25,15 @@ import pytest from satpy.readers.epic_l1b_h5 import CALIB_COEFS +from satpy.tests.utils import RANDOM_GEN -b317_data = np.random.uniform(low=0, high=5200, size=(100, 100)) -b688_data = np.random.uniform(low=0, high=5200, size=(100, 100)) -sza_data = np.random.uniform(low=0, high=100, size=(100, 100)) -vaa_data = np.random.uniform(low=-180, high=180, size=(100, 100)) -lon_data = np.random.uniform(low=-90, high=90, size=(100, 100)) -lat_data = np.random.uniform(low=-180, high=180, size=(100, 100)) -mas_data = np.random.choice([0, 1], size=(100, 100)) +b317_data = RANDOM_GEN.uniform(low=0, high=5200, size=(100, 100)) +b688_data = RANDOM_GEN.uniform(low=0, high=5200, size=(100, 100)) +sza_data = RANDOM_GEN.uniform(low=0, high=100, size=(100, 100)) +vaa_data = RANDOM_GEN.uniform(low=-180, high=180, size=(100, 100)) +lon_data = RANDOM_GEN.uniform(low=-90, high=90, size=(100, 100)) +lat_data = RANDOM_GEN.uniform(low=-180, high=180, size=(100, 100)) +mas_data = RANDOM_GEN.choice([0, 1], size=(100, 100)) @pytest.fixture() @@ -89,11 +90,11 @@ def setup_method(self): def test_times(self, setup_hdf5_file): """Test start and end times load properly.""" - from datetime import datetime + import datetime as dt test_reader = self._setup_h5(setup_hdf5_file) - assert test_reader.start_time == datetime(2015, 6, 13, 12, 0, 37) - assert test_reader.end_time == datetime(2015, 6, 13, 12, 5, 1) + assert test_reader.start_time == dt.datetime(2015, 6, 13, 12, 0, 37) + assert test_reader.end_time == dt.datetime(2015, 6, 13, 12, 5, 1) def test_counts_calibration(self, setup_hdf5_file): """Test that data is correctly calibrated.""" diff --git a/satpy/tests/reader_tests/test_eum_base.py b/satpy/tests/reader_tests/test_eum_base.py index 55ac977b59..35b29aa79c 100644 --- a/satpy/tests/reader_tests/test_eum_base.py +++ b/satpy/tests/reader_tests/test_eum_base.py @@ -15,10 +15,11 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """EUMETSAT base reader tests package.""" +import datetime as dt import unittest -from datetime import datetime import numpy as np @@ -40,18 +41,18 @@ def test_fun(self): """Test function for TestMakeTimeCdsDictionary.""" # time_cds_short tcds = {"Days": np.array(1), "Milliseconds": np.array(2)} - expected = datetime(1958, 1, 2, 0, 0, 0, 2000) + expected = dt.datetime(1958, 1, 2, 0, 0, 0, 2000) assert timecds2datetime(tcds) == expected # time_cds tcds = {"Days": np.array(1), "Milliseconds": np.array(2), "Microseconds": np.array(3)} - expected = datetime(1958, 1, 2, 0, 0, 0, 2003) + expected = dt.datetime(1958, 1, 2, 0, 0, 0, 2003) assert timecds2datetime(tcds) == expected # time_cds_expanded tcds = {"Days": np.array(1), "Milliseconds": np.array(2), "Microseconds": np.array(3), "Nanoseconds": np.array(4)} - expected = datetime(1958, 1, 2, 0, 0, 0, 2003) + expected = dt.datetime(1958, 1, 2, 0, 0, 0, 2003) assert timecds2datetime(tcds) == expected @@ -62,17 +63,17 @@ def test_fun(self): """Test function for TestMakeTimeCdsRecarray.""" # time_cds_short tcds = np.array([(1, 2)], dtype=np.dtype(time_cds_short)) - expected = datetime(1958, 1, 2, 0, 0, 0, 2000) + expected = dt.datetime(1958, 1, 2, 0, 0, 0, 2000) assert timecds2datetime(tcds) == expected # time_cds tcds = np.array([(1, 2, 3)], dtype=np.dtype(time_cds)) - expected = datetime(1958, 1, 2, 0, 0, 0, 2003) + expected = dt.datetime(1958, 1, 2, 0, 0, 0, 2003) assert timecds2datetime(tcds) == expected # time_cds_expanded tcds = np.array([(1, 2, 3, 4)], dtype=np.dtype(time_cds_expanded)) - expected = datetime(1958, 1, 2, 0, 0, 0, 2003) + expected = dt.datetime(1958, 1, 2, 0, 0, 0, 2003) assert timecds2datetime(tcds) == expected @@ -97,9 +98,9 @@ def test_timestamps(self): (21916, 42309417, 918, 443))]]], dtype=pat_dt) expected = { - "TrueRepeatCycleStart": datetime(2018, 1, 2, 11, 30, 9, 544305), - "PlanForwardScanEnd": datetime(2018, 1, 2, 11, 42, 40, 340660), - "PlannedRepeatCycleEnd": datetime(2018, 1, 2, 11, 45, 9, 417918) + "TrueRepeatCycleStart": dt.datetime(2018, 1, 2, 11, 30, 9, 544305), + "PlanForwardScanEnd": dt.datetime(2018, 1, 2, 11, 42, 40, 340660), + "PlannedRepeatCycleEnd": dt.datetime(2018, 1, 2, 11, 45, 9, 417918) } assert recarray2dict(pat) == expected diff --git a/satpy/tests/reader_tests/test_eum_l2_grib.py b/satpy/tests/reader_tests/test_eum_l2_grib.py index a7846be706..593eb2f5af 100644 --- a/satpy/tests/reader_tests/test_eum_l2_grib.py +++ b/satpy/tests/reader_tests/test_eum_l2_grib.py @@ -60,7 +60,7 @@ FAKE_GID = [0, 1, 2, 3, None] -class Test_EUML2GribFileHandler(unittest.TestCase): +class TestEUML2GribFileHandler(unittest.TestCase): """Test the EUML2GribFileHandler reader.""" @mock.patch("satpy.readers.eum_l2_grib.ec") @@ -72,7 +72,7 @@ def setUp(self, ec_): def common_checks(self, mock_file, dataset_id): """Commmon checks for fci and seviri data.""" - # Checks that the codes_grib_multi_support_on function has been called + # Checks that the codes_grib_multi_support_on function has been called self.ec_.codes_grib_multi_support_on.assert_called() # Restarts the id generator and clears the call history @@ -110,9 +110,9 @@ def common_checks(self, mock_file, dataset_id): @mock.patch("satpy.readers.eum_l2_grib.da") def test_seviri_data_reading(self, da_, xr_): """Test the reading of data from the product.""" - from satpy.readers.eum_l2_grib import REPEAT_CYCLE_DURATION, EUML2GribFileHandler + from satpy.readers.eum_l2_grib import EUML2GribFileHandler from satpy.utils import get_legacy_chunk_size - CHUNK_SIZE = get_legacy_chunk_size() + chunk_size = get_legacy_chunk_size() with mock.patch("builtins.open", mock.mock_open()) as mock_file: with mock.patch("satpy.readers.eum_l2_grib.ec", self.ec_): @@ -126,7 +126,7 @@ def test_seviri_data_reading(self, da_, xr_): hour=19, minute=45, second=0) }, filetype_info={ - "file_type" : "seviri" + "file_type": "seviri" } ) @@ -134,8 +134,9 @@ def test_seviri_data_reading(self, da_, xr_): self.common_checks(mock_file, dataset_id) - # Checks the basic data reading - assert REPEAT_CYCLE_DURATION == 15 + # Check end_time + assert self.reader.end_time == datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=50, second=0) # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions attributes = self.reader._get_attributes() @@ -154,7 +155,7 @@ def test_seviri_data_reading(self, da_, xr_): # Checks that dask.array has been called with the correct arguments name, args, kwargs = da_.mock_calls[0] assert np.all(args[0] == np.ones((1200, 1000))) - assert args[1] == CHUNK_SIZE + assert args[1] == chunk_size # Checks that xarray.DataArray has been called with the correct arguments name, args, kwargs = xr_.mock_calls[0] @@ -208,7 +209,7 @@ def test_fci_data_reading(self, da_, xr_): """Test the reading of fci data from the product.""" from satpy.readers.eum_l2_grib import EUML2GribFileHandler from satpy.utils import get_legacy_chunk_size - CHUNK_SIZE = get_legacy_chunk_size() + chunk_size = get_legacy_chunk_size() with mock.patch("builtins.open", mock.mock_open()) as mock_file: with mock.patch("satpy.readers.eum_l2_grib.ec", self.ec_): @@ -219,10 +220,12 @@ def test_fci_data_reading(self, da_, xr_): filename_info={ "spacecraft_id": "1", "start_time": datetime.datetime(year=2020, month=10, day=20, - hour=19, minute=45, second=0) + hour=19, minute=40, second=0), + "end_time": datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=50, second=0) }, filetype_info={ - "file_type" : "fci" + "file_type": "fci" } ) @@ -230,6 +233,10 @@ def test_fci_data_reading(self, da_, xr_): self.common_checks(mock_file, dataset_id) + # Check end_time + assert self.reader.end_time == datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=50, second=0) + # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions attributes = self.reader._get_attributes() expected_attributes = { @@ -247,7 +254,7 @@ def test_fci_data_reading(self, da_, xr_): # Checks that dask.array has been called with the correct arguments name, args, kwargs = da_.mock_calls[0] assert np.all(args[0] == np.ones((5568, 5568))) - assert args[1] == CHUNK_SIZE + assert args[1] == chunk_size # Checks that xarray.DataArray has been called with the correct arguments name, args, kwargs = xr_.mock_calls[0] diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index 792de90462..04f2d48930 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -28,7 +28,7 @@ import pytest import xarray as xr from netCDF4 import default_fillvals -from pytest_lazyfixture import lazy_fixture +from pytest_lazy_fixtures import lf as lazy_fixture from satpy.readers.fci_l1c_nc import FCIL1cNCFileHandler from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler @@ -57,9 +57,135 @@ "scale_factor": 5.58871526031607e-05, "add_offset": 1.55617776423501e-01, }, + "3km": { + "nrows": 67, + "ncols": 3712, + "scale_factor": 8.38307287956433e-05, + "add_offset": 0.155631748009112, + }, } +LIST_CHANNEL_SOLAR = ["vis_04", "vis_05", "vis_06", "vis_08", "vis_09", + "nir_13", "nir_16", "nir_22"] +LIST_CHANNEL_TERRAN = ["ir_38", "wv_63", "wv_73", "ir_87", "ir_97", "ir_105", + "ir_123", "ir_133"] +LIST_TOTAL_CHANNEL = LIST_CHANNEL_SOLAR + LIST_CHANNEL_TERRAN +LIST_RESOLUTION_V06 = ["1km","3km"] +LIST_RESOLUTION = ["3km"] +EXPECTED_POS_INFO_FOR_FILETYPE = { + "fdhsi": {"1km": {"start_position_row": 1, + "end_position_row": 200, + "segment_height": 200, + "grid_width": 11136}, + "2km": {"start_position_row": 1, + "end_position_row": 100, + "segment_height": 100, + "grid_width": 5568}}, + "hrfi": {"500m": {"start_position_row": 1, + "end_position_row": 400, + "segment_height": 400, + "grid_width": 22272}, + "1km": {"start_position_row": 1, + "end_position_row": 200, + "grid_width": 11136, + "segment_height": 200}}, + "fci_af" : {"3km": {"start_position_row": 1, + "end_position_row": 67, + "segment_height": 67, + "grid_width": 3712 + }, + }, + "fci_af_vis_06" : {"3km": {"start_position_row": 1, + "end_position_row": 67, + "segment_height": 67, + "grid_width": 3712 + }, + "1km": {"start_position_row": 1, + "end_position_row": 200, + "grid_width": 11136, + "segment_height": 200} + } + } + +CHANS_FHDSI = {"solar": LIST_CHANNEL_SOLAR, + "solar_grid_type": ["1km"] * 8, + "terran": LIST_CHANNEL_TERRAN, + "terran_grid_type": ["2km"] * 8} + +CHANS_HRFI = {"solar": ["vis_06", "nir_22"], + "solar_grid_type": ["500m"] * 2, + "terran": ["ir_38", "ir_105"], + "terran_grid_type": ["1km"] * 2} + +DICT_CALIBRATION = { "radiance" : {"dtype": np.float32, + "value_1": 15, + "value_0":9700, + "attrs_dict":{"calibration":"radiance", + "units":"mW m-2 sr-1 (cm-1)-1", + "radiance_unit_conversion_coefficient": np.float32(1234.56) + }, + }, + + "reflectance" : {"dtype": np.float32, + "attrs_dict":{"calibration":"reflectance", + "units":"%" + }, + }, + + "counts" : {"dtype": np.uint16, + "value_1": 1, + "value_0": 5000, + "attrs_dict":{"calibration":"counts", + "units":"count", + }, + }, + + "brightness_temperature" : {"dtype": np.float32, + "value_1": np.float32(209.68275), + "value_0": np.float32(1888.8513), + "attrs_dict":{"calibration":"brightness_temperature", + "units":"K", + }, + }, +} +TEST_FILENAMES = {"fdhsi": [ + "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" + "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" + "20170410113925_20170410113934_N__C_0070_0067.nc" +], + "hrfi": [ + "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-HRFI-FD--" + "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" + "20170410113925_20170410113934_N__C_0070_0067.nc" + ] +} +def resolutions(channel): + """Get the resolutions.""" + if channel == "vis_06": + return LIST_RESOLUTION_V06 + else: + return LIST_RESOLUTION + +def fill_chans_af(): + """Fill the dict CHANS_AF and the list TEST_FILENAMES with the right channel and resolution.""" + CHANS_AF = {} + for channel in LIST_TOTAL_CHANNEL: + list_resol = resolutions(channel) + for resol in list_resol: + chann_upp = channel.replace("_","").upper() + TEST_FILENAMES[f"af_{channel}_{resol}"] = [f"W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1-FCI-1C-RRAD" + f"-{resol.upper()}-AF-{chann_upp}-x-x---NC4E_C_EUMT_20240125144655_DT_OPE" + f"_20240109080007_20240109080924_N_JLS_T_0049_0000.nc"] + if channel.split("_")[0] in ["vis","nir"]: + CHANS_AF[f"{channel}_{resol}"] = {"solar":[channel], + "solar_grid_type": [resol]} + elif channel.split("_")[0] in ["ir","wv"]: + CHANS_AF[f"{channel}_{resol}"] = {"terran":[channel], + "terran_grid_type": [resol]} + return CHANS_AF,TEST_FILENAMES + +CHANS_AF,TEST_FILENAMES = fill_chans_af() # ---------------------------------------------------- # Filehandlers preparation --------------------------- # ---------------------------------------------------- @@ -366,6 +492,10 @@ class FakeFCIFileHandlerHRFI(FakeFCIFileHandlerBase): } +class FakeFCIFileHandlerAF(FakeFCIFileHandlerBase): + """Mock AF data.""" + chan_patterns = {} + # ---------------------------------------------------- # Fixtures preparation ------------------------------- # ---------------------------------------------------- @@ -386,7 +516,6 @@ def _get_reader_with_filehandlers(filenames, reader_configs): clear_cache(reader) return reader - def clear_cache(reader): """Clear the cache for file handlres in reader.""" for key in reader.file_handlers: @@ -394,31 +523,20 @@ def clear_cache(reader): for fh in fhs: fh.cached_file_content = {} +def get_list_channel_calibration(calibration): + """Get the channel's list according the calibration.""" + if calibration == "reflectance": + return LIST_CHANNEL_SOLAR + elif calibration == "brightness_temperature": + return LIST_CHANNEL_TERRAN + else: + return LIST_TOTAL_CHANNEL -_chans_fdhsi = {"solar": ["vis_04", "vis_05", "vis_06", "vis_08", "vis_09", - "nir_13", "nir_16", "nir_22"], - "solar_grid_type": ["1km"] * 8, - "terran": ["ir_38", "wv_63", "wv_73", "ir_87", "ir_97", "ir_105", - "ir_123", "ir_133"], - "terran_grid_type": ["2km"] * 8} - -_chans_hrfi = {"solar": ["vis_06", "nir_22"], - "solar_grid_type": ["500m"] * 2, - "terran": ["ir_38", "ir_105"], - "terran_grid_type": ["1km"] * 2} - -_test_filenames = {"fdhsi": [ - "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" - "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" - "20170410113925_20170410113934_N__C_0070_0067.nc" -], - "hrfi": [ - "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-HRFI-FD--" - "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" - "20170410113925_20170410113934_N__C_0070_0067.nc" - ] -} - +def generate_parameters(calibration): + """Generate dinamicaly the parameters.""" + for channel in get_list_channel_calibration(calibration): + for resolution in resolutions(channel): + yield (channel, resolution) @contextlib.contextmanager def mocked_basefilehandler(filehandler): @@ -435,8 +553,8 @@ def FakeFCIFileHandlerFDHSI_fixture(): with mocked_basefilehandler(FakeFCIFileHandlerFDHSI): param_dict = { "filetype": "fci_l1c_fdhsi", - "channels": _chans_fdhsi, - "filenames": _test_filenames["fdhsi"] + "channels": CHANS_FHDSI, + "filenames": TEST_FILENAMES["fdhsi"] } yield param_dict @@ -447,11 +565,24 @@ def FakeFCIFileHandlerHRFI_fixture(): with mocked_basefilehandler(FakeFCIFileHandlerHRFI): param_dict = { "filetype": "fci_l1c_hrfi", - "channels": _chans_hrfi, - "filenames": _test_filenames["hrfi"] + "channels": CHANS_HRFI, + "filenames": TEST_FILENAMES["hrfi"] } yield param_dict +@pytest.fixture() +def FakeFCIFileHandlerAF_fixture(channel,resolution): + """Get a fixture for the fake AF filehandler, it contains only one channel and one resolution.""" + chan_patterns = {channel.split("_")[0]+"_{:>02d}": {"channels": [int(channel.split("_")[1])], + "grid_type": f"{resolution}"},} + FakeFCIFileHandlerAF.chan_patterns = chan_patterns + with mocked_basefilehandler(FakeFCIFileHandlerAF): + param_dict = { + "filetype": "fci_l1c_af", + "channels": CHANS_AF[f"{channel}_{resolution}"], + "filenames": TEST_FILENAMES[f"af_{channel}_{resolution}"], + } + yield param_dict # ---------------------------------------------------- # Tests ---------------------------------------------- @@ -461,12 +592,47 @@ def FakeFCIFileHandlerHRFI_fixture(): class TestFCIL1cNCReader: """Test FCI L1c NetCDF reader with nominal data.""" - fh_param_for_filetype = {"hrfi": {"channels": _chans_hrfi, - "filenames": _test_filenames["hrfi"]}, - "fdhsi": {"channels": _chans_fdhsi, - "filenames": _test_filenames["fdhsi"]}} + fh_param_for_filetype = {"hrfi": {"channels": CHANS_HRFI, + "filenames": TEST_FILENAMES["hrfi"]}, + "fdhsi": {"channels": CHANS_FHDSI, + "filenames": TEST_FILENAMES["fdhsi"]}} + + def _get_type_ter_AF(self,channel): + """Get the type_ter.""" + if channel.split("_")[0] in ["vis","nir"]: + return "solar" + elif channel.split("_")[0] in ["wv","ir"]: + return "terran" + + def _get_assert_attrs(self,res,ch,attrs_dict): + """Test the differents attributes values.""" + for key,item in attrs_dict.items(): + assert res[ch].attrs[key] == item + + def _get_assert_load(self,res,ch,grid_type,dict_arg): + """Test the value for differents channels.""" + assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) + assert res[ch].dtype == dict_arg["dtype"] + self._get_assert_attrs(res,ch,dict_arg["attrs_dict"]) + if dict_arg["attrs_dict"]["calibration"] == "reflectance": + numpy.testing.assert_array_almost_equal(res[ch], 100 * 15 * 1 * np.pi / 50) + else : + if ch == "ir_38": + numpy.testing.assert_array_equal(res[ch][-1], dict_arg["value_1"]) + numpy.testing.assert_array_equal(res[ch][0], dict_arg["value_0"]) + else: + numpy.testing.assert_array_equal(res[ch], dict_arg["value_1"]) + + def _get_res_AF(self,channel,fh_param,calibration,reader_configs): + """Load the reader for AF data.""" + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) + type_ter = self._get_type_ter_AF(channel) + res = reader.load([make_dataid(name=name, calibration=calibration) + for name in fh_param["channels"][type_ter]], pad_data=False) + return res - @pytest.mark.parametrize("filenames", [_test_filenames["fdhsi"], _test_filenames["hrfi"]]) + @pytest.mark.parametrize("filenames", [TEST_FILENAMES[filename] for filename in TEST_FILENAMES.keys()]) def test_file_pattern(self, reader_configs, filenames): """Test file pattern matching.""" from satpy.readers import load_reader @@ -475,8 +641,8 @@ def test_file_pattern(self, reader_configs, filenames): files = reader.select_files_from_pathnames(filenames) assert len(files) == 1 - @pytest.mark.parametrize("filenames", [_test_filenames["fdhsi"][0].replace("BODY", "TRAIL"), - _test_filenames["hrfi"][0].replace("BODY", "TRAIL")]) + @pytest.mark.parametrize("filenames", [TEST_FILENAMES["fdhsi"][0].replace("BODY", "TRAIL"), + TEST_FILENAMES["hrfi"][0].replace("BODY", "TRAIL")]) def test_file_pattern_for_TRAIL_file(self, reader_configs, filenames): """Test file pattern matching for TRAIL files, which should not be picked up.""" from satpy.readers import load_reader @@ -485,97 +651,57 @@ def test_file_pattern_for_TRAIL_file(self, reader_configs, filenames): files = reader.select_files_from_pathnames(filenames) assert len(files) == 0 - @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16), - (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)]) - def test_load_counts(self, reader_configs, fh_param, - expected_res_n): - """Test loading with counts.""" - reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) - res = reader.load( - [make_dataid(name=name, calibration="counts") for name in - fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False) - assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"], - fh_param["channels"]["solar_grid_type"] + - fh_param["channels"]["terran_grid_type"]): - assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) - assert res[ch].dtype == np.uint16 - assert res[ch].attrs["calibration"] == "counts" - assert res[ch].attrs["units"] == "count" - if ch == "ir_38": - numpy.testing.assert_array_equal(res[ch][-1], 1) - numpy.testing.assert_array_equal(res[ch][0], 5000) - else: - numpy.testing.assert_array_equal(res[ch], 1) - - @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16), - (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)]) - def test_load_radiance(self, reader_configs, fh_param, - expected_res_n): - """Test loading with radiance.""" - reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) - res = reader.load( - [make_dataid(name=name, calibration="radiance") for name in - fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False) - assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"], - fh_param["channels"]["solar_grid_type"] + - fh_param["channels"]["terran_grid_type"]): - assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) - assert res[ch].dtype == np.float32 - assert res[ch].attrs["calibration"] == "radiance" - assert res[ch].attrs["units"] == "mW m-2 sr-1 (cm-1)-1" - assert res[ch].attrs["radiance_unit_conversion_coefficient"].values == np.float32(1234.56) - if ch == "ir_38": - numpy.testing.assert_array_equal(res[ch][-1], 15) - numpy.testing.assert_array_equal(res[ch][0], 9700) - else: - numpy.testing.assert_array_equal(res[ch], 15) - - @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 8), - (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 2)]) - def test_load_reflectance(self, reader_configs, fh_param, - expected_res_n): - """Test loading with reflectance.""" - reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) - res = reader.load( - [make_dataid(name=name, calibration="reflectance") for name in - fh_param["channels"]["solar"]], pad_data=False) - assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param["channels"]["solar"], fh_param["channels"]["solar_grid_type"]): - assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) - assert res[ch].dtype == np.float32 - assert res[ch].attrs["calibration"] == "reflectance" - assert res[ch].attrs["units"] == "%" - numpy.testing.assert_array_almost_equal(res[ch], 100 * 15 * 1 * np.pi / 50) - - @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 8), - (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 2)]) - def test_load_bt(self, reader_configs, caplog, fh_param, - expected_res_n): - """Test loading with bt.""" + @pytest.mark.parametrize("calibration", ["counts","radiance","brightness_temperature","reflectance"]) + @pytest.mark.parametrize(("fh_param","res_type"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"),"hdfi"), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"),"hrfi")]) + def test_load_calibration(self, reader_configs, fh_param, + caplog,calibration,res_type): + """Test loading with counts,radiance,reflectance and bt.""" + expected_res_n = {} + if calibration == "reflectance": + list_chan = fh_param["channels"]["solar"] + list_grid = fh_param["channels"]["solar_grid_type"] + expected_res_n["hdfi"] = 8 + expected_res_n["hrfi"] = 2 + elif calibration == "brightness_temperature": + list_chan = fh_param["channels"]["terran"] + list_grid = fh_param["channels"]["terran_grid_type"] + expected_res_n["hdfi"] = 8 + expected_res_n["hrfi"] = 2 + else: + list_chan = fh_param["channels"]["solar"] + fh_param["channels"]["terran"] + list_grid = fh_param["channels"]["solar_grid_type"] + fh_param["channels"]["terran_grid_type"] + expected_res_n["hdfi"] = 16 + expected_res_n["hrfi"] = 4 reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) with caplog.at_level(logging.WARNING): res = reader.load( - [make_dataid(name=name, calibration="brightness_temperature") for - name in fh_param["channels"]["terran"]], pad_data=False) + [make_dataid(name=name, calibration=calibration) for name in + list_chan], pad_data=False) + assert caplog.text == "" + assert expected_res_n[res_type] == len(res) + for ch, grid_type in zip(list_chan, + list_grid): + self._get_assert_load(res, ch, grid_type, DICT_CALIBRATION[calibration]) + + @pytest.mark.parametrize(("calibration", "channel", "resolution"), [ + (calibration, channel, resolution) + for calibration in ["counts", "radiance", "brightness_temperature", "reflectance"] + for channel, resolution in generate_parameters(calibration) + ]) + def test_load_calibration_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,calibration,caplog): + """Test loading with counts,radiance,reflectance and bt for AF files.""" + expected_res_n = 1 + fh_param = FakeFCIFileHandlerAF_fixture + type_ter = self._get_type_ter_AF(channel) + with caplog.at_level(logging.WARNING): + res = self._get_res_AF(channel,fh_param,calibration,reader_configs) assert caplog.text == "" assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param["channels"]["terran"], fh_param["channels"]["terran_grid_type"]): - assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) - assert res[ch].dtype == np.float32 - assert res[ch].attrs["calibration"] == "brightness_temperature" - assert res[ch].attrs["units"] == "K" + for ch, grid_type in zip(fh_param["channels"][type_ter], + fh_param["channels"][f"{type_ter}_grid_type"]): + self._get_assert_load(res,ch,grid_type,DICT_CALIBRATION[calibration]) - if ch == "ir_38": - numpy.testing.assert_array_almost_equal(res[ch][-1], np.float32(209.68275)) - numpy.testing.assert_array_almost_equal(res[ch][0], np.float32(1888.8513)) - else: - numpy.testing.assert_array_almost_equal(res[ch], np.float32(209.68275)) @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))]) @@ -599,28 +725,9 @@ def test_orbital_parameters_attr(self, reader_configs, fh_param): "projection_altitude": 35786400.0, } - expected_pos_info_for_filetype = { - "fdhsi": {"1km": {"start_position_row": 1, - "end_position_row": 200, - "segment_height": 200, - "grid_width": 11136}, - "2km": {"start_position_row": 1, - "end_position_row": 100, - "segment_height": 100, - "grid_width": 5568}}, - "hrfi": {"500m": {"start_position_row": 1, - "end_position_row": 400, - "segment_height": 400, - "grid_width": 22272}, - "1km": {"start_position_row": 1, - "end_position_row": 200, - "grid_width": 11136, - "segment_height": 200}} - } - @pytest.mark.parametrize(("fh_param", "expected_pos_info"), [ - (lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), expected_pos_info_for_filetype["fdhsi"]), - (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), expected_pos_info_for_filetype["hrfi"]) + (lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), EXPECTED_POS_INFO_FOR_FILETYPE["fdhsi"]), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), EXPECTED_POS_INFO_FOR_FILETYPE["hrfi"]) ]) def test_get_segment_position_info(self, reader_configs, fh_param, expected_pos_info): """Test the segment position info method.""" @@ -629,21 +736,61 @@ def test_get_segment_position_info(self, reader_configs, fh_param, expected_pos_ segpos_info = filetype_handler.get_segment_position_info() assert segpos_info == expected_pos_info + @mock.patch("satpy.readers.yaml_reader.GEOVariableSegmentYAMLReader") + @pytest.mark.parametrize(("channel", "resolution"), generate_parameters("radiance")) + def test_not_get_segment_info_called_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,resolution): + """Test that checks that the get_segment_position_info has not been called for AF data.""" + with mock.patch("satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler.get_segment_position_info") as gspi: + fh_param = FakeFCIFileHandlerAF_fixture + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) + reader.load([channel]) + gspi.assert_not_called() + + @pytest.mark.parametrize("calibration", ["index_map","pixel_quality"]) @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)]) - def test_load_index_map(self, reader_configs, fh_param, expected_res_n): - """Test loading of index_map.""" + def test_load_map_and_pixel(self, reader_configs, fh_param, expected_res_n,calibration): + """Test loading of index_map and pixel_quality.""" reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load( - [name + "_index_map" for name in + [f"{name}_{calibration}" for name in fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False) assert expected_res_n == len(res) for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"], fh_param["channels"]["solar_grid_type"] + fh_param["channels"]["terran_grid_type"]): - assert res[ch + "_index_map"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + assert res[f"{ch}_{calibration}"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) - numpy.testing.assert_array_equal(res[ch + "_index_map"][1, 1], 110) + if calibration == "index_map": + numpy.testing.assert_array_equal(res[f"{ch}_{calibration}"][1, 1], 110) + elif calibration == "pixel_quality": + numpy.testing.assert_array_equal(res[f"{ch}_{calibration}"][1, 1], 3) + assert res[f"{ch}_{calibration}"].attrs["name"] == ch + "_pixel_quality" + + @pytest.mark.parametrize(("calibration", "channel", "resolution"), [ + (calibration, channel, resolution) + for calibration in ["index_map","pixel_quality"] + for channel, resolution in generate_parameters(calibration) + ]) + def test_load_map_and_pixel_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,calibration): + """Test loading with of index_map and pixel_quality for AF files.""" + expected_res_n = 1 + fh_param = FakeFCIFileHandlerAF_fixture + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) + type_ter = self._get_type_ter_AF(channel) + res = reader.load([f"{name}_{calibration}" + for name in fh_param["channels"][type_ter]], pad_data=False) + assert expected_res_n == len(res) + for ch, grid_type in zip(fh_param["channels"][type_ter], + fh_param["channels"][f"{type_ter}_grid_type"]): + assert res[f"{ch}_{calibration}"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) + if calibration == "index_map": + numpy.testing.assert_array_equal(res[f"{ch}_{calibration}"][1, 1], 110) + elif calibration == "pixel_quality": + numpy.testing.assert_array_equal(res[f"{ch}_{calibration}"][1, 1], 3) + assert res[f"{ch}_{calibration}"].attrs["name"] == ch + "_pixel_quality" + @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))]) @@ -662,23 +809,6 @@ def test_load_aux_data(self, reader_configs, fh_param): else: numpy.testing.assert_array_equal(res[aux][1, 1], 10) - @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16), - (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)]) - def test_load_quality_only(self, reader_configs, fh_param, expected_res_n): - """Test that loading quality only works.""" - reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) - res = reader.load( - [name + "_pixel_quality" for name in - fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False) - assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"], - fh_param["channels"]["solar_grid_type"] + - fh_param["channels"]["terran_grid_type"]): - assert res[ch + "_pixel_quality"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) - numpy.testing.assert_array_equal(res[ch + "_pixel_quality"][1, 1], 3) - assert res[ch + "_pixel_quality"].attrs["name"] == ch + "_pixel_quality" - @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))]) def test_platform_name(self, reader_configs, fh_param): @@ -750,7 +880,7 @@ class TestFCIL1cNCReaderBadData: def test_handling_bad_data_ir(self, reader_configs, caplog): """Test handling of bad IR data.""" with mocked_basefilehandler(FakeFCIFileHandlerWithBadData): - reader = _get_reader_with_filehandlers(_test_filenames["fdhsi"], reader_configs) + reader = _get_reader_with_filehandlers(TEST_FILENAMES["fdhsi"], reader_configs) with caplog.at_level(logging.ERROR): reader.load([make_dataid( name="ir_105", @@ -760,7 +890,7 @@ def test_handling_bad_data_ir(self, reader_configs, caplog): def test_handling_bad_data_vis(self, reader_configs, caplog): """Test handling of bad VIS data.""" with mocked_basefilehandler(FakeFCIFileHandlerWithBadData): - reader = _get_reader_with_filehandlers(_test_filenames["fdhsi"], reader_configs) + reader = _get_reader_with_filehandlers(TEST_FILENAMES["fdhsi"], reader_configs) with caplog.at_level(logging.ERROR): reader.load([make_dataid( name="vis_06", @@ -774,7 +904,7 @@ class TestFCIL1cNCReaderBadDataFromIDPF: def test_handling_bad_earthsun_distance(self, reader_configs): """Test handling of bad earth-sun distance data.""" with mocked_basefilehandler(FakeFCIFileHandlerWithBadIDPFData): - reader = _get_reader_with_filehandlers(_test_filenames["fdhsi"], reader_configs) + reader = _get_reader_with_filehandlers(TEST_FILENAMES["fdhsi"], reader_configs) res = reader.load([make_dataid(name=["vis_06"], calibration="reflectance")], pad_data=False) numpy.testing.assert_array_almost_equal(res["vis_06"], 100 * 15 * 1 * np.pi / 50) @@ -782,7 +912,7 @@ def test_handling_bad_earthsun_distance(self, reader_configs): def test_bad_xy_coords(self, reader_configs): """Test that the geolocation computation is correct.""" with mocked_basefilehandler(FakeFCIFileHandlerWithBadIDPFData): - reader = _get_reader_with_filehandlers(_test_filenames["fdhsi"], reader_configs) + reader = _get_reader_with_filehandlers(TEST_FILENAMES["fdhsi"], reader_configs) res = reader.load(["vis_06"], pad_data=False) area_def = res["vis_06"].attrs["area"] diff --git a/satpy/tests/reader_tests/test_fci_l2_nc.py b/satpy/tests/reader_tests/test_fci_l2_nc.py index 84681b0f02..830f793d00 100644 --- a/satpy/tests/reader_tests/test_fci_l2_nc.py +++ b/satpy/tests/reader_tests/test_fci_l2_nc.py @@ -95,6 +95,7 @@ def setUp(self): "number_of_columns")) two_layers_dataset[0, :, :] = np.ones((100, 10)) two_layers_dataset[1, :, :] = 2 * np.ones((100, 10)) + two_layers_dataset.unit = "test_unit" mtg_geos_projection = nc.createVariable("mtg_geos_projection", int, dimensions=()) mtg_geos_projection.longitude_of_projection_origin = 0.0 @@ -102,6 +103,12 @@ def setUp(self): mtg_geos_projection.inverse_flattening = 298.257223563 mtg_geos_projection.perspective_point_height = 35786400. + # Add enumerated type + enum_dict = {"False": 0, "True": 1} + bool_type = nc.createEnumType(np.uint8,"bool_t",enum_dict) + nc.createVariable("quality_flag", bool_type, + dimensions=("number_of_rows", "number_of_columns")) + self.fh = FciL2NCFileHandler(filename=self.test_file, filename_info={}, filetype_info={}) def tearDown(self): @@ -158,34 +165,32 @@ def test_area_definition(self, me_, gad_): assert args[5] == 100 def test_dataset(self): - """Test the correct execution of the get_dataset function with a valid file_key.""" + """Test the correct execution of the get_dataset function with a valid nc_key.""" dataset = self.fh.get_dataset(make_dataid(name="test_one_layer", resolution=2000), {"name": "test_one_layer", - "file_key": "test_one_layer", + "nc_key": "test_one_layer", "fill_value": -999, "file_type": "test_file_type"}) np.testing.assert_allclose(dataset.values, np.ones((100, 10))) assert dataset.attrs["test_attr"] == "attr" - assert dataset.attrs["units"] == "test_units" assert dataset.attrs["fill_value"] == -999 def test_dataset_with_layer(self): - """Check the correct execution of the get_dataset function with a valid file_key & layer.""" + """Check the correct execution of the get_dataset function with a valid nc_key & layer.""" dataset = self.fh.get_dataset(make_dataid(name="test_two_layers", resolution=2000), {"name": "test_two_layers", - "file_key": "test_two_layers", "layer": 1, + "nc_key": "test_two_layers", "layer": 1, "fill_value": -999, "file_type": "test_file_type"}) np.testing.assert_allclose(dataset.values, 2 * np.ones((100, 10))) - assert dataset.attrs["units"] is None assert dataset.attrs["spacecraft_name"] == "test_platform" def test_dataset_with_invalid_filekey(self): - """Test the correct execution of the get_dataset function with an invalid file_key.""" + """Test the correct execution of the get_dataset function with an invalid nc_key.""" invalid_dataset = self.fh.get_dataset(make_dataid(name="test_invalid", resolution=2000), {"name": "test_invalid", - "file_key": "test_invalid", + "nc_key": "test_invalid", "fill_value": -999, "file_type": "test_file_type"}) assert invalid_dataset is None @@ -194,7 +199,7 @@ def test_dataset_with_total_cot(self): """Test the correct execution of the get_dataset function for total COT (add contributions from two layers).""" dataset = self.fh.get_dataset(make_dataid(name="retrieved_cloud_optical_thickness", resolution=2000), {"name": "retrieved_cloud_optical_thickness", - "file_key": "test_two_layers", + "nc_key": "test_two_layers", "fill_value": -999, "file_type": "test_file_type"}) # Checks that the function returns None @@ -207,7 +212,7 @@ def test_dataset_with_scalar(self): # Checks returned scalar value dataset = self.fh.get_dataset(make_dataid(name="test_scalar"), {"name": "product_quality", - "file_key": "product_quality", + "nc_key": "product_quality", "file_type": "test_file_type"}) assert dataset.values == 99.0 @@ -215,6 +220,53 @@ def test_dataset_with_scalar(self): with pytest.raises(NotImplementedError): self.fh.get_area_def(None) + def test_emumerations(self): + """Test the conversion of enumerated type information into flag_values and flag_meanings.""" + dataset = self.fh.get_dataset(make_dataid(name="test_enum", resolution=2000), + {"name": "quality_flag", + "nc_key": "quality_flag", + "file_type": "test_file_type", + "import_enum_information": True}) + attributes = dataset.attrs + assert "flag_values" in attributes + assert attributes["flag_values"] == [0,1] + assert "flag_meanings" in attributes + assert attributes["flag_meanings"] == ["False","True"] + + def test_units_from_file(self): + """Test units extraction from NetCDF file.""" + dataset = self.fh.get_dataset(make_dataid(name="test_units_from_file", resolution=2000), + {"name": "test_one_layer", + "nc_key": "test_one_layer", + "file_type": "test_file_type"}) + assert dataset.attrs["units"] == "test_units" + + def test_unit_from_file(self): + """Test that a unit stored with attribute `unit` in the file is assigned to the `units` attribute.""" + dataset = self.fh.get_dataset(make_dataid(name="test_unit_from_file", resolution=2000), + {"name": "test_two_layers", + "nc_key": "test_two_layers", "layer": 1, + "file_type": "test_file_type"}) + assert dataset.attrs["units"] == "test_unit" + + def test_units_from_yaml(self): + """Test units extraction from yaml file.""" + dataset = self.fh.get_dataset(make_dataid(name="test_units_from_yaml", resolution=2000), + {"name": "test_one_layer", + "units": "test_unit_from_yaml", + "nc_key": "test_one_layer", + "file_type": "test_file_type"}) + assert dataset.attrs["units"] == "test_unit_from_yaml" + + def test_units_none_conversion(self): + """Test that a units stored as 'none' is converted to None.""" + dataset = self.fh.get_dataset(make_dataid(name="test_units_none_conversion", resolution=2000), + {"name": "test_one_layer", + "units": "none", + "nc_key": "test_one_layer", + "file_type": "test_file_type"}) + assert dataset.attrs["units"] is None + class TestFciL2NCSegmentFileHandler(unittest.TestCase): """Test the FciL2NCSegmentFileHandler reader.""" @@ -290,13 +342,13 @@ def test_all_basic(self): assert global_attributes == expected_global_attributes def test_dataset(self): - """Test the correct execution of the get_dataset function with valid file_key.""" + """Test the correct execution of the get_dataset function with valid nc_key.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) - # Checks the correct execution of the get_dataset function with a valid file_key + # Checks the correct execution of the get_dataset function with a valid nc_key dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), {"name": "test_values", - "file_key": "test_values", + "nc_key": "test_values", "fill_value": -999, }) expected_dataset = self._get_unique_array(range(8), range(6)) np.testing.assert_allclose(dataset.values, expected_dataset) @@ -309,13 +361,13 @@ def test_dataset(self): self.fh.get_area_def(None) def test_dataset_with_invalid_filekey(self): - """Test the correct execution of the get_dataset function with an invalid file_key.""" + """Test the correct execution of the get_dataset function with an invalid nc_key.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) - # Checks the correct execution of the get_dataset function with an invalid file_key + # Checks the correct execution of the get_dataset function with an invalid nc_key invalid_dataset = self.fh.get_dataset(make_dataid(name="test_invalid", resolution=32000), {"name": "test_invalid", - "file_key": "test_invalid", + "nc_key": "test_invalid", "fill_value": -999, }) # Checks that the function returns None assert invalid_dataset is None @@ -325,10 +377,10 @@ def test_dataset_with_adef(self): self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}, with_area_definition=True) - # Checks the correct execution of the get_dataset function with a valid file_key + # Checks the correct execution of the get_dataset function with a valid nc_key dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), {"name": "test_values", - "file_key": "test_values", + "nc_key": "test_values", "fill_value": -999, "coordinates": ("test_lon", "test_lat"), }) expected_dataset = self._get_unique_array(range(8), range(6)) @@ -347,7 +399,7 @@ def test_dataset_with_adef_and_wrongs_dims(self): with_area_definition=True) with pytest.raises(NotImplementedError): self.fh.get_dataset(make_dataid(name="test_wrong_dims", resolution=6000), - {"name": "test_wrong_dims", "file_key": "test_values", "fill_value": -999} + {"name": "test_wrong_dims", "nc_key": "test_values", "fill_value": -999} ) def test_dataset_with_scalar(self): @@ -356,7 +408,7 @@ def test_dataset_with_scalar(self): # Checks returned scalar value dataset = self.fh.get_dataset(make_dataid(name="test_scalar"), {"name": "product_quality", - "file_key": "product_quality", + "nc_key": "product_quality", "file_type": "test_file_type"}) assert dataset.values == 99.0 @@ -370,7 +422,7 @@ def test_dataset_slicing_catid(self): dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), {"name": "test_values", - "file_key": "test_values", + "nc_key": "test_values", "fill_value": -999, "category_id": 5}) expected_dataset = self._get_unique_array(range(8), 5) @@ -382,7 +434,7 @@ def test_dataset_slicing_chid_catid(self): dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), {"name": "test_values", - "file_key": "test_values", + "nc_key": "test_values", "fill_value": -999, "channel_id": 0, "category_id": 1}) expected_dataset = self._get_unique_array(0, 1) @@ -395,7 +447,7 @@ def test_dataset_slicing_visid_catid(self): self.fh.nc = self.fh.nc.rename_dims({"number_of_channels": "number_of_vis_channels"}) dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), {"name": "test_values", - "file_key": "test_values", + "nc_key": "test_values", "fill_value": -999, "vis_channel_id": 3, "category_id": 3}) expected_dataset = self._get_unique_array(3, 3) @@ -408,7 +460,7 @@ def test_dataset_slicing_irid(self): self.fh.nc = self.fh.nc.rename_dims({"number_of_channels": "number_of_ir_channels"}) dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), {"name": "test_values", - "file_key": "test_values", + "nc_key": "test_values", "fill_value": -999, "ir_channel_id": 4}) expected_dataset = self._get_unique_array(4, range(6)) @@ -489,7 +541,7 @@ def test_byte_extraction(self): # Value of 1 is expected to be returned for this test dataset = self.byte_reader.get_dataset(make_dataid(name="cloud_mask_test_flag", resolution=2000), {"name": "cloud_mask_test_flag", - "file_key": "cloud_mask_test_flag", + "nc_key": "cloud_mask_test_flag", "fill_value": -999, "file_type": "nc_fci_test_clm", "extract_byte": 1, @@ -500,7 +552,7 @@ def test_byte_extraction(self): # Value of 0 is expected fto be returned or this test dataset = self.byte_reader.get_dataset(make_dataid(name="cloud_mask_test_flag", resolution=2000), {"name": "cloud_mask_test_flag", - "file_key": "cloud_mask_test_flag", + "nc_key": "cloud_mask_test_flag", "fill_value": -999, "mask_value": 0., "file_type": "nc_fci_test_clm", "extract_byte": 23, @@ -575,10 +627,10 @@ def test_all_basic(self, amv_filehandler, amv_file): assert global_attributes == expected_global_attributes def test_dataset(self, amv_filehandler): - """Test the correct execution of the get_dataset function with a valid file_key.""" + """Test the correct execution of the get_dataset function with a valid nc_key.""" dataset = amv_filehandler.get_dataset(make_dataid(name="test_dataset", resolution=2000), {"name": "test_dataset", - "file_key": "test_dataset", + "nc_key": "test_dataset", "fill_value": -999, "file_type": "test_file_type"}) np.testing.assert_allclose(dataset.values, np.ones(50000)) @@ -587,10 +639,10 @@ def test_dataset(self, amv_filehandler): assert dataset.attrs["fill_value"] == -999 def test_dataset_with_invalid_filekey(self, amv_filehandler): - """Test the correct execution of the get_dataset function with an invalid file_key.""" + """Test the correct execution of the get_dataset function with an invalid nc_key.""" invalid_dataset = amv_filehandler.get_dataset(make_dataid(name="test_invalid", resolution=2000), {"name": "test_invalid", - "file_key": "test_invalid", + "nc_key": "test_invalid", "fill_value": -999, "file_type": "test_file_type"}) assert invalid_dataset is None diff --git a/satpy/tests/reader_tests/test_generic_image.py b/satpy/tests/reader_tests/test_generic_image.py index cd347ce07e..40d7611eb4 100644 --- a/satpy/tests/reader_tests/test_generic_image.py +++ b/satpy/tests/reader_tests/test_generic_image.py @@ -24,7 +24,7 @@ import pytest import xarray as xr -from satpy.tests.utils import make_dataid +from satpy.tests.utils import RANDOM_GEN, make_dataid class TestGenericImage(unittest.TestCase): @@ -32,14 +32,14 @@ class TestGenericImage(unittest.TestCase): def setUp(self): """Create temporary images to test on.""" + import datetime as dt import tempfile - from datetime import datetime from pyresample.geometry import AreaDefinition from satpy.scene import Scene - self.date = datetime(2018, 1, 1) + self.date = dt.datetime(2018, 1, 1) # Create area definition pcs_id = "ETRS89 / LAEA Europe" @@ -62,7 +62,7 @@ def setUp(self): a__[:10, :10] = 0 a__ = da.from_array(a__, chunks=(50, 50)) - r_nan__ = np.random.uniform(0., 1., size=(self.y_size, self.x_size)) + r_nan__ = RANDOM_GEN.uniform(0., 1., size=(self.y_size, self.x_size)) r_nan__[:10, :10] = np.nan r_nan__ = da.from_array(r_nan__, chunks=(50, 50)) diff --git a/satpy/tests/reader_tests/test_ghrsst_l2.py b/satpy/tests/reader_tests/test_ghrsst_l2.py index 66c030e91d..b4cabccfa4 100644 --- a/satpy/tests/reader_tests/test_ghrsst_l2.py +++ b/satpy/tests/reader_tests/test_ghrsst_l2.py @@ -17,9 +17,9 @@ # satpy. If not, see . """Module for testing the satpy.readers.ghrsst_l2 module.""" +import datetime as dt import os import tarfile -from datetime import datetime from pathlib import Path import numpy as np @@ -124,7 +124,7 @@ def test_get_dataset(self, tmp_path): def test_get_sensor(self, tmp_path): """Test retrieval of the sensor name from the netCDF file.""" - dt_valid = datetime(2022, 3, 21, 11, 26, 40) # 202203211200Z + dt_valid = dt.datetime(2022, 3, 21, 11, 26, 40) # 202203211200Z filename_info = {"field_type": "NARSST", "generating_centre": "FRA_", "satid": "NOAA20_", "valid_time": dt_valid} @@ -136,9 +136,9 @@ def test_get_sensor(self, tmp_path): def test_get_start_and_end_times(self, tmp_path): """Test retrieval of the sensor name from the netCDF file.""" - dt_valid = datetime(2022, 3, 21, 11, 26, 40) # 202203211200Z - good_start_time = datetime(2022, 3, 21, 11, 26, 40) # 20220321T112640Z - good_stop_time = datetime(2022, 3, 21, 14, 57, 11) # 20220321T145711Z + dt_valid = dt.datetime(2022, 3, 21, 11, 26, 40) # 202203211200Z + good_start_time = dt.datetime(2022, 3, 21, 11, 26, 40) # 20220321T112640Z + good_stop_time = dt.datetime(2022, 3, 21, 14, 57, 11) # 20220321T145711Z filename_info = {"field_type": "NARSST", "generating_centre": "FRA_", "satid": "NOAA20_", "valid_time": dt_valid} diff --git a/satpy/tests/reader_tests/test_glm_l2.py b/satpy/tests/reader_tests/test_glm_l2.py index 81636ba630..8ee53e29a2 100644 --- a/satpy/tests/reader_tests/test_glm_l2.py +++ b/satpy/tests/reader_tests/test_glm_l2.py @@ -128,9 +128,9 @@ def setUp(self, xr_): def test_basic_attributes(self): """Test getting basic file attributes.""" - from datetime import datetime - assert self.reader.start_time == datetime(2017, 9, 20, 17, 30, 40) - assert self.reader.end_time == datetime(2017, 9, 20, 17, 41, 17) + import datetime as dt + assert self.reader.start_time == dt.datetime(2017, 9, 20, 17, 30, 40) + assert self.reader.end_time == dt.datetime(2017, 9, 20, 17, 41, 17) def test_get_dataset(self): """Test the get_dataset method.""" diff --git a/satpy/tests/reader_tests/test_goci2_l2_nc.py b/satpy/tests/reader_tests/test_goci2_l2_nc.py new file mode 100644 index 0000000000..178d0c5209 --- /dev/null +++ b/satpy/tests/reader_tests/test_goci2_l2_nc.py @@ -0,0 +1,213 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2016-2018 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . + +"""Module for testing the satpy.readers.goci2_l2_nc module.""" + +import datetime as dt + +import numpy as np +import pytest +import xarray as xr +from pytest_lazy_fixtures import lf as lazy_fixture + +from satpy import Scene +from satpy.tests.utils import RANDOM_GEN + +# NOTE: +# The following fixtures are not defined in this file, but are used and injected by Pytest: +# - tmp_path_factory + + +start_time = dt.datetime(2024, 2, 14, 2, 32, 27) +end_time = dt.datetime(2024, 2, 14, 2, 33, 31) + +global_attrs = { + "observation_start_time": start_time.strftime("%Y%m%d_%H%M%S"), + "observation_end_time": end_time.strftime("%Y%m%d_%H%M%S"), + "instrument": "GOCI-II", + "platform": "GK-2B", +} + +badarea_attrs = global_attrs.copy() +badarea_attrs["cdm_data_type"] = "bad_area" + + +def _create_lonlat(): + """Create a fake navigation dataset with lon/lat.""" + lon, lat = np.meshgrid(np.linspace(120, 130, 10), np.linspace(30, 40, 10)) + lon = xr.DataArray( + lon, + dims=("number_of_lines", "pixels_per_line"), + attrs={"standard_name": "longitude", "units": "degrees_east"}, + ) + lat = xr.DataArray( + lat, + dims=("number_of_lines", "pixels_per_line"), + attrs={"standard_name": "latitude", "units": "degrees_north"}, + ) + ds = xr.Dataset() + ds["longitude"] = lon + ds["latitude"] = lat + return ds + + +def _create_bad_lon_lat(): + """Create a fake navigation dataset with lon/lat base name missing.""" + lon, lat = np.meshgrid(np.linspace(120, 130, 10), np.linspace(30, 40, 10)) + ds = xr.Dataset( + { + "longitude": (["number_of_lines", "pixels_per_line"], lon), + "latitude": (["number_of_lines", "pixels_per_line"], lat), + } + ) + return ds + + +@pytest.fixture(scope="session") +def ac_file(tmp_path_factory): + """Create a fake atmospheric correction product.""" + data = RANDOM_GEN.random((10, 10)) + RhoC = xr.Dataset( + {"RhoC_555": (["number_of_lines", "pixels_per_line"], data)}, + coords={"number_of_lines": np.arange(10), "pixels_per_line": np.arange(10)}, + ) + Rrs = xr.Dataset( + {"Rrs_555": (["number_of_lines", "pixels_per_line"], data)}, + coords={"number_of_lines": np.arange(10), "pixels_per_line": np.arange(10)}, + ) + navigation = _create_lonlat() + ds = xr.Dataset(attrs=global_attrs) + fname = ( + f'{tmp_path_factory.mktemp("data")}/GK2B_GOCI2_L2_20240214_021530_LA_S010_AC.nc' + ) + ds.to_netcdf(fname) + navigation.to_netcdf(fname, group="navigation_data", mode="a") + RhoC.to_netcdf(fname, group="geophysical_data/RhoC", mode="a") + Rrs.to_netcdf(fname, group="geophysical_data/Rrs", mode="a") + return fname + + +@pytest.fixture(scope="module") +def iop_file(tmp_path_factory): + """Create a fake IOP product.""" + data = RANDOM_GEN.random((10, 10)) + a = xr.Dataset( + {"a_total_555": (["number_of_lines", "pixels_per_line"], data)}, + coords={"number_of_lines": np.arange(10), "pixels_per_line": np.arange(10)}, + ) + bb = xr.Dataset( + {"bb_total_555": (["number_of_lines", "pixels_per_line"], data)}, + coords={"number_of_lines": np.arange(10), "pixels_per_line": np.arange(10)}, + ) + navigation = _create_lonlat() + ds = xr.Dataset(attrs=global_attrs) + fname = f'{tmp_path_factory.mktemp("data")}/GK2B_GOCI2_L2_20240214_021530_LA_S010_IOP.nc' + ds.to_netcdf(fname) + navigation.to_netcdf(fname, group="navigation_data", mode="a") + a.to_netcdf(fname, group="geophysical_data/a_total", mode="a") + bb.to_netcdf(fname, group="geophysical_data/bb_total", mode="a") + return fname + + +@pytest.fixture(scope="module") +def generic_file(tmp_path_factory): + """Create a fake ouput product like Chl, Zsd etc.""" + data = RANDOM_GEN.random((10, 10)) + geophysical_data = xr.Dataset( + {"Chl": (["number_of_lines", "pixels_per_line"], data)}, + coords={"number_of_lines": np.arange(10), "pixels_per_line": np.arange(10)}, + ) + navigation = _create_lonlat() + ds = xr.Dataset(attrs=global_attrs) + fname = f'{tmp_path_factory.mktemp("data")}/GK2B_GOCI2_L2_20240214_021530_LA_S010_Chl.nc' + ds.to_netcdf(fname) + navigation.to_netcdf(fname, group="navigation_data", mode="a") + geophysical_data.to_netcdf(fname, group="geophysical_data", mode="a") + return fname + + +@pytest.fixture(scope="module") +def generic_bad_file(tmp_path_factory): + """Create a PP product with lon/lat base name missing.""" + data = RANDOM_GEN.random((10, 10)) + geophysical_data = xr.Dataset( + {"PP": (["number_of_lines", "pixels_per_line"], data)}, + coords={"number_of_lines": np.arange(10), "pixels_per_line": np.arange(10)}, + ) + navigation = _create_bad_lon_lat() + ds = xr.Dataset(attrs=global_attrs) + fname = ( + f'{tmp_path_factory.mktemp("data")}/GK2B_GOCI2_L2_20240214_021530_LA_S010_PP.nc' + ) + ds.to_netcdf(fname) + navigation.to_netcdf(fname, group="navigation_data", mode="a") + geophysical_data.to_netcdf(fname, group="geophysical_data", mode="a") + return fname + + +class TestGOCI2Reader: + """Test the GOCI-II L2 netcdf file reader.""" + + @pytest.mark.parametrize( + "test_files", + [ + lazy_fixture("ac_file"), + lazy_fixture("iop_file"), + lazy_fixture("generic_file"), + lazy_fixture("generic_bad_file"), + ], + ) + def test_scene_available_datasets(self, test_files): + """Test that datasets are available.""" + scene = Scene(filenames=[test_files], reader="goci2_l2_nc") + available_datasets = scene.all_dataset_names() + assert len(available_datasets) > 0 + assert "longitude" in available_datasets + assert "latitude" in available_datasets + + @pytest.mark.parametrize( + "test_files", + [ + lazy_fixture("ac_file"), + lazy_fixture("iop_file"), + lazy_fixture("generic_file"), + lazy_fixture("generic_bad_file"), + ], + ) + def test_start_end_time(self, test_files): + """Test dataset start_time and end_time.""" + scene = Scene(filenames=[test_files], reader="goci2_l2_nc") + assert scene.start_time == start_time + assert scene.end_time == end_time + + @pytest.mark.parametrize( + ("test_files", "datasets"), + [ + (lazy_fixture("ac_file"), ["RhoC_555", "Rrs_555"]), + (lazy_fixture("iop_file"), ["a_total_555", "bb_total_555"]), + (lazy_fixture("generic_file"), ["Chl"]), + (lazy_fixture("generic_bad_file"), ["PP"]), + ], + ) + def test_load_dataset(self, test_files, datasets): + """Test dataset loading.""" + scene = Scene(filenames=[test_files], reader="goci2_l2_nc") + scene.load(datasets) + for dataset in datasets: + data_arr = scene[dataset] + assert data_arr.dims == ("y", "x") diff --git a/satpy/tests/reader_tests/test_gpm_imerg.py b/satpy/tests/reader_tests/test_gpm_imerg.py index a75e59863f..d038d0f0d7 100644 --- a/satpy/tests/reader_tests/test_gpm_imerg.py +++ b/satpy/tests/reader_tests/test_gpm_imerg.py @@ -14,12 +14,12 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -"""Unittests for GPM IMERG reader.""" +"""Unittests for GPM IMERG reader.""" +import datetime as dt import os import unittest -from datetime import datetime from unittest import mock import dask.array as da @@ -127,8 +127,8 @@ def test_load_data(self): assert reader.file_handlers res = reader.load(["IRprecipitation"]) assert 1 == len(res) - assert res["IRprecipitation"].start_time == datetime(2020, 1, 31, 23, 30, 0) - assert res["IRprecipitation"].end_time == datetime(2020, 1, 31, 23, 59, 59) + assert res["IRprecipitation"].start_time == dt.datetime(2020, 1, 31, 23, 30, 0) + assert res["IRprecipitation"].end_time == dt.datetime(2020, 1, 31, 23, 59, 59) assert res["IRprecipitation"].resolution == 0.1 assert res["IRprecipitation"].area.width == 3600 assert res["IRprecipitation"].area.height == 1800 diff --git a/satpy/tests/reader_tests/test_hrit_base.py b/satpy/tests/reader_tests/test_hrit_base.py index 12317f11f1..cae8f771a6 100644 --- a/satpy/tests/reader_tests/test_hrit_base.py +++ b/satpy/tests/reader_tests/test_hrit_base.py @@ -18,10 +18,10 @@ """The HRIT base reader tests package.""" import bz2 +import datetime as dt import gzip import os import unittest -from datetime import datetime, timedelta from tempfile import NamedTemporaryFile, gettempdir from unittest import mock @@ -30,6 +30,7 @@ from satpy.readers import FSFile from satpy.readers.hrit_base import HRITFileHandler, decompress, get_xritdecompress_cmd, get_xritdecompress_outfile +from satpy.tests.utils import RANDOM_GEN # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: @@ -145,9 +146,9 @@ def create_stub_hrit(filename, open_fun=open, meta=mda): lines = meta["number_of_lines"] cols = meta["number_of_columns"] total_bits = lines * cols * nbits - arr = np.random.randint(0, 256, - size=int(total_bits / 8), - dtype=np.uint8) + arr = RANDOM_GEN.integers(0, 256, + size=int(total_bits / 8), + dtype=np.uint8) with open_fun(filename, mode="wb") as fd: fd.write(b" " * meta["total_header_length"]) bytes_data = arr.tobytes() @@ -189,7 +190,7 @@ def setup_method(self, method): with mock.patch.object(HRITFileHandler, "_get_hd", new=new_get_hd): self.reader = HRITFileHandler("filename", {"platform_shortname": "MSG3", - "start_time": datetime(2016, 3, 3, 0, 0)}, + "start_time": dt.datetime(2016, 3, 3, 0, 0)}, {"filetype": "info"}, [mock.MagicMock(), mock.MagicMock(), mock.MagicMock()]) @@ -269,9 +270,9 @@ def test_read_band_gzip_stream(self, stub_gzipped_hrit_file): def test_start_end_time(self): """Test reading and converting start/end time.""" - assert self.reader.start_time == datetime(2016, 3, 3, 0, 0) + assert self.reader.start_time == dt.datetime(2016, 3, 3, 0, 0) assert self.reader.start_time == self.reader.observation_start_time - assert self.reader.end_time == datetime(2016, 3, 3, 0, 0) + timedelta(minutes=15) + assert self.reader.end_time == dt.datetime(2016, 3, 3, 0, 0) + dt.timedelta(minutes=15) assert self.reader.end_time == self.reader.observation_end_time @@ -292,7 +293,7 @@ def test_read_band_filepath(self, stub_compressed_hrit_file): with mock.patch.object(HRITFileHandler, "_get_hd", side_effect=new_get_hd, autospec=True) as get_hd: self.reader = HRITFileHandler(filename, {"platform_shortname": "MSG3", - "start_time": datetime(2016, 3, 3, 0, 0)}, + "start_time": dt.datetime(2016, 3, 3, 0, 0)}, {"filetype": "info"}, [mock.MagicMock(), mock.MagicMock(), mock.MagicMock()]) diff --git a/satpy/tests/reader_tests/test_hsaf_grib.py b/satpy/tests/reader_tests/test_hsaf_grib.py index da0f6dd86b..296bb921c4 100644 --- a/satpy/tests/reader_tests/test_hsaf_grib.py +++ b/satpy/tests/reader_tests/test_hsaf_grib.py @@ -17,9 +17,9 @@ # satpy. If not, see . """Module for testing the satpy.readers.grib module.""" +import datetime as dt import sys import unittest -from datetime import datetime from unittest import mock import numpy as np @@ -132,7 +132,7 @@ def tearDown(self): def test_init(self, pg): """Test the init function, ensure that the correct dates and metadata are returned.""" pg.open.return_value = FakeGRIB() - correct_dt = datetime(2019, 6, 3, 16, 45, 0) + correct_dt = dt.datetime(2019, 6, 3, 16, 45, 0) from satpy.readers.hsaf_grib import HSAFFileHandler fh = HSAFFileHandler("filename", mock.MagicMock(), mock.MagicMock()) assert fh._analysis_time == correct_dt diff --git a/satpy/tests/reader_tests/test_hsaf_h5.py b/satpy/tests/reader_tests/test_hsaf_h5.py index 49658e6727..bdd523ad0d 100644 --- a/satpy/tests/reader_tests/test_hsaf_h5.py +++ b/satpy/tests/reader_tests/test_hsaf_h5.py @@ -1,6 +1,7 @@ """Tests for the H-SAF H5 reader.""" + +import datetime as dt import os -from datetime import datetime import h5py import numpy as np @@ -50,7 +51,7 @@ def test_hsaf_sc_datetime(sc_h5_file): loaded_scene = _get_scene_with_loaded_sc_datasets(sc_h5_file) fname = os.path.basename(sc_h5_file) dtstr = fname.split("_")[1] - obs_time = datetime.strptime(dtstr, "%Y%m%d") + obs_time = dt.datetime.strptime(dtstr, "%Y%m%d") assert loaded_scene["SC"].attrs["data_time"] == obs_time diff --git a/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py b/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py index 9bf5f5f093..999fb50045 100644 --- a/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py +++ b/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py @@ -330,7 +330,7 @@ def _get_global_attrs(self, num_rows, num_cols): "/attr/Platform_LongName": "Haiyang 2B Ocean Observing Satellite", "/attr/Platform_ShortName": "HY-2B", "/attr/Platform_Type": "spacecraft", - "/attr/Producer_Agency": "Ministry of Natural Resources of the People\'s Republic of China", + "/attr/Producer_Agency": "Ministry of Natural Resources of the People's Republic of China", "/attr/Producer_Institution": "NSOAS", "/attr/Production_Date_Time": "20200326T06:23:10", "/attr/Range_Beginning_Time": "20200326T01:11:07", @@ -509,7 +509,7 @@ def test_reading_attrs_nsoas(self): def test_properties(self): """Test platform_name.""" - from datetime import datetime + import datetime as dt from satpy.readers import load_reader filenames = [ @@ -521,5 +521,5 @@ def test_properties(self): # Make sure we have some files res = reader.load(["wvc_lon"]) assert res["wvc_lon"].platform_name == "HY-2B" - assert res["wvc_lon"].start_time == datetime(2020, 3, 26, 1, 11, 7) - assert res["wvc_lon"].end_time == datetime(2020, 3, 26, 2, 55, 40) + assert res["wvc_lon"].start_time == dt.datetime(2020, 3, 26, 1, 11, 7) + assert res["wvc_lon"].end_time == dt.datetime(2020, 3, 26, 2, 55, 40) diff --git a/satpy/tests/reader_tests/test_ici_l1b_nc.py b/satpy/tests/reader_tests/test_ici_l1b_nc.py index 498ca88705..ab8bad2527 100644 --- a/satpy/tests/reader_tests/test_ici_l1b_nc.py +++ b/satpy/tests/reader_tests/test_ici_l1b_nc.py @@ -15,13 +15,14 @@ # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . + """The ici_l1b_nc reader tests package. This version tests the reader for ICI test data as per PFS V3A. """ -from datetime import datetime +import datetime as dt from unittest.mock import patch import numpy as np @@ -50,13 +51,13 @@ def reader(fake_file): filename=fake_file, filename_info={ "sensing_start_time": ( - datetime.fromisoformat("2000-01-01T01:00:00") + dt.datetime.fromisoformat("2000-01-01T01:00:00") ), "sensing_end_time": ( - datetime.fromisoformat("2000-01-01T02:00:00") + dt.datetime.fromisoformat("2000-01-01T02:00:00") ), "creation_time": ( - datetime.fromisoformat("2000-01-01T03:00:00") + dt.datetime.fromisoformat("2000-01-01T03:00:00") ), }, filetype_info={ @@ -217,11 +218,11 @@ class TestIciL1bNCFileHandler: def test_start_time(self, reader): """Test start time.""" - assert reader.start_time == datetime(2000, 1, 2, 3, 4, 5) + assert reader.start_time == dt.datetime(2000, 1, 2, 3, 4, 5) def test_end_time(self, reader): """Test end time.""" - assert reader.end_time == datetime(2000, 1, 2, 4, 5, 6) + assert reader.end_time == dt.datetime(2000, 1, 2, 4, 5, 6) def test_sensor(self, reader): """Test sensor.""" @@ -445,8 +446,14 @@ def test_interpolate_geo(self, reader): """Test interpolate geographic coordinates.""" shape = (N_SCAN, N_SUBS, N_HORNS) dims = ("n_scan", "n_subs", "n_horns") + sub_pos = np.append( + np.arange(0, N_SAMPLES, np.ceil(N_SAMPLES / N_SUBS)), + N_SAMPLES - 1 + ) longitude = xr.DataArray( - 2. * np.ones(shape), + np.tile( # longitudes between 0 and 10 + 10 * sub_pos / sub_pos[-1], (N_SCAN, N_HORNS, 1) + ).swapaxes(1, 2), dims=dims, coords={ "n_horns": np.arange(N_HORNS), @@ -462,7 +469,9 @@ def test_interpolate_geo(self, reader): expect_shape = (N_SCAN, N_SAMPLES, N_HORNS) assert lon.shape == expect_shape assert lat.shape == expect_shape - np.testing.assert_allclose(lon, 2.0) + np.testing.assert_allclose(lon[:, 0, :], 0.) + np.testing.assert_allclose(lon[:, -1, :], 10.) + np.testing.assert_allclose(np.diff(lon[0, :, 0]), 10 / (N_SAMPLES - 1)) np.testing.assert_allclose(lat, 1.0) def test_interpolate_viewing_angle(self, reader): @@ -509,13 +518,13 @@ def test_get_global_attributes(self, reader): attributes = reader._get_global_attributes() assert attributes == { "filename": reader.filename, - "start_time": datetime(2000, 1, 2, 3, 4, 5), - "end_time": datetime(2000, 1, 2, 4, 5, 6), + "start_time": dt.datetime(2000, 1, 2, 3, 4, 5), + "end_time": dt.datetime(2000, 1, 2, 4, 5, 6), "spacecraft_name": "SGB", "ssp_lon": None, "sensor": "ICI", - "filename_start_time": datetime(2000, 1, 1, 1, 0), - "filename_end_time": datetime(2000, 1, 1, 2, 0), + "filename_start_time": dt.datetime(2000, 1, 1, 1, 0), + "filename_end_time": dt.datetime(2000, 1, 1, 2, 0), "platform_name": "SGB", "quality_group": { "duration_of_product": np.array(1000., dtype=np.float32), diff --git a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py index 9fa7af224d..7378078c2a 100644 --- a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py +++ b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py @@ -1,6 +1,7 @@ """Tests for the Insat3D reader.""" + +import datetime as dt import os -from datetime import datetime import dask.array as da import h5netcdf @@ -16,7 +17,7 @@ open_dataset, open_datatree, ) -from satpy.tests.utils import make_dataid +from satpy.tests.utils import RANDOM_GEN, make_dataid # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: @@ -30,10 +31,10 @@ alb_units = "%" temp_units = "K" chunks_1km = (1, 46, 1126) -values_1km = np.random.randint(0, 1000, shape_1km, dtype=np.uint16) +values_1km = RANDOM_GEN.integers(0, 1000, shape_1km, dtype=np.uint16) values_1km[0, 0, 0] = 0 -values_4km = np.random.randint(0, 1000, shape_4km, dtype=np.uint16) -values_8km = np.random.randint(0, 1000, shape_8km, dtype=np.uint16) +values_4km = RANDOM_GEN.integers(0, 1000, shape_4km, dtype=np.uint16) +values_8km = RANDOM_GEN.integers(0, 1000, shape_8km, dtype=np.uint16) values_by_resolution = {1000: values_1km, 4000: values_4km, @@ -72,8 +73,8 @@ "ALBEDO": "%", "TEMP": "K"} -start_time = datetime(2009, 6, 9, 9, 0) -end_time = datetime(2009, 6, 9, 9, 30) +start_time = dt.datetime(2009, 6, 9, 9, 0) +end_time = dt.datetime(2009, 6, 9, 9, 30) subsatellite_longitude = 82 time_pattern = "%d-%b-%YT%H:%M:%S" diff --git a/satpy/tests/reader_tests/test_li_l2_nc.py b/satpy/tests/reader_tests/test_li_l2_nc.py index 5e9d0ff563..cf4858c7ed 100644 --- a/satpy/tests/reader_tests/test_li_l2_nc.py +++ b/satpy/tests/reader_tests/test_li_l2_nc.py @@ -12,9 +12,11 @@ # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . + """Unit tests on the LI L2 reader using the conventional mock constructed context.""" + +import datetime as dt import os -from datetime import datetime from unittest import mock import numpy as np @@ -174,7 +176,8 @@ def test_dataset_loading(self, filetype_infos): "end_time": "1000" } - handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, ftype)) + handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, ftype), + with_area_definition=False) ds_desc = handler.ds_desc # retrieve the schema that what used to generate the content for that product: @@ -405,7 +408,7 @@ def test_report_datetimes(self, filetype_infos): assert dset.values.dtype == np.dtype("datetime64[ns]") # The default epoch_time should be 1.234 seconds after epoch: - ref_time = np.datetime64(datetime(2000, 1, 1, 0, 0, 1, 234000)) + ref_time = np.datetime64(dt.datetime(2000, 1, 1, 0, 0, 1, 234000)) assert np.all(dset.values == ref_time) # Check time_offset: @@ -480,7 +483,8 @@ def test_combine_info(self, filetype_infos): def test_coordinates_projection(self, filetype_infos): """Should automatically generate lat/lon coords from projection data.""" - handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc")) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), + with_area_definition=False) dsid = make_dataid(name="flash_accumulation") dset = handler.get_dataset(dsid) @@ -492,7 +496,8 @@ def test_coordinates_projection(self, filetype_infos): with pytest.raises(NotImplementedError): handler.get_area_def(dsid) - handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_afr_nc")) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_afr_nc"), + with_area_definition=False) dsid = make_dataid(name="flash_radiance") dset = handler.get_dataset(dsid) @@ -501,7 +506,8 @@ def test_coordinates_projection(self, filetype_infos): assert dset.attrs["coordinates"][0] == "longitude" assert dset.attrs["coordinates"][1] == "latitude" - handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_afa_nc")) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_afa_nc"), + with_area_definition=False) dsid = make_dataid(name="accumulated_flash_area") dset = handler.get_dataset(dsid) @@ -592,7 +598,6 @@ def test_generate_coords_called_once(Self, filetype_infos): def test_coords_generation(self, filetype_infos): """Compare daskified coords generation results with non-daskified.""" - # Prepare dummy (but somewhat realistic) arrays of azimuth/elevation values. products = ["li_l2_af_nc", "li_l2_afr_nc", "li_l2_afa_nc"] @@ -627,6 +632,7 @@ def test_coords_generation(self, filetype_infos): projection = Proj(proj_dict) azimuth_vals = azimuth.values * point_height elevation_vals = elevation.values * point_height + azimuth_vals *= -1 lon_ref, lat_ref = projection(azimuth_vals, elevation_vals, inverse=True) # Convert to float32: lon_ref = lon_ref.astype(np.float32) @@ -640,6 +646,30 @@ def test_coords_generation(self, filetype_infos): np.testing.assert_equal(lon, lon_ref) np.testing.assert_equal(lat, lat_ref) + def test_coords_and_grid_consistency(self, filetype_infos): + """Compare computed latlon coords for 1-d version with latlon from areadef as for the gridded version.""" + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), + with_area_definition=True) + + # Get cols/rows arrays from handler + x = handler.get_measured_variable(handler.swath_coordinates["azimuth"]) + y = handler.get_measured_variable(handler.swath_coordinates["elevation"]) + cols = x.astype(int) - 1 + rows = (LI_GRID_SHAPE[0] - y.astype(int)) + + # compute lonlat from 1-d coords generation (called when with_area_definition==False) + handler.generate_coords_from_scan_angles() + lon = handler.internal_variables["longitude"].values + lat = handler.internal_variables["latitude"].values + + # compute lonlat from 2-d areadef + dsid = make_dataid(name="flash_accumulation") + area_def = handler.get_area_def(dsid) + lon_areadef, lat_areadef = area_def.get_lonlat_from_array_coordinates(cols, rows) + + np.testing.assert_allclose(lon, lon_areadef, rtol=1e-3) + np.testing.assert_allclose(lat, lat_areadef, rtol=1e-3) + def test_get_area_def_acc_products(self, filetype_infos): """Test retrieval of area def for accumulated products.""" handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index 1df0d41f12..e9b8c45ae6 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -27,13 +27,23 @@ from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler -def _get_calibration(num_scans): +def _get_calibration(num_scans, ftype): calibration = { - "Calibration/VIS_Cal_Coeff": + f"Calibration/{ftype}_Cal_Coeff": xr.DataArray( da.ones((19, 3), chunks=1024), attrs={"Slope": np.array([1.] * 19), "Intercept": np.array([0.] * 19)}, dims=("_bands", "_coeffs")), + "Calibration/Solar_Irradiance": + xr.DataArray( + da.ones((19, ), chunks=1024), + attrs={"Slope": np.array([1.] * 19), "Intercept": np.array([0.] * 19)}, + dims=("_bands")), + "Calibration/Solar_Irradiance_LL": + xr.DataArray( + da.ones((1, ), chunks=1024), + attrs={"Slope": np.array([1.]), "Intercept": np.array([0.])}, + dims=("_bands")), "Calibration/IR_Cal_Coeff": xr.DataArray( da.ones((6, 4, num_scans), chunks=1024), @@ -43,119 +53,161 @@ def _get_calibration(num_scans): return calibration -def _get_250m_data(num_scans, rows_per_scan, num_cols): +def _get_250m_data(num_scans, rows_per_scan, num_cols, filetype_info): # Set some default attributes - def_attrs = {"FillValue": 65535, + is_fy3ab_mersi1 = filetype_info["file_type"].startswith(("fy3a_mersi1", "fy3b_mersi1")) + + fill_value_name = "_FillValue" if is_fy3ab_mersi1 else "FillValue" + key_prefix = "" if is_fy3ab_mersi1 else "Data/" + + def_attrs = {fill_value_name: 65535, "valid_range": [0, 4095], "Slope": np.array([1.] * 1), "Intercept": np.array([0.] * 1) } nounits_attrs = {**def_attrs, **{"units": "NO"}} radunits_attrs = {**def_attrs, **{"units": "mW/ (m2 cm-1 sr)"}} + valid_range_none_attrs = radunits_attrs.copy() + valid_range_none_attrs["valid_range"] = None data = { - "Data/EV_250_RefSB_b1": + f"{key_prefix}EV_250_RefSB_b1": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, dims=("_rows", "_cols")), - "Data/EV_250_RefSB_b2": + f"{key_prefix}EV_250_RefSB_b2": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, dims=("_rows", "_cols")), - "Data/EV_250_RefSB_b3": + f"{key_prefix}EV_250_RefSB_b3": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, dims=("_rows", "_cols")), - "Data/EV_250_RefSB_b4": + f"{key_prefix}EV_250_RefSB_b4": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, dims=("_rows", "_cols")), - "Data/EV_250_Emissive_b24": + f"{key_prefix}EV_250_Emissive_b24": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), - attrs=radunits_attrs, + attrs=valid_range_none_attrs, dims=("_rows", "_cols")), - "Data/EV_250_Emissive_b25": + f"{key_prefix}EV_250_Emissive_b25": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=radunits_attrs, dims=("_rows", "_cols")), + f"{key_prefix}EV_250_Emissive": + xr.DataArray( + da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), + attrs=nounits_attrs, + dims=("_rows", "_cols")), } return data -def _get_1km_data(num_scans, rows_per_scan, num_cols): +def _get_500m_data(num_scans, rows_per_scan, num_cols): data = { - "Data/EV_1KM_LL": - xr.DataArray( - da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, - dtype=np.uint16), - attrs={ - "Slope": np.array([1.]), "Intercept": np.array([0.]), - "FillValue": 65535, - "units": "NO", - "valid_range": [0, 4095], - "long_name": b"1km Earth View Science Data", - }, - dims=("_rows", "_cols")), - "Data/EV_1KM_RefSB": + "Data/EV_Reflectance": xr.DataArray( - da.ones((15, num_scans * rows_per_scan, num_cols), chunks=1024, + da.ones((5, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ - "Slope": np.array([1.] * 15), "Intercept": np.array([0.] * 15), + "Slope": np.array([1.] * 5), "Intercept": np.array([0.] * 5), "FillValue": 65535, "units": "NO", "valid_range": [0, 4095], - "long_name": b"1km Earth View Science Data", + "long_name": b"500m Earth View Science Data", }, dims=("_ref_bands", "_rows", "_cols")), - "Data/EV_1KM_Emissive": + "Data/EV_Emissive": xr.DataArray( - da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, + da.ones((3, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ - "Slope": np.array([1.] * 4), "Intercept": np.array([0.] * 4), + "Slope": np.array([1.] * 3), "Intercept": np.array([0.] * 3), "FillValue": 65535, "units": "mW/ (m2 cm-1 sr)", "valid_range": [0, 25000], - "long_name": b"1km Emissive Bands Earth View " + "long_name": b"500m Emissive Bands Earth View " b"Science Data", }, dims=("_ir_bands", "_rows", "_cols")), - "Data/EV_250_Aggr.1KM_RefSB": - xr.DataArray( - da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, - dtype=np.uint16), - attrs={ - "Slope": np.array([1.] * 4), "Intercept": np.array([0.] * 4), - "FillValue": 65535, - "units": "NO", - "valid_range": [0, 4095], - "long_name": b"250m Reflective Bands Earth View " - b"Science Data Aggregated to 1 km" - }, - dims=("_ref250_bands", "_rows", "_cols")), - "Data/EV_250_Aggr.1KM_Emissive": - xr.DataArray( - da.ones((2, num_scans * rows_per_scan, num_cols), chunks=1024, - dtype=np.uint16), - attrs={ - "Slope": np.array([1.] * 2), "Intercept": np.array([0.] * 2), - "FillValue": 65535, - "units": "mW/ (m2 cm-1 sr)", - "valid_range": [0, 4095], - "long_name": b"250m Emissive Bands Earth View " - b"Science Data Aggregated to 1 km" - }, - dims=("_ir250_bands", "_rows", "_cols")), } return data +def _get_1km_data(num_scans, rows_per_scan, num_cols, filetype_info): + is_mersi1 = filetype_info["file_type"].startswith(("fy3a_mersi1", "fy3b_mersi1", "fy3c_mersi1")) + is_fy3ab_mersi1 = filetype_info["file_type"].startswith(("fy3a_mersi1", "fy3b_mersi1")) + + fill_value_name = "_FillValue" if is_fy3ab_mersi1 else "FillValue" + key_prefix = "" if is_fy3ab_mersi1 else "Data/" + radunits = "NO" if is_mersi1 else "mW/ (m2 cm-1 sr)" + + data = {"Data/EV_1KM_LL": + xr.DataArray(da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), + attrs={"Slope": np.array([1.]), "Intercept": np.array([0.]), + "FillValue": 65535, + "units": "NO", + "valid_range": [0, 4095], + "long_name": b"1km Earth View Science Data"}, + dims=("_rows", "_cols")), + f"{key_prefix}EV_1KM_RefSB": + xr.DataArray(da.ones((15, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), + attrs={"Slope": np.array([1.] * 15), "Intercept": np.array([0.] * 15), + fill_value_name: 65535, + "units": "NO", + "valid_range": [0, 4095], + "long_name": b"1km Earth View Science Data"}, + dims=("_ref_bands", "_rows", "_cols")), + "Data/EV_1KM_Emissive": + xr.DataArray(da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), + attrs={"Slope": np.array([1.] * 4), "Intercept": np.array([0.] * 4), + "FillValue": 65535, + "units": "mW/ (m2 cm-1 sr)", + "valid_range": [0, 25000], + "long_name": b"1km Emissive Bands Earth View Science Data"}, + dims=("_ir_bands", "_rows", "_cols")), + f"{key_prefix}EV_250_Aggr.1KM_RefSB": + xr.DataArray(da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), + attrs={"Slope": np.array([1.] * 4), "Intercept": np.array([0.] * 4), + fill_value_name: 65535, + "units": "NO", + "valid_range": [0, 4095], + "long_name": b"250m Reflective Bands Earth View Science Data Aggregated to 1 km"}, + dims=("_ref250_bands", "_rows", "_cols")), + f"{key_prefix}EV_250_Aggr.1KM_Emissive": + xr.DataArray(da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), + attrs={"Slope": np.array([1.]), "Intercept": np.array([0.]), + fill_value_name: 65535, + "units": radunits, + "valid_range": [0, 4095], + "long_name": b"250m Emissive Bands Earth View Science Data Aggregated to 1 km"}, + dims=("_rows", "_cols")) if is_mersi1 else + xr.DataArray(da.ones((2, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), + attrs={"Slope": np.array([1.] * 2), "Intercept": np.array([0.] * 2), + "FillValue": 65535, + "units": "mW/ (m2 cm-1 sr)", + "valid_range": [0, 4095], + "long_name": b"250m Emissive Bands Earth View Science Data Aggregated to 1 km"}, + dims=("_ir250_bands", "_rows", "_cols")), + f"{key_prefix}SensorZenith": + xr.DataArray( + da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), + attrs={ + "Slope": np.array([.01] * 1), "Intercept": np.array([0.] * 1), + "units": "degree", + "valid_range": [0, 28000], + }, + dims=("_rows", "_cols")), + } + return data + + def _get_250m_ll_data(num_scans, rows_per_scan, num_cols): # Set some default attributes def_attrs = {"FillValue": 65535, @@ -235,30 +287,72 @@ def get_test_content(self, filename, filename_info, filetype_info): "/attr/Observing Beginning Time": "18:27:39.720", "/attr/Observing Ending Time": "18:38:36.728", } + fy3a_attrs = { + "/attr/VIR_Cal_Coeff": np.array([0.0, 1.0, 0.0] * 19), + } + fy3b_attrs = { + "/attr/VIS_Cal_Coeff": np.array([0.0, 1.0, 0.0] * 19), + } + fy3d_attrs = { + "/attr/Solar_Irradiance": np.array([1.0] * 19), + } - global_attrs = self._set_sensor_attrs(global_attrs) + global_attrs, ftype = self._set_sensor_attrs(global_attrs) self._add_tbb_coefficients(global_attrs) data = self._get_data_file_content() test_content = {} test_content.update(global_attrs) - test_content.update(data) - test_content.update(_get_calibration(self.num_scans)) + if "fy3a_mersi1" in self.filetype_info["file_type"]: + test_content.update(data[0]) + test_content.update(data[1]) + else: + test_content.update(data) + if "fy3a_mersi1" in self.filetype_info["file_type"]: + test_content.update(fy3a_attrs) + elif "fy3b_mersi1" in self.filetype_info["file_type"]: + test_content.update(fy3b_attrs) + elif "mersi2" in self.filetype_info["file_type"]: + test_content.update(fy3d_attrs) + if not self.filetype_info["file_type"].startswith(("fy3a_mersi1", "fy3b_mersi1")): + test_content.update(_get_calibration(self.num_scans, ftype)) return test_content def _set_sensor_attrs(self, global_attrs): - if "mersi2_l1b" in self.filetype_info["file_type"]: + if "fy3a_mersi1" in self.filetype_info["file_type"]: + global_attrs["/attr/Satellite Name"] = "FY-3A" + global_attrs["/attr/Sensor Identification Code"] = "MERSI" + ftype = "VIS" + elif "fy3b_mersi1" in self.filetype_info["file_type"]: + global_attrs["/attr/Satellite Name"] = "FY-3B" + global_attrs["/attr/Sensor Identification Code"] = "MERSI" + ftype = "VIS" + elif "fy3c_mersi1" in self.filetype_info["file_type"]: + global_attrs["/attr/Satellite Name"] = "FY-3C" + global_attrs["/attr/Sensor Identification Code"] = "MERSI" + ftype = "VIS" + elif "mersi2_l1b" in self.filetype_info["file_type"]: global_attrs["/attr/Satellite Name"] = "FY-3D" global_attrs["/attr/Sensor Identification Code"] = "MERSI" + ftype = "VIS" elif "mersi_ll" in self.filetype_info["file_type"]: global_attrs["/attr/Satellite Name"] = "FY-3E" global_attrs["/attr/Sensor Identification Code"] = "MERSI LL" - return global_attrs + ftype = "LL" + elif "mersi_rm" in self.filetype_info["file_type"]: + global_attrs["/attr/Satellite Name"] = "FY-3G" + global_attrs["/attr/Sensor Identification Code"] = "MERSI RM" + ftype = "RSB" + return global_attrs, ftype def _get_data_file_content(self): - if "_geo" in self.filetype_info["file_type"]: - return self._add_geo_data_file_content() - return self._add_band_data_file_content() + if "fy3a_mersi1" in self.filetype_info["file_type"]: + return self._add_band_data_file_content(), self._add_geo_data_file_content() + else: + if "_geo" in self.filetype_info["file_type"]: + return self._add_geo_data_file_content() + else: + return self._add_band_data_file_content() def _add_geo_data_file_content(self): num_scans = self.num_scans @@ -271,10 +365,19 @@ def _add_band_data_file_content(self): num_cols = self._num_cols_for_file_type num_scans = self.num_scans rows_per_scan = self._rows_per_scan - is_mersi2 = self.filetype_info["file_type"].startswith("mersi2_") + is_mersill = self.filetype_info["file_type"].startswith("mersi_ll") is_1km = "_1000" in self.filetype_info["file_type"] - data_func = _get_1km_data if is_1km else (_get_250m_data if is_mersi2 else _get_250m_ll_data) - return data_func(num_scans, rows_per_scan, num_cols) + is_250m = "_250" in self.filetype_info["file_type"] + + if is_1km: + return _get_1km_data(num_scans, rows_per_scan, num_cols, self.filetype_info) + elif is_250m: + if is_mersill: + return _get_250m_ll_data(num_scans, rows_per_scan, num_cols) + else: + return _get_250m_data(num_scans, rows_per_scan, num_cols, self.filetype_info) + else: + return _get_500m_data(num_scans, rows_per_scan, num_cols) def _add_tbb_coefficients(self, global_attrs): if not self.filetype_info["file_type"].startswith("mersi2_"): @@ -293,27 +396,66 @@ def _num_cols_for_file_type(self): @property def _geo_prefix_for_file_type(self): - return "Geolocation/" if "1000" in self.filetype_info["file_type"] else "" + if self.filetype_info["file_type"].startswith(("fy3a_mersi1", "fy3b_mersi1")): + return "" + else: + if "1000" in self.filetype_info["file_type"]: + return "Geolocation/" + elif "500" in self.filetype_info["file_type"]: + return "Geolocation/" + else: + return "" -def _test_helper(res): +def _assert_bands_mda_as_exp(res, band_list, exp_result): """Remove test code duplication.""" - assert (2 * 40, 2048 * 2) == res["1"].shape - assert "reflectance" == res["1"].attrs["calibration"] - assert "%" == res["1"].attrs["units"] - assert (2 * 40, 2048 * 2) == res["2"].shape - assert "reflectance" == res["2"].attrs["calibration"] - assert "%" == res["2"].attrs["units"] - assert (2 * 40, 2048 * 2) == res["3"].shape - assert "reflectance" == res["3"].attrs["calibration"] - assert "%" == res["3"].attrs["units"] - assert (2 * 40, 2048 * 2) == res["4"].shape - assert "reflectance" == res["4"].attrs["calibration"] - assert "%" == res["4"].attrs["units"] + exp_cal = exp_result[0] + exp_unit = exp_result[1] + exp_shape = exp_result[2] + for band in band_list: + assert res[band].attrs["calibration"] == exp_cal + assert res[band].attrs["units"] == exp_unit + assert res[band].shape == exp_shape + + +def _test_find_files_and_readers(reader_config, filenames): + """Test file and reader search.""" + from satpy.readers import load_reader + reader = load_reader(reader_config) + files = reader.select_files_from_pathnames(filenames) + # Make sure we have some files + reader.create_filehandlers(files) + assert len(files) == len(filenames) + assert reader.file_handlers + return reader + + +def _test_multi_resolutions(available_datasets, band_list, test_resolution, cal_results_number): + """Test some bands have multiple resolutions.""" + for band_name in band_list: + from satpy.dataset.data_dict import get_key + from satpy.tests.utils import make_dataid + ds_id = make_dataid(name=band_name, resolution=250) + if test_resolution == "1000": + with pytest.raises(KeyError): + get_key(ds_id, available_datasets, num_results=cal_results_number, best=False) + else: + + res = get_key(ds_id, available_datasets, num_results=cal_results_number, best=False) + assert len(res) == cal_results_number + + ds_id = make_dataid(name=band_name, resolution=1000) + if test_resolution == "250": + with pytest.raises(KeyError): + get_key(ds_id, available_datasets, num_results=cal_results_number, best=False) + else: + + res = get_key(ds_id, available_datasets, num_results=cal_results_number, best=False) + assert len(res) == cal_results_number class MERSIL1BTester: - """Test MERSI2 L1B Reader.""" + """Test MERSI1/2/LL/RM L1B Reader.""" def setup_method(self): """Wrap HDF5 file handler with our own fake handler.""" @@ -330,294 +472,207 @@ def teardown_method(self): self.p.stop() -class TestMERSI2L1B(MERSIL1BTester): - """Test the FY3D MERSI2 L1B reader.""" +class MERSI12llL1BTester(MERSIL1BTester): + """Test MERSI1/2/LL L1B Reader.""" - yaml_file = "mersi2_l1b.yaml" - filenames_1000m = ["tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF", "tf2019071182739.FY3D-X_MERSI_GEO1K_L1B.HDF"] - filenames_250m = ["tf2019071182739.FY3D-X_MERSI_0250M_L1B.HDF", "tf2019071182739.FY3D-X_MERSI_GEOQK_L1B.HDF"] - filenames_all = filenames_1000m + filenames_250m + yaml_file: str = "" + filenames_1000m: list= [] + filenames_250m: list = [] + filenames_all: list = [] + vis_250_bands: list = [] + ir_250_bands: list = [] + vis_1000_bands: list = [] + ir_1000_bands: list = [] + bands_1000: list = [] + bands_250: list = [] def test_all_resolutions(self): - """Test loading data when all resolutions are available.""" - from satpy.dataset.data_dict import get_key - from satpy.readers import load_reader - from satpy.tests.utils import make_dataid - filenames = self.filenames_all - reader = load_reader(self.reader_configs) - files = reader.select_files_from_pathnames(filenames) - assert 4 == len(files) - reader.create_filehandlers(files) - # Make sure we have some files - assert reader.file_handlers - - # Verify that we have multiple resolutions for: - # - Bands 1-4 (visible) - # - Bands 24-25 (IR) - available_datasets = reader.available_dataset_ids - for band_name in ("1", "2", "3", "4", "24", "25"): - if band_name in ("24", "25"): - # don't know how to get radiance for IR bands - num_results = 2 + """Test loading data when all resolutions or specific one are available.""" + resolution_list = ["all", "250", "1000"] + file_list = [self.filenames_all, self.filenames_250m, self.filenames_1000m] + + for resolution in resolution_list: + filenames = file_list[resolution_list.index(resolution)] + reader = _test_find_files_and_readers(self.reader_configs, filenames) + + # Verify that we have multiple resolutions for: + # ---------MERSI-1--------- + # - Bands 1-4 (visible) + # - Bands 5 (IR) + # ---------MERSI-2--------- + # - Bands 1-4 (visible) + # - Bands 24-25 (IR) + # ---------MERSI-LL--------- + # - Bands 6-7 (IR) + available_datasets = reader.available_dataset_ids + # Only MERSI-2/LL VIS has radiance calibration + vis_num_results = 3 if self.yaml_file in ["mersi2_l1b.yaml", "mersi_ll_l1b.yaml"] else 2 + ir_num_results = 3 + _test_multi_resolutions(available_datasets, self.vis_250_bands, resolution, vis_num_results) + _test_multi_resolutions(available_datasets, self.ir_250_bands, resolution, ir_num_results) + + res = reader.load(self.bands_1000 + self.bands_250) + if resolution != "250": + assert len(res) == len(self.bands_1000 + self.bands_250) else: - num_results = 3 - ds_id = make_dataid(name=band_name, resolution=250) - res = get_key(ds_id, available_datasets, - num_results=num_results, best=False) - assert num_results == len(res) - ds_id = make_dataid(name=band_name, resolution=1000) - res = get_key(ds_id, available_datasets, - num_results=num_results, best=False) - assert num_results == len(res) - - res = reader.load(["1", "2", "3", "4", "5", "20", "24", "25"]) - assert len(res) == 8 - assert res["5"].shape == (2 * 10, 2048) - assert res["5"].attrs["calibration"] == "reflectance" - assert res["5"].attrs["units"] == "%" - assert res["20"].shape == (2 * 10, 2048) - assert res["20"].attrs["calibration"] == "brightness_temperature" - assert res["20"].attrs["units"] == "K" - assert res["24"].shape == (2 * 40, 2048 * 2) - assert res["24"].attrs["calibration"] == "brightness_temperature" - assert res["24"].attrs["units"] == "K" - assert res["25"].shape == (2 * 40, 2048 * 2) - assert res["25"].attrs["calibration"] == "brightness_temperature" - assert res["25"].attrs["units"] == "K" + assert len(res) == len(self.bands_250) + for band in self.bands_1000: + with pytest.raises(KeyError): + res.__getitem__(band) + + if resolution in ["all", "250"]: + _assert_bands_mda_as_exp(res, self.vis_250_bands, ("reflectance", "%", (2 * 40, 2048 * 2))) + _assert_bands_mda_as_exp(res, self.ir_250_bands, ("brightness_temperature", "K", (2 * 40, 2048 * 2))) + + if resolution == "all": + _assert_bands_mda_as_exp(res, self.vis_1000_bands, ("reflectance", "%", (2 * 10, 2048))) + _assert_bands_mda_as_exp(res, self.ir_1000_bands, ("brightness_temperature", "K", (2 * 10, 2048))) + else: + _assert_bands_mda_as_exp(res, self.vis_250_bands, ("reflectance", "%", (2 * 10, 2048))) + _assert_bands_mda_as_exp(res, self.vis_1000_bands, ("reflectance", "%", (2 * 10, 2048))) + _assert_bands_mda_as_exp(res, self.ir_250_bands, ("brightness_temperature", "K", (2 * 10, 2048))) + _assert_bands_mda_as_exp(res, self.ir_1000_bands, ("brightness_temperature", "K", (2 * 10, 2048))) def test_counts_calib(self): """Test loading data at counts calibration.""" - from satpy.readers import load_reader from satpy.tests.utils import make_dataid filenames = self.filenames_all - reader = load_reader(self.reader_configs) - files = reader.select_files_from_pathnames(filenames) - assert 4 == len(files) - reader.create_filehandlers(files) - # Make sure we have some files - assert reader.file_handlers + reader = _test_find_files_and_readers(self.reader_configs, filenames) ds_ids = [] - for band_name in ["1", "2", "3", "4", "5", "20", "24", "25"]: + for band_name in self.bands_1000 + self.bands_250: ds_ids.append(make_dataid(name=band_name, calibration="counts")) ds_ids.append(make_dataid(name="satellite_zenith_angle")) res = reader.load(ds_ids) - assert len(res) == 9 - assert res["1"].shape == (2 * 40, 2048 * 2) - assert res["1"].attrs["calibration"] == "counts" - assert res["1"].dtype == np.uint16 - assert res["1"].attrs["units"] == "1" - assert res["2"].shape == (2 * 40, 2048 * 2) - assert res["2"].attrs["calibration"] == "counts" - assert res["2"].dtype == np.uint16 - assert res["2"].attrs["units"] == "1" - assert res["3"].shape == (2 * 40, 2048 * 2) - assert res["3"].attrs["calibration"] == "counts" - assert res["3"].dtype == np.uint16 - assert res["3"].attrs["units"] == "1" - assert res["4"].shape == (2 * 40, 2048 * 2) - assert res["4"].attrs["calibration"] == "counts" - assert res["4"].dtype == np.uint16 - assert res["4"].attrs["units"] == "1" - assert res["5"].shape == (2 * 10, 2048) - assert res["5"].attrs["calibration"] == "counts" - assert res["5"].dtype == np.uint16 - assert res["5"].attrs["units"] == "1" - assert res["20"].shape == (2 * 10, 2048) - assert res["20"].attrs["calibration"] == "counts" - assert res["20"].dtype == np.uint16 - assert res["20"].attrs["units"] == "1" - assert res["24"].shape == (2 * 40, 2048 * 2) - assert res["24"].attrs["calibration"] == "counts" - assert res["24"].dtype == np.uint16 - assert res["24"].attrs["units"] == "1" - assert res["25"].shape == (2 * 40, 2048 * 2) - assert res["25"].attrs["calibration"] == "counts" - assert res["25"].dtype == np.uint16 - assert res["25"].attrs["units"] == "1" + assert len(res) == len(self.bands_1000) + len(self.bands_250) + 1 + _assert_bands_mda_as_exp(res, self.bands_250, ("counts", "1", (2 * 40, 2048 * 2))) + _assert_bands_mda_as_exp(res, self.bands_1000, ("counts", "1", (2 * 10, 2048))) def test_rad_calib(self): - """Test loading data at radiance calibration.""" - from satpy.readers import load_reader + """Test loading data at radiance calibration. For MERSI-2/LL VIS/IR and MERSI-1 IR.""" from satpy.tests.utils import make_dataid filenames = self.filenames_all - reader = load_reader(self.reader_configs) - files = reader.select_files_from_pathnames(filenames) - assert 4 == len(files) - reader.create_filehandlers(files) - # Make sure we have some files - assert reader.file_handlers + reader = _test_find_files_and_readers(self.reader_configs, filenames) ds_ids = [] - for band_name in ["1", "2", "3", "4", "5"]: + test_bands = self.bands_1000 + self.bands_250 if self.yaml_file in ["mersi2_l1b.yaml", "mersi_ll_l1b.yaml"] \ + else self.ir_250_bands + self.ir_1000_bands + + for band_name in test_bands: ds_ids.append(make_dataid(name=band_name, calibration="radiance")) res = reader.load(ds_ids) - assert len(res) == 5 - assert res["1"].shape == (2 * 40, 2048 * 2) - assert res["1"].attrs["calibration"] == "radiance" - assert res["1"].attrs["units"] == "mW/ (m2 cm-1 sr)" - assert res["2"].shape == (2 * 40, 2048 * 2) - assert res["2"].attrs["calibration"] == "radiance" - assert res["2"].attrs["units"] == "mW/ (m2 cm-1 sr)" - assert res["3"].shape == (2 * 40, 2048 * 2) - assert res["3"].attrs["calibration"] == "radiance" - assert res["3"].attrs["units"] == "mW/ (m2 cm-1 sr)" - assert res["4"].shape == (2 * 40, 2048 * 2) - assert res["4"].attrs["calibration"] == "radiance" - assert res["4"].attrs["units"] == "mW/ (m2 cm-1 sr)" - assert res["5"].shape == (2 * 10, 2048) - assert res["5"].attrs["calibration"] == "radiance" - assert res["5"].attrs["units"] == "mW/ (m2 cm-1 sr)" - - def test_1km_resolutions(self): - """Test loading data when only 1km resolutions are available.""" - from satpy.dataset.data_dict import get_key - from satpy.readers import load_reader - from satpy.tests.utils import make_dataid - filenames = self.filenames_1000m - reader = load_reader(self.reader_configs) - files = reader.select_files_from_pathnames(filenames) - assert 2 == len(files) - reader.create_filehandlers(files) - # Make sure we have some files - assert reader.file_handlers + assert len(res) == len(test_bands) + if self.yaml_file in ["mersi2_l1b.yaml", "mersi_ll_l1b.yaml"]: + _assert_bands_mda_as_exp(res, self.bands_250, ("radiance", "mW/ (m2 cm-1 sr)", (2 * 40, 2048 * 2))) + _assert_bands_mda_as_exp(res, self.bands_1000, ("radiance", "mW/ (m2 cm-1 sr)", (2 * 10, 2048))) + else: + _assert_bands_mda_as_exp(res, self.ir_250_bands, ("radiance", "mW/ (m2 cm-1 sr)", (2 * 40, 2048 * 2))) + _assert_bands_mda_as_exp(res, self.ir_1000_bands, ("radiance", "mW/ (m2 cm-1 sr)", (2 * 10, 2048))) - # Verify that we have multiple resolutions for: - # - Bands 1-4 (visible) - # - Bands 24-25 (IR) - available_datasets = reader.available_dataset_ids - for band_name in ("1", "2", "3", "4", "24", "25"): - if band_name in ("24", "25"): - # don't know how to get radiance for IR bands - num_results = 2 - else: - num_results = 3 - ds_id = make_dataid(name=band_name, resolution=250) - with pytest.raises(KeyError): - get_key(ds_id, available_datasets, num_results=num_results, best=False) - ds_id = make_dataid(name=band_name, resolution=1000) - res = get_key(ds_id, available_datasets, - num_results=num_results, best=False) - assert num_results == len(res) - - res = reader.load(["1", "2", "3", "4", "5", "20", "24", "25"]) - assert len(res) == 8 - assert res["1"].shape == (2 * 10, 2048) - assert res["1"].attrs["calibration"] == "reflectance" - assert res["1"].attrs["units"] == "%" - assert res["2"].shape == (2 * 10, 2048) - assert res["2"].attrs["calibration"] == "reflectance" - assert res["2"].attrs["units"] == "%" - assert res["3"].shape == (2 * 10, 2048) - assert res["3"].attrs["calibration"] == "reflectance" - assert res["3"].attrs["units"] == "%" - assert res["4"].shape == (2 * 10, 2048) - assert res["4"].attrs["calibration"] == "reflectance" - assert res["4"].attrs["units"] == "%" - assert res["5"].shape == (2 * 10, 2048) - assert res["5"].attrs["calibration"] == "reflectance" - assert res["5"].attrs["units"] == "%" - assert res["20"].shape == (2 * 10, 2048) - assert res["20"].attrs["calibration"] == "brightness_temperature" - assert res["20"].attrs["units"] == "K" - assert res["24"].shape == (2 * 10, 2048) - assert res["24"].attrs["calibration"] == "brightness_temperature" - assert res["24"].attrs["units"] == "K" - assert res["25"].shape == (2 * 10, 2048) - assert res["25"].attrs["calibration"] == "brightness_temperature" - assert res["25"].attrs["units"] == "K" - - def test_250_resolutions(self): - """Test loading data when only 250m resolutions are available.""" - from satpy.dataset.data_dict import get_key - from satpy.readers import load_reader - from satpy.tests.utils import make_dataid - filenames = self.filenames_250m - reader = load_reader(self.reader_configs) - files = reader.select_files_from_pathnames(filenames) - assert 2 == len(files) - reader.create_filehandlers(files) - # Make sure we have some files - assert reader.file_handlers - # Verify that we have multiple resolutions for: - # - Bands 1-4 (visible) - # - Bands 24-25 (IR) - available_datasets = reader.available_dataset_ids - for band_name in ("1", "2", "3", "4", "24", "25"): - if band_name in ("24", "25"): - # don't know how to get radiance for IR bands - num_results = 2 - else: - num_results = 3 - ds_id = make_dataid(name=band_name, resolution=250) - res = get_key(ds_id, available_datasets, - num_results=num_results, best=False) - assert num_results == len(res) - ds_id = make_dataid(name=band_name, resolution=1000) - with pytest.raises(KeyError): - get_key(ds_id, available_datasets, num_results=num_results, best=False) - - res = reader.load(["1", "2", "3", "4", "5", "20", "24", "25"]) - assert len(res) == 6 - with pytest.raises(KeyError): - res.__getitem__("5") - with pytest.raises(KeyError): - res.__getitem__("20") - _test_helper(res) - assert res["24"].shape == (2 * 40, 2048 * 2) - assert res["24"].attrs["calibration"] == "brightness_temperature" - assert res["24"].attrs["units"] == "K" - assert res["25"].shape == (2 * 40, 2048 * 2) - assert res["25"].attrs["calibration"] == "brightness_temperature" - assert res["25"].attrs["units"] == "K" - - -class TestMERSILLL1B(MERSIL1BTester): - """Test the FY3E MERSI-LL L1B reader.""" +class TestFY3AMERSI1L1B(MERSI12llL1BTester): + """Test the FY3A MERSI1 L1B reader.""" + + yaml_file = "fy3a_mersi1_l1b.yaml" + filenames_1000m = ["FY3A_MERSI_GBAL_L1_20090601_1200_1000M_MS.hdf"] + filenames_250m = ["FY3A_MERSI_GBAL_L1_20090601_1200_0250M_MS.hdf"] + filenames_all = filenames_1000m + filenames_250m + vis_250_bands = ["1", "2", "3", "4"] + ir_250_bands = ["5"] + vis_1000_bands = ["6", "7", "8", "11", "15", "19", "20"] + ir_1000_bands = [] + bands_1000 = vis_1000_bands + ir_1000_bands + bands_250 = vis_250_bands + ir_250_bands + + +class TestFY3BMERSI1L1B(MERSI12llL1BTester): + """Test the FY3B MERSI1 L1B reader.""" + + yaml_file = "fy3b_mersi1_l1b.yaml" + filenames_1000m = ["FY3B_MERSI_GBAL_L1_20110824_1850_1000M_MS.hdf"] + filenames_250m = ["FY3B_MERSI_GBAL_L1_20110824_1850_0250M_MS.hdf", "FY3B_MERSI_GBAL_L1_20110824_1850_GEOXX_MS.hdf"] + filenames_all = filenames_1000m + filenames_250m + vis_250_bands = ["1", "2", "3", "4"] + ir_250_bands = ["5"] + vis_1000_bands = ["6", "7", "8", "11", "15", "19", "20"] + ir_1000_bands = [] + bands_1000 = vis_1000_bands + ir_1000_bands + bands_250 = vis_250_bands + ir_250_bands + + +class TestFY3CMERSI1L1B(MERSI12llL1BTester): + """Test the FY3C MERSI1 L1B reader.""" + + yaml_file = "fy3c_mersi1_l1b.yaml" + filenames_1000m = ["FY3C_MERSI_GBAL_L1_20131002_1835_1000M_MS.hdf", "FY3C_MERSI_GBAL_L1_20131002_1835_GEO1K_MS.hdf"] + filenames_250m = ["FY3C_MERSI_GBAL_L1_20131002_1835_0250M_MS.hdf", "FY3C_MERSI_GBAL_L1_20131002_1835_GEOQK_MS.hdf"] + filenames_all = filenames_1000m + filenames_250m + vis_250_bands = ["1", "2", "3", "4"] + ir_250_bands = ["5"] + vis_1000_bands = ["6", "7", "8", "11", "15", "19", "20"] + ir_1000_bands = [] + bands_1000 = vis_1000_bands + ir_1000_bands + bands_250 = vis_250_bands + ir_250_bands + + +class TestFY3DMERSI2L1B(MERSI12llL1BTester): + """Test the FY3D MERSI2 L1B reader.""" + + yaml_file = "mersi2_l1b.yaml" + filenames_1000m = ["tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF", "tf2019071182739.FY3D-X_MERSI_GEO1K_L1B.HDF"] + filenames_250m = ["tf2019071182739.FY3D-X_MERSI_0250M_L1B.HDF", "tf2019071182739.FY3D-X_MERSI_GEOQK_L1B.HDF"] + filenames_all = filenames_1000m + filenames_250m + vis_250_bands = ["1", "2", "3", "4"] + ir_250_bands = ["24", "25"] + vis_1000_bands = ["5", "8", "9", "11", "15", "17", "19"] + ir_1000_bands = ["20", "21", "23"] + bands_1000 = vis_1000_bands + ir_1000_bands + bands_250 = vis_250_bands + ir_250_bands + + +class TestFY3EMERSIllL1B(MERSI12llL1BTester): + """Test the FY3D MERSI2 L1B reader.""" yaml_file = "mersi_ll_l1b.yaml" filenames_1000m = ["FY3E_MERSI_GRAN_L1_20230410_1910_1000M_V0.HDF", "FY3E_MERSI_GRAN_L1_20230410_1910_GEO1K_V0.HDF"] filenames_250m = ["FY3E_MERSI_GRAN_L1_20230410_1910_0250M_V0.HDF", "FY3E_MERSI_GRAN_L1_20230410_1910_GEOQK_V0.HDF"] filenames_all = filenames_1000m + filenames_250m + vis_250_bands = [] + ir_250_bands = ["6", "7"] + vis_1000_bands = ["1"] + ir_1000_bands = ["2", "3", "5"] + bands_1000 = vis_1000_bands + ir_1000_bands + bands_250 = vis_250_bands + ir_250_bands - def test_all_resolutions(self): + +class TestMERSIRML1B(MERSIL1BTester): + """Test the FY3E MERSI-RM L1B reader.""" + + yaml_file = "mersi_rm_l1b.yaml" + filenames_500m = ["FY3G_MERSI_GRAN_L1_20230410_1910_0500M_V1.HDF", + "FY3G_MERSI_GRAN_L1_20230410_1910_GEOHK_V1.HDF", + ] + + def test_500m_resolution(self): """Test loading data when all resolutions are available.""" - from satpy.dataset.data_dict import get_key from satpy.readers import load_reader - from satpy.tests.utils import make_dataid - filenames = self.filenames_all + filenames = self.filenames_500m reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) - assert 4 == len(files) + assert 2 == len(files) reader.create_filehandlers(files) # Make sure we have some files assert reader.file_handlers - # Verify that we have multiple resolutions for: - # - Bands 1-4 (visible) - # - Bands 24-25 (IR) - available_datasets = reader.available_dataset_ids - for band_name in ("6", "7"): - num_results = 2 - ds_id = make_dataid(name=band_name, resolution=250) - res = get_key(ds_id, available_datasets, - num_results=num_results, best=False) - assert num_results == len(res) - ds_id = make_dataid(name=band_name, resolution=1000) - res = get_key(ds_id, available_datasets, - num_results=num_results, best=False) - assert num_results == len(res) - res = reader.load(["1", "2", "4", "7"]) assert len(res) == 4 - assert res["4"].shape == (2 * 10, 2048) - assert res["1"].attrs["calibration"] == "radiance" - assert res["1"].attrs["units"] == "mW/ (m2 cm-1 sr)" - assert res["2"].shape == (2 * 10, 2048) - assert res["2"].attrs["calibration"] == "brightness_temperature" - assert res["2"].attrs["units"] == "K" - assert res["7"].shape == (2 * 40, 2048 * 2) + assert res["4"].shape == (2 * 10, 4096) + assert res["1"].attrs["calibration"] == "reflectance" + assert res["1"].attrs["units"] == "%" + assert res["2"].shape == (2 * 10, 4096) + assert res["2"].attrs["calibration"] == "reflectance" + assert res["2"].attrs["units"] == "%" + assert res["7"].shape == (20, 2048 * 2) assert res["7"].attrs["calibration"] == "brightness_temperature" assert res["7"].attrs["units"] == "K" @@ -625,123 +680,21 @@ def test_rad_calib(self): """Test loading data at radiance calibration.""" from satpy.readers import load_reader from satpy.tests.utils import make_dataid - filenames = self.filenames_all + filenames = self.filenames_500m reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) - assert 4 == len(files) + assert 2 == len(files) reader.create_filehandlers(files) # Make sure we have some files assert reader.file_handlers + band_names = ["1", "3", "4", "6", "7"] ds_ids = [] - for band_name in ["1", "3", "4", "6", "7"]: + for band_name in band_names: ds_ids.append(make_dataid(name=band_name, calibration="radiance")) res = reader.load(ds_ids) assert len(res) == 5 - assert res["1"].shape == (2 * 10, 2048) - assert res["1"].attrs["calibration"] == "radiance" - assert res["1"].attrs["units"] == "mW/ (m2 cm-1 sr)" - assert res["3"].shape == (2 * 10, 2048) - assert res["3"].attrs["calibration"] == "radiance" - assert res["3"].attrs["units"] == "mW/ (m2 cm-1 sr)" - assert res["4"].shape == (2 * 10, 2048) - assert res["4"].attrs["calibration"] == "radiance" - assert res["4"].attrs["units"] == "mW/ (m2 cm-1 sr)" - assert res["6"].shape == (2 * 40, 2048 * 2) - assert res["6"].attrs["calibration"] == "radiance" - assert res["6"].attrs["units"] == "mW/ (m2 cm-1 sr)" - assert res["7"].shape == (2 * 40, 2048 * 2) - assert res["7"].attrs["calibration"] == "radiance" - assert res["7"].attrs["units"] == "mW/ (m2 cm-1 sr)" - - def test_1km_resolutions(self): - """Test loading data when only 1km resolutions are available.""" - from satpy.dataset.data_dict import get_key - from satpy.readers import load_reader - from satpy.tests.utils import make_dataid - filenames = self.filenames_1000m - reader = load_reader(self.reader_configs) - files = reader.select_files_from_pathnames(filenames) - assert 2 == len(files) - reader.create_filehandlers(files) - # Make sure we have some files - assert reader.file_handlers - - # Verify that we have multiple resolutions for: - # - Band 6-7 (IR) - # - Bands 24-25 (IR) - available_datasets = reader.available_dataset_ids - for band_name in ("1", "2", "3", "4", "6", "7"): - if band_name == "1": - # don't know how to get anything apart from radiance for LL band - num_results = 1 - else: - num_results = 2 - ds_id = make_dataid(name=band_name, resolution=250) - with pytest.raises(KeyError): - get_key(ds_id, available_datasets, num_results=num_results, best=False) - ds_id = make_dataid(name=band_name, resolution=1000) - res = get_key(ds_id, available_datasets, - num_results=num_results, best=False) - if band_name == "1": - assert num_results == len([res]) - else: - assert num_results == len(res) - - res = reader.load(["1", "2", "3", "5", "6", "7"]) - assert len(res) == 6 - assert res["1"].shape == (2 * 10, 2048) - assert "radiance" == res["1"].attrs["calibration"] - assert res["1"].attrs["units"] == "mW/ (m2 cm-1 sr)" - assert res["2"].shape == (2 * 10, 2048) - assert "brightness_temperature" == res["2"].attrs["calibration"] - assert res["2"].attrs["units"] == "K" - assert res["3"].shape == (2 * 10, 2048) - assert "brightness_temperature" == res["3"].attrs["calibration"] - assert res["3"].attrs["units"] == "K" - assert res["5"].shape == (2 * 10, 2048) - assert "brightness_temperature" == res["5"].attrs["calibration"] - assert res["5"].attrs["units"] == "K" - assert res["6"].shape == (2 * 10, 2048) - assert "brightness_temperature" == res["6"].attrs["calibration"] - assert res["6"].attrs["units"] == "K" - assert res["7"].shape == (2 * 10, 2048) - assert "brightness_temperature" == res["7"].attrs["calibration"] - assert res["7"].attrs["units"] == "K" - - def test_250_resolutions(self): - """Test loading data when only 250m resolutions are available.""" - from satpy.dataset.data_dict import get_key - from satpy.readers import load_reader - from satpy.tests.utils import make_dataid - filenames = self.filenames_250m - reader = load_reader(self.reader_configs) - files = reader.select_files_from_pathnames(filenames) - assert 2 == len(files) - reader.create_filehandlers(files) - # Make sure we have some files - assert reader.file_handlers - - # Verify that we have multiple resolutions for: - # - Bands 6-7 - available_datasets = reader.available_dataset_ids - for band_name in ("6", "7"): - num_results = 2 - ds_id = make_dataid(name=band_name, resolution=250) - res = get_key(ds_id, available_datasets, - num_results=num_results, best=False) - assert num_results == len(res) - ds_id = make_dataid(name=band_name, resolution=1000) - with pytest.raises(KeyError): - get_key(ds_id, available_datasets, num_results=num_results, best=False) - - res = reader.load(["1", "6", "7"]) - assert 2 == len(res) - with pytest.raises(KeyError): - res.__getitem__("1") - assert (2 * 40, 2048 * 2) == res["6"].shape - assert "brightness_temperature" == res["6"].attrs["calibration"] - assert "K" == res["6"].attrs["units"] - assert (2 * 40, 2048 * 2) == res["7"].shape - assert "brightness_temperature" == res["7"].attrs["calibration"] - assert "K" == res["7"].attrs["units"] + for band_name in band_names: + assert res[band_name].shape == (20, 4096) + assert res[band_name].attrs["calibration"] == "radiance" + assert res[band_name].attrs["units"] == "mW/ (m2 cm-1 sr)" diff --git a/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py b/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py index 4083f7de00..be0bc12ee1 100644 --- a/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py +++ b/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py @@ -18,10 +18,10 @@ # Satpy. If not, see . """Module for testing the satpy.readers.tropomi_l2 module.""" +import datetime as dt import itertools import os import unittest -from datetime import datetime from unittest import mock import numpy as np @@ -31,7 +31,7 @@ DEFAULT_FILE_DTYPE = np.float32 DEFAULT_FILE_SHAPE = (721, 1440) -DEFAULT_DATE = datetime(2019, 6, 19, 13, 0) +DEFAULT_DATE = dt.datetime(2019, 6, 19, 13, 0) DEFAULT_LAT = np.linspace(-90, 90, DEFAULT_FILE_SHAPE[0], dtype=DEFAULT_FILE_DTYPE) DEFAULT_LON = np.linspace(-180, 180, DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE) DEFAULT_FILE_FLOAT_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], diff --git a/satpy/tests/reader_tests/test_mimic_TPW2_nc.py b/satpy/tests/reader_tests/test_mimic_TPW2_nc.py index 63214b0477..29857afbed 100644 --- a/satpy/tests/reader_tests/test_mimic_TPW2_nc.py +++ b/satpy/tests/reader_tests/test_mimic_TPW2_nc.py @@ -16,11 +16,12 @@ # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . + """Module for testing the satpy.readers.tropomi_l2 module.""" +import datetime as dt import os import unittest -from datetime import datetime from unittest import mock import numpy as np @@ -43,8 +44,8 @@ class FakeNetCDF4FileHandlerMimic(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" from xarray import DataArray - dt_s = filename_info.get("start_time", datetime(2019, 6, 19, 13, 0)) - dt_e = filename_info.get("end_time", datetime(2019, 6, 19, 13, 0)) + dt_s = filename_info.get("start_time", dt.datetime(2019, 6, 19, 13, 0)) + dt_e = filename_info.get("end_time", dt.datetime(2019, 6, 19, 13, 0)) if filetype_info["file_type"] == "mimicTPW2_comp": file_content = { diff --git a/satpy/tests/reader_tests/test_mirs.py b/satpy/tests/reader_tests/test_mirs.py index b857147e47..ecd078438a 100644 --- a/satpy/tests/reader_tests/test_mirs.py +++ b/satpy/tests/reader_tests/test_mirs.py @@ -16,11 +16,13 @@ # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . + """Module for testing the satpy.readers.mirs module.""" + from __future__ import annotations +import datetime as dt import os -from datetime import datetime from unittest import mock import numpy as np @@ -31,6 +33,7 @@ from satpy.dataset import DataID from satpy.readers import load_reader from satpy.readers.yaml_reader import FileYAMLReader +from satpy.tests.utils import RANDOM_GEN METOP_FILE = "IMG_SX.M2.D17037.S1601.E1607.B0000001.WE.HR.ORB.nc" NPP_MIRS_L2_SWATH = "NPR-MIRS-IMG_v11r6_npp_s201702061601000_e201702061607000_c202012201658410.nc" @@ -45,7 +48,7 @@ N_SCANLINE = 100 DEFAULT_FILE_DTYPE = np.float32 DEFAULT_2D_SHAPE = (N_SCANLINE, N_FOV) -DEFAULT_DATE = datetime(2019, 6, 19, 13, 0) +DEFAULT_DATE = dt.datetime(2019, 6, 19, 13, 0) DEFAULT_LAT = np.linspace(23.09356, 36.42844, N_SCANLINE * N_FOV, dtype=DEFAULT_FILE_DTYPE) DEFAULT_LON = np.linspace(127.6879, 144.5284, N_SCANLINE * N_FOV, @@ -71,13 +74,13 @@ PLATFORM = {"M2": "metop-a", "NPP": "npp", "GPM": "gpm"} SENSOR = {"m2": "amsu-mhs", "npp": "atms", "gpm": "GPI"} -START_TIME = datetime(2017, 2, 6, 16, 1, 0) -END_TIME = datetime(2017, 2, 6, 16, 7, 0) +START_TIME = dt.datetime(2017, 2, 6, 16, 1, 0) +END_TIME = dt.datetime(2017, 2, 6, 16, 7, 0) def fake_coeff_from_fn(fn): """Create Fake Coefficients.""" - ameans = np.random.uniform(261, 267, N_CHANNEL) + ameans = RANDOM_GEN.uniform(261, 267, N_CHANNEL) locations = [ [1, 2], [1, 2], @@ -117,7 +120,7 @@ def fake_coeff_from_fn(fn): str_coeff = " ".join([str(x) for x in random_coeff]) random_means = np.zeros(all_nchx[nx]) str_means = " ".join([str(x) for x in random_means]) - error_val = np.random.uniform(0, 4) + error_val = RANDOM_GEN.uniform(0, 4) coeffs_line = " {:>2} {:>2} {} {} {}\n".format(idx, fov, str_coeff, str_means, @@ -138,7 +141,7 @@ def _get_datasets_with_attributes(**kwargs): "_FillValue": -999, "valid_range": [0, 50000]}, dims=("Scanline", "Field_of_view", "Channel")) - rr = xr.DataArray(np.random.randint(100, 500, size=(N_SCANLINE, N_FOV), dtype=np.int16), + rr = xr.DataArray(RANDOM_GEN.integers(100, 500, size=(N_SCANLINE, N_FOV), dtype=np.int16), attrs={"long_name": "Rain Rate (mm/hr)", "units": "mm/hr", "coordinates": "Longitude Latitude", @@ -146,7 +149,7 @@ def _get_datasets_with_attributes(**kwargs): "_FillValue": -999, "valid_range": [0, 1000]}, dims=("Scanline", "Field_of_view")) - sfc_type = xr.DataArray(np.random.randint(0, 4, size=(N_SCANLINE, N_FOV), dtype=np.int16), + sfc_type = xr.DataArray(RANDOM_GEN.integers(0, 4, size=(N_SCANLINE, N_FOV), dtype=np.int16), attrs={"description": "type of surface:0-ocean," + "1-sea ice,2-land,3-snow", "units": "1", @@ -187,12 +190,12 @@ def _get_datasets_with_less_attributes(): attrs={"long_name": "Channel Temperature (K)", "scale_factor": 0.01}, dims=("Scanline", "Field_of_view", "Channel")) - rr = xr.DataArray(np.random.randint(100, 500, size=(N_SCANLINE, N_FOV), dtype=np.int16), + rr = xr.DataArray(RANDOM_GEN.integers(100, 500, size=(N_SCANLINE, N_FOV), dtype=np.int16), attrs={"long_name": "Rain Rate (mm/hr)", "scale_factor": 0.1}, dims=("Scanline", "Field_of_view")) - sfc_type = xr.DataArray(np.random.randint(0, 4, size=(N_SCANLINE, N_FOV), dtype=np.int16), + sfc_type = xr.DataArray(RANDOM_GEN.integers(0, 4, size=(N_SCANLINE, N_FOV), dtype=np.int16), attrs={"description": "type of surface:0-ocean," + "1-sea ice,2-land,3-snow"}, dims=("Scanline", "Field_of_view")) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index bcee32ddbb..84828f4ecf 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -17,15 +17,18 @@ # satpy. If not, see . """Module for testing the satpy.readers.msi_safe module.""" import unittest.mock as mock +from datetime import datetime from io import BytesIO, StringIO import numpy as np import pytest import xarray as xr -from satpy.tests.utils import make_dataid +# Datetimes used for checking start time is correctly set. +fname_dt = datetime(2020, 10, 1, 18, 35, 41) +tilemd_dt = datetime(2020, 10, 1, 16, 34, 23, 153611) -mtd_tile_xml = b""" +mtd_l1c_tile_xml = b""" @@ -575,7 +578,6 @@ """ # noqa - mtd_l1c_old_xml = """ @@ -861,108 +863,775 @@ """ # noqa +mtd_l2a_xml = """ + + + + 2024-04-11T03:05:21.024Z + 2024-04-11T03:05:21.024Z + S2A_MSIL2A_20240411T030521_N0510_R075_T50TMK_20240411T080950.SAFE + Level-2A + S2MSI2A + 05.10 + https://doi.org/10.5270/S2_-znk9xsj + 2024-04-11T08:09:50.000000Z + Not applicable + Not applicable + + Sentinel-2A + INS-NOBS + 2024-04-11T03:05:21.024Z + 75 + DESCENDING + + +SAFE_COMPACT + + + + + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_B02_10m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_B03_10m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_B04_10m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_B08_10m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_TCI_10m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_AOT_10m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_WVP_10m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B01_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B02_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B03_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B04_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B05_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B06_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B07_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B8A_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B11_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B12_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_TCI_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_AOT_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_WVP_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_SCL_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B01_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B02_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B03_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B04_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B05_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B06_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B07_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B8A_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B09_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B11_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B12_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_TCI_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_AOT_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_WVP_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_SCL_60m + + + + + + + NODATA + 0 + + + SATURATED + 65535 + + + 3 + 2 + 1 + + + 10000 + 1000.0 + 1000.0 + + + -1000 + -1000 + -1000 + -1000 + -1000 + -1000 + -1000 + -1000 + -1000 + -1000 + -1000 + -1000 + -1000 + + + 0.998279632507911 + + 1884.69 + 1959.66 + 1823.24 + 1512.06 + 1424.64 + 1287.61 + 1162.08 + 1041.63 + 955.32 + 812.92 + 367.15 + 245.59 + 85.25 + + + + + 60 + + 412 + 456 + 442.7 + + + 1 + 0.001775742 0.004073061 0.003626143 0.003515199 0.005729163 0.003780292 0.002636732 0.001262113 0.001987583 0.001368913 0.001250444 0.000463454 0.000814293 0.001376431 0.001485086 0.001823735 0.001626817 0.004392062 0.029008099 0.11874593 0.32387506 0.57281921 0.71472749 0.76196778 0.78929702 0.80862387 0.81089382 0.82419876 0.85415811 0.87079088 0.88731097 0.92619924 0.98228149 1 0.9752382 0.93596338 0.88997148 0.85021048 0.82569453 0.78390239 0.61417422 0.33007109 0.12410831 0.04365694 0.014749595 + + + + 10 + + 456 + 533 + 492.7 + + + 1 + 0.04255531 0.0722983 0.15374322 0.32799225 0.55336788 0.71011166 0.75285179 0.75232691 0.75668081 0.76326948 0.76239425 0.7852515 0.81546669 0.86179176 0.89282599 0.9195221 0.91900649 0.91315754 0.90035366 0.88989693 0.8823246 0.87606118 0.88429987 0.90695544 0.93232085 0.93947252 0.94383543 0.92204086 0.8860231 0.84743609 0.81251687 0.7823971 0.7731087 0.77209054 0.78742652 0.81217177 0.84605052 0.88767996 0.92793997 0.95069235 0.96573311 0.96938253 0.96570294 0.95832003 0.95405064 0.95178268 0.95699722 0.96556515 0.9770514 0.97709574 0.97436606 0.95903183 0.93506318 0.90190134 0.87165792 0.84402444 0.82280852 0.81536043 0.82057639 0.8395149 0.86992171 0.91526205 0.96067028 0.99163699 1 0.98356097 0.91130763 0.74018256 0.50395858 0.3050155 0.18004605 0.10738342 0.06593592 0.04207746 0.02662129 0.0143396 0.00265779 0.00081822 + + + + 10 + + 538 + 583 + 559.8 + + + 1 + 0.01448181 0.03422251 0.07346335 0.15444843 0.31661425 0.55322279 0.74859406 0.84890306 0.89772216 0.9215368 0.92572845 0.91122688 0.88818924 0.86523756 0.84718187 0.8387572 0.84459081 0.86219653 0.88838714 0.92443236 0.96017974 0.98685516 1 0.9986008 0.98076472 0.94522089 0.8981778 0.85580323 0.81841734 0.78862048 0.76460653 0.74963745 0.75055111 0.76137888 0.78244479 0.79890086 0.81016957 0.81408886 0.77358596 0.62881065 0.40397555 0.21542098 0.10715281 0.04792877 0.01848693 0.00108588 + + + + 10 + + 646 + 684 + 664.6 + + + 1 + 0.00141521 0.02590238 0.11651178 0.39088616 0.74959342 0.94485805 0.98011173 0.99406309 1 0.99545475 0.99052772 0.97733476 0.94055988 0.87894956 0.81629384 0.77345952 0.75448766 0.75991531 0.7826343 0.8101689 0.83612975 0.86125424 0.88609106 0.91138767 0.93405146 0.95042063 0.9592573 0.96039555 0.95913395 0.95809013 0.95527459 0.94376465 0.89490799 0.74426308 0.476777 0.22960399 0.08009118 0.02617076 0.00415242 + + + + 20 + + 695 + 714 + 704.1 + + + 1 + 0.02835786 0.12369337 0.39378774 0.76113071 0.97108502 0.99889523 1 0.99412258 0.98321789 0.96704093 0.94847389 0.92714833 0.90372458 0.88614713 0.86723745 0.79075319 0.58840332 0.26334833 0.05675422 0.00618833 + + + + 20 + + 731 + 749 + 740.5 + + + 1 + 0.00171088 0.05467153 0.25806676 0.64722098 0.89218999 0.90232877 0.91508768 0.94115846 0.96299993 0.97510481 0.9770217 0.98736251 1 0.98880277 0.97179916 0.90126739 0.60672391 0.20520227 0.0267569 + + + + 20 + + 769 + 797 + 782.8 + + + 1 + 0.00045899 0.0117201 0.05219715 0.16561733 0.36903355 0.63685453 0.86119638 0.97002897 0.99119602 0.99897921 1 0.97725155 0.92572385 0.86605804 0.81969611 0.79407674 0.79111029 0.80431552 0.81902721 0.82571292 0.82011829 0.79222195 0.72054559 0.58767794 0.41430355 0.23088817 0.09850282 0.02736551 0.00516235 + + + + 10 + + 760 + 907 + 832.8 + + + 1 + 0.00067259 0.00388856 0 0 0 0 0 0 0 0 0 0 0 0.00028956 0.00702964 0.01752391 0.03231111 0.05328661 0.08299885 0.12748502 0.19591065 0.30246323 0.43553954 0.57141637 0.69766701 0.80303852 0.89115744 0.95284584 0.98894161 1 0.98840653 0.96389216 0.94207967 0.93694643 0.94227343 0.95395718 0.96828896 0.97966549 0.9854444 0.98592681 0.98391181 0.97793903 0.97722771 0.97810609 0.98144486 0.98764558 0.98857708 0.9862422 0.98070921 0.97078624 0.95721089 0.93865821 0.91672388 0.89620759 0.872888 0.85160331 0.8246394 0.80078117 0.7823386 0.76360274 0.74962771 0.7387221 0.73079407 0.72271237 0.72507708 0.72563856 0.72304217 0.72229211 0.71616364 0.71159446 0.70826954 0.70157205 0.69924532 0.70093762 0.70692733 0.71824001 0.73124634 0.7484061 0.76818541 0.78394807 0.7968381 0.80260206 0.8045194 0.80240918 0.79699072 0.78920304 0.77691621 0.76518406 0.75119717 0.73700357 0.72262399 0.70412578 0.68410805 0.66474528 0.64736891 0.63005125 0.61564222 0.60249557 0.58988992 0.57993399 0.57136506 0.56094242 0.55235105 0.54568236 0.53958052 0.53510215 0.53093675 0.53016508 0.52984662 0.53036682 0.53211463 0.53271918 0.53246806 0.53331158 0.5319278 0.53051055 0.52951499 0.52996848 0.53253373 0.53705085 0.54235344 0.54912497 0.55523055 0.56011135 0.55767999 0.54821984 0.53144613 0.50763528 0.47811224 0.45092793 0.42798466 0.41051405 0.40039139 0.40087302 0.40829375 0.42086556 0.43007022 0.42456692 0.39136817 0.33009008 0.25720509 0.18189031 0.11650668 0.07031579 0.04275381 0.02593154 0.01574394 0.00394326 + + + + 20 + + 837 + 881 + 864.7 + + + 1 + 0.00030097 0 0 0 0 0 0 0 0 0 0.00157217 0.00249886 0.01332037 0.02614866 0.05260479 0.10779709 0.22160755 0.39721628 0.60986885 0.81658883 0.9322445 0.97210033 0.97545482 0.97538048 0.97328205 0.97607828 0.98034955 0.98690928 0.99087465 0.99741818 0.99984673 0.99939141 0.99587928 0.99541228 1 0.99640762 0.92359433 0.74137684 0.48965971 0.25020643 0.11221246 0.04755984 0.02297815 0.01061438 0.00108149 + + + + 60 + + 932 + 958 + 945.1 + + + 1 + 0.01662953 0.06111857 0.17407094 0.38946454 0.6645915 0.87454114 0.93695988 0.96751014 0.9893391 0.9951269 1 0.97845762 0.98069118 0.9922335 0.98798379 0.99428313 0.98348041 0.97820013 0.95023367 0.95299604 0.92240308 0.85573828 0.70970227 0.46429542 0.21538427 0.06534121 0.01625596 + + + + 60 + + 1337 + 1412 + 1373.5 + + + 1 + 0.00024052 5.404e-05 3.052e-05 2.872e-05 7.632e-05 0.00010949 8.804e-05 0.00012356 0.00017424 0.0003317 0.00036891 0.0004467 0.00065919 0.0010913 0.00196903 0.00373668 0.00801754 0.01884719 0.04466732 0.10165546 0.20111776 0.34284841 0.50710992 0.6632068 0.78377143 0.86153862 0.91000261 0.94193255 0.96182259 0.97365119 0.98169786 0.98795826 0.99283342 0.99649788 0.99906011 1 0.99907734 0.99601604 0.9909083 0.98479854 0.97802142 0.97030114 0.96080954 0.94849765 0.93314108 0.91482336 0.8937997 0.86825426 0.83023193 0.76384193 0.65440009 0.50671604 0.35014737 0.21799972 0.12643091 0.06768988 0.0322709 0.013544 0.00544557 0.00237642 0.00111267 0.00053796 0.0003457 0.00017488 0.00021619 0.00019479 0.00010421 5.919e-05 5.109e-05 6.115e-05 5.527e-05 3.856e-05 3.147e-05 0.00012289 0.0001089 2.502e-05 + + + + 20 + + 1539 + 1682 + 1613.7 + + + 1 + 6.79e-06 6.66e-06 8e-06 2.734e-05 3.685e-05 8.851e-05 0.00014522 0.00024812 0.00047627 0.00056335 0.00065326 0.00089835 0.00114664 0.00165604 0.00241611 0.00350246 0.00524274 0.0081538 0.01237062 0.0186097 0.02721853 0.03879155 0.05379167 0.07353187 0.09932758 0.1334178 0.18029249 0.24484994 0.32834511 0.42749961 0.53576798 0.64570396 0.74245998 0.81447017 0.85866596 0.87924777 0.88665266 0.888727 0.89105732 0.89725046 0.90632982 0.91627527 0.9263751 0.93515828 0.94226446 0.94739906 0.95131987 0.95416808 0.95635128 0.95813297 0.96062738 0.96344083 0.96577764 0.96818134 0.97104025 0.97343195 0.97597444 0.97865413 0.97994672 0.98064126 0.98094979 0.98143338 0.98123856 0.98068083 0.98033995 0.98101894 0.98268503 0.98507875 0.98777658 0.9903608 0.99202087 0.9933069 0.99256744 0.99044883 0.98717314 0.98353656 0.9800432 0.97617287 0.97253451 0.96977033 0.96762556 0.9662626 0.96572411 0.96592079 0.96729798 0.96975438 0.97337748 0.97862858 0.98345358 0.98765317 0.9919238 0.99554959 0.99767411 0.99866451 0.99941783 0.99930984 0.99885298 0.99913515 0.99973164 0.99973592 1 0.9998438 0.9967639 0.99175576 0.9859206 0.97887302 0.97029262 0.96135891 0.95379752 0.94709017 0.94228614 0.93919512 0.93616637 0.92889205 0.9129921 0.88158383 0.82602164 0.74412949 0.64281662 0.53483955 0.42772166 0.32439525 0.23488131 0.16445229 0.11056237 0.07271886 0.04634859 0.02949618 0.01941871 0.0133487 0.00934594 0.00654231 0.00487921 0.00341903 0.00249864 0.00196431 0.00142754 0.00105878 0.00049978 0.00022833 0.00015999 3.415e-05 4.517e-05 1.313e-05 + + + + 20 + + 2078 + 2320 + 2202.4 + + + 1 + 0.00063835 0.00102286 0.00288712 0.00399879 0.00658916 0.00765458 0.00799918 0.00853524 0.00929493 0.00999614 0.01096645 0.01208363 0.01335837 0.01501119 0.01711931 0.01977307 0.02332743 0.02765779 0.03320435 0.04020464 0.04886709 0.0596238 0.07315348 0.09050885 0.11143964 0.13686671 0.16776886 0.20341457 0.24281992 0.28484195 0.32711894 0.36834301 0.40794043 0.4447145 0.47647207 0.50303896 0.52524762 0.54328057 0.55717994 0.5685619 0.57895708 0.58860881 0.59881758 0.60990899 0.62128986 0.63421311 0.64847648 0.66363778 0.67997936 0.69609688 0.71189957 0.7269499 0.74124079 0.75734734 0.77201504 0.78552587 0.79818641 0.80962939 0.81965718 0.82855741 0.83668178 0.84440292 0.85106862 0.85321701 0.85471321 0.8561428 0.85778963 0.8594989 0.86142876 0.86322831 0.86511218 0.8672932 0.86967076 0.87427502 0.87856212 0.88241466 0.88590611 0.8894516 0.89320419 0.8966738 0.89987484 0.90257636 0.90481219 0.90550545 0.90564491 0.90548208 0.90513822 0.90476379 0.90406427 0.90332978 0.90274309 0.90235795 0.90196488 0.90340528 0.90429478 0.90529761 0.90642862 0.90807348 0.91010493 0.91293181 0.91556686 0.91842631 0.92128288 0.92431702 0.92719913 0.92972159 0.93190455 0.93412538 0.93588954 0.93707083 0.93762594 0.93828534 0.93763643 0.94042634 0.94250397 0.94324531 0.94301861 0.94210283 0.94061808 0.93841726 0.93665003 0.93524569 0.93301102 0.92686708 0.92104485 0.91547175 0.91100989 0.90828339 0.9072733 0.90817907 0.91115631 0.91617845 0.92284525 0.92059829 0.91947472 0.91947973 0.92126575 0.92451632 0.92772589 0.93196884 0.93676408 0.94147739 0.94679545 0.95119533 0.95443018 0.95704142 0.95972628 0.9625372 0.96485326 0.96603599 0.96664138 0.96630455 0.96545713 0.96484036 0.96365512 0.96169531 0.95944859 0.95732078 0.95513625 0.95355574 0.95273072 0.95217795 0.95172542 0.9521403 0.95263595 0.95405248 0.95707559 0.96063594 0.96421772 0.96830187 0.97268597 0.97741944 0.98289489 0.9871429 0.99073348 0.99398244 0.99678431 0.99875181 1 0.9999284 0.9991523 0.99712951 0.99388228 0.98968273 0.98373274 0.97621057 0.96780985 0.95833495 0.94842856 0.93818752 0.9277078 0.91702104 0.90597951 0.89384371 0.88165575 0.86861704 0.85460324 0.84058628 0.82598123 0.80948042 0.79182917 0.7724052 0.74907137 0.72031195 0.68815487 0.65125598 0.6100244 0.56600904 0.52095058 0.47464344 0.42924778 0.38584718 0.34208462 0.30067509 0.26317221 0.22770037 0.19571781 0.16808736 0.14467686 0.12482737 0.10823403 0.09439655 0.08235799 0.07149445 0.0626855 0.05498009 0.04818852 0.04285814 0.03859244 0.03494044 0.03199172 0.02958044 0.02741084 0.02556884 0.02395058 0.02166741 0.0191457 0.01632139 0.0109837 0.00736032 0.00649061 0.00469736 0.00205874 + + + + 4.10137842 + 3.75605469 + 4.18741753 + 4.52205376 + 5.20680393 + 4.8729478 + 4.5356737 + 6.16247757 + 5.13772343 + 8.53898524 + 55.10485389 + 35.30373192 + 106.24732599 + + + SC_NODATA + 0 + + + SC_SATURATED_DEFECTIVE + 1 + + + SC_DARK_FEATURE_SHADOW + 2 + + + SC_CLOUD_SHADOW + 3 + + + SC_VEGETATION + 4 + + + SC_NOT_VEGETATED + 5 + + + SC_WATER + 6 + + + SC_UNCLASSIFIED + 7 + + + SC_CLOUD_MEDIUM_PROBA + 8 + + + SC_CLOUD_HIGH_PROBA + 9 + + + SC_THIN_CIRRUS + 10 + + + SC_SNOW_ICE + 11 + + + + + + + + + 40.64479480422486 115.81682739339685 40.65079881136531 117.1154430676197 39.66155122739065 117.11377991452629 39.655752572676114 115.83386830444628 40.64479480422486 115.81682739339685 + + + POINT + 1 + + + EPSG + GEOGRAPHIC + + + + + S2A_OPER_GIP_INVLOC_MPC__20171206T000000_V20150703T000000_21000101T000000_B00 + S2A_OPER_GIP_LREXTR_MPC__20150605T094736_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_ATMIMA_MPC__20150605T094744_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_ATMSAD_MPC__20160729T000005_V20150703T000000_21000101T000000_B00 + S2A_OPER_GIP_BLINDP_MPC__20150605T094736_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_CLOINV_MPC__20210609T000005_V20210823T030000_21000101T000000_B00 + S2A_OPER_GIP_CLOPAR_MPC__20220120T000001_V20220125T022000_21000101T000000_B00 + S2A_OPER_GIP_CONVER_MPC__20150710T131444_V20150627T000000_21000101T000000_B00 + S2A_OPER_GIP_DATATI_MPC__20151117T131048_V20150703T000000_21000101T000000_B00 + S2A_OPER_GIP_DECOMP_MPC__20121031T075922_V19830101T000000_21000101T000000_B00 + S2__OPER_GIP_EARMOD_MPC__20150605T094736_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_ECMWFP_MPC__20121031T075922_V19830101T000000_21000101T000000_B00 + S2A_OPER_GIP_G2PARA_MPC__20231208T000027_V20231213T070000_21000101T000000_B00 + S2A_OPER_GIP_G2PARE_MPC__20150605T094736_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_GEOPAR_MPC__20150605T094741_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_INTDET_MPC__20220120T000010_V20220125T022000_21000101T000000_B00 + S2A_OPER_GIP_JP2KPA_MPC__20220120T000006_V20220125T022000_21000101T000000_B00 + S2A_OPER_GIP_MASPAR_MPC__20220120T000009_V20220125T022000_21000101T000000_B00 + S2A_OPER_GIP_OLQCPA_MPC__20220715T000042_V20220830T002500_21000101T000000_B00 + S2A_OPER_GIP_PRDLOC_MPC__20180301T130000_V20180305T005000_21000101T000000_B00 + S2A_OPER_GIP_PROBAS_MPC__20240305T000510_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_R2ABCA_MPC__20240315T121000_V20240319T003000_21000101T000000_B00 + S2A_OPER_GIP_R2BINN_MPC__20150605T094803_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_R2CRCO_MPC__20151023T224715_V20150622T224715_21000101T000000_B00 + S2A_OPER_GIP_R2DECT_MPC__20150605T094742_V20150622T000000_21000101T000000_B09 + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B04 + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B02 + S2A_OPER_GIP_R2DECT_MPC__20150605T094742_V20150622T000000_21000101T000000_B12 + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B06 + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B08 + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B07 + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B05 + S2A_OPER_GIP_R2DECT_MPC__20150605T094742_V20150622T000000_21000101T000000_B10 + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B01 + S2A_OPER_GIP_R2DECT_MPC__20150605T094742_V20150622T000000_21000101T000000_B11 + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B8A + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B03 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B09 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B05 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B8A + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B08 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B02 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B04 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B10 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B01 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B12 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B03 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B11 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B07 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B06 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B05 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B10 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B09 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B8A + S2A_OPER_GIP_R2DENT_MPC__20150605T094741_V20150622T000000_21000101T000000_B01 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B08 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B06 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B02 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B12 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B04 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B03 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B07 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B11 + S2A_OPER_GIP_R2DEPI_MPC__20230424T160000_V20230426T000000_21000101T000000_B00 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B12 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B03 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B07 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B09 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B10 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B01 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B05 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B8A + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B06 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B04 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B11 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B02 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B08 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B10 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B05 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B04 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B06 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B08 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B03 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B01 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B12 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B11 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B02 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B07 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B8A + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B09 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B05 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B08 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B8A + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B12 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B10 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B07 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B01 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094741_V20150622T000000_21000101T000000_B03 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B04 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B11 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B02 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B09 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B06 + S2A_OPER_GIP_R2NOMO_MPC__20150605T094803_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_R2PARA_MPC__20221206T000009_V20221206T073000_21000101T000000_B00 + S2A_OPER_GIP_R2SWIR_MPC__20180406T000021_V20180604T100000_21000101T000000_B00 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B12 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B09 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B05 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B02 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B03 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B8A + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B06 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B08 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B04 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B10 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B01 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B11 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B07 + S2A_OPER_GIP_RESPAR_MPC__20150605T094736_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_SPAMOD_MPC__20231122T110026_V20231123T010000_21000101T000000_B00 + S2A_OPER_GIP_TILPAR_MPC__20151209T095117_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B8A + S2A_OPER_GIP_VIEDIR_MPC__20151117T131049_V20150703T000000_21000101T000000_B03 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B08 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131048_V20150703T000000_21000101T000000_B01 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B11 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B10 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B06 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131049_V20150703T000000_21000101T000000_B04 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131049_V20150703T000000_21000101T000000_B02 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B05 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131051_V20150703T000000_21000101T000000_B12 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B09 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B07 + S2__OPER_GIP_L2ACSC_MPC__20220121T000003_V20220125T022000_21000101T000000_B00 + S2__OPER_GIP_L2ACAC_MPC__20220121T000004_V20220125T022000_21000101T000000_B00 + S2__OPER_GIP_PROBA2_MPC__20231208T000510_V20231213T070000_21000101T000000_B00 + + + CopernicusDEM30 + S2__OPER_AUX_UT1UTC_PDMC_20240404T000000_V20240405T000000_20250404T000000 + + S2__OPER_AUX_ECMWFD_ADG__20240410T120000_V20240410T210000_20240412T150000 + + None + + GlobalSnowMap.tiff + ESACCI-LC-L4-WB-Map-150m-P13Y-2000-v4.0.tif + ESACCI-LC-L4-LCCS-Map-300m-P1Y-2015-v2.0.7.tif + ESACCI-LC-L4-Snow-Cond-500m-MONTHLY-2000-2012-v2.4 + + + 3.500058 + + 0.0 + 0 + + + + PASSED + PASSED + PASSED + PASSED + PASSED + PASSED + + + + + 3.354197 + 0.0 + 0.0 + 8.675177 + 0.268831 + 2.81222 + 83.179593 + 0.992827 + 0.571295 + 0.275278 + 0.038401 + 3.18638 + 0.0 + 0.0 + 0.0 + 0.0 + CAMS + 0.392921 + 1.224094 + AUX_ECMWFT + 357.927923 + + + +""" # noqa + +PROCESS_LEVELS = ["L1C", "oldL1C", "L2A"] +MTD_XMLS = [mtd_l1c_xml, mtd_l1c_old_xml, mtd_l2a_xml] +TILE_XMLS = [mtd_l1c_tile_xml, mtd_l1c_tile_xml, mtd_l1c_tile_xml] + + +def xml_builder(process_level, mask_saturated=True, band_name=None): + """Build fake SAFE MTD/Tile XML.""" + from satpy.readers.msi_safe import SAFEMSIMDXML, SAFEMSITileMDXML + filename_info = dict(observation_time=fname_dt, dtile_number=None, band_name=band_name, fmission_id="S2A", + process_level=process_level.replace("old", "")) + xml_fh = SAFEMSIMDXML(StringIO(MTD_XMLS[PROCESS_LEVELS.index(process_level)]), + filename_info, mock.MagicMock(), mask_saturated=mask_saturated) + xml_tile_fh = SAFEMSITileMDXML(BytesIO(TILE_XMLS[PROCESS_LEVELS.index(process_level)]), + filename_info, mock.MagicMock()) + return xml_fh, xml_tile_fh + + +def jp2_builder(process_level, band_name, mask_saturated=True): + """Build fake SAFE jp2 image file.""" + from satpy.readers.msi_safe import SAFEMSIL1C, SAFEMSITileMDXML + filename_info = dict(observation_time=fname_dt, dtile_number=None, band_name=band_name, fmission_id="S2A", + process_level=process_level.replace("old", "")) + xml_fh = xml_builder(process_level, mask_saturated, band_name)[0] + tile_xml_fh = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(TILE_XMLS[PROCESS_LEVELS.index(process_level)]), + filename_info, mock.MagicMock()) + tile_xml_fh.start_time.return_value = tilemd_dt + jp2_fh = SAFEMSIL1C("somefile", filename_info, mock.MagicMock(), xml_fh, tile_xml_fh) + return jp2_fh + +def make_alt_dataid(**items): + """Make a DataID with modified keys.""" + from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange + modified_id_keys_config = { + "name": { + "required": True, + }, + "wavelength": { + "type": WavelengthRange, + }, + "resolution": { + "transitive": False, + }, + "calibration": { + "enum": [ + "reflectance", + "brightness_temperature", + "radiance", + "radiance_wavenumber", + "counts", + "aerosol_thickness", + "water_vapor" + ], + "transitive": True, + }, + "modifiers": { + "default": ModifierTuple(), + "type": ModifierTuple, + }, + } + + return DataID(modified_id_keys_config, **items) + + +class TestTileXML: + """Test the SAFE TILE XML file handler. + + Since L1C/L2A share almost the same Tile XML structure, we only use L1C Tile here. + + """ + + @pytest.mark.parametrize(("process_level", "angle_name", "angle_tag", "expected"), + [ + ("L1C", "satellite_zenith_angle", ("Viewing_Incidence_Angles_Grids", "Zenith"), + [[11.7128, 11.18397802, 10.27667671, 9.35384969, 8.42850504, + 7.55445611, 6.65475545, 5.66517232, 4.75893757, 4.04976844], + [11.88606009, 10.9799713, 10.07083278, 9.14571825, 8.22607131, + 7.35181457, 6.44647222, 5.46144173, 4.56625547, 3.86638233], + [11.6823579, 10.7763071, 9.86302106, 8.93879112, 8.04005637, + 7.15028077, 6.21461062, 5.25780953, 4.39876601, 3.68620793], + [11.06724679, 10.35723901, 9.63958896, 8.73072512, 7.83680864, + 6.94792574, 5.9889201, 5.05445872, 4.26089708, 3.50984272], + [6.28411038, 6.28411038, 6.28411038, 6.28411038, 6.28411038, + 5.99769643, 5.62586167, 4.85165966, 4.13238314, 3.33781401], + [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], + [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], + [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], + [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], + [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837]]), + ("L2A", "solar_zenith_angle", ("Sun_Angles_Grid", "Zenith"), + [[39.8824, 39.83721367, 39.79230847, 39.74758442, 39.7030415, + 39.65867687, 39.61455566, 39.57061558, 39.52685664, 39.48331372], + [39.78150175, 39.73629896, 39.69128852, 39.64643679, 39.6018404, + 39.5574369, 39.51323286, 39.46920212, 39.4253673, 39.38179377], + [39.6806035, 39.63532838, 39.5902497, 39.54538507, 39.5007087, + 39.45621756, 39.41195347, 39.36779169, 39.3239121, 39.28027381], + [39.57980525, 39.53445664, 39.48931088, 39.44434154, 39.39957879, + 39.35503587, 39.31067408, 39.26649344, 39.22249393, 39.17876143], + [39.479007, 39.43355483, 39.38829092, 39.34328573, 39.29846167, + 39.25381983, 39.2093947, 39.16513007, 39.12109926, 39.07726878], + [39.37820875, 39.33268069, 39.28735495, 39.24224914, 39.19736058, + 39.15267709, 39.1081719, 39.06385068, 39.01973446, 38.97584982], + [39.2774105, 39.23184303, 39.18646737, 39.14130809, 39.09632176, + 39.05153988, 39.00696049, 38.9625713, 38.91842056, 38.87444401], + [39.17671225, 39.13104478, 39.08559031, 39.04034757, 38.99528294, + 38.95039991, 38.9057971, 38.86130793, 38.81705183, 38.77303821], + [39.076014, 39.03026112, 38.98477906, 38.93940875, 38.89425338, + 38.84936063, 38.80464763, 38.76011645, 38.7157479, 38.67164839], + [38.97531575, 38.92950771, 38.88389967, 38.83852091, 38.7933053, + 38.74831897, 38.7034912, 38.65891427, 38.61446851, 38.57030388]]), + ("L1C", "moon_zenith_angle", ("Sun_Angles_Grid", "Zenith"), None) + ]) + def test_angles(self, process_level, angle_name, angle_tag, expected): + """Test reading angles array.""" + info = dict(xml_tag=angle_tag[0], xml_item=angle_tag[1]) if "satellite" in angle_name else \ + dict(xml_tag=angle_tag[0] + "/" + angle_tag[1]) + xml_tile_fh = xml_builder(process_level)[1] + + res = xml_tile_fh.get_dataset(make_alt_dataid(name=angle_name, resolution=60), info) + if res is not None: + res = res[::200, ::200] + + if res is not None: + np.testing.assert_allclose(res, expected) + else: + assert res is expected + + def test_start_time(self): + """Ensure start time is read correctly from XML.""" + xml_tile_fh = xml_builder("L1C")[1] + assert xml_tile_fh.start_time() == tilemd_dt + + def test_navigation(self): + """Test the navigation.""" + from pyproj import CRS + crs = CRS("EPSG:32616") + + dsid = make_alt_dataid(name="B01", resolution=60) + xml_tile_fh = xml_builder("L1C")[1] + result = xml_tile_fh.get_area_def(dsid) + area_extent = (499980.0, 3590220.0, 609780.0, 3700020.0) + assert result.crs == crs + np.testing.assert_allclose(result.area_extent, area_extent) + class TestMTDXML: """Test the SAFE MTD XML file handler.""" def setup_method(self): """Set up the test case.""" - from satpy.readers.msi_safe import SAFEMSIMDXML, SAFEMSITileMDXML - filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A") - self.xml_tile_fh = SAFEMSITileMDXML(BytesIO(mtd_tile_xml), filename_info, mock.MagicMock()) - self.old_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_old_xml), filename_info, mock.MagicMock()) - self.xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), filename_info, mock.MagicMock(), mask_saturated=True) - - def test_satellite_zenith_array(self): - """Test reading the satellite zenith array.""" - info = dict(xml_tag="Viewing_Incidence_Angles_Grids", xml_item="Zenith") - - expected_data = np.array([[11.7128, 11.18397802, 10.27667671, 9.35384969, 8.42850504, - 7.55445611, 6.65475545, 5.66517232, 4.75893757, 4.04976844], - [11.88606009, 10.9799713, 10.07083278, 9.14571825, 8.22607131, - 7.35181457, 6.44647222, 5.46144173, 4.56625547, 3.86638233], - [11.6823579, 10.7763071, 9.86302106, 8.93879112, 8.04005637, - 7.15028077, 6.21461062, 5.25780953, 4.39876601, 3.68620793], - [11.06724679, 10.35723901, 9.63958896, 8.73072512, 7.83680864, - 6.94792574, 5.9889201, 5.05445872, 4.26089708, 3.50984272], - [6.28411038, 6.28411038, 6.28411038, 6.28411038, 6.28411038, - 5.99769643, 5.62586167, 4.85165966, 4.13238314, 3.33781401], - [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, - 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], - [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, - 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], - [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, - 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], - [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, - 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], - [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, - 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837]]) - res = self.xml_tile_fh.get_dataset(make_dataid(name="satellite_zenith_angle", - resolution=60), - info)[::200, ::200] - np.testing.assert_allclose(res, expected_data) - - def test_old_xml_calibration(self): - """Test the calibration of older data formats (no offset).""" - fake_data = xr.DataArray([[[0, 1, 2, 3], - [4, 1000, 65534, 65535]]], - dims=["band", "x", "y"]) - result = self.old_xml_fh.calibrate_to_reflectances(fake_data, "B01") - np.testing.assert_allclose(result, [[[np.nan, 0.01, 0.02, 0.03], - [0.04, 10, 655.34, np.inf]]]) - - def test_xml_calibration(self): - """Test the calibration with radiometric offset.""" - fake_data = xr.DataArray([[[0, 1, 2, 3], - [4, 1000, 65534, 65535]]], - dims=["band", "x", "y"]) - result = self.xml_fh.calibrate_to_reflectances(fake_data, "B01") - np.testing.assert_allclose(result, [[[np.nan, 0.01 - 10, 0.02 - 10, 0.03 - 10], - [0.04 - 10, 0, 655.34 - 10, np.inf]]]) - - def test_xml_calibration_unmasked_saturated(self): - """Test the calibration with radiometric offset but unmasked saturated pixels.""" - from satpy.readers.msi_safe import SAFEMSIMDXML - filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A") - self.xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), filename_info, mock.MagicMock(), mask_saturated=False) - - fake_data = xr.DataArray([[[0, 1, 2, 3], - [4, 1000, 65534, 65535]]], - dims=["band", "x", "y"]) - result = self.xml_fh.calibrate_to_reflectances(fake_data, "B01") - np.testing.assert_allclose(result, [[[np.nan, 0.01 - 10, 0.02 - 10, 0.03 - 10], - [0.04 - 10, 0, 655.34 - 10, 655.35 - 10]]]) - - def test_xml_calibration_with_different_offset(self): - """Test the calibration with a different offset.""" - fake_data = xr.DataArray([[[0, 1, 2, 3], - [4, 1000, 65534, 65535]]], - dims=["band", "x", "y"]) - result = self.xml_fh.calibrate_to_reflectances(fake_data, "B10") - np.testing.assert_allclose(result, [[[np.nan, 0.01 - 20, 0.02 - 20, 0.03 - 20], - [0.04 - 20, -10, 655.34 - 20, np.inf]]]) - - def test_xml_calibration_to_radiance(self): - """Test the calibration with a different offset.""" - fake_data = xr.DataArray([[[0, 1, 2, 3], - [4, 1000, 65534, 65535]]], - dims=["band", "x", "y"]) - result = self.xml_fh.calibrate_to_radiances(fake_data, "B01") - expected = np.array([[[np.nan, -251.584265, -251.332429, -251.080593], - [-250.828757, 0., 16251.99095, np.inf]]]) - np.testing.assert_allclose(result, expected) - - def test_xml_navigation(self): - """Test the navigation.""" - from pyproj import CRS - crs = CRS("EPSG:32616") + self.fake_data = xr.DataArray([[[0, 1, 2, 3], [4, 1000, 65534, 65535]]], dims=["band", "x", "y"]) - dsid = make_dataid(name="B01", resolution=60) - result = self.xml_tile_fh.get_area_def(dsid) + @pytest.mark.parametrize(("process_level", "mask_saturated", "band_name", "expected"), + [ + ("L1C", True, "B01", ([[[np.nan, -9.99, -9.98, -9.97], + [-9.96, 0, 645.34, np.inf]]], + [[[np.nan, -251.584265, -251.332429, -251.080593], + [-250.828757, 0., 16251.99095, np.inf]]], + [[[np.nan, 1, 2, 3], + [4, 1000, 65534, np.inf]]])), + ("L1C", False, "B10", ([[[np.nan, -19.99, -19.98, -19.97], + [-19.96, -10, 635.34, 635.35]]], + [[[np.nan, -35.465976, -35.448234, -35.430493], + [-35.412751, -17.741859, 1127.211275, 1127.229017]]], + [[[np.nan, 1, 2, 3], + [4, 1000, 65534, 65535]]])), + ("oldL1C", True, "B01", ([[[np.nan, 0.01, 0.02, 0.03], + [0.04, 10, 655.34, np.inf]]], + [[[np.nan, 0.251836101, 0.503672202, 0.755508303], + [1.00734440, 251.836101, 16503.8271, np.inf]]], + [[[np.nan, 1, 2, 3], + [4, 1000, 65534, np.inf]]])), + ("L2A", False, "B03", ([[[np.nan, -9.99, -9.98, -9.97], + [-9.96, 0, 645.34, 645.35]]], + [[[np.nan, -238.571863, -238.333052, -238.094241], + [-237.855431, 0, 15411.407995, 15411.646806]]], + [[[np.nan, 1, 2, 3], + [4, 1000, 65534, 65535]]])), + ]) + def test_xml_calibration(self, process_level, mask_saturated, band_name, expected): + """Test the calibration to reflectance/radiance/counts.""" + xml_fh = xml_builder(process_level, mask_saturated)[0] - area_extents = (499980.0, 3590220.0, 609780.0, 3700020.0) - assert result.crs == crs - np.testing.assert_allclose(result.area_extent, area_extents) + res1 = xml_fh.calibrate_to_reflectances(self.fake_data, band_name) + res2 = xml_fh.calibrate_to_radiances(self.fake_data, band_name) + res3 = xml_fh._sanitize_data(self.fake_data) + + results = (res1, res2, res3) + np.testing.assert_allclose(results, expected) + + @pytest.mark.parametrize(("process_level", "mask_saturated", "band_name", "expected"), + [ + ("L1C", True, "B01", None), + ("L2A", False, "AOT", [[[np.nan, 0.001, 0.002, 0.003], + [0.004, 1., 65.534, 65.535]]]), + ("L2A", True, "WVP", [[[np.nan, 0.001, 0.002, 0.003], + [0.004, 1., 65.534, np.inf]]]), + ("L2A", False, "CLOUD", None), + ("L2A", False, "B10", None), + ]) + def test_xml_calibration_to_atmospheric(self, process_level, mask_saturated, band_name, expected): + """Test the calibration to L2A atmospheric products.""" + xml_fh = xml_builder(process_level, mask_saturated)[0] + + result = xml_fh.calibrate_to_atmospheric(self.fake_data, band_name) + + if result is not None: + np.testing.assert_allclose(result, expected) + else: + assert result is expected class TestSAFEMSIL1C: @@ -970,24 +1639,45 @@ class TestSAFEMSIL1C: def setup_method(self): """Set up the test.""" - from satpy.readers.msi_safe import SAFEMSITileMDXML - self.filename_info = dict(observation_time=None, fmission_id="S2A", band_name="B01", dtile_number=None) self.fake_data = xr.Dataset({"band_data": xr.DataArray([[[0, 1], [65534, 65535]]], dims=["band", "x", "y"])}) - self.tile_mda = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(mtd_tile_xml), - self.filename_info, mock.MagicMock()) - - @pytest.mark.parametrize(("mask_saturated", "calibration", "expected"), - [(True, "reflectance", [[np.nan, 0.01 - 10], [645.34, np.inf]]), - (False, "reflectance", [[np.nan, 0.01 - 10], [645.34, 645.35]]), - (True, "radiance", [[np.nan, -251.58426503], [16251.99095011, np.inf]])]) - def test_calibration_and_masking(self, mask_saturated, calibration, expected): + + @pytest.mark.parametrize(("mask_saturated", "dataset_name", "calibration", "expected"), + [ + (False, "B01", "reflectance", [[np.nan, -9.99], [645.34, 645.35]]), + (True, "B02", "radiance", [[np.nan, -265.970568], [17181.325973, np.inf]]), + (True, "B03", "counts", [[np.nan, 1], [65534, np.inf]]), + (False, "AOT", "aerosol_thickness", [[np.nan, 0.001], [65.534, 65.535]]), + (True, "WVP", "water_vapor", [[np.nan, 0.001], [65.534, np.inf]]), + (True, "SNOW", "water_vapor", None), + ]) + def test_calibration_and_masking(self, mask_saturated, dataset_name, calibration, expected): """Test that saturated is masked with inf when requested and that calibration is performed.""" - from satpy.readers.msi_safe import SAFEMSIL1C, SAFEMSIMDXML + jp2_fh = jp2_builder("L2A", dataset_name, mask_saturated) + + with mock.patch("xarray.open_dataset", return_value=self.fake_data): + res = jp2_fh.get_dataset(make_alt_dataid(name=dataset_name, calibration=calibration), info=dict()) + if res is not None: + np.testing.assert_allclose(res, expected) + else: + assert res is expected - mda = SAFEMSIMDXML(StringIO(mtd_l1c_xml), self.filename_info, mock.MagicMock(), - mask_saturated=mask_saturated) - self.jp2_fh = SAFEMSIL1C("somefile", self.filename_info, mock.MagicMock(), mda, self.tile_mda) + @pytest.mark.parametrize(("process_level", "band_name", "dataset_name"), + [ + ("L1C", "B01", "B03"), + ("L2A", "B02", "B03"), + ]) + def test_filename_dsname_mismatch(self, process_level, band_name, dataset_name): + """Test when dataset name and file band name mismatch, the data and its area definition should both be None.""" + jp2_fh = jp2_builder(process_level, band_name) with mock.patch("xarray.open_dataset", return_value=self.fake_data): - res = self.jp2_fh.get_dataset(make_dataid(name="B01", calibration=calibration), info=dict()) - np.testing.assert_allclose(res, expected) + res1 = jp2_fh.get_dataset(make_alt_dataid(name=dataset_name), info=dict()) + res2 = jp2_fh.get_area_def(make_alt_dataid(name=dataset_name)) + + assert res1 is None + assert res2 is None + + def test_start_time(self): + """Test that the correct start time is returned.""" + jp2_fh = jp2_builder("L1C", "B01") + assert tilemd_dt == jp2_fh.start_time diff --git a/satpy/tests/reader_tests/test_mws_l1b_nc.py b/satpy/tests/reader_tests/test_mws_l1b_nc.py index 2d227822a4..52a894bd00 100644 --- a/satpy/tests/reader_tests/test_mws_l1b_nc.py +++ b/satpy/tests/reader_tests/test_mws_l1b_nc.py @@ -19,8 +19,8 @@ """ +import datetime as dt import logging -from datetime import datetime from unittest.mock import patch import numpy as np @@ -50,13 +50,13 @@ def reader(fake_file): filename=fake_file, filename_info={ "start_time": ( - datetime.fromisoformat("2000-01-01T01:00:00") + dt.datetime.fromisoformat("2000-01-01T01:00:00") ), "end_time": ( - datetime.fromisoformat("2000-01-01T02:00:00") + dt.datetime.fromisoformat("2000-01-01T02:00:00") ), "creation_time": ( - datetime.fromisoformat("2000-01-01T03:00:00") + dt.datetime.fromisoformat("2000-01-01T03:00:00") ), }, filetype_info={ @@ -207,11 +207,11 @@ class TestMwsL1bNCFileHandler: def test_start_time(self, reader): """Test acquiring the start time.""" - assert reader.start_time == datetime(2000, 1, 2, 3, 4, 5) + assert reader.start_time == dt.datetime(2000, 1, 2, 3, 4, 5) def test_end_time(self, reader): """Test acquiring the end time.""" - assert reader.end_time == datetime(2000, 1, 2, 4, 5, 6) + assert reader.end_time == dt.datetime(2000, 1, 2, 4, 5, 6) def test_sensor(self, reader): """Test sensor.""" @@ -356,12 +356,12 @@ def test_get_global_attributes(self, reader): attributes = reader._get_global_attributes() assert attributes == { "filename": reader.filename, - "start_time": datetime(2000, 1, 2, 3, 4, 5), - "end_time": datetime(2000, 1, 2, 4, 5, 6), + "start_time": dt.datetime(2000, 1, 2, 3, 4, 5), + "end_time": dt.datetime(2000, 1, 2, 4, 5, 6), "spacecraft_name": "Metop-SG-A1", "sensor": "MWS", - "filename_start_time": datetime(2000, 1, 1, 1, 0), - "filename_end_time": datetime(2000, 1, 1, 2, 0), + "filename_start_time": dt.datetime(2000, 1, 1, 1, 0), + "filename_end_time": dt.datetime(2000, 1, 1, 2, 0), "platform_name": "Metop-SG-A1", "quality_group": { "duration_of_product": np.array(5944., dtype=np.float32), diff --git a/satpy/tests/reader_tests/test_netcdf_utils.py b/satpy/tests/reader_tests/test_netcdf_utils.py index 2d29288784..5e0bcc44a1 100644 --- a/satpy/tests/reader_tests/test_netcdf_utils.py +++ b/satpy/tests/reader_tests/test_netcdf_utils.py @@ -293,3 +293,102 @@ def test_use_h5netcdf_for_file_not_accessible_locally(self): fh = NetCDF4FsspecFileHandler(fname, {}, {}) h5_file.assert_called_once() assert fh._use_h5netcdf + + +NC_ATTRS = { + "standard_name": "test_data", + "scale_factor": 0.01, + "add_offset": 0} + +def test_get_data_as_xarray_netcdf4(tmp_path): + """Test getting xr.DataArray from netcdf4 variable.""" + import numpy as np + + from satpy.readers.netcdf_utils import get_data_as_xarray + + data = np.array([1, 2, 3]) + fname = tmp_path / "test.nc" + dset = _write_test_netcdf4(fname, data) + + res = get_data_as_xarray(dset["test_data"]) + np.testing.assert_equal(res.data, data) + assert res.attrs == NC_ATTRS + + +def test_get_data_as_xarray_scalar_netcdf4(tmp_path): + """Test getting scalar xr.DataArray from netcdf4 variable.""" + import numpy as np + + from satpy.readers.netcdf_utils import get_data_as_xarray + + data = 1 + fname = tmp_path / "test.nc" + dset = _write_test_netcdf4(fname, data) + + res = get_data_as_xarray(dset["test_data"]) + np.testing.assert_equal(res.data, np.array(data)) + assert res.attrs == NC_ATTRS + + +def _write_test_netcdf4(fname, data): + import netCDF4 as nc + + dset = nc.Dataset(fname, "w") + try: + dset.createDimension("y", data.size) + dims = ("y",) + except AttributeError: + dims = () + var = dset.createVariable("test_data", "uint8", dims) + var[:] = data + var.setncatts(NC_ATTRS) + # Turn off automatic scale factor and offset handling + dset.set_auto_maskandscale(False) + + return dset + + +def test_get_data_as_xarray_h5netcdf(tmp_path): + """Test getting xr.DataArray from h5netcdf variable.""" + import numpy as np + + from satpy.readers.netcdf_utils import get_data_as_xarray + + data = np.array([1, 2, 3]) + fname = tmp_path / "test.nc" + fid = _write_test_h5netcdf(fname, data) + + res = get_data_as_xarray(fid["test_data"]) + np.testing.assert_equal(res.data, data) + assert res.attrs == NC_ATTRS + + +def _write_test_h5netcdf(fname, data): + import h5netcdf + + fid = h5netcdf.File(fname, "w") + try: + fid.dimensions = {"y": data.size} + dims = ("y",) + except AttributeError: + dims = () + var = fid.create_variable("test_data", dims, "uint8", data=data) + for key in NC_ATTRS: + var.attrs[key] = NC_ATTRS[key] + + return fid + + +def test_get_data_as_xarray_scalar_h5netcdf(tmp_path): + """Test getting xr.DataArray from h5netcdf variable.""" + import numpy as np + + from satpy.readers.netcdf_utils import get_data_as_xarray + + data = 1 + fname = tmp_path / "test.nc" + fid = _write_test_h5netcdf(fname, data) + + res = get_data_as_xarray(fid["test_data"]) + np.testing.assert_equal(res.data, np.array(data)) + assert res.attrs == NC_ATTRS diff --git a/satpy/tests/reader_tests/test_nwcsaf_msg.py b/satpy/tests/reader_tests/test_nwcsaf_msg.py index 1c8e0fb793..1a9b2ca3bf 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_msg.py +++ b/satpy/tests/reader_tests/test_nwcsaf_msg.py @@ -26,12 +26,13 @@ import pytest from satpy.tests.reader_tests.utils import fill_h5 +from satpy.tests.utils import RANDOM_GEN -CTYPE_TEST_ARRAY = (np.random.rand(1856, 3712) * 255).astype(np.uint8) +CTYPE_TEST_ARRAY = (RANDOM_GEN.random((1856, 3712)) * 255).astype(np.uint8) CTYPE_TEST_FRAME = (np.arange(100).reshape(10, 10) / 100. * 20).astype(np.uint8) CTYPE_TEST_ARRAY[1000:1010, 1000:1010] = CTYPE_TEST_FRAME -CTTH_HEIGHT_TEST_ARRAY = (np.random.rand(1856, 3712) * 255).astype(np.uint8) +CTTH_HEIGHT_TEST_ARRAY = (RANDOM_GEN.random((1856, 3712)) * 255).astype(np.uint8) _CTTH_HEIGHT_TEST_FRAME = (np.arange(100).reshape(10, 10) / 100. * 80).astype(np.uint8) CTTH_HEIGHT_TEST_ARRAY[1000:1010, 1000:1010] = _CTTH_HEIGHT_TEST_FRAME @@ -39,7 +40,7 @@ CTTH_HEIGHT_TEST_FRAME_RES[0, 0:10] = np.nan CTTH_HEIGHT_TEST_FRAME_RES[1, 0:3] = np.nan -CTTH_PRESSURE_TEST_ARRAY = (np.random.rand(1856, 3712) * 255).astype(np.uint8) +CTTH_PRESSURE_TEST_ARRAY = (RANDOM_GEN.random((1856, 3712)) * 255).astype(np.uint8) _CTTH_PRESSURE_TEST_FRAME = (np.arange(100).reshape(10, 10) / 100. * 54).astype(np.uint8) CTTH_PRESSURE_TEST_ARRAY[1000:1010, 1000:1010] = _CTTH_PRESSURE_TEST_FRAME @@ -47,7 +48,7 @@ CTTH_PRESSURE_TEST_FRAME_RES[0, 0:10] = np.nan CTTH_PRESSURE_TEST_FRAME_RES[1, 0:9] = np.nan -CTTH_TEMPERATURE_TEST_ARRAY = (np.random.rand(1856, 3712) * 255).astype(np.uint8) +CTTH_TEMPERATURE_TEST_ARRAY = (RANDOM_GEN.random((1856, 3712)) * 255).astype(np.uint8) _CTTH_TEMPERATURE_TEST_FRAME = (np.arange(100).reshape(10, 10) / 100. * 140).astype(np.uint8) _CTTH_TEMPERATURE_TEST_FRAME[8, 5] = 255 CTTH_TEMPERATURE_TEST_ARRAY[1000:1010, 1000:1010] = _CTTH_TEMPERATURE_TEST_FRAME @@ -130,7 +131,7 @@ "PRODUCT": b"CT__", "SCALING_FACTOR": 1.0, }, - "value": (np.random.rand(1856, 3712) * 255).astype(np.uint8), + "value": (RANDOM_GEN.random((1856, 3712)) * 255).astype(np.uint8), }, "CT_QUALITY": { "attrs": { @@ -145,7 +146,7 @@ "PRODUCT": b"CT__", "SCALING_FACTOR": 1.0, }, - "value": (np.random.rand(1856, 3712) * 65535).astype(np.uint16), + "value": (RANDOM_GEN.random((1856, 3712)) * 65535).astype(np.uint16), }, "attrs": { "CFAC": 13642337, @@ -255,7 +256,7 @@ "PAL_COLORMODEL": b"RGB", "PAL_TYPE": b"DIRECTINDEX", }, - "value": (np.random.rand(128, 3) * 255).astype(np.uint8), + "value": (RANDOM_GEN.random((128, 3)) * 255).astype(np.uint8), }, "03-PALETTE": { "attrs": { @@ -263,7 +264,7 @@ "PAL_COLORMODEL": b"RGB", "PAL_TYPE": b"DIRECTINDEX", }, - "value": (np.random.rand(256, 3) * 255).astype(np.uint8), + "value": (RANDOM_GEN.random((256, 3)) * 255).astype(np.uint8), }, "04-PALETTE": { "attrs": { @@ -323,7 +324,7 @@ "PRODUCT": b"CTTH", "SCALING_FACTOR": 5.0, }, - "value": (np.random.rand(1856, 3712) * 255).astype(np.uint8), + "value": (RANDOM_GEN.random((1856, 3712)) * 255).astype(np.uint8), }, "CTTH_HEIGHT": { "attrs": { @@ -370,7 +371,7 @@ "PRODUCT": b"CTTH", "SCALING_FACTOR": 1.0, }, - "value": (np.random.rand(1856, 3712) * 65535).astype(np.uint16), + "value": (RANDOM_GEN.random((1856, 3712)) * 65535).astype(np.uint16), }, "CTTH_TEMPER": { "attrs": { diff --git a/satpy/tests/reader_tests/test_nwcsaf_nc.py b/satpy/tests/reader_tests/test_nwcsaf_nc.py index 4f6755f390..6a509f023f 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_nc.py +++ b/satpy/tests/reader_tests/test_nwcsaf_nc.py @@ -22,6 +22,7 @@ import xarray as xr from satpy.readers.nwcsaf_nc import NcNWCSAF, read_nwcsaf_time +from satpy.tests.utils import RANDOM_GEN PROJ_KM = {"gdal_projection": "+proj=geos +a=6378.137000 +b=6356.752300 +lon_0=0.000000 +h=35785.863000", "gdal_xgeo_up_left": -5569500.0, @@ -83,9 +84,9 @@ COT_SCALE = 0.01 COT_OFFSET = 0.0 -CRE_ARRAY = np.random.randint(0, 65535, size=(928, 1530), dtype=np.uint16) -COT_ARRAY = np.random.randint(0, 65535, size=(928, 1530), dtype=np.uint16) -PAL_ARRAY = np.random.randint(0, 255, size=(250, 3), dtype=np.uint8) +CRE_ARRAY = RANDOM_GEN.integers(0, 65535, size=(928, 1530), dtype=np.uint16) +COT_ARRAY = RANDOM_GEN.integers(0, 65535, size=(928, 1530), dtype=np.uint16) +PAL_ARRAY = RANDOM_GEN.integers(0, 255, size=(250, 3), dtype=np.uint8) @pytest.fixture(scope="session") @@ -104,7 +105,7 @@ def create_nwcsaf_geo_ct_file(directory, attrs=global_attrs_geo): var = nc_file.create_variable(var_name, ("ny", "nx"), np.uint16, chunks=(256, 256)) - var[:] = np.random.randint(0, 255, size=(928, 1530), dtype=np.uint8) + var[:] = RANDOM_GEN.integers(0, 255, size=(928, 1530), dtype=np.uint8) return filename diff --git a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py index 90b9d4432f..0293a88fe3 100644 --- a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py +++ b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py @@ -16,10 +16,11 @@ # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . + """Module for testing the satpy.readers.oceancolorcci_l3_nc module.""" +import datetime as dt import os -from datetime import datetime import numpy as np import pytest @@ -243,12 +244,12 @@ def test_get_dataset_5d_allprods(self, fake_dataset, fake_file_dict): def test_start_time(self, fake_file_dict): """Test start time property.""" reader = self._create_reader_for_resolutions([fake_file_dict["k490_1d"]]) - assert reader.start_time == datetime(2021, 8, 1, 0, 0, 0) + assert reader.start_time == dt.datetime(2021, 8, 1, 0, 0, 0) def test_end_time(self, fake_file_dict): """Test end time property.""" reader = self._create_reader_for_resolutions([fake_file_dict["iop_8d"]]) - assert reader.end_time == datetime(2021, 8, 31, 23, 59, 0) + assert reader.end_time == dt.datetime(2021, 8, 31, 23, 59, 0) def test_correct_dimnames(self, fake_file_dict): """Check that the loaded dimension names are correct.""" diff --git a/satpy/tests/reader_tests/test_olci_nc.py b/satpy/tests/reader_tests/test_olci_nc.py index 2f37fb2098..fe384b9dc3 100644 --- a/satpy/tests/reader_tests/test_olci_nc.py +++ b/satpy/tests/reader_tests/test_olci_nc.py @@ -268,9 +268,108 @@ def test_bitflags(self): "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND"] mask = reduce(np.logical_or, [bflags[item] for item in items]) - expected = np.array([True, False, True, True, True, True, False, - False, True, True, False, False, False, False, - False, False, False, True, False, True, False, - False, False, True, True, False, False, True, + expected = np.array([True, False, True, True, True, True, False, + False, True, True, False, False, False, False, + False, False, False, True, False, True, False, + False, False, True, True, False, False, True, + False]) + assert all(mask == expected) + + def test_bitflags_with_flags_from_array(self): + """Test reading bitflags from DataArray attributes.""" + from functools import reduce + + import numpy as np + import xarray as xr + + from satpy.readers.olci_nc import BitFlags + + flag_masks = [1, 2, 4, 8, 4194304, 8388608, 16777216, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, + 32768, 65536, 131072, 262144, 524288, 1048576, 2097152, 33554432, 67108864, 134217728, 268435456, + 536870912, 4294967296, 8589934592, 17179869184, 34359738368, 68719476736, 137438953472, + 274877906944, 549755813888, 1099511627776, 2199023255552, 4398046511104, 8796093022208, + 17592186044416, 35184372088832, 70368744177664, 140737488355328, 281474976710656, 562949953421312, + 1125899906842624, 2251799813685248, 4503599627370496, 9007199254740992, 18014398509481984, + 36028797018963968] + flag_meanings = ("INVALID WATER LAND CLOUD TURBID_ATM CLOUD_AMBIGUOUS CLOUD_MARGIN SNOW_ICE INLAND_WATER " + "COASTLINE TIDAL COSMETIC SUSPECT HISOLZEN SATURATED MEGLINT HIGHGLINT WHITECAPS ADJAC " + "WV_FAIL PAR_FAIL AC_FAIL OC4ME_FAIL OCNN_FAIL KDM_FAIL BPAC_ON WHITE_SCATT LOWRW HIGHRW " + "IOP_LSD_FAIL ANNOT_ANGSTROM ANNOT_AERO_B ANNOT_ABSO_D ANNOT_ACLIM ANNOT_ABSOA ANNOT_MIXR1 " + "ANNOT_DROUT ANNOT_TAU06 RWNEG_O1 RWNEG_O2 RWNEG_O3 RWNEG_O4 RWNEG_O5 RWNEG_O6 RWNEG_O7 " + "RWNEG_O8 RWNEG_O9 RWNEG_O10 RWNEG_O11 RWNEG_O12 RWNEG_O16 RWNEG_O17 RWNEG_O18 RWNEG_O21") + + bits = np.array([1 << x for x in range(int(np.log2(max(flag_masks))) + 1)]) + bits_array = xr.DataArray(bits, attrs=dict(flag_masks=flag_masks, flag_meanings=flag_meanings)) + bflags = BitFlags(bits_array) + + items = ["INVALID", "TURBID_ATM"] + mask = reduce(np.logical_or, [bflags[item] for item in items]) + + assert mask[0].item() is True + assert any(mask[1:22]) is False + assert mask[22].item() is True + assert any(mask[23:]) is False + + def test_bitflags_with_dataarray_without_flags(self): + """Test the BitFlags class.""" + from functools import reduce + + import numpy as np + import xarray as xr + + from satpy.readers.olci_nc import BitFlags + flag_list = ["INVALID", "WATER", "LAND", "CLOUD", "SNOW_ICE", + "INLAND_WATER", "TIDAL", "COSMETIC", "SUSPECT", "HISOLZEN", + "SATURATED", "MEGLINT", "HIGHGLINT", "WHITECAPS", + "ADJAC", "WV_FAIL", "PAR_FAIL", "AC_FAIL", "OC4ME_FAIL", + "OCNN_FAIL", "Extra_1", "KDM_FAIL", "Extra_2", + "CLOUD_AMBIGUOUS", "CLOUD_MARGIN", "BPAC_ON", + "WHITE_SCATT", "LOWRW", "HIGHRW"] + + bits = np.array([1 << x for x in range(len(flag_list))]) + + bflags = BitFlags(xr.DataArray(bits)) + + items = ["INVALID", "SNOW_ICE", "INLAND_WATER", "SUSPECT", + "AC_FAIL", "CLOUD", "HISOLZEN", "OCNN_FAIL", + "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND"] + + mask = reduce(np.logical_or, [bflags[item] for item in items]) + expected = np.array([True, False, True, True, True, True, False, + False, True, True, False, False, False, False, + False, False, False, True, False, True, False, + False, False, True, True, False, False, True, + False]) + assert all(mask == expected) + + + def test_bitflags_with_custom_flag_list(self): + """Test the BitFlags class providing a flag list.""" + from functools import reduce + + import numpy as np + + from satpy.readers.olci_nc import BitFlags + flag_list = ["INVALID", "WATER", "LAND", "CLOUD", "SNOW_ICE", + "INLAND_WATER", "TIDAL", "COSMETIC", "SUSPECT", "HISOLZEN", + "SATURATED", "MEGLINT", "HIGHGLINT", "WHITECAPS", + "ADJAC", "WV_FAIL", "PAR_FAIL", "AC_FAIL", "OC4ME_FAIL", + "OCNN_FAIL", "Extra_1", "KDM_FAIL", "Extra_2", + "CLOUD_AMBIGUOUS", "CLOUD_MARGIN", "BPAC_ON", + "WHITE_SCATT", "LOWRW", "HIGHRW"] + + bits = np.array([1 << x for x in range(len(flag_list))]) + + bflags = BitFlags(bits, flag_list) + + items = ["INVALID", "SNOW_ICE", "INLAND_WATER", "SUSPECT", + "AC_FAIL", "CLOUD", "HISOLZEN", "OCNN_FAIL", + "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND"] + + mask = reduce(np.logical_or, [bflags[item] for item in items]) + expected = np.array([True, False, True, True, True, True, False, + False, True, True, False, False, False, False, + False, False, False, True, False, True, False, + False, False, True, True, False, False, True, False]) assert all(mask == expected) diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index 80fb581db7..106687a509 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -15,8 +15,8 @@ # satpy. If not, see . """Module for testing the satpy.readers.osisaf_l3 module.""" +import datetime as dt import os -from datetime import datetime import numpy as np import pytest @@ -206,8 +206,8 @@ def setup_method(self): super().setup_method(tester="ice") self.filename_info = {"grid": "ease"} self.filetype_info = {"file_type": "osi_sea_ice_conc"} - self.good_start_time = datetime(2022, 12, 15, 0, 0, 0) - self.good_stop_time = datetime(2022, 12, 16, 0, 0, 0) + self.good_start_time = dt.datetime(2022, 12, 15, 0, 0, 0) + self.good_stop_time = dt.datetime(2022, 12, 16, 0, 0, 0) self.varname = "ice_conc" self.stdname = "sea_ice_area_fraction" self.fillv = -999 @@ -260,8 +260,8 @@ def setup_method(self): super().setup_method(tester="flux_stere") self.filename_info = {"grid": "polstere"} self.filetype_info = {"file_type": "osi_radflux_stere"} - self.good_start_time = datetime(2023, 10, 10, 0, 0, 0) - self.good_stop_time = datetime(2023, 10, 10, 23, 59, 59) + self.good_start_time = dt.datetime(2023, 10, 10, 0, 0, 0) + self.good_stop_time = dt.datetime(2023, 10, 10, 23, 59, 59) self.varname = "ssi" self.stdname = "surface_downwelling_shortwave_flux_in_air" self.fillv = -999.99 @@ -295,8 +295,8 @@ def setup_method(self): super().setup_method(tester="flux_geo") self.filename_info = {} self.filetype_info = {"file_type": "osi_radflux_grid"} - self.good_start_time = datetime(2022, 12, 28, 18, 30, 0) - self.good_stop_time = datetime(2022, 12, 28, 19, 30, 0) + self.good_start_time = dt.datetime(2022, 12, 28, 18, 30, 0) + self.good_stop_time = dt.datetime(2022, 12, 28, 19, 30, 0) self.varname = "ssi" self.stdname = "surface_downwelling_shortwave_flux_in_air" self.fillv = -32768 @@ -332,8 +332,8 @@ def setup_method(self): super().setup_method(tester="sst") self.filename_info = {} self.filetype_info = {"file_type": "osi_sst"} - self.good_start_time = datetime(2022, 12, 15, 0, 0, 0) - self.good_stop_time = datetime(2022, 12, 16, 0, 0, 0) + self.good_start_time = dt.datetime(2022, 12, 15, 0, 0, 0) + self.good_stop_time = dt.datetime(2022, 12, 16, 0, 0, 0) self.varname = "surface_temperature" self.stdname = "sea_ice_surface_temperature" self.fillv = -32768 diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index 4ac4d97cfe..9e24c00c4e 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -17,202 +17,295 @@ # satpy. If not, see . """Module for testing the satpy.readers.sar-c_safe module.""" -import unittest -import unittest.mock as mock +import os +from datetime import datetime from enum import Enum -from io import BytesIO +from pathlib import Path -import dask.array as da import numpy as np -import xarray as xr +import pytest +import yaml +from satpy._config import PACKAGE_CONFIG_PATH from satpy.dataset import DataQuery -from satpy.readers.sar_c_safe import SAFEXMLAnnotation, SAFEXMLCalibration, SAFEXMLNoise +from satpy.dataset.dataid import DataID +from satpy.readers.sar_c_safe import Calibrator, Denoiser, SAFEXMLAnnotation + +rasterio = pytest.importorskip("rasterio") + + +dirname_suffix = "20190201T024655_20190201T024720_025730_02DC2A_AE07" +filename_suffix = "20190201t024655-20190201t024720-025730-02dc2a" + +START_TIME = datetime(2019, 2, 1, 2, 46, 55) +END_TIME = datetime(2019, 2, 1, 2, 47, 20) + +@pytest.fixture(scope="module") +def granule_directory(tmp_path_factory): + """Create a granule directory.""" + data_dir = tmp_path_factory.mktemp("data") + gdir = data_dir / f"S1A_IW_GRDH_1SDV_{dirname_suffix}.SAFE" + os.mkdir(gdir) + return gdir + + +@pytest.fixture(scope="module") +def annotation_file(granule_directory): + """Create an annotation file.""" + ann_dir = granule_directory / "annotation" + os.makedirs(ann_dir, exist_ok=True) + annotation_file = ann_dir / f"s1a-iw-grd-vv-{filename_suffix}-001.xml" + with open(annotation_file, "wb") as fd: + fd.write(annotation_xml) + return annotation_file + + +@pytest.fixture(scope="module") +def annotation_filehandler(annotation_file): + """Create an annotation filehandler.""" + filename_info = dict(start_time=START_TIME, end_time=END_TIME, polarization="vv") + return SAFEXMLAnnotation(annotation_file, filename_info, None) + + +@pytest.fixture(scope="module") +def calibration_file(granule_directory): + """Create a calibration file.""" + cal_dir = granule_directory / "annotation" / "calibration" + os.makedirs(cal_dir, exist_ok=True) + calibration_file = cal_dir / f"calibration-s1a-iw-grd-vv-{filename_suffix}-001.xml" + with open(calibration_file, "wb") as fd: + fd.write(calibration_xml) + return Path(calibration_file) + +@pytest.fixture(scope="module") +def calibration_filehandler(calibration_file, annotation_filehandler): + """Create a calibration filehandler.""" + filename_info = dict(start_time=START_TIME, end_time=END_TIME, polarization="vv") + return Calibrator(calibration_file, + filename_info, + None, + image_shape=annotation_filehandler.image_shape) + +@pytest.fixture(scope="module") +def noise_file(granule_directory): + """Create a noise file.""" + noise_dir = granule_directory / "annotation" / "calibration" + os.makedirs(noise_dir, exist_ok=True) + noise_file = noise_dir / f"noise-s1a-iw-grd-vv-{filename_suffix}-001.xml" + with open(noise_file, "wb") as fd: + fd.write(noise_xml) + return noise_file + + +@pytest.fixture(scope="module") +def noise_filehandler(noise_file, annotation_filehandler): + """Create a noise filehandler.""" + filename_info = dict(start_time=START_TIME, end_time=END_TIME, polarization="vv") + return Denoiser(noise_file, filename_info, None, image_shape=annotation_filehandler.image_shape) + + +@pytest.fixture(scope="module") +def noise_with_holes_filehandler(annotation_filehandler, tmpdir_factory): + """Create a noise filehandler from data with holes.""" + filename_info = dict(start_time=START_TIME, end_time=END_TIME, polarization="vv") + noise_xml_file = tmpdir_factory.mktemp("data").join("noise_with_holes.xml") + with open(noise_xml_file, "wb") as fd: + fd.write(noise_xml_with_holes) + noise_filehandler = Denoiser(noise_xml_file, + filename_info, None, + image_shape=annotation_filehandler.image_shape) + return noise_filehandler + + + +@pytest.fixture(scope="module") +def measurement_file(granule_directory): + """Create a tiff measurement file.""" + GCP = rasterio.control.GroundControlPoint + + gcps = [GCP(0, 0, 0, 0, 0), + GCP(0, 3, 1, 0, 0), + GCP(3, 0, 0, 1, 0), + GCP(3, 3, 1, 1, 0), + GCP(0, 7, 2, 0, 0), + GCP(3, 7, 2, 1, 0), + GCP(7, 7, 2, 2, 0), + GCP(7, 3, 1, 2, 0), + GCP(7, 0, 0, 2, 0), + GCP(0, 15, 3, 0, 0), + GCP(3, 15, 3, 1, 0), + GCP(7, 15, 3, 2, 0), + GCP(15, 15, 3, 3, 0), + GCP(15, 7, 2, 3, 0), + GCP(15, 3, 1, 3, 0), + GCP(15, 0, 0, 3, 0), + ] + Z = np.linspace(0, 30000, 100, dtype=np.uint16).reshape((10, 10)) + m_dir = granule_directory / "measurement" + os.makedirs(m_dir, exist_ok=True) + filename = m_dir / f"s1a-iw-grd-vv-{filename_suffix}-001.tiff" + with rasterio.open( + filename, + "w", + driver="GTiff", + height=Z.shape[0], + width=Z.shape[1], + count=1, + dtype=Z.dtype, + crs="+proj=latlong", + gcps=gcps) as dst: + dst.write(Z, 1) + return Path(filename) + + +@pytest.fixture(scope="module") +def measurement_filehandler(measurement_file, noise_filehandler, calibration_filehandler): + """Create a measurement filehandler.""" + filename_info = {"mission_id": "S1A", "dataset_name": "foo", "start_time": START_TIME, "end_time": END_TIME, + "polarization": "vv"} + filetype_info = None + from satpy.readers.sar_c_safe import SAFEGRD + filehandler = SAFEGRD(measurement_file, + filename_info, + filetype_info, + calibration_filehandler, + noise_filehandler) + return filehandler + + + +expected_longitudes = np.array([[3.79492915e-16, 5.91666667e-01, 9.09722222e-01, + 1.00000000e+00, 9.08333333e-01, 6.80555556e-01, + 3.62500000e-01, 8.32667268e-17, -3.61111111e-01, + -6.75000000e-01, -8.95833333e-01, -9.77777778e-01, + -8.75000000e-01, -5.41666667e-01, 6.80555556e-02, + 1.00000000e+00], + [1.19166667e+00, 1.32437500e+00, 1.36941964e+00, + 1.34166667e+00, 1.25598214e+00, 1.12723214e+00, + 9.70282738e-01, 8.00000000e-01, 6.31250000e-01, + 4.78898810e-01, 3.57812500e-01, 2.82857143e-01, + 2.68898810e-01, 3.30803571e-01, 4.83437500e-01, + 7.41666667e-01], + [1.82638889e+00, 1.77596726e+00, 1.72667765e+00, + 1.67757937e+00, 1.62773172e+00, 1.57619402e+00, + 1.52202558e+00, 1.46428571e+00, 1.40203373e+00, + 1.33432894e+00, 1.26023065e+00, 1.17879819e+00, + 1.08909084e+00, 9.90167942e-01, 8.81088790e-01, + 7.60912698e-01], + [2.00000000e+00, 1.99166667e+00, 1.99305556e+00, + 2.00000000e+00, 2.00833333e+00, 2.01388889e+00, + 2.01250000e+00, 2.00000000e+00, 1.97222222e+00, + 1.92500000e+00, 1.85416667e+00, 1.75555556e+00, + 1.62500000e+00, 1.45833333e+00, 1.25138889e+00, + 1.00000000e+00], + [1.80833333e+00, 2.01669643e+00, 2.18011267e+00, + 2.30119048e+00, 2.38253827e+00, 2.42676446e+00, + 2.43647747e+00, 2.41428571e+00, 2.36279762e+00, + 2.28462160e+00, 2.18236607e+00, 2.05863946e+00, + 1.91605017e+00, 1.75720663e+00, 1.58471726e+00, + 1.40119048e+00], + [1.34722222e+00, 1.89627976e+00, 2.29940830e+00, + 2.57341270e+00, 2.73509779e+00, 2.80126842e+00, + 2.78872945e+00, 2.71428571e+00, 2.59474206e+00, + 2.44690334e+00, 2.28757440e+00, 2.13356009e+00, + 2.00166525e+00, 1.90869473e+00, 1.87145337e+00, + 1.90674603e+00], + [7.12500000e-01, 1.67563988e+00, 2.36250177e+00, + 2.80892857e+00, 3.05076318e+00, 3.12384850e+00, + 3.06402742e+00, 2.90714286e+00, 2.68903770e+00, + 2.44555485e+00, 2.21253720e+00, 2.02582766e+00, + 1.92126913e+00, 1.93470451e+00, 2.10197669e+00, + 2.45892857e+00], + [5.55111512e-16, 1.40000000e+00, 2.38095238e+00, + 3.00000000e+00, 3.31428571e+00, 3.38095238e+00, + 3.25714286e+00, 3.00000000e+00, 2.66666667e+00, + 2.31428571e+00, 2.00000000e+00, 1.78095238e+00, + 1.71428571e+00, 1.85714286e+00, 2.26666667e+00, + 3.00000000e+00], + [-6.94444444e-01, 1.11458333e+00, 2.36631944e+00, + 3.13888889e+00, 3.51041667e+00, 3.55902778e+00, + 3.36284722e+00, 3.00000000e+00, 2.54861111e+00, + 2.08680556e+00, 1.69270833e+00, 1.44444444e+00, + 1.42013889e+00, 1.69791667e+00, 2.35590278e+00, + 3.47222222e+00], + [-1.27500000e+00, 8.64613095e-01, 2.33016227e+00, + 3.21785714e+00, 3.62390731e+00, 3.64452239e+00, + 3.37591199e+00, 2.91428571e+00, 2.35585317e+00, + 1.79682398e+00, 1.33340774e+00, 1.06181406e+00, + 1.07825255e+00, 1.47893282e+00, 2.36006448e+00, + 3.81785714e+00], + [-1.64583333e+00, 6.95312500e-01, 2.28404018e+00, + 3.22916667e+00, 3.63950893e+00, 3.62388393e+00, + 3.29110863e+00, 2.75000000e+00, 2.10937500e+00, + 1.47805060e+00, 9.64843750e-01, 6.78571429e-01, + 7.28050595e-01, 1.22209821e+00, 2.26953125e+00, + 3.97916667e+00], + [-1.71111111e+00, 6.51904762e-01, 2.23951247e+00, + 3.16507937e+00, 3.54197279e+00, 3.48356009e+00, + 3.10320862e+00, 2.51428571e+00, 1.83015873e+00, + 1.16419501e+00, 6.29761905e-01, 3.40226757e-01, + 4.08956916e-01, 9.49319728e-01, 2.07468254e+00, + 3.89841270e+00], + [-1.37500000e+00, 7.79613095e-01, 2.20813846e+00, + 3.01785714e+00, 3.31605017e+00, 3.20999858e+00, + 2.80698342e+00, 2.21428571e+00, 1.53918651e+00, + 8.88966837e-01, 3.70907738e-01, 9.22902494e-02, + 1.60395408e-01, 6.82504252e-01, 1.76589782e+00, + 3.51785714e+00], + [-5.41666667e-01, 1.12366071e+00, 2.20147747e+00, + 2.77976190e+00, 2.94649235e+00, 2.78964711e+00, + 2.39720451e+00, 1.85714286e+00, 1.25744048e+00, + 6.86075680e-01, 2.31026786e-01, -1.97278912e-02, + 2.17899660e-02, 4.43558673e-01, 1.33355655e+00, + 2.77976190e+00], + [8.84722222e-01, 1.72927083e+00, 2.23108879e+00, + 2.44305556e+00, 2.41805060e+00, 2.20895337e+00, + 1.86864335e+00, 1.45000000e+00, 1.00590278e+00, + 5.89231151e-01, 2.52864583e-01, 4.96825397e-02, + 3.25644841e-02, 2.54389881e-01, 7.68038194e-01, + 1.62638889e+00], + [3.00000000e+00, 2.64166667e+00, 2.30853175e+00, + 2.00000000e+00, 1.71547619e+00, 1.45436508e+00, + 1.21607143e+00, 1.00000000e+00, 8.05555556e-01, + 6.32142857e-01, 4.79166667e-01, 3.46031746e-01, + 2.32142857e-01, 1.36904762e-01, 5.97222222e-02, + 0.00000000e+00]]) -class TestSAFEGRD(unittest.TestCase): +class Calibration(Enum): + """Calibration levels.""" + + gamma = 1 + sigma_nought = 2 + beta_nought = 3 + dn = 4 + + +class TestSAFEGRD: """Test the SAFE GRD file handler.""" - @mock.patch("rasterio.open") - def setUp(self, mocked_rio_open): - """Set up the test case.""" - from satpy.readers.sar_c_safe import SAFEGRD - filename_info = {"mission_id": "S1A", "dataset_name": "foo", "start_time": 0, "end_time": 0, - "polarization": "vv"} - filetype_info = "bla" - self.noisefh = mock.MagicMock() - self.noisefh.get_noise_correction.return_value = xr.DataArray(np.zeros((2, 2)), dims=["y", "x"]) - self.calfh = mock.MagicMock() - self.calfh.get_calibration_constant.return_value = 1 - self.calfh.get_calibration.return_value = xr.DataArray(np.ones((2, 2)), dims=["y", "x"]) - self.annotationfh = mock.MagicMock() - - self.test_fh = SAFEGRD("S1A_IW_GRDH_1SDV_20190201T024655_20190201T024720_025730_02DC2A_AE07.SAFE/measurement/" - "s1a-iw-grd-vv-20190201t024655-20190201t024720-025730-02dc2a-001.tiff", - filename_info, filetype_info, self.calfh, self.noisefh, self.annotationfh) - self.mocked_rio_open = mocked_rio_open - - def test_instantiate(self): - """Test initialization of file handlers.""" - assert self.test_fh._polarization == "vv" - assert self.test_fh.calibration == self.calfh - assert self.test_fh.noise == self.noisefh - self.mocked_rio_open.assert_called() - - @mock.patch("xarray.open_dataset") - def test_read_calibrated_natural(self, mocked_xarray_open): - """Test the calibration routines.""" - calibration = mock.MagicMock() - calibration.name = "sigma_nought" - mocked_xarray_open.return_value.__getitem__.return_value = xr.DataArray(da.from_array(np.array([[0, 1], - [2, 3]])), - dims=["y", "x"]) - xarr = self.test_fh.get_dataset(DataQuery(name="measurement", polarization="vv", - calibration=calibration, quantity="natural"), info=dict()) - np.testing.assert_allclose(xarr, [[np.nan, 2], [5, 10]]) - - @mock.patch("xarray.open_dataset") - def test_read_calibrated_dB(self, mocked_xarray_open): + def test_read_calibrated_natural(self, measurement_filehandler): """Test the calibration routines.""" - calibration = mock.MagicMock() - calibration.name = "sigma_nought" - mocked_xarray_open.return_value.__getitem__.return_value = xr.DataArray(da.from_array(np.array([[0, 1], - [2, 3]])), - dims=["y", "x"]) - xarr = self.test_fh.get_dataset(DataQuery(name="measurement", polarization="vv", - calibration=calibration, quantity="dB"), info=dict()) - np.testing.assert_allclose(xarr, [[np.nan, 3.0103], [6.9897, 10]]) - - def test_read_lon_lats(self): - """Test reading lons and lats.""" + calibration = Calibration.sigma_nought + xarr = measurement_filehandler.get_dataset(DataQuery(name="measurement", polarization="vv", + calibration=calibration, quantity="natural"), info=dict()) + expected = np.array([[np.nan, 0.02707529], [2.55858416, 3.27611055]]) + np.testing.assert_allclose(xarr.values[:2, :2], expected, rtol=2e-7) - class FakeGCP: - - def __init__(self, *args): - self.row, self.col, self.x, self.y, self.z = args - - gcps = [FakeGCP(0, 0, 0, 0, 0), - FakeGCP(0, 3, 1, 0, 0), - FakeGCP(3, 0, 0, 1, 0), - FakeGCP(3, 3, 1, 1, 0), - FakeGCP(0, 7, 2, 0, 0), - FakeGCP(3, 7, 2, 1, 0), - FakeGCP(7, 7, 2, 2, 0), - FakeGCP(7, 3, 1, 2, 0), - FakeGCP(7, 0, 0, 2, 0), - FakeGCP(0, 15, 3, 0, 0), - FakeGCP(3, 15, 3, 1, 0), - FakeGCP(7, 15, 3, 2, 0), - FakeGCP(15, 15, 3, 3, 0), - FakeGCP(15, 7, 2, 3, 0), - FakeGCP(15, 3, 1, 3, 0), - FakeGCP(15, 0, 0, 3, 0), - ] - - crs = dict(init="epsg:4326") - - self.mocked_rio_open.return_value.gcps = [gcps, crs] - self.mocked_rio_open.return_value.shape = [16, 16] + def test_read_calibrated_dB(self, measurement_filehandler): + """Test the calibration routines.""" + calibration = Calibration.sigma_nought + xarr = measurement_filehandler.get_dataset(DataQuery(name="measurement", polarization="vv", + calibration=calibration, quantity="dB"), info=dict()) + expected = np.array([[np.nan, -15.674268], [4.079997, 5.153585]]) + np.testing.assert_allclose(xarr.values[:2, :2], expected) + def test_read_lon_lats(self, measurement_filehandler): + """Test reading lons and lats.""" query = DataQuery(name="longitude", polarization="vv") - xarr = self.test_fh.get_dataset(query, info=dict()) - expected = np.array([[3.79492915e-16, 5.91666667e-01, 9.09722222e-01, - 1.00000000e+00, 9.08333333e-01, 6.80555556e-01, - 3.62500000e-01, 8.32667268e-17, -3.61111111e-01, - -6.75000000e-01, -8.95833333e-01, -9.77777778e-01, - -8.75000000e-01, -5.41666667e-01, 6.80555556e-02, - 1.00000000e+00], - [1.19166667e+00, 1.32437500e+00, 1.36941964e+00, - 1.34166667e+00, 1.25598214e+00, 1.12723214e+00, - 9.70282738e-01, 8.00000000e-01, 6.31250000e-01, - 4.78898810e-01, 3.57812500e-01, 2.82857143e-01, - 2.68898810e-01, 3.30803571e-01, 4.83437500e-01, - 7.41666667e-01], - [1.82638889e+00, 1.77596726e+00, 1.72667765e+00, - 1.67757937e+00, 1.62773172e+00, 1.57619402e+00, - 1.52202558e+00, 1.46428571e+00, 1.40203373e+00, - 1.33432894e+00, 1.26023065e+00, 1.17879819e+00, - 1.08909084e+00, 9.90167942e-01, 8.81088790e-01, - 7.60912698e-01], - [2.00000000e+00, 1.99166667e+00, 1.99305556e+00, - 2.00000000e+00, 2.00833333e+00, 2.01388889e+00, - 2.01250000e+00, 2.00000000e+00, 1.97222222e+00, - 1.92500000e+00, 1.85416667e+00, 1.75555556e+00, - 1.62500000e+00, 1.45833333e+00, 1.25138889e+00, - 1.00000000e+00], - [1.80833333e+00, 2.01669643e+00, 2.18011267e+00, - 2.30119048e+00, 2.38253827e+00, 2.42676446e+00, - 2.43647747e+00, 2.41428571e+00, 2.36279762e+00, - 2.28462160e+00, 2.18236607e+00, 2.05863946e+00, - 1.91605017e+00, 1.75720663e+00, 1.58471726e+00, - 1.40119048e+00], - [1.34722222e+00, 1.89627976e+00, 2.29940830e+00, - 2.57341270e+00, 2.73509779e+00, 2.80126842e+00, - 2.78872945e+00, 2.71428571e+00, 2.59474206e+00, - 2.44690334e+00, 2.28757440e+00, 2.13356009e+00, - 2.00166525e+00, 1.90869473e+00, 1.87145337e+00, - 1.90674603e+00], - [7.12500000e-01, 1.67563988e+00, 2.36250177e+00, - 2.80892857e+00, 3.05076318e+00, 3.12384850e+00, - 3.06402742e+00, 2.90714286e+00, 2.68903770e+00, - 2.44555485e+00, 2.21253720e+00, 2.02582766e+00, - 1.92126913e+00, 1.93470451e+00, 2.10197669e+00, - 2.45892857e+00], - [5.55111512e-16, 1.40000000e+00, 2.38095238e+00, - 3.00000000e+00, 3.31428571e+00, 3.38095238e+00, - 3.25714286e+00, 3.00000000e+00, 2.66666667e+00, - 2.31428571e+00, 2.00000000e+00, 1.78095238e+00, - 1.71428571e+00, 1.85714286e+00, 2.26666667e+00, - 3.00000000e+00], - [-6.94444444e-01, 1.11458333e+00, 2.36631944e+00, - 3.13888889e+00, 3.51041667e+00, 3.55902778e+00, - 3.36284722e+00, 3.00000000e+00, 2.54861111e+00, - 2.08680556e+00, 1.69270833e+00, 1.44444444e+00, - 1.42013889e+00, 1.69791667e+00, 2.35590278e+00, - 3.47222222e+00], - [-1.27500000e+00, 8.64613095e-01, 2.33016227e+00, - 3.21785714e+00, 3.62390731e+00, 3.64452239e+00, - 3.37591199e+00, 2.91428571e+00, 2.35585317e+00, - 1.79682398e+00, 1.33340774e+00, 1.06181406e+00, - 1.07825255e+00, 1.47893282e+00, 2.36006448e+00, - 3.81785714e+00], - [-1.64583333e+00, 6.95312500e-01, 2.28404018e+00, - 3.22916667e+00, 3.63950893e+00, 3.62388393e+00, - 3.29110863e+00, 2.75000000e+00, 2.10937500e+00, - 1.47805060e+00, 9.64843750e-01, 6.78571429e-01, - 7.28050595e-01, 1.22209821e+00, 2.26953125e+00, - 3.97916667e+00], - [-1.71111111e+00, 6.51904762e-01, 2.23951247e+00, - 3.16507937e+00, 3.54197279e+00, 3.48356009e+00, - 3.10320862e+00, 2.51428571e+00, 1.83015873e+00, - 1.16419501e+00, 6.29761905e-01, 3.40226757e-01, - 4.08956916e-01, 9.49319728e-01, 2.07468254e+00, - 3.89841270e+00], - [-1.37500000e+00, 7.79613095e-01, 2.20813846e+00, - 3.01785714e+00, 3.31605017e+00, 3.20999858e+00, - 2.80698342e+00, 2.21428571e+00, 1.53918651e+00, - 8.88966837e-01, 3.70907738e-01, 9.22902494e-02, - 1.60395408e-01, 6.82504252e-01, 1.76589782e+00, - 3.51785714e+00], - [-5.41666667e-01, 1.12366071e+00, 2.20147747e+00, - 2.77976190e+00, 2.94649235e+00, 2.78964711e+00, - 2.39720451e+00, 1.85714286e+00, 1.25744048e+00, - 6.86075680e-01, 2.31026786e-01, -1.97278912e-02, - 2.17899660e-02, 4.43558673e-01, 1.33355655e+00, - 2.77976190e+00], - [8.84722222e-01, 1.72927083e+00, 2.23108879e+00, - 2.44305556e+00, 2.41805060e+00, 2.20895337e+00, - 1.86864335e+00, 1.45000000e+00, 1.00590278e+00, - 5.89231151e-01, 2.52864583e-01, 4.96825397e-02, - 3.25644841e-02, 2.54389881e-01, 7.68038194e-01, - 1.62638889e+00], - [3.00000000e+00, 2.64166667e+00, 2.30853175e+00, - 2.00000000e+00, 1.71547619e+00, 1.45436508e+00, - 1.21607143e+00, 1.00000000e+00, 8.05555556e-01, - 6.32142857e-01, 4.79166667e-01, 3.46031746e-01, - 2.32142857e-01, 1.36904762e-01, 5.97222222e-02, - 0.00000000e+00]]) - np.testing.assert_allclose(xarr.values, expected) + xarr = measurement_filehandler.get_dataset(query, info=dict()) + expected = expected_longitudes + np.testing.assert_allclose(xarr.values, expected[:10, :10], atol=1e-3) annotation_xml = b""" @@ -622,15 +715,11 @@ def __init__(self, *args): """ -class TestSAFEXMLNoise(unittest.TestCase): +class TestSAFEXMLNoise: """Test the SAFE XML Noise file handler.""" - def setUp(self): + def setup_method(self): """Set up the test case.""" - filename_info = dict(start_time=None, end_time=None, polarization="vv") - self.annotation_fh = SAFEXMLAnnotation(BytesIO(annotation_xml), filename_info, mock.MagicMock()) - self.noise_fh = SAFEXMLNoise(BytesIO(noise_xml), filename_info, mock.MagicMock(), self.annotation_fh) - self.expected_azimuth_noise = np.array([[np.nan, 1, 1, 1, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan], [np.nan, 1, 1, 1, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan], [2, 2, 3, 3, 3, 4, 4, 4, 4, np.nan], @@ -655,8 +744,6 @@ def setUp(self): [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], ]) - self.noise_fh_with_holes = SAFEXMLNoise(BytesIO(noise_xml_with_holes), filename_info, mock.MagicMock(), - self.annotation_fh) self.expected_azimuth_noise_with_holes = np.array( [[np.nan, np.nan, np.nan, 1, 1, 1, np.nan, np.nan, np.nan, np.nan], [2, 2, np.nan, 1, 1, 1, np.nan, np.nan, np.nan, np.nan], @@ -670,112 +757,150 @@ def setUp(self): [10, np.nan, 11, 11, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan] ]) - def test_azimuth_noise_array(self): + def test_azimuth_noise_array(self, noise_filehandler): """Test reading the azimuth-noise array.""" - res = self.noise_fh.azimuth_noise_reader.read_azimuth_noise_array() + res = noise_filehandler.azimuth_noise_reader.read_azimuth_noise_array() np.testing.assert_array_equal(res, self.expected_azimuth_noise) - def test_azimuth_noise_array_with_holes(self): + def test_azimuth_noise_array_with_holes(self, noise_with_holes_filehandler): """Test reading the azimuth-noise array.""" - res = self.noise_fh_with_holes.azimuth_noise_reader.read_azimuth_noise_array() + res = noise_with_holes_filehandler.azimuth_noise_reader.read_azimuth_noise_array() np.testing.assert_array_equal(res, self.expected_azimuth_noise_with_holes) - def test_range_noise_array(self): + def test_range_noise_array(self, noise_filehandler): """Test reading the range-noise array.""" - res = self.noise_fh.read_range_noise_array(chunks=5) + res = noise_filehandler.read_range_noise_array(chunks=5) np.testing.assert_allclose(res, self.expected_range_noise) - def test_get_noise_dataset(self): + def test_get_noise_dataset(self, noise_filehandler): """Test using get_dataset for the noise.""" query = DataQuery(name="noise", polarization="vv") - res = self.noise_fh.get_dataset(query, {}) + res = noise_filehandler.get_dataset(query, {}) np.testing.assert_allclose(res, self.expected_azimuth_noise * self.expected_range_noise) - def test_get_noise_dataset_has_right_chunk_size(self): + def test_get_noise_dataset_has_right_chunk_size(self, noise_filehandler): """Test using get_dataset for the noise has right chunk size in result.""" query = DataQuery(name="noise", polarization="vv") - res = self.noise_fh.get_dataset(query, {}, chunks=3) + res = noise_filehandler.get_dataset(query, {}, chunks=3) assert res.data.chunksize == (3, 3) -class Calibration(Enum): - """Calibration levels.""" - - gamma = 1 - sigma_nought = 2 - beta_nought = 3 - dn = 4 - - -class TestSAFEXMLCalibration(unittest.TestCase): +class TestSAFEXMLCalibration: """Test the SAFE XML Calibration file handler.""" - def setUp(self): - """Set up the test case.""" - filename_info = dict(start_time=None, end_time=None, polarization="vv") - self.annotation_fh = SAFEXMLAnnotation(BytesIO(annotation_xml), filename_info, mock.MagicMock()) - self.calibration_fh = SAFEXMLCalibration(BytesIO(calibration_xml), - filename_info, - mock.MagicMock(), - self.annotation_fh) + def setup_method(self): + """Set up testing.""" + self.expected_gamma = np.array([[1840.695, 1779.672, 1718.649, 1452.926, 1187.203, 1186.226, + 1185.249, 1184.276, 1183.303, 1181.365]]) * np.ones((10, 1)) - self.expected_gamma = np.array([[1840.695, 1779.672, 1718.649, 1452.926, 1187.203, 1186.226, - 1185.249, 1184.276, 1183.303, 1181.365]]) * np.ones((10, 1)) - def test_dn_calibration_array(self): + def test_dn_calibration_array(self, calibration_filehandler): """Test reading the dn calibration array.""" expected_dn = np.ones((10, 10)) * 1087 - res = self.calibration_fh.get_calibration(Calibration.dn, chunks=5) + res = calibration_filehandler.get_calibration(Calibration.dn, chunks=5) np.testing.assert_allclose(res, expected_dn) - def test_beta_calibration_array(self): + def test_beta_calibration_array(self, calibration_filehandler): """Test reading the beta calibration array.""" expected_beta = np.ones((10, 10)) * 1087 - res = self.calibration_fh.get_calibration(Calibration.beta_nought, chunks=5) + res = calibration_filehandler.get_calibration(Calibration.beta_nought, chunks=5) np.testing.assert_allclose(res, expected_beta) - def test_sigma_calibration_array(self): + def test_sigma_calibration_array(self, calibration_filehandler): """Test reading the sigma calibration array.""" expected_sigma = np.array([[1894.274, 1841.4335, 1788.593, 1554.4165, 1320.24, 1299.104, 1277.968, 1277.968, 1277.968, 1277.968]]) * np.ones((10, 1)) - res = self.calibration_fh.get_calibration(Calibration.sigma_nought, chunks=5) + res = calibration_filehandler.get_calibration(Calibration.sigma_nought, chunks=5) np.testing.assert_allclose(res, expected_sigma) - def test_gamma_calibration_array(self): + + def test_gamma_calibration_array(self, calibration_filehandler): """Test reading the gamma calibration array.""" - res = self.calibration_fh.get_calibration(Calibration.gamma, chunks=5) + res = calibration_filehandler.get_calibration(Calibration.gamma, chunks=5) np.testing.assert_allclose(res, self.expected_gamma) - def test_get_calibration_dataset(self): + def test_get_calibration_dataset(self, calibration_filehandler): """Test using get_dataset for the calibration.""" query = DataQuery(name="gamma", polarization="vv") - res = self.calibration_fh.get_dataset(query, {}) + res = calibration_filehandler.get_dataset(query, {}) np.testing.assert_allclose(res, self.expected_gamma) - def test_get_calibration_dataset_has_right_chunk_size(self): + def test_get_calibration_dataset_has_right_chunk_size(self, calibration_filehandler): """Test using get_dataset for the calibration yields array with right chunksize.""" query = DataQuery(name="gamma", polarization="vv") - res = self.calibration_fh.get_dataset(query, {}, chunks=3) + res = calibration_filehandler.get_dataset(query, {}, chunks=3) assert res.data.chunksize == (3, 3) np.testing.assert_allclose(res, self.expected_gamma) - def test_get_calibration_constant(self): + def test_get_calibration_constant(self, calibration_filehandler): """Test getting the calibration constant.""" query = DataQuery(name="calibration_constant", polarization="vv") - res = self.calibration_fh.get_dataset(query, {}) + res = calibration_filehandler.get_dataset(query, {}) assert res == 1 -class TestSAFEXMLAnnotation(unittest.TestCase): - """Test the SAFE XML Annotation file handler.""" - - def setUp(self): - """Set up the test case.""" - filename_info = dict(start_time=None, end_time=None, polarization="vv") - self.annotation_fh = SAFEXMLAnnotation(BytesIO(annotation_xml), filename_info, mock.MagicMock()) - - def test_incidence_angle(self): - """Test reading the incidence angle.""" - query = DataQuery(name="incidence_angle", polarization="vv") - res = self.annotation_fh.get_dataset(query, {}) - np.testing.assert_allclose(res, 19.18318046) +def test_incidence_angle(annotation_filehandler): + """Test reading the incidence angle in an annotation file.""" + query = DataQuery(name="incidence_angle", polarization="vv") + res = annotation_filehandler.get_dataset(query, {}) + np.testing.assert_allclose(res, 19.18318046) + + +def test_reading_from_reader(measurement_file, calibration_file, noise_file, annotation_file): + """Test reading using the reader defined in the config.""" + with open(Path(PACKAGE_CONFIG_PATH) / "readers" / "sar-c_safe.yaml") as fd: + config = yaml.load(fd, Loader=yaml.UnsafeLoader) + reader_class = config["reader"]["reader"] + reader = reader_class(config) + + files = [measurement_file, calibration_file, noise_file, annotation_file] + reader.create_storage_items(files) + query = DataQuery(name="measurement", polarization="vv", + calibration="sigma_nought", quantity="dB") + query = DataID(reader._id_keys, **query.to_dict()) + dataset_dict = reader.load([query]) + array = dataset_dict["measurement"] + np.testing.assert_allclose(array.attrs["area"].lons, expected_longitudes[:10, :10], atol=1e-3) + expected_db = np.array([[np.nan, -15.674268], [4.079997, 5.153585]]) + np.testing.assert_allclose(array.values[:2, :2], expected_db) + + +def test_filename_filtering_from_reader(measurement_file, calibration_file, noise_file, annotation_file, tmp_path): + """Test that filenames get filtered before filehandlers are created.""" + with open(Path(PACKAGE_CONFIG_PATH) / "readers" / "sar-c_safe.yaml") as fd: + config = yaml.load(fd, Loader=yaml.UnsafeLoader) + reader_class = config["reader"]["reader"] + filter_parameters = {"start_time": datetime(2019, 2, 1, 0, 0, 0), + "end_time": datetime(2019, 2, 1, 12, 0, 0)} + reader = reader_class(config, filter_parameters) + + spurious_file = (tmp_path / "S1A_IW_GRDH_1SDV_20190202T024655_20190202T024720_025730_02DC2A_AE07.SAFE" / + "measurement" / + "s1a-iw-grd-vv-20190202t024655-20190202t024720-025730-02dc2a-001.tiff") + + + files = [spurious_file, measurement_file, calibration_file, noise_file, annotation_file] + + files = reader.filter_selected_filenames(files) + assert spurious_file not in files + try: + reader.create_storage_items(files) + except rasterio.RasterioIOError as err: + pytest.fail(str(err)) + + +def test_swath_def_contains_gcps(measurement_file, calibration_file, noise_file, annotation_file): + """Test reading using the reader defined in the config.""" + with open(Path(PACKAGE_CONFIG_PATH) / "readers" / "sar-c_safe.yaml") as fd: + config = yaml.load(fd, Loader=yaml.UnsafeLoader) + reader_class = config["reader"]["reader"] + reader = reader_class(config) + + files = [measurement_file, calibration_file, noise_file, annotation_file] + reader.create_storage_items(files) + query = DataQuery(name="measurement", polarization="vv", + calibration="sigma_nought", quantity="dB") + query = DataID(reader._id_keys, **query.to_dict()) + dataset_dict = reader.load([query]) + array = dataset_dict["measurement"] + assert array.attrs["area"].attrs["gcps"] is not None diff --git a/satpy/tests/reader_tests/test_satpy_cf_nc.py b/satpy/tests/reader_tests/test_satpy_cf_nc.py index ec7ac34be4..fb4fd6831b 100644 --- a/satpy/tests/reader_tests/test_satpy_cf_nc.py +++ b/satpy/tests/reader_tests/test_satpy_cf_nc.py @@ -15,9 +15,11 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Tests for the CF reader.""" + +import datetime as dt import warnings -from datetime import datetime import numpy as np import pytest @@ -66,8 +68,8 @@ def _create_test_netcdf(filename, resolution=742): "solar_zenith_angle": solar_zenith_angle_i } - tstart = datetime(2019, 4, 1, 12, 0) - tend = datetime(2019, 4, 1, 12, 15) + tstart = dt.datetime(2019, 4, 1, 12, 0) + tend = dt.datetime(2019, 4, 1, 12, 15) common_attrs = { "start_time": tstart, "end_time": tend, @@ -88,126 +90,173 @@ def _create_test_netcdf(filename, resolution=742): @pytest.fixture(scope="session") -def cf_scene(): - """Create a cf scene.""" - tstart = datetime(2019, 4, 1, 12, 0) - tend = datetime(2019, 4, 1, 12, 15) - data_visir = np.array([[1, 2], [3, 4]]) - z_visir = [1, 2, 3, 4, 5, 6, 7] - qual_data = [[1, 2, 3, 4, 5, 6, 7], - [1, 2, 3, 4, 5, 6, 7]] - time_vis006 = [1, 2] - lat = 33.0 * np.array([[1, 2], [3, 4]]) - lon = -13.0 * np.array([[1, 2], [3, 4]]) - - proj_dict = { - "a": 6378169.0, "b": 6356583.8, "h": 35785831.0, - "lon_0": 0.0, "proj": "geos", "units": "m" - } - x_size, y_size = data_visir.shape +def area(): + """Get area definition.""" area_extent = (339045.5577, 4365586.6063, 1068143.527, 4803645.4685) - area = AreaDefinition( - "test", - "test", - "test", - proj_dict, - x_size, - y_size, - area_extent, - ) + proj_dict = {"a": 6378169.0, "b": 6356583.8, "h": 35785831.0, + "lon_0": 0.0, "proj": "geos", "units": "m"} + area = AreaDefinition("test", + "test", + "test", + proj_dict, + 2, + 2, + area_extent) + return area - x, y = area.get_proj_coords() - y_visir = y[:, 0] - x_visir = x[0, :] - common_attrs = { - "start_time": tstart, - "end_time": tend, +@pytest.fixture(scope="session") +def common_attrs(area): + """Get common dataset attributes.""" + return { + "start_time": dt.datetime(2019, 4, 1, 12, 0, 0, 123456), + "end_time": dt.datetime(2019, 4, 1, 12, 15), "platform_name": "tirosn", "orbit_number": 99999, - "area": area + "area": area, + "my_timestamp": dt.datetime(2000, 1, 1) } - vis006 = xr.DataArray(data_visir, + +@pytest.fixture(scope="session") +def xy_coords(area): + """Get projection coordinates.""" + x, y = area.get_proj_coords() + y = y[:, 0] + x = x[0, :] + return x, y + + +@pytest.fixture(scope="session") +def vis006(xy_coords, common_attrs): + """Get VIS006 dataset.""" + x, y = xy_coords + attrs = { + "name": "image0", + "id_tag": "ch_r06", + "coordinates": "lat lon", + "resolution": 1000, + "calibration": "reflectance", + "wavelength": WavelengthRange(min=0.58, central=0.63, max=0.68, unit="µm"), + "orbital_parameters": { + "projection_longitude": 1, + "projection_latitude": 1, + "projection_altitude": 1, + "satellite_nominal_longitude": 1, + "satellite_nominal_latitude": 1, + "satellite_actual_longitude": 1, + "satellite_actual_latitude": 1, + "satellite_actual_altitude": 1, + "nadir_longitude": 1, + "nadir_latitude": 1, + "only_in_1": False + }, + "time_parameters": { + "nominal_start_time": common_attrs["start_time"], + "nominal_end_time": common_attrs["end_time"] + } + } + coords = {"y": y, "x": x, "acq_time": ("y", [1, 2])} + vis006 = xr.DataArray(np.array([[1, 2], [3, 4]]), dims=("y", "x"), - coords={"y": y_visir, "x": x_visir, "acq_time": ("y", time_vis006)}, - attrs={ - "name": "image0", "id_tag": "ch_r06", - "coordinates": "lat lon", "resolution": 1000, "calibration": "reflectance", - "wavelength": WavelengthRange(min=0.58, central=0.63, max=0.68, unit="µm"), - "orbital_parameters": { - "projection_longitude": 1, - "projection_latitude": 1, - "projection_altitude": 1, - "satellite_nominal_longitude": 1, - "satellite_nominal_latitude": 1, - "satellite_actual_longitude": 1, - "satellite_actual_latitude": 1, - "satellite_actual_altitude": 1, - "nadir_longitude": 1, - "nadir_latitude": 1, - "only_in_1": False - } - }) - - ir_108 = xr.DataArray(data_visir, + coords=coords, + attrs=attrs) + return vis006 + + +@pytest.fixture(scope="session") +def ir_108(xy_coords): + """Get IR_108 dataset.""" + x, y = xy_coords + coords = {"y": y, "x": x, "acq_time": ("y", [1, 2])} + attrs = {"name": "image1", "id_tag": "ch_tb11", "coordinates": "lat lon"} + ir_108 = xr.DataArray(np.array([[1, 2], [3, 4]]), dims=("y", "x"), - coords={"y": y_visir, "x": x_visir, "acq_time": ("y", time_vis006)}, - attrs={"name": "image1", "id_tag": "ch_tb11", "coordinates": "lat lon"}) + coords=coords, + attrs=attrs) + return ir_108 + + +@pytest.fixture(scope="session") +def qual_flags(xy_coords): + """Get quality flags.""" + qual_data = [[1, 2, 3, 4, 5, 6, 7], + [1, 2, 3, 4, 5, 6, 7]] + x, y = xy_coords + z = [1, 2, 3, 4, 5, 6, 7] + coords = {"y": y, "z": z, "acq_time": ("y", [1, 2])} qual_f = xr.DataArray(qual_data, dims=("y", "z"), - coords={"y": y_visir, "z": z_visir, "acq_time": ("y", time_vis006)}, - attrs={ - "name": "qual_flags", - "id_tag": "qual_flags" - }) - lat = xr.DataArray(lat, - dims=("y", "x"), - coords={"y": y_visir, "x": x_visir}, - attrs={ - "name": "lat", - "standard_name": "latitude", - "modifiers": np.array([]) - }) - lon = xr.DataArray(lon, - dims=("y", "x"), - coords={"y": y_visir, "x": x_visir}, - attrs={ - "name": "lon", - "standard_name": "longitude", - "modifiers": np.array([]) - }) - - # for prefix testing - prefix_data = xr.DataArray(data_visir, + coords=coords, + attrs={"name": "qual_flags", + "id_tag": "qual_flags"}) + return qual_f + + +@pytest.fixture(scope="session") +def lonlats(xy_coords): + """Get longitudes and latitudes.""" + x, y = xy_coords + lat = 33.0 * np.array([[1, 2], [3, 4]]) + lon = -13.0 * np.array([[1, 2], [3, 4]]) + attrs = {"name": "lat", + "standard_name": "latitude", + "modifiers": np.array([])} + dims = ("y", "x") + coords = {"y": y, "x": x} + lat = xr.DataArray(lat, dims=dims, coords=coords, attrs=attrs) + lon = xr.DataArray(lon, dims=dims, coords=coords, attrs=attrs) + return lon, lat + + +@pytest.fixture(scope="session") +def prefix_data(xy_coords, area): + """Get dataset whose name should be prefixed.""" + x, y = xy_coords + attrs = {"name": "1", + "id_tag": "ch_r06", + "coordinates": "lat lon", + "resolution": 1000, + "calibration": "reflectance", + "wavelength": WavelengthRange(min=0.58, central=0.63, max=0.68, unit="µm"), + "area": area} + prefix_data = xr.DataArray(np.array([[1, 2], [3, 4]]), dims=("y", "x"), - coords={"y": y_visir, "x": x_visir}, - attrs={ - "name": "1", "id_tag": "ch_r06", - "coordinates": "lat lon", "resolution": 1000, "calibration": "reflectance", - "wavelength": WavelengthRange(min=0.58, central=0.63, max=0.68, unit="µm"), - "area": area - }) - - # for swath testing + coords={"y": y, "x": x}, + attrs=attrs) + return prefix_data + + +@pytest.fixture(scope="session") +def swath_data(prefix_data, lonlats): + """Get swath data.""" + lon, lat = lonlats area = SwathDefinition(lons=lon, lats=lat) swath_data = prefix_data.copy() swath_data.attrs.update({"name": "swath_data", "area": area}) + return swath_data + + +@pytest.fixture(scope="session") +def datasets(vis006, ir_108, qual_flags, lonlats, prefix_data, swath_data): + """Get datasets belonging to the scene.""" + lon, lat = lonlats + return {"image0": vis006, + "image1": ir_108, + "swath_data": swath_data, + "1": prefix_data, + "lat": lat, + "lon": lon, + "qual_flags": qual_flags} + +@pytest.fixture(scope="session") +def cf_scene(datasets, common_attrs): + """Create a cf scene.""" scene = Scene() scene.attrs["sensor"] = ["avhrr-1", "avhrr-2", "avhrr-3"] - scene_dict = { - "image0": vis006, - "image1": ir_108, - "swath_data": swath_data, - "1": prefix_data, - "lat": lat, - "lon": lon, - "qual_flags": qual_f - } - - for key in scene_dict: - scene[key] = scene_dict[key] + for key in datasets: + scene[key] = datasets[key] if key != "swath_data": scene[key].attrs.update(common_attrs) return scene @@ -216,7 +265,7 @@ def cf_scene(): @pytest.fixture() def nc_filename(tmp_path): """Create an nc filename for viirs m band.""" - now = datetime.utcnow() + now = dt.datetime.utcnow() filename = f"testingcfwriter{now:%Y%j%H%M%S}-viirs-mband-20201007075915-20201007080744.nc" return str(tmp_path / filename) @@ -224,7 +273,7 @@ def nc_filename(tmp_path): @pytest.fixture() def nc_filename_i(tmp_path): """Create an nc filename for viirs i band.""" - now = datetime.utcnow() + now = dt.datetime.utcnow() filename = f"testingcfwriter{now:%Y%j%H%M%S}-viirs-iband-20201007075915-20201007080744.nc" return str(tmp_path / filename) @@ -394,18 +443,25 @@ def test_read_prefixed_channels_by_user_no_prefix(self, cf_scene, nc_filename): np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loded as coord - def test_orbital_parameters(self, cf_scene, nc_filename): - """Test that the orbital parameters in attributes are handled correctly.""" + def test_decoding_of_dict_type_attributes(self, cf_scene, nc_filename): + """Test decoding of dict type attributes.""" cf_scene.save_datasets(writer="cf", filename=nc_filename) scn_ = Scene(reader="satpy_cf_nc", filenames=[nc_filename]) scn_.load(["image0"]) - orig_attrs = cf_scene["image0"].attrs["orbital_parameters"] - new_attrs = scn_["image0"].attrs["orbital_parameters"] - assert isinstance(new_attrs, dict) - for key in orig_attrs: - assert orig_attrs[key] == new_attrs[key] + for attr_name in ["orbital_parameters", "time_parameters"]: + orig_attrs = cf_scene["image0"].attrs[attr_name] + new_attrs = scn_["image0"].attrs[attr_name] + assert new_attrs == orig_attrs + + def test_decoding_of_timestamps(self, cf_scene, nc_filename): + """Test decoding of timestamps.""" + cf_scene.save_datasets(writer="cf", filename=nc_filename) + scn = Scene(reader="satpy_cf_nc", filenames=[nc_filename]) + scn.load(["image0"]) + expected = cf_scene["image0"].attrs["my_timestamp"] + assert scn["image0"].attrs["my_timestamp"] == expected def test_write_and_read_from_two_files(self, nc_filename, nc_filename_i): """Save two datasets with different resolution and read the solar_zenith_angle again.""" diff --git a/satpy/tests/reader_tests/test_scmi.py b/satpy/tests/reader_tests/test_scmi.py index 13c74a7d5c..12fbb7dc2a 100644 --- a/satpy/tests/reader_tests/test_scmi.py +++ b/satpy/tests/reader_tests/test_scmi.py @@ -103,11 +103,11 @@ def setUp(self, xr_): def test_basic_attributes(self): """Test getting basic file attributes.""" - from datetime import datetime + import datetime as dt from satpy.tests.utils import make_dataid - assert self.reader.start_time == datetime(2017, 7, 29, 12, 0, 0, 0) - assert self.reader.end_time == datetime(2017, 7, 29, 12, 0, 0, 0) + assert self.reader.start_time == dt.datetime(2017, 7, 29, 12, 0, 0, 0) + assert self.reader.end_time == dt.datetime(2017, 7, 29, 12, 0, 0, 0) assert self.reader.get_shape(make_dataid(name="C05"), {}) == (2, 5) def test_data_load(self): diff --git a/satpy/tests/reader_tests/test_seadas_l2.py b/satpy/tests/reader_tests/test_seadas_l2.py index d3037e6b55..8343abbef2 100644 --- a/satpy/tests/reader_tests/test_seadas_l2.py +++ b/satpy/tests/reader_tests/test_seadas_l2.py @@ -20,7 +20,7 @@ import numpy as np import pytest from pyresample.geometry import SwathDefinition -from pytest_lazyfixture import lazy_fixture +from pytest_lazy_fixtures import lf as lazy_fixture from satpy import Scene, available_readers diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index a07bb799bc..f705796521 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -17,8 +17,8 @@ # satpy. If not, see . """Test the MSG common (native and hrit format) functionionalities.""" +import datetime as dt import unittest -from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -117,18 +117,18 @@ def test_pad_data_vertically_bad_shape(self): def observation_start_time(self): """Get scan start timestamp for testing.""" - return datetime(2023, 3, 20, 15, 0, 10, 691000) + return dt.datetime(2023, 3, 20, 15, 0, 10, 691000) def observation_end_time(self): """Get scan end timestamp for testing.""" - return datetime(2023, 3, 20, 15, 12, 43, 843000) + return dt.datetime(2023, 3, 20, 15, 12, 43, 843000) def test_round_nom_time(self): """Test the rouding of start/end_time.""" - assert round_nom_time(dt=self.observation_start_time(), - time_delta=timedelta(minutes=15)) == datetime(2023, 3, 20, 15, 0) - assert round_nom_time(dt=self.observation_end_time(), - time_delta=timedelta(minutes=15)) == datetime(2023, 3, 20, 15, 15) + assert round_nom_time(date=self.observation_start_time(), + time_delta=dt.timedelta(minutes=15)) == dt.datetime(2023, 3, 20, 15, 0) + assert round_nom_time(date=self.observation_end_time(), + time_delta=dt.timedelta(minutes=15)) == dt.datetime(2023, 3, 20, 15, 15) @staticmethod def test_pad_data_horizontally(): @@ -177,13 +177,13 @@ def test_get_padding_area_int(): ORBIT_POLYNOMIALS = { "StartTime": np.array([ [ - datetime(2006, 1, 1, 6), datetime(2006, 1, 1, 12), - datetime(2006, 1, 1, 18), datetime(1958, 1, 1, 0)] + dt.datetime(2006, 1, 1, 6), dt.datetime(2006, 1, 1, 12), + dt.datetime(2006, 1, 1, 18), dt.datetime(1958, 1, 1, 0)] ]), "EndTime": np.array([ [ - datetime(2006, 1, 1, 12), datetime(2006, 1, 1, 18), - datetime(2006, 1, 2, 0), datetime(1958, 1, 1, 0) + dt.datetime(2006, 1, 1, 12), dt.datetime(2006, 1, 1, 18), + dt.datetime(2006, 1, 2, 0), dt.datetime(1958, 1, 1, 0) ] ]), "X": [np.zeros(8), @@ -212,18 +212,18 @@ def test_get_padding_area_int(): # 01-03: Overlap (10:00 - 13:00) "StartTime": np.array([ [ - datetime(2005, 12, 31, 10), datetime(2005, 12, 31, 12), - datetime(2006, 1, 1, 10), datetime(2006, 1, 1, 13), - datetime(2006, 1, 2, 0), datetime(2006, 1, 2, 18), - datetime(2006, 1, 3, 6), datetime(2006, 1, 3, 10), + dt.datetime(2005, 12, 31, 10), dt.datetime(2005, 12, 31, 12), + dt.datetime(2006, 1, 1, 10), dt.datetime(2006, 1, 1, 13), + dt.datetime(2006, 1, 2, 0), dt.datetime(2006, 1, 2, 18), + dt.datetime(2006, 1, 3, 6), dt.datetime(2006, 1, 3, 10), ] ]), "EndTime": np.array([ [ - datetime(2005, 12, 31, 12), datetime(2005, 12, 31, 18), - datetime(2006, 1, 1, 12), datetime(2006, 1, 1, 18), - datetime(2006, 1, 2, 4), datetime(2006, 1, 2, 22), - datetime(2006, 1, 3, 13), datetime(2006, 1, 3, 18), + dt.datetime(2005, 12, 31, 12), dt.datetime(2005, 12, 31, 18), + dt.datetime(2006, 1, 1, 12), dt.datetime(2006, 1, 1, 18), + dt.datetime(2006, 1, 2, 4), dt.datetime(2006, 1, 2, 22), + dt.datetime(2006, 1, 3, 13), dt.datetime(2006, 1, 3, 18), ] ]), "X": [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0], @@ -233,12 +233,12 @@ def test_get_padding_area_int(): ORBIT_POLYNOMIALS_INVALID = { "StartTime": np.array([ [ - datetime(1958, 1, 1), datetime(1958, 1, 1) + dt.datetime(1958, 1, 1), dt.datetime(1958, 1, 1) ] ]), "EndTime": np.array([ [ - datetime(1958, 1, 1), datetime(1958, 1, 1) + dt.datetime(1958, 1, 1), dt.datetime(1958, 1, 1) ] ]), "X": [1, 2], @@ -254,8 +254,8 @@ class TestSatellitePosition: def orbit_polynomial(self): """Get an orbit polynomial for testing.""" return OrbitPolynomial( - start_time=datetime(2006, 1, 1, 12), - end_time=datetime(2006, 1, 1, 18), + start_time=dt.datetime(2006, 1, 1, 12), + end_time=dt.datetime(2006, 1, 1, 18), coefs=( np.array([8.41607082e+04, 2.94319260e+00, 9.86748617e-01, -2.70135453e-01, -3.84364650e-02, 8.48718433e-03, @@ -272,7 +272,7 @@ def orbit_polynomial(self): @pytest.fixture() def time(self): """Get scan timestamp for testing.""" - return datetime(2006, 1, 1, 12, 15, 9, 304888) + return dt.datetime(2006, 1, 1, 12, 15, 9, 304888) def test_eval_polynomial(self, orbit_polynomial, time): """Test getting the position in cartesian coordinates.""" @@ -305,7 +305,7 @@ class TestOrbitPolynomialFinder: # Contiguous validity intervals (that's the norm) ( ORBIT_POLYNOMIALS_SYNTH, - datetime(2005, 12, 31, 12, 15), + dt.datetime(2005, 12, 31, 12, 15), OrbitPolynomial( coefs=(2.0, 2.1, 2.2), start_time=np.datetime64("2005-12-31 12:00"), @@ -316,7 +316,7 @@ class TestOrbitPolynomialFinder: # not too far away ( ORBIT_POLYNOMIALS_SYNTH, - datetime(2006, 1, 1, 12, 15), + dt.datetime(2006, 1, 1, 12, 15), OrbitPolynomial( coefs=(3.0, 3.1, 3.2), start_time=np.datetime64("2006-01-01 10:00"), @@ -326,7 +326,7 @@ class TestOrbitPolynomialFinder: # Overlapping intervals ( ORBIT_POLYNOMIALS_SYNTH, - datetime(2006, 1, 3, 12, 15), + dt.datetime(2006, 1, 3, 12, 15), OrbitPolynomial( coefs=(8.0, 8.1, 8.2), start_time=np.datetime64("2006-01-03 10:00"), @@ -351,9 +351,9 @@ def test_get_orbit_polynomial(self, orbit_polynomials, time, [ # No interval enclosing the given timestamp and closest interval # too far away - (ORBIT_POLYNOMIALS_SYNTH, datetime(2006, 1, 2, 12, 15)), + (ORBIT_POLYNOMIALS_SYNTH, dt.datetime(2006, 1, 2, 12, 15)), # No valid polynomials at all - (ORBIT_POLYNOMIALS_INVALID, datetime(2006, 1, 1, 12, 15)) + (ORBIT_POLYNOMIALS_INVALID, dt.datetime(2006, 1, 1, 12, 15)) ] ) def test_get_orbit_polynomial_exceptions(self, orbit_polynomials, time): @@ -378,14 +378,14 @@ def test_get_meirink_slope_epoch(self, platform_id, channel_name): assert calibration_handler.get_gain_offset()[0] == MEIRINK_COEFS["2023"][platform_id][channel_name][0]/1000. @pytest.mark.parametrize(("platform_id", "time", "expected"), [ - (321, datetime(2005, 1, 18, 0, 0), [0.0250354716, 0.0315626684, 0.022880986]), - (321, datetime(2010, 12, 31, 0, 0), [0.0258479563, 0.0322386887, 0.022895110500000003]), - (322, datetime(2010, 1, 18, 0, 0), [0.021964051999999998, 0.027548445, 0.021576766]), - (322, datetime(2015, 6, 1, 0, 0), [0.022465028, 0.027908105, 0.021674373999999996]), - (323, datetime(2005, 1, 18, 0, 0), [0.0209088464, 0.0265355228, 0.0230132616]), - (323, datetime(2010, 12, 31, 0, 0), [0.022181355200000002, 0.0280103379, 0.0229511138]), - (324, datetime(2010, 1, 18, 0, 0), [0.0218362, 0.027580748, 0.022285370999999998]), - (324, datetime(2015, 6, 1, 0, 0), [0.0225418, 0.028530172, 0.022248718999999997]), + (321, dt.datetime(2005, 1, 18, 0, 0), [0.0250354716, 0.0315626684, 0.022880986]), + (321, dt.datetime(2010, 12, 31, 0, 0), [0.0258479563, 0.0322386887, 0.022895110500000003]), + (322, dt.datetime(2010, 1, 18, 0, 0), [0.021964051999999998, 0.027548445, 0.021576766]), + (322, dt.datetime(2015, 6, 1, 0, 0), [0.022465028, 0.027908105, 0.021674373999999996]), + (323, dt.datetime(2005, 1, 18, 0, 0), [0.0209088464, 0.0265355228, 0.0230132616]), + (323, dt.datetime(2010, 12, 31, 0, 0), [0.022181355200000002, 0.0280103379, 0.0229511138]), + (324, dt.datetime(2010, 1, 18, 0, 0), [0.0218362, 0.027580748, 0.022285370999999998]), + (324, dt.datetime(2015, 6, 1, 0, 0), [0.0225418, 0.028530172, 0.022248718999999997]), ]) def test_get_meirink_slope_2020(self, platform_id, time, expected): """Test the value of the slope of the Meirink calibration.""" diff --git a/satpy/tests/reader_tests/test_seviri_l1b_calibration.py b/satpy/tests/reader_tests/test_seviri_l1b_calibration.py index e6c2cdcf16..8eaf2b83da 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_calibration.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_calibration.py @@ -17,8 +17,8 @@ # satpy. If not, see . """Unittesting the native msg reader.""" +import datetime as dt import unittest -from datetime import datetime import numpy as np import pytest @@ -110,7 +110,7 @@ def setUp(self): """Set up the SEVIRI Calibration algorithm for testing.""" self.algo = SEVIRICalibrationAlgorithm( platform_id=PLATFORM_ID, - scan_time=datetime(2020, 8, 15, 13, 0, 40) + scan_time=dt.datetime(2020, 8, 15, 13, 0, 40) ) def test_convert_to_radiance(self): @@ -212,7 +212,7 @@ class TestFileHandlerCalibrationBase: gains_gsics = [0, 0, 0, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 0] offsets_gsics = [0, 0, 0, -0.4, -0.5, -0.6, -0.7, -0.8, -0.9, -1.0, -1.1, 0] radiance_types = 2 * np.ones(12) - scan_time = datetime(2020, 1, 1) + scan_time = dt.datetime(2020, 1, 1) external_coefs = { "VIS006": {"gain": 10, "offset": -10}, "IR_108": {"gain": 20, "offset": -20}, diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py index 3fe00edc80..66dc4ed0fa 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py @@ -15,10 +15,11 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """The HRIT msg reader tests package.""" +import datetime as dt import unittest -from datetime import datetime from unittest import mock import numpy as np @@ -31,7 +32,7 @@ from satpy.readers.seviri_l1b_hrit import HRITMSGEpilogueFileHandler, HRITMSGFileHandler, HRITMSGPrologueFileHandler from satpy.tests.reader_tests.test_seviri_base import ORBIT_POLYNOMIALS_INVALID from satpy.tests.reader_tests.test_seviri_l1b_calibration import TestFileHandlerCalibrationBase -from satpy.tests.utils import assert_attrs_equal, make_dataid +from satpy.tests.utils import RANDOM_GEN, assert_attrs_equal, make_dataid class TestHRITMSGBase(unittest.TestCase): @@ -47,7 +48,7 @@ class TestHRITMSGFileHandlerHRV(TestHRITMSGBase): def setUp(self): """Set up the hrit file handler for testing HRV.""" - self.observation_start_time = datetime(2006, 1, 1, 12, 15, 9, 304888) + self.observation_start_time = dt.datetime(2006, 1, 1, 12, 15, 9, 304888) self.nlines = 464 self.reader = setup.get_fake_file_handler( observation_start_time=self.observation_start_time, @@ -64,9 +65,9 @@ def setUp(self): def test_read_hrv_band(self, memmap): """Test reading the hrv band.""" nbits = self.reader.mda["number_of_bits_per_pixel"] - memmap.return_value = np.random.randint(0, 256, - size=int((464 * 5568 * nbits) / 8), - dtype=np.uint8) + memmap.return_value = RANDOM_GEN.integers(0, 256, + size=int((464 * 5568 * nbits) / 8), + dtype=np.uint8) res = self.reader.read_band("HRV", None) assert res.shape == (464, 5568) @@ -139,7 +140,7 @@ class TestHRITMSGFileHandler(TestHRITMSGBase): def setUp(self): """Set up the hrit file handler for testing.""" - self.observation_start_time = datetime(2006, 1, 1, 12, 15, 9, 304888) + self.observation_start_time = dt.datetime(2006, 1, 1, 12, 15, 9, 304888) self.nlines = 464 self.ncols = 3712 self.projection_longitude = 9.5 @@ -181,9 +182,9 @@ def test_get_area_def(self): def test_read_band(self, memmap): """Test reading a band.""" nbits = self.reader.mda["number_of_bits_per_pixel"] - memmap.return_value = np.random.randint(0, 256, - size=int((464 * 3712 * nbits) / 8), - dtype=np.uint8) + memmap.return_value = RANDOM_GEN.integers(0, 256, + size=int((464 * 3712 * nbits) / 8), + dtype=np.uint8) res = self.reader.read_band("VIS006", None) assert res.shape == (464, 3712) @@ -214,13 +215,13 @@ def test_get_dataset(self, calibrate, parent_get_dataset): setup.get_attrs_exp(self.projection_longitude) ) # testing start/end time - assert datetime(2006, 1, 1, 12, 15, 9, 304888) == self.reader.observation_start_time - assert datetime(2006, 1, 1, 12, 15) == self.reader.start_time + assert dt.datetime(2006, 1, 1, 12, 15, 9, 304888) == self.reader.observation_start_time + assert dt.datetime(2006, 1, 1, 12, 15) == self.reader.start_time assert self.reader.start_time == self.reader.nominal_start_time - assert datetime(2006, 1, 1, 12, 27, 39) == self.reader.observation_end_time + assert dt.datetime(2006, 1, 1, 12, 27, 39) == self.reader.observation_end_time assert self.reader.end_time == self.reader.nominal_end_time - assert datetime(2006, 1, 1, 12, 30) == self.reader.end_time + assert dt.datetime(2006, 1, 1, 12, 30) == self.reader.end_time # test repeat cycle duration assert 15 == self.reader._repeat_cycle_duration # Change the reducescan scenario to test the repeat cycle duration handling @@ -292,7 +293,7 @@ class TestHRITMSGPrologueFileHandler(unittest.TestCase): def setUp(self, *mocks): """Set up the test case.""" fh = setup.get_fake_file_handler( - observation_start_time=datetime(2016, 3, 3, 0, 0), + observation_start_time=dt.datetime(2016, 3, 3, 0, 0), nlines=464, ncols=3712, ) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py index a885a5becc..21c42c0281 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py @@ -15,9 +15,10 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Setup for SEVIRI HRIT reader tests.""" -from datetime import datetime +import datetime as dt from unittest import mock import numpy as np @@ -126,8 +127,8 @@ def get_fake_prologue(projection_longitude, orbit_polynomials): }, "ImageAcquisition": { "PlannedAcquisitionTime": { - "TrueRepeatCycleStart": datetime(2006, 1, 1, 12, 15, 9, 304888), - "PlannedRepeatCycleEnd": datetime(2006, 1, 1, 12, 30, 0, 0) + "TrueRepeatCycleStart": dt.datetime(2006, 1, 1, 12, 15, 9, 304888), + "PlannedRepeatCycleEnd": dt.datetime(2006, 1, 1, 12, 30, 0, 0) } } } @@ -149,8 +150,8 @@ def get_fake_epilogue(): }, "ActualScanningSummary": { "ReducedScan": 0, - "ForwardScanStart": datetime(2006, 1, 1, 12, 15, 9, 304888), - "ForwardScanEnd": datetime(2006, 1, 1, 12, 27, 39, 0) + "ForwardScanStart": dt.datetime(2006, 1, 1, 12, 15, 9, 304888), + "ForwardScanEnd": dt.datetime(2006, 1, 1, 12, 27, 39, 0) } } } @@ -198,7 +199,7 @@ def get_fake_dataset_info(): def get_acq_time_cds(start_time, nlines): """Get fake scanline acquisition times.""" - days_since_1958 = (start_time - datetime(1958, 1, 1)).days + days_since_1958 = (start_time - dt.datetime(1958, 1, 1)).days tline = np.zeros( nlines, dtype=[("days", ">u2"), ("milliseconds", ">u4")] @@ -238,12 +239,12 @@ def get_attrs_exp(projection_longitude=0.0): "satellite_actual_latitude": -0.5711243456528018, "satellite_actual_altitude": 35783296.150123544}, "georef_offset_corrected": True, - "nominal_start_time": (datetime(2006, 1, 1, 12, 15),), - "nominal_end_time": (datetime(2006, 1, 1, 12, 30),), + "nominal_start_time": dt.datetime(2006, 1, 1, 12, 15), + "nominal_end_time": dt.datetime(2006, 1, 1, 12, 30), "time_parameters": { - "nominal_start_time": datetime(2006, 1, 1, 12, 15), - "nominal_end_time": datetime(2006, 1, 1, 12, 30), - "observation_start_time": datetime(2006, 1, 1, 12, 15, 9, 304888), - "observation_end_time": datetime(2006, 1, 1, 12, 27, 39, 0) + "nominal_start_time": dt.datetime(2006, 1, 1, 12, 15), + "nominal_end_time": dt.datetime(2006, 1, 1, 12, 30), + "observation_start_time": dt.datetime(2006, 1, 1, 12, 15, 9, 304888), + "observation_end_time": dt.datetime(2006, 1, 1, 12, 27, 39, 0) } } diff --git a/satpy/tests/reader_tests/test_seviri_l1b_icare.py b/satpy/tests/reader_tests/test_seviri_l1b_icare.py index 7c32001168..f75d22d385 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_icare.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_icare.py @@ -124,7 +124,7 @@ def test_init(self): def test_load_dataset_vis(self): """Test loading all datasets from a full swath file.""" - from datetime import datetime + import datetime as dt r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf" @@ -133,8 +133,8 @@ def test_load_dataset_vis(self): datasets = r.load(["VIS008"]) assert len(datasets) == 1 for v in datasets.values(): - dt = datetime(2004, 12, 29, 12, 27, 44) - assert v.attrs["end_time"] == dt + date = dt.datetime(2004, 12, 29, 12, 27, 44) + assert v.attrs["end_time"] == date assert v.attrs["calibration"] == "reflectance" def test_load_dataset_ir(self): diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index 6382517b55..8f4e46e2fb 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -19,10 +19,10 @@ from __future__ import annotations +import datetime as dt import os import unittest import warnings -from datetime import datetime from unittest import mock import dask.array as da @@ -889,8 +889,8 @@ def file_handler(self): "15TRAILER": { "ImageProductionStats": { "ActualScanningSummary": { - "ForwardScanStart": datetime(2006, 1, 1, 12, 15, 9, 304888), - "ForwardScanEnd": datetime(2006, 1, 1, 12, 27, 9, 304888), + "ForwardScanStart": dt.datetime(2006, 1, 1, 12, 15, 9, 304888), + "ForwardScanEnd": dt.datetime(2006, 1, 1, 12, 27, 9, 304888), "ReducedScan": 0 } } @@ -941,8 +941,8 @@ def _fake_header(): }, "ImageAcquisition": { "PlannedAcquisitionTime": { - "TrueRepeatCycleStart": datetime(2006, 1, 1, 12, 15, 0, 0), - "PlannedRepeatCycleEnd": datetime(2006, 1, 1, 12, 30, 0, 0), + "TrueRepeatCycleStart": dt.datetime(2006, 1, 1, 12, 15, 0, 0), + "PlannedRepeatCycleEnd": dt.datetime(2006, 1, 1, 12, 30, 0, 0), } } }, @@ -993,19 +993,19 @@ def test_get_dataset(self, file_handler): expected = self._exp_data_array() xr.testing.assert_equal(xarr, expected) assert "raw_metadata" not in xarr.attrs - assert file_handler.start_time == datetime(2006, 1, 1, 12, 15, 0) - assert file_handler.end_time == datetime(2006, 1, 1, 12, 30, 0) + assert file_handler.start_time == dt.datetime(2006, 1, 1, 12, 15, 0) + assert file_handler.end_time == dt.datetime(2006, 1, 1, 12, 30, 0) assert_attrs_equal(xarr.attrs, expected.attrs, tolerance=1e-4) def test_time(self, file_handler): """Test start/end nominal/observation time handling.""" - assert datetime(2006, 1, 1, 12, 15, 9, 304888) == file_handler.observation_start_time - assert datetime(2006, 1, 1, 12, 15,) == file_handler.start_time + assert dt.datetime(2006, 1, 1, 12, 15, 9, 304888) == file_handler.observation_start_time + assert dt.datetime(2006, 1, 1, 12, 15,) == file_handler.start_time assert file_handler.start_time == file_handler.nominal_start_time - assert datetime(2006, 1, 1, 12, 27, 9, 304888) == file_handler.observation_end_time + assert dt.datetime(2006, 1, 1, 12, 27, 9, 304888) == file_handler.observation_end_time assert file_handler.end_time == file_handler.nominal_end_time - assert datetime(2006, 1, 1, 12, 30,) == file_handler.end_time + assert dt.datetime(2006, 1, 1, 12, 30,) == file_handler.end_time def test_repeat_cycle_duration(self, file_handler): """Test repeat cycle handling for FD or ReduscedScan.""" @@ -1035,10 +1035,10 @@ def _exp_data_array(): "projection_altitude": 35785831.0 }, "time_parameters": { - "nominal_start_time": datetime(2006, 1, 1, 12, 15, 0), - "nominal_end_time": datetime(2006, 1, 1, 12, 30, 0), - "observation_start_time": datetime(2006, 1, 1, 12, 15, 9, 304888), - "observation_end_time": datetime(2006, 1, 1, 12, 27, 9, 304888), + "nominal_start_time": dt.datetime(2006, 1, 1, 12, 15, 0), + "nominal_end_time": dt.datetime(2006, 1, 1, 12, 30, 0), + "observation_start_time": dt.datetime(2006, 1, 1, 12, 15, 9, 304888), + "observation_end_time": dt.datetime(2006, 1, 1, 12, 27, 9, 304888), }, "georef_offset_corrected": True, "platform_name": "MSG-3", diff --git a/satpy/tests/reader_tests/test_seviri_l1b_nc.py b/satpy/tests/reader_tests/test_seviri_l1b_nc.py index cd5e2c713f..d77933b9a0 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_nc.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_nc.py @@ -17,7 +17,7 @@ # satpy. If not, see . """The HRIT msg reader tests package.""" -from datetime import datetime +import datetime as dt from unittest import mock import numpy as np @@ -34,7 +34,7 @@ def to_cds_time(time): """Convert datetime to (days, msecs) since 1958-01-01.""" - if isinstance(time, datetime): + if isinstance(time, dt.datetime): time = np.datetime64(time) t0 = np.datetime64("1958-01-01 00:00") delta = time - t0 @@ -62,13 +62,13 @@ def _get_fake_dataset(self, counts, h5netcdf): line_validity = np.repeat([3, 3], 11).reshape(2, 11) line_geom_radio_quality = np.repeat([4, 4], 11).reshape(2, 11) orbit_poly_start_day, orbit_poly_start_msec = to_cds_time( - np.array([datetime(2019, 12, 31, 18), - datetime(2019, 12, 31, 22)], + np.array([dt.datetime(2019, 12, 31, 18), + dt.datetime(2019, 12, 31, 22)], dtype="datetime64") ) orbit_poly_end_day, orbit_poly_end_msec = to_cds_time( - np.array([datetime(2019, 12, 31, 22), - datetime(2020, 1, 1, 2)], + np.array([dt.datetime(2019, 12, 31, 22), + dt.datetime(2020, 1, 1, 2)], dtype="datetime64") ) counts = counts.rename({ @@ -325,10 +325,10 @@ def test_get_dataset(self, file_handler, channel, calibration, mask_bad_quality_ "projection_altitude": 35785831.0 }, "time_parameters": { - "nominal_start_time": datetime(2020, 1, 1, 0, 0), - "nominal_end_time": datetime(2020, 1, 1, 0, 0), - "observation_start_time": datetime(2020, 1, 1, 0, 0), - "observation_end_time": datetime(2020, 1, 1, 0, 0), + "nominal_start_time": dt.datetime(2020, 1, 1, 0, 0), + "nominal_end_time": dt.datetime(2020, 1, 1, 0, 0), + "observation_start_time": dt.datetime(2020, 1, 1, 0, 0), + "observation_end_time": dt.datetime(2020, 1, 1, 0, 0), }, "georef_offset_corrected": True, "platform_name": "Meteosat-11", @@ -352,13 +352,13 @@ def test_get_dataset(self, file_handler, channel, calibration, mask_bad_quality_ def test_time(self, file_handler): """Test start/end nominal/observation time handling.""" - assert datetime(2020, 1, 1, 0, 0) == file_handler.observation_start_time - assert datetime(2020, 1, 1, 0, 0) == file_handler.start_time + assert dt.datetime(2020, 1, 1, 0, 0) == file_handler.observation_start_time + assert dt.datetime(2020, 1, 1, 0, 0) == file_handler.start_time assert file_handler.start_time == file_handler.nominal_start_time - assert datetime(2020, 1, 1, 0, 0) == file_handler.observation_end_time + assert dt.datetime(2020, 1, 1, 0, 0) == file_handler.observation_end_time assert file_handler.end_time == file_handler.nominal_end_time - assert datetime(2020, 1, 1, 0, 0) == file_handler.end_time + assert dt.datetime(2020, 1, 1, 0, 0) == file_handler.end_time def test_repeat_cycle_duration(self, file_handler): """Test repeat cycle handling for FD or ReduscedScan.""" diff --git a/satpy/tests/reader_tests/test_seviri_l2_bufr.py b/satpy/tests/reader_tests/test_seviri_l2_bufr.py index ec3fdf7b56..5696cbbef0 100644 --- a/satpy/tests/reader_tests/test_seviri_l2_bufr.py +++ b/satpy/tests/reader_tests/test_seviri_l2_bufr.py @@ -17,9 +17,9 @@ # satpy. If not, see . """Unittesting the SEVIRI L2 BUFR reader.""" +import datetime as dt import sys import unittest -from datetime import datetime from unittest import mock import dask.array as da @@ -27,7 +27,7 @@ import pytest from pyresample import geometry -from satpy.tests.utils import make_dataid +from satpy.tests.utils import RANDOM_GEN, make_dataid FILETYPE_INFO = {"file_type": "seviri_l2_bufr_asr"} @@ -37,7 +37,7 @@ "spacecraft": "MSG2", "server": "TESTSERVER"} MPEF_PRODUCT_HEADER = { - "NominalTime": datetime(2019, 11, 6, 18, 0), + "NominalTime": dt.datetime(2019, 11, 6, 18, 0), "SpacecraftName": "09", "RectificationLongitude": "E0455" } @@ -109,9 +109,9 @@ ] # Test data -DATA = np.random.uniform(low=250, high=350, size=(128,)) -LAT = np.random.uniform(low=-80, high=80, size=(128,)) -LON = np.random.uniform(low=-38.5, high=121.5, size=(128,)) +DATA = RANDOM_GEN.uniform(low=250, high=350, size=(128,)) +LAT = RANDOM_GEN.uniform(low=-80, high=80, size=(128,)) +LON = RANDOM_GEN.uniform(low=-38.5, high=121.5, size=(128,)) class SeviriL2BufrData: diff --git a/satpy/tests/reader_tests/test_sgli_l1b.py b/satpy/tests/reader_tests/test_sgli_l1b.py index 7f5fffa70c..d959f810a7 100644 --- a/satpy/tests/reader_tests/test_sgli_l1b.py +++ b/satpy/tests/reader_tests/test_sgli_l1b.py @@ -1,6 +1,7 @@ """Tests for the SGLI L1B backend.""" + +import datetime as dt import sys -from datetime import datetime, timedelta import dask import h5py @@ -8,14 +9,15 @@ import pytest from satpy.readers.sgli_l1b import HDF5SGLI +from satpy.tests.utils import RANDOM_GEN -START_TIME = datetime.now() -END_TIME = START_TIME + timedelta(minutes=5) +START_TIME = dt.datetime.now() +END_TIME = START_TIME + dt.timedelta(minutes=5) FULL_KM_ARRAY = np.arange(1955 * 1250, dtype=np.uint16).reshape((1955, 1250)) MASK = 16383 LON_LAT_ARRAY = np.arange(197 * 126, dtype=np.float32).reshape((197, 126)) -AZI_ARRAY = np.random.randint(-180 * 100, 180 * 100, size=(197, 126), dtype=np.int16) -ZEN_ARRAY = np.random.randint(0, 180 * 100, size=(197, 126), dtype=np.int16) +AZI_ARRAY = RANDOM_GEN.integers(-180 * 100, 180 * 100, size=(197, 126), dtype=np.int16) +ZEN_ARRAY = RANDOM_GEN.integers(0, 180 * 100, size=(197, 126), dtype=np.int16) @pytest.fixture(scope="module") @@ -168,14 +170,14 @@ def test_start_time(sgli_vn_file): """Test that the start time is extracted.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) microseconds = START_TIME.microsecond % 1000 - assert handler.start_time == START_TIME - timedelta(microseconds=microseconds) + assert handler.start_time == START_TIME - dt.timedelta(microseconds=microseconds) def test_end_time(sgli_vn_file): """Test that the end time is extracted.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) microseconds = END_TIME.microsecond % 1000 - assert handler.end_time == END_TIME - timedelta(microseconds=microseconds) + assert handler.end_time == END_TIME - dt.timedelta(microseconds=microseconds) def test_get_dataset_counts(sgli_vn_file): """Test that counts can be extracted from a file.""" diff --git a/satpy/tests/reader_tests/test_slstr_l1b.py b/satpy/tests/reader_tests/test_slstr_l1b.py index b6784d4e2b..becc1455b2 100644 --- a/satpy/tests/reader_tests/test_slstr_l1b.py +++ b/satpy/tests/reader_tests/test_slstr_l1b.py @@ -15,10 +15,12 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Module for testing the satpy.readers.nc_slstr module.""" + +import datetime as dt import unittest import unittest.mock as mock -from datetime import datetime import numpy as np import pytest @@ -136,10 +138,10 @@ def test_instantiate(self, bvs_, xr_): bvs_.return_value = self.FakeSpl xr_.open_dataset.return_value = self.fake_dataset - good_start = datetime.strptime(self.start_time, - "%Y-%m-%dT%H:%M:%S.%fZ") - good_end = datetime.strptime(self.end_time, - "%Y-%m-%dT%H:%M:%S.%fZ") + good_start = dt.datetime.strptime(self.start_time, + "%Y-%m-%dT%H:%M:%S.%fZ") + good_end = dt.datetime.strptime(self.end_time, + "%Y-%m-%dT%H:%M:%S.%fZ") ds_id = make_dataid(name="foo", calibration="radiance", stripe="a", view="nadir") diff --git a/satpy/tests/reader_tests/test_smos_l2_wind.py b/satpy/tests/reader_tests/test_smos_l2_wind.py index 519030447b..409feb62ad 100644 --- a/satpy/tests/reader_tests/test_smos_l2_wind.py +++ b/satpy/tests/reader_tests/test_smos_l2_wind.py @@ -18,9 +18,9 @@ # Satpy. If not, see . """Module for testing the satpy.readers.smos_l2_wind module.""" +import datetime as dt import os import unittest -from datetime import datetime from unittest import mock import numpy as np @@ -35,8 +35,8 @@ class FakeNetCDF4FileHandlerSMOSL2WIND(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" from xarray import DataArray - dt_s = filename_info.get("start_time", datetime(2020, 4, 22, 12, 0, 0)) - dt_e = filename_info.get("end_time", datetime(2020, 4, 22, 12, 0, 0)) + dt_s = filename_info.get("start_time", dt.datetime(2020, 4, 22, 12, 0, 0)) + dt_e = filename_info.get("end_time", dt.datetime(2020, 4, 22, 12, 0, 0)) if filetype_info["file_type"] == "smos_l2_wind": file_content = { diff --git a/satpy/tests/reader_tests/test_tropomi_l2.py b/satpy/tests/reader_tests/test_tropomi_l2.py index 7305bf365c..4bdf3f67d2 100644 --- a/satpy/tests/reader_tests/test_tropomi_l2.py +++ b/satpy/tests/reader_tests/test_tropomi_l2.py @@ -16,11 +16,12 @@ # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . + """Module for testing the satpy.readers.tropomi_l2 module.""" +import datetime as dt import os import unittest -from datetime import datetime, timedelta from unittest import mock import numpy as np @@ -41,13 +42,13 @@ class FakeNetCDF4FileHandlerTL2(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" - dt_s = filename_info.get("start_time", datetime(2016, 1, 1, 12, 0, 0)) - dt_e = filename_info.get("end_time", datetime(2016, 1, 1, 12, 0, 0)) + dt_s = filename_info.get("start_time", dt.datetime(2016, 1, 1, 12, 0, 0)) + dt_e = filename_info.get("end_time", dt.datetime(2016, 1, 1, 12, 0, 0)) if filetype_info["file_type"] == "tropomi_l2": file_content = { - "/attr/time_coverage_start": (dt_s+timedelta(minutes=22)).strftime("%Y-%m-%dT%H:%M:%SZ"), - "/attr/time_coverage_end": (dt_e-timedelta(minutes=22)).strftime("%Y-%m-%dT%H:%M:%SZ"), + "/attr/time_coverage_start": (dt_s+dt.timedelta(minutes=22)).strftime("%Y-%m-%dT%H:%M:%SZ"), + "/attr/time_coverage_end": (dt_e-dt.timedelta(minutes=22)).strftime("%Y-%m-%dT%H:%M:%SZ"), "/attr/platform_shortname": "S5P", "/attr/sensor": "TROPOMI", } @@ -141,8 +142,8 @@ def test_load_no2(self): for d in ds.values(): assert d.attrs["platform_shortname"] == "S5P" assert d.attrs["sensor"] == "tropomi" - assert d.attrs["time_coverage_start"] == datetime(2018, 7, 9, 17, 25, 34) - assert d.attrs["time_coverage_end"] == datetime(2018, 7, 9, 18, 23, 4) + assert d.attrs["time_coverage_start"] == dt.datetime(2018, 7, 9, 17, 25, 34) + assert d.attrs["time_coverage_end"] == dt.datetime(2018, 7, 9, 18, 23, 4) assert "area" in d.attrs assert d.attrs["area"] is not None assert "y" in d.dims diff --git a/satpy/tests/reader_tests/test_utils.py b/satpy/tests/reader_tests/test_utils.py index 67bdb41374..ba43688b76 100644 --- a/satpy/tests/reader_tests/test_utils.py +++ b/satpy/tests/reader_tests/test_utils.py @@ -15,11 +15,12 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Testing of helper functions.""" +import datetime as dt import os import unittest -from datetime import datetime from unittest import mock import dask.array as da @@ -430,7 +431,7 @@ class TestSunEarthDistanceCorrection: def setup_method(self): """Create input / output arrays for the tests.""" - self.test_date = datetime(2020, 8, 15, 13, 0, 40) + self.test_date = dt.datetime(2020, 8, 15, 13, 0, 40) raw_refl = xr.DataArray(da.from_array([10., 20., 40., 1., 98., 50.]), attrs={"start_time": self.test_date, @@ -462,7 +463,7 @@ def test_get_utc_time(self): # Now check correct time is returned with utc_date passed tmp_array = self.raw_refl.copy() - new_test_date = datetime(2019, 2, 1, 15, 2, 12) + new_test_date = dt.datetime(2019, 2, 1, 15, 2, 12) utc_time = hf.get_array_date(tmp_array, new_test_date) assert utc_time == new_test_date diff --git a/satpy/tests/reader_tests/test_vii_l1b_nc.py b/satpy/tests/reader_tests/test_vii_l1b_nc.py index 22ab14e0a3..c302973a5a 100644 --- a/satpy/tests/reader_tests/test_vii_l1b_nc.py +++ b/satpy/tests/reader_tests/test_vii_l1b_nc.py @@ -68,7 +68,7 @@ def setUp(self): bt_b[:] = np.arange(9) cw = g1_1.createVariable("channel_cw_thermal", np.float32, dimensions=("num_chan_thermal",)) cw[:] = np.arange(9) - isi = g1_1.createVariable("Band_averaged_solar_irradiance", np.float32, dimensions=("num_chan_solar",)) + isi = g1_1.createVariable("band_averaged_solar_irradiance", np.float32, dimensions=("num_chan_solar",)) isi[:] = np.arange(11) # Create measurement_data group diff --git a/satpy/tests/reader_tests/test_viirs_compact.py b/satpy/tests/reader_tests/test_viirs_compact.py index ba8fa6f312..f27d9d6f32 100644 --- a/satpy/tests/reader_tests/test_viirs_compact.py +++ b/satpy/tests/reader_tests/test_viirs_compact.py @@ -24,6 +24,7 @@ import pytest from satpy.tests.reader_tests.utils import fill_h5 +from satpy.tests.utils import RANDOM_GEN # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: @@ -647,13 +648,13 @@ def fake_dnb(): dtype=np.float32, ) }, - "Latitude": {"value": np.random.rand(96, 332).astype(np.float32)}, - "Longitude": {"value": np.random.rand(96, 332).astype(np.float32)}, + "Latitude": {"value": RANDOM_GEN.random((96, 332)).astype(np.float32)}, + "Longitude": {"value": RANDOM_GEN.random((96, 332)).astype(np.float32)}, "LunarAzimuthAngle": { - "value": np.random.rand(96, 332).astype(np.float32) + "value": RANDOM_GEN.random((96, 332)).astype(np.float32) }, "LunarZenithAngle": { - "value": np.random.rand(96, 332).astype(np.float32) + "value": RANDOM_GEN.random((96, 332)).astype(np.float32) }, "MidTime": { "value": np.array( @@ -1170,16 +1171,16 @@ def fake_dnb(): ) }, "SatelliteAzimuthAngle": { - "value": np.random.rand(96, 332).astype(np.float32) + "value": RANDOM_GEN.random((96, 332)).astype(np.float32) }, "SatelliteZenithAngle": { - "value": np.random.rand(96, 332).astype(np.float32) + "value": RANDOM_GEN.random((96, 332)).astype(np.float32) }, "SolarAzimuthAngle": { - "value": np.random.rand(96, 332).astype(np.float32) + "value": RANDOM_GEN.random((96, 332)).astype(np.float32) }, "SolarZenithAngle": { - "value": np.random.rand(96, 332).astype(np.float32) + "value": RANDOM_GEN.random((96, 332)).astype(np.float32) }, "StartTime": { "value": np.array( @@ -1484,7 +1485,7 @@ def fake_dnb(): }, "PadByte1": {"value": np.array([0, 0, 0], dtype=np.uint8)}, "QF1_VIIRSDNBSDR": { - "value": (np.random.rand(768, 4064) * 255).astype(np.uint8) + "value": (RANDOM_GEN.random((768, 4064)) * 255).astype(np.uint8) }, "QF2_SCAN_SDR": { "value": np.array( @@ -1596,7 +1597,7 @@ def fake_dnb(): dtype=np.uint8, ) }, - "Radiance": {"value": np.random.rand(768, 4064).astype(np.float32)}, + "Radiance": {"value": RANDOM_GEN.random((768, 4064)).astype(np.float32)}, "attrs": { "OriginalFilename": np.array( [ diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index e61718e9db..2983891054 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -21,8 +21,8 @@ """ from __future__ import annotations +import datetime as dt import shutil -from datetime import datetime, timedelta from pathlib import Path from typing import Iterable @@ -34,14 +34,16 @@ import xarray as xr from pyresample import SwathDefinition from pytest import TempPathFactory # noqa: PT013 -from pytest_lazyfixture import lazy_fixture +from pytest_lazy_fixtures import lf as lazy_fixture + +from satpy.tests.utils import RANDOM_GEN I_COLS = 6400 I_ROWS = 32 # one scan M_COLS = 3200 M_ROWS = 16 # one scan -START_TIME = datetime(2023, 5, 30, 17, 55, 41, 0) -END_TIME = datetime(2023, 5, 30, 17, 57, 5, 0) +START_TIME = dt.datetime(2023, 5, 30, 17, 55, 41, 0) +END_TIME = dt.datetime(2023, 5, 30, 17, 57, 5, 0) QF1_FLAG_MEANINGS = """ \tBits are listed from the MSB (bit 7) to the LSB (bit 0): \tBit Description @@ -78,7 +80,7 @@ def surface_reflectance_file(tmp_path_factory: TempPathFactory) -> Path: @pytest.fixture(scope="module") def surface_reflectance_file2(tmp_path_factory: TempPathFactory) -> Path: """Generate fake surface reflectance EDR file.""" - return _create_surface_reflectance_file(tmp_path_factory, START_TIME + timedelta(minutes=5), + return _create_surface_reflectance_file(tmp_path_factory, START_TIME + dt.timedelta(minutes=5), include_veg_indices=False) @@ -97,7 +99,7 @@ def surface_reflectance_with_veg_indices_file(tmp_path_factory: TempPathFactory) @pytest.fixture(scope="module") def surface_reflectance_with_veg_indices_file2(tmp_path_factory: TempPathFactory) -> Path: """Generate fake surface reflectance EDR file with vegetation indexes included.""" - return _create_surface_reflectance_file(tmp_path_factory, START_TIME + timedelta(minutes=5), + return _create_surface_reflectance_file(tmp_path_factory, START_TIME + dt.timedelta(minutes=5), include_veg_indices=True) @@ -110,7 +112,7 @@ def multiple_surface_reflectance_files_with_veg_indices(surface_reflectance_with def _create_surface_reflectance_file( tmp_path_factory: TempPathFactory, - start_time: datetime, + start_time: dt.datetime, include_veg_indices: bool = False, ) -> Path: fn = f"SurfRefl_v1r2_npp_s{start_time:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202305302025590.nc" @@ -132,10 +134,11 @@ def _create_surf_refl_variables() -> dict[str, xr.DataArray]: "valid_min": -180.0, "valid_max": 180.0} lat_attrs = {"standard_name": "latitude", "units": "degrees_north", "_FillValue": -999.9, "valid_min": -90.0, "valid_max": 90.0} - sr_attrs = {"units": "unitless", "_FillValue": -9999, "scale_factor": 0.0001, "add_offset": 0.0} + sr_attrs = {"units": "unitless", "_FillValue": -9999, + "scale_factor": np.float32(0.0001), "add_offset": np.float32(0.0)} - i_data = np.random.random_sample((I_ROWS, I_COLS)).astype(np.float32) - m_data = np.random.random_sample((M_ROWS, M_COLS)).astype(np.float32) + i_data = RANDOM_GEN.random((I_ROWS, I_COLS)).astype(np.float32) + m_data = RANDOM_GEN.random((M_ROWS, M_COLS)).astype(np.float32) lon_i_data = (i_data * 360) - 180.0 lon_m_data = (m_data * 360) - 180.0 lat_i_data = (i_data * 180) - 90.0 @@ -257,9 +260,10 @@ def _create_continuous_variables(var_names: Iterable[str]) -> dict[str, xr.DataA lon_attrs = {"standard_name": "longitude", "units": "degrees_east", "_FillValue": -999.9} lat_attrs = {"standard_name": "latitude", "units": "degrees_north", "_FillValue": -999.9} - cont_attrs = {"units": "Kelvin", "_FillValue": -9999, "scale_factor": 0.0001, "add_offset": 0.0} + cont_attrs = {"units": "Kelvin", "_FillValue": -9999, + "scale_factor": np.float32(0.0001), "add_offset": np.float32(0.0)} - m_data = np.random.random_sample((M_ROWS, M_COLS)).astype(np.float32) + m_data = RANDOM_GEN.random((M_ROWS, M_COLS)).astype(np.float32) data_arrs = { "Longitude": xr.DataArray(m_data, dims=dims, attrs=lon_attrs), "Latitude": xr.DataArray(m_data, dims=dims, attrs=lat_attrs), diff --git a/satpy/tests/reader_tests/test_viirs_l1b.py b/satpy/tests/reader_tests/test_viirs_l1b.py index e60f83cfd0..b2a5c4b476 100644 --- a/satpy/tests/reader_tests/test_viirs_l1b.py +++ b/satpy/tests/reader_tests/test_viirs_l1b.py @@ -17,8 +17,8 @@ # satpy. If not, see . """Module for testing the satpy.readers.viirs_l1b module.""" +import datetime as dt import os -from datetime import datetime, timedelta from unittest import mock import numpy as np @@ -49,7 +49,7 @@ class FakeNetCDF4FileHandlerDay(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" - dt = filename_info.get("start_time", datetime(2016, 1, 1, 12, 0, 0)) + date = filename_info.get("start_time", dt.datetime(2016, 1, 1, 12, 0, 0)) file_type = filename[:5].lower() num_lines = DEFAULT_FILE_SHAPE[0] num_pixels = DEFAULT_FILE_SHAPE[1] @@ -60,8 +60,8 @@ def get_test_content(self, filename, filename_info, filetype_info): "/dimension/number_of_lines": num_lines, "/dimension/number_of_pixels": num_pixels, "/dimension/number_of_LUT_values": num_luts, - "/attr/time_coverage_start": dt.strftime("%Y-%m-%dT%H:%M:%S.000Z"), - "/attr/time_coverage_end": (dt + timedelta(minutes=6)).strftime("%Y-%m-%dT%H:%M:%S.000Z"), + "/attr/time_coverage_start": date.strftime("%Y-%m-%dT%H:%M:%S.000Z"), + "/attr/time_coverage_end": (date + dt.timedelta(minutes=6)).strftime("%Y-%m-%dT%H:%M:%S.000Z"), "/attr/orbit_number": 26384, "/attr/instrument": "VIIRS", "/attr/platform": "Suomi-NPP", diff --git a/satpy/tests/reader_tests/test_viirs_l2.py b/satpy/tests/reader_tests/test_viirs_l2.py new file mode 100644 index 0000000000..01801535ed --- /dev/null +++ b/satpy/tests/reader_tests/test_viirs_l2.py @@ -0,0 +1,136 @@ +"""Module for testing the satpy.readers.viirs_l2 module.""" + +import datetime as dt +import os +from unittest import mock + +import numpy as np +import pytest + +from satpy.readers import load_reader +from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler +from satpy.tests.utils import convert_file_content_to_data_array + +DEFAULT_FILE_DTYPE = np.float32 +DEFAULT_FILE_SHAPE = (10, 300) +DEFAULT_FILE_DATA = np.arange( + DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE +).reshape(DEFAULT_FILE_SHAPE) +DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) +DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) +DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) +DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) +DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) + + +class FakeNetCDF4FileHandlerVIIRSL2(FakeNetCDF4FileHandler): + """Swap-in NetCDF4 File Handler.""" + + def get_test_content(self, filename, filename_info, filetype_info): + """Mimic reader input file content.""" + date = filename_info.get("start_time", dt.datetime(2023, 12, 30, 22, 30, 0)) + file_type = filename[:6] + num_lines = DEFAULT_FILE_SHAPE[0] + num_pixels = DEFAULT_FILE_SHAPE[1] + num_scans = 5 + file_content = { + "/dimension/number_of_scans": num_scans, + "/dimension/number_of_lines": num_lines, + "/dimension/number_of_pixels": num_pixels, + "/attr/time_coverage_start": date.strftime("%Y-%m-%dT%H:%M:%S.000Z"), + "/attr/time_coverage_end": (date + dt.timedelta(minutes=6)).strftime( + "%Y-%m-%dT%H:%M:%S.000Z" + ), + "/attr/orbit_number": 26384, + "/attr/instrument": "VIIRS", + "/attr/platform": "Suomi-NPP", + } + self._fill_contents_with_default_data(file_content, file_type) + convert_file_content_to_data_array(file_content) + return file_content + + def _fill_contents_with_default_data(self, file_content, file_type): + """Fill file contents with default data.""" + if file_type.startswith("CLD"): + file_content["geolocation_data/latitude"] = DEFAULT_LAT_DATA + file_content["geolocation_data/longitude"] = DEFAULT_LON_DATA + if file_type == "CLDPRO": + file_content["geophysical_data/Cloud_Top_Height"] = DEFAULT_FILE_DATA + elif file_type == "CLDMSK": + file_content[ + "geophysical_data/Clear_Sky_Confidence" + ] = DEFAULT_FILE_DATA + elif file_type == "AERDB_": + file_content["Latitude"] = DEFAULT_LAT_DATA + file_content["Longitude"] = DEFAULT_LON_DATA + file_content["Angstrom_Exponent_Land_Ocean_Best_Estimate"] = DEFAULT_FILE_DATA + file_content["Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate"] = DEFAULT_FILE_DATA + + +class TestVIIRSL2FileHandler: + """Test VIIRS_L2 Reader.""" + yaml_file = "viirs_l2.yaml" + + def setup_method(self): + """Wrap NetCDF4 file handler with our own fake handler.""" + from satpy._config import config_search_paths + from satpy.readers.viirs_l2 import VIIRSL2FileHandler + + self.reader_configs = config_search_paths( + os.path.join("readers", self.yaml_file) + ) + self.p = mock.patch.object( + VIIRSL2FileHandler, "__bases__", (FakeNetCDF4FileHandlerVIIRSL2,) + ) + self.fake_handler = self.p.start() + self.p.is_local = True + + def teardown_method(self): + """Stop wrapping the NetCDF4 file handler.""" + self.p.stop() + + @pytest.mark.parametrize( + "filename", + [ + ("CLDPROP_L2_VIIRS_SNPP.A2023364.2230.011.2023365115856.nc"), + ("CLDMSK_L2_VIIRS_SNPP.A2023364.2230.001.2023365105952.nc"), + ("AERDB_L2_VIIRS_SNPP.A2023364.2230.011.2023365113427.nc"), + ], + ) + def test_init(self, filename): + """Test basic init with no extra parameters.""" + from satpy.readers import load_reader + + r = load_reader(self.reader_configs) + loadables = r.select_files_from_pathnames([filename]) + assert len(loadables) == 1 + r.create_filehandlers(loadables) + # make sure we have some files + assert r.file_handlers + + @pytest.mark.parametrize( + ("filename", "datasets"), + [ + pytest.param("CLDPROP_L2_VIIRS_SNPP.A2023364.2230.011.2023365115856.nc", + ["Cloud_Top_Height"], id="CLDPROP"), + pytest.param("CLDMSK_L2_VIIRS_SNPP.A2023364.2230.001.2023365105952.nc", + ["Clear_Sky_Confidence"], id="CLDMSK"), + pytest.param("AERDB_L2_VIIRS_SNPP.A2023364.2230.011.2023365113427.nc", + ["Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate", + "Angstrom_Exponent_Land_Ocean_Best_Estimate"], id="AERDB"), + ], + ) + def test_load_l2_files(self, filename, datasets): + """Test L2 File Loading.""" + r = load_reader(self.reader_configs) + loadables = r.select_files_from_pathnames([filename]) + r.create_filehandlers(loadables) + loaded_datasets = r.load(datasets) + assert len(loaded_datasets) == len(datasets) + for d in loaded_datasets.values(): + assert d.shape == DEFAULT_FILE_SHAPE + assert d.dims == ("y", "x") + assert d.attrs["sensor"] == "viirs" + d_np = d.compute() + assert d.dtype == d_np.dtype + assert d.dtype == np.float32 diff --git a/satpy/tests/reader_tests/test_viirs_sdr.py b/satpy/tests/reader_tests/test_viirs_sdr.py index 952224daaf..2758ceb81c 100644 --- a/satpy/tests/reader_tests/test_viirs_sdr.py +++ b/satpy/tests/reader_tests/test_viirs_sdr.py @@ -354,12 +354,12 @@ def test_init_start_time_is_nodate(self): def test_init_start_time_beyond(self): """Test basic init with start_time after the provided files.""" - from datetime import datetime + import datetime as dt from satpy.readers import load_reader r = load_reader(self.reader_configs, filter_parameters={ - "start_time": datetime(2012, 2, 26) + "start_time": dt.datetime(2012, 2, 26) }) fhs = r.create_filehandlers([ "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", @@ -368,12 +368,12 @@ def test_init_start_time_beyond(self): def test_init_end_time_beyond(self): """Test basic init with end_time before the provided files.""" - from datetime import datetime + import datetime as dt from satpy.readers import load_reader r = load_reader(self.reader_configs, filter_parameters={ - "end_time": datetime(2012, 2, 24) + "end_time": dt.datetime(2012, 2, 24) }) fhs = r.create_filehandlers([ "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", @@ -382,14 +382,14 @@ def test_init_end_time_beyond(self): def test_init_start_end_time(self): """Test basic init with end_time before the provided files.""" - from datetime import datetime + import datetime as dt from satpy.readers import load_reader r = load_reader(self.reader_configs, filter_parameters={ - "start_time": datetime(2012, 2, 24), - "end_time": datetime(2012, 2, 26) + "start_time": dt.datetime(2012, 2, 24), + "end_time": dt.datetime(2012, 2, 26) }) loadables = r.select_files_from_pathnames([ "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", diff --git a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py index 7ec34fd9bf..b03052ea30 100644 --- a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py +++ b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . + """The viirs_vgac_l1b_nc reader tests package. This version tests the readers for VIIIRS VGAC data preliminary version. @@ -22,17 +23,18 @@ """ -import datetime +import datetime as dt import numpy as np import pytest +import xarray as xr from netCDF4 import Dataset @pytest.fixture() def nc_filename(tmp_path): """Create an nc test data file and return its filename.""" - now = datetime.datetime.utcnow() + now = dt.datetime.utcnow() filename = f"VGAC_VJ10XMOD_A{now:%Y%j_%H%M}_n004946_K005.nc" filename_str = str(tmp_path / filename) # Create test data @@ -40,11 +42,14 @@ def nc_filename(tmp_path): nscn = 7 npix = 800 n_lut = 12000 + start_time_srting = "2023-03-28T09:08:07" + end_time_string = "2023-03-28T10:11:12" nc.createDimension("npix", npix) nc.createDimension("nscn", nscn) nc.createDimension("n_lut", n_lut) - nc.StartTime = "2023-03-28T09:08:07" - nc.EndTime = "2023-03-28T10:11:12" + nc.createDimension("one", 1) + nc.StartTime = start_time_srting + nc.EndTime = end_time_string for ind in range(1, 11, 1): ch_name = "M{:02d}".format(ind) r_a = nc.createVariable(ch_name, np.int16, dimensions=("nscn", "npix")) @@ -61,6 +66,23 @@ def nc_filename(tmp_path): setattr(tb_b, attr, attrs[attr]) tb_lut = nc.createVariable(ch_name + "_LUT", np.float32, dimensions=("n_lut")) tb_lut[:] = np.array(range(0, n_lut)) * 0.5 + tb_lut.units = "Kelvin" + reference_time = np.datetime64("2010-01-01T00:00:00") + start_time = np.datetime64("2023-03-28T09:08:07") + np.timedelta64(123000, "us") + delta_days = start_time - reference_time + delta_full_days = delta_days.astype("timedelta64[D]") + hidden_reference_time = reference_time + delta_full_days + delta_part_of_days = start_time - hidden_reference_time + proj_time0 = nc.createVariable("proj_time0", np.float64) + proj_time0[:] = (delta_full_days.astype(np.int64) + + 0.000001 * delta_part_of_days.astype("timedelta64[us]").astype(np.int64) / (60 * 60 * 24)) + proj_time0.units = "days since 01/01/2010T00:00:00" + time_v = nc.createVariable("time", np.float64, ("nscn",)) + delta_h = np.datetime64(end_time_string) - start_time + delta_hours = 0.000001 * delta_h.astype("timedelta64[us]").astype(np.int64) / (60 * 60) + time_v[:] = np.linspace(0, delta_hours, num=nscn).astype(np.float64) + time_v.units = "hours since proj_time0" + return filename_str @@ -75,10 +97,43 @@ def test_read_vgac(self, nc_filename): scn_ = Scene( reader="viirs_vgac_l1c_nc", filenames=[nc_filename]) - scn_.load(["M05", "M15"]) + scn_.load(["M05", "M15", "scanline_timestamps"]) + diff_s = (scn_["scanline_timestamps"][0].values.astype("datetime64[us]") - + np.datetime64("2023-03-28T09:08:07.123000").astype("datetime64[us]")) + diff_e = (np.datetime64("2023-03-28T10:11:12.000000").astype("datetime64[us]") - + scn_["scanline_timestamps"][-1].values.astype("datetime64[us]")) + assert (diff_s < np.timedelta64(5, "us")) + assert (diff_s > np.timedelta64(-5, "us")) + assert (diff_e < np.timedelta64(5, "us")) + assert (diff_e > np.timedelta64(-5, "us")) assert (scn_["M05"][0, 0] == 100) assert (scn_["M15"][0, 0] == 400) - assert scn_.start_time == datetime.datetime(year=2023, month=3, day=28, - hour=9, minute=8, second=7) - assert scn_.end_time == datetime.datetime(year=2023, month=3, day=28, - hour=10, minute=11, second=12) + assert scn_.start_time == dt.datetime(year=2023, month=3, day=28, + hour=9, minute=8, second=7) + assert scn_.end_time == dt.datetime(year=2023, month=3, day=28, + hour=10, minute=11, second=12) + + def test_dt64_to_datetime(self): + """Test datetime conversion branch.""" + from satpy.readers.viirs_vgac_l1c_nc import VGACFileHandler + fh = VGACFileHandler(filename="", + filename_info={"start_time": "2023-03-28T09:08:07"}, + filetype_info="") + in_dt = dt.datetime(year=2023, month=3, day=28, + hour=9, minute=8, second=7) + out_dt = fh.dt64_to_datetime(in_dt) + assert out_dt == in_dt + + def test_decode_time_variable(self): + """Test decode time variable branch.""" + from satpy.readers.viirs_vgac_l1c_nc import VGACFileHandler + fh = VGACFileHandler(filename="", + filename_info={"start_time": "2023-03-28T09:08:07"}, + filetype_info="") + data = xr.DataArray( + [[1, 2], + [3, 4]], + dims=("y", "x"), + attrs={"units": "something not expected"}) + with pytest.raises(AttributeError): + fh.decode_time_variable(data, "time", None) diff --git a/satpy/tests/scene_tests/test_conversions.py b/satpy/tests/scene_tests/test_conversions.py index 9b0dd9098e..4490903880 100644 --- a/satpy/tests/scene_tests/test_conversions.py +++ b/satpy/tests/scene_tests/test_conversions.py @@ -13,8 +13,10 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Unit tests for Scene conversion functionality.""" -from datetime import datetime + +import datetime as dt import pytest import xarray as xr @@ -61,7 +63,7 @@ def test_geoviews_basic_with_area(self): {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, 2, 2, [-200, -200, 200, 200]) scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1), + attrs={"start_time": dt.datetime(2018, 1, 1), "area": area}) gv_obj = scn.to_geoviews() # we assume that if we got something back, geoviews can use it @@ -75,7 +77,7 @@ def test_geoviews_basic_with_swath(self): lats = xr.DataArray(da.zeros((2, 2))) area = SwathDefinition(lons, lats) scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1), + attrs={"start_time": dt.datetime(2018, 1, 1), "area": area}) gv_obj = scn.to_geoviews() # we assume that if we got something back, geoviews can use it @@ -89,7 +91,7 @@ def test_hvplot_basic_with_area(self): {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, 2, 2, [-200, -200, 200, 200]) scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1), + attrs={"start_time": dt.datetime(2018, 1, 1), "area": area, "units": "m"}) hv_obj = scn.to_hvplot() # we assume that if we got something back, hvplot can use it @@ -103,13 +105,13 @@ def test_hvplot_rgb_with_area(self): {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, 2, 2, [-200, -200, 200, 200]) scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1), + attrs={"start_time": dt.datetime(2018, 1, 1), "area": area, "units": "m"}) scn["ds2"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1), + attrs={"start_time": dt.datetime(2018, 1, 1), "area": area, "units": "m"}) scn["ds3"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1), + attrs={"start_time": dt.datetime(2018, 1, 1), "area": area, "units": "m"}) hv_obj = scn.to_hvplot() # we assume that if we got something back, hvplot can use it @@ -123,7 +125,7 @@ def test_hvplot_basic_with_swath(self): latitude = xr.DataArray(da.zeros((2, 2))) area = SwathDefinition(longitude, latitude) scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1), + attrs={"start_time": dt.datetime(2018, 1, 1), "area": area, "units": "m"}) hv_obj = scn.to_hvplot() # we assume that if we got something back, hvplot can use it @@ -150,7 +152,7 @@ def single_area_scn(self): 2, 2, [-200, -200, 200, 200]) data_array = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1), "area": area}) + attrs={"start_time": dt.datetime(2018, 1, 1), "area": area}) scn = Scene() scn["var1"] = data_array return scn @@ -169,10 +171,10 @@ def multi_area_scn(self): data_array1 = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1), "area": area1}) + attrs={"start_time": dt.datetime(2018, 1, 1), "area": area1}) data_array2 = xr.DataArray(da.zeros((4, 4), chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1), "area": area2}) + attrs={"start_time": dt.datetime(2018, 1, 1), "area": area2}) scn = Scene() scn["var1"] = data_array1 scn["var2"] = data_array2 diff --git a/satpy/tests/scene_tests/test_saving.py b/satpy/tests/scene_tests/test_saving.py index 32c6ff61c2..b67f41cc2e 100644 --- a/satpy/tests/scene_tests/test_saving.py +++ b/satpy/tests/scene_tests/test_saving.py @@ -13,9 +13,11 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Unit tests for saving-related functionality in scene.py.""" + +import datetime as dt import os -from datetime import datetime from unittest import mock import pytest @@ -39,7 +41,7 @@ def test_save_datasets_default(self, tmp_path): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", - "start_time": datetime(2018, 1, 1, 0, 0, 0)} + "start_time": dt.datetime(2018, 1, 1, 0, 0, 0)} ) scn = Scene() scn["test"] = ds1 @@ -52,7 +54,7 @@ def test_save_datasets_by_ext(self, tmp_path): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", - "start_time": datetime(2018, 1, 1, 0, 0, 0)} + "start_time": dt.datetime(2018, 1, 1, 0, 0, 0)} ) scn = Scene() scn["test"] = ds1 @@ -70,7 +72,7 @@ def test_save_datasets_bad_writer(self, tmp_path): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", - "start_time": datetime.utcnow()} + "start_time": dt.datetime.utcnow()} ) scn = Scene() scn["test"] = ds1 @@ -98,7 +100,7 @@ def test_save_dataset_default(self, tmp_path): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", - "start_time": datetime(2018, 1, 1, 0, 0, 0)} + "start_time": dt.datetime(2018, 1, 1, 0, 0, 0)} ) scn = Scene() scn["test"] = ds1 diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index b5d5a54b96..8e21b34615 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -15,11 +15,12 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Tests for compositors in composites/__init__.py.""" +import datetime as dt import os import unittest -from datetime import datetime from unittest import mock import dask @@ -30,7 +31,7 @@ from pyresample import AreaDefinition import satpy -from satpy.tests.utils import CustomScheduler +from satpy.tests.utils import RANDOM_GEN, CustomScheduler # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: @@ -175,7 +176,7 @@ def setup_method(self): {"proj": "merc"}, 2, 2, (-2000, -2000, 2000, 2000)) attrs = {"area": area, - "start_time": datetime(2018, 1, 1, 18), + "start_time": dt.datetime(2018, 1, 1, 18), "modifiers": tuple(), "resolution": 1000, "calibration": "reflectance", @@ -347,7 +348,7 @@ def setUp(self): {"proj": "merc"}, 2, 2, (-2000, -2000, 2000, 2000)) attrs = {"area": area, - "start_time": datetime(2018, 1, 1, 18), + "start_time": dt.datetime(2018, 1, 1, 18), "modifiers": tuple(), "resolution": 1000, "name": "test_vis"} @@ -430,7 +431,7 @@ class TestDayNightCompositor(unittest.TestCase): def setUp(self): """Create test data.""" bands = ["R", "G", "B"] - start_time = datetime(2018, 1, 1, 18, 0, 0) + start_time = dt.datetime(2018, 1, 1, 18, 0, 0) # RGB a = np.zeros((3, 2, 2), dtype=np.float32) @@ -701,10 +702,10 @@ def test_compositor(self, e2d, input_shape, bands): """Test luminance sharpening compositor.""" from satpy.composites import SandwichCompositor - rgb_arr = da.from_array(np.random.random(input_shape), chunks=2) + rgb_arr = da.from_array(RANDOM_GEN.random(input_shape), chunks=2) rgb = xr.DataArray(rgb_arr, dims=["bands", "y", "x"], coords={"bands": bands}) - lum_arr = da.from_array(100 * np.random.random((2, 2)), chunks=2) + lum_arr = da.from_array(100 * RANDOM_GEN.random((2, 2)), chunks=2) lum = xr.DataArray(lum_arr, dims=["y", "x"]) # Make enhance2dataset return unmodified dataset @@ -1420,8 +1421,6 @@ def load(self, arg): filenames=["/foo.tif"]) register.assert_not_called() retrieve.assert_not_called() - assert "start_time" in res.attrs - assert "end_time" in res.attrs assert res.attrs["sensor"] is None assert "modifiers" not in res.attrs assert "calibration" not in res.attrs @@ -1434,8 +1433,6 @@ def load(self, arg): res = comp() Scene.assert_called_once_with(reader="generic_image", filenames=["data_dir/foo.tif"]) - assert "start_time" in res.attrs - assert "end_time" in res.attrs assert res.attrs["sensor"] is None assert "modifiers" not in res.attrs assert "calibration" not in res.attrs @@ -1488,10 +1485,10 @@ def setup_class(cls): [[1., 0.5], [0., np.nan]], [[1., 0.5], [0., np.nan]]]), "RGBA": np.array([ - [[1.0, 0.5], [0.0, np.nan]], - [[1.0, 0.5], [0.0, np.nan]], - [[1.0, 0.5], [0.0, np.nan]], - [[0.5, 0.5], [0.5, 0.5]]]), + [[1., 0.5], [0., np.nan]], + [[1., 0.5], [0., np.nan]], + [[1., 0.5], [0., np.nan]], + [[0.5, 0.5], [0., 0.5]]]), } cls.foreground_data = foreground_data @@ -1499,20 +1496,41 @@ def setup_class(cls): @pytest.mark.parametrize( ("foreground_bands", "background_bands", "exp_bands", "exp_result"), [ - ("L", "L", "L", np.array([[1.0, 0.5], [0.0, 1.0]])), - ("LA", "LA", "L", np.array([[1.0, 0.75], [0.5, 1.0]])), - ("RGB", "RGB", "RGB", np.array([ + ("L", "L", "L", np.array([[1., 0.5], [0., 1.]])), + ("L", "RGB", "RGB", np.array([ [[1., 0.5], [0., 1.]], [[1., 0.5], [0., 1.]], [[1., 0.5], [0., 1.]]])), - ("RGBA", "RGBA", "RGB", np.array([ + ("LA", "LA", "LA", np.array([ [[1., 0.75], [0.5, 1.]], - [[1., 0.75], [0.5, 1.]], - [[1., 0.75], [0.5, 1.]]])), - ("RGBA", "RGB", "RGB", np.array([ + [[1., 1.], [1., 1.]]])), + ("LA", "RGB", "RGB", np.array([ [[1., 0.75], [0.5, 1.]], [[1., 0.75], [0.5, 1.]], [[1., 0.75], [0.5, 1.]]])), + ("RGB", "RGB", "RGB", np.array([ + [[1., 0.5], [0., 1.]], + [[1., 0.5], [0., 1.]], + [[1., 0.5], [0., 1.]]])), + ("RGB", "LA", "RGBA", np.array([ + [[1., 0.5], [0., 1.]], + [[1., 0.5], [0., 1.]], + [[1., 0.5], [0., 1.]], + [[1., 1.], [1., 1.]]])), + ("RGB", "RGBA", "RGBA", np.array([ + [[1., 0.5], [0., 1.]], + [[1., 0.5], [0., 1.]], + [[1., 0.5], [0., 1.]], + [[1., 1.], [1., 1.]]])), + ("RGBA", "RGBA", "RGBA", np.array([ + [[1., 0.75], [1., 1.]], + [[1., 0.75], [1., 1.]], + [[1., 0.75], [1., 1.]], + [[1., 1.], [1., 1.]]])), + ("RGBA", "RGB", "RGB", np.array([ + [[1., 0.75], [1., 1.]], + [[1., 0.75], [1., 1.]], + [[1., 0.75], [1., 1.]]])), ] ) def test_call(self, foreground_bands, background_bands, exp_bands, exp_result): @@ -1522,6 +1540,7 @@ def test_call(self, foreground_bands, background_bands, exp_bands, exp_result): # L mode images foreground_data = self.foreground_data[foreground_bands] + attrs = {"mode": foreground_bands, "area": "foo"} foreground = xr.DataArray(da.from_array(foreground_data), dims=("bands", "y", "x"), @@ -1531,7 +1550,9 @@ def test_call(self, foreground_bands, background_bands, exp_bands, exp_result): background = xr.DataArray(da.ones((len(background_bands), 2, 2)), dims=("bands", "y", "x"), coords={"bands": [c for c in attrs["mode"]]}, attrs=attrs) + res = comp([foreground, background]) + assert res.attrs["area"] == "foo" np.testing.assert_allclose(res, exp_result) assert res.attrs["mode"] == exp_bands diff --git a/satpy/tests/test_dataset.py b/satpy/tests/test_dataset.py index 1b827b8dcf..6ca3b25d72 100644 --- a/satpy/tests/test_dataset.py +++ b/satpy/tests/test_dataset.py @@ -13,10 +13,11 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Test objects and functions in the dataset module.""" +import datetime as dt import unittest -from datetime import datetime import numpy as np import pytest @@ -101,39 +102,85 @@ class TestCombineMetadata(unittest.TestCase): def setUp(self): """Set up the test case.""" - self.datetime_dts = ( - {"start_time": datetime(2018, 2, 1, 11, 58, 0)}, - {"start_time": datetime(2018, 2, 1, 11, 59, 0)}, - {"start_time": datetime(2018, 2, 1, 12, 0, 0)}, - {"start_time": datetime(2018, 2, 1, 12, 1, 0)}, - {"start_time": datetime(2018, 2, 1, 12, 2, 0)}, + # The times need to be in ascending order (oldest first) + self.start_time_dts = ( + {"start_time": dt.datetime(2018, 2, 1, 11, 58, 0)}, + {"start_time": dt.datetime(2018, 2, 1, 11, 59, 0)}, + {"start_time": dt.datetime(2018, 2, 1, 12, 0, 0)}, + {"start_time": dt.datetime(2018, 2, 1, 12, 1, 0)}, + {"start_time": dt.datetime(2018, 2, 1, 12, 2, 0)}, + ) + self.end_time_dts = ( + {"end_time": dt.datetime(2018, 2, 1, 11, 58, 0)}, + {"end_time": dt.datetime(2018, 2, 1, 11, 59, 0)}, + {"end_time": dt.datetime(2018, 2, 1, 12, 0, 0)}, + {"end_time": dt.datetime(2018, 2, 1, 12, 1, 0)}, + {"end_time": dt.datetime(2018, 2, 1, 12, 2, 0)}, + ) + self.other_time_dts = ( + {"other_time": dt.datetime(2018, 2, 1, 11, 58, 0)}, + {"other_time": dt.datetime(2018, 2, 1, 11, 59, 0)}, + {"other_time": dt.datetime(2018, 2, 1, 12, 0, 0)}, + {"other_time": dt.datetime(2018, 2, 1, 12, 1, 0)}, + {"other_time": dt.datetime(2018, 2, 1, 12, 2, 0)}, + ) + self.start_time_dts_with_none = ( + {"start_time": None}, + {"start_time": dt.datetime(2018, 2, 1, 11, 59, 0)}, + {"start_time": dt.datetime(2018, 2, 1, 12, 0, 0)}, + {"start_time": dt.datetime(2018, 2, 1, 12, 1, 0)}, + {"start_time": dt.datetime(2018, 2, 1, 12, 2, 0)}, + ) + self.end_time_dts_with_none = ( + {"end_time": dt.datetime(2018, 2, 1, 11, 58, 0)}, + {"end_time": dt.datetime(2018, 2, 1, 11, 59, 0)}, + {"end_time": dt.datetime(2018, 2, 1, 12, 0, 0)}, + {"end_time": dt.datetime(2018, 2, 1, 12, 1, 0)}, + {"end_time": None}, ) def test_average_datetimes(self): """Test the average_datetimes helper function.""" from satpy.dataset.metadata import average_datetimes dts = ( - datetime(2018, 2, 1, 11, 58, 0), - datetime(2018, 2, 1, 11, 59, 0), - datetime(2018, 2, 1, 12, 0, 0), - datetime(2018, 2, 1, 12, 1, 0), - datetime(2018, 2, 1, 12, 2, 0), + dt.datetime(2018, 2, 1, 11, 58, 0), + dt.datetime(2018, 2, 1, 11, 59, 0), + dt.datetime(2018, 2, 1, 12, 0, 0), + dt.datetime(2018, 2, 1, 12, 1, 0), + dt.datetime(2018, 2, 1, 12, 2, 0), ) ret = average_datetimes(dts) assert dts[2] == ret - def test_combine_times_with_averaging(self): - """Test the combine_metadata with times with averaging.""" + def test_combine_start_times(self): + """Test the combine_metadata with start times.""" + from satpy.dataset.metadata import combine_metadata + ret = combine_metadata(*self.start_time_dts) + assert ret["start_time"] == self.start_time_dts[0]["start_time"] + + def test_combine_end_times(self): + """Test the combine_metadata with end times.""" + from satpy.dataset.metadata import combine_metadata + ret = combine_metadata(*self.end_time_dts) + assert ret["end_time"] == self.end_time_dts[-1]["end_time"] + + def test_combine_start_times_with_none(self): + """Test the combine_metadata with start times when there's a None included.""" + from satpy.dataset.metadata import combine_metadata + ret = combine_metadata(*self.start_time_dts_with_none) + assert ret["start_time"] == self.start_time_dts_with_none[1]["start_time"] + + def test_combine_end_times_with_none(self): + """Test the combine_metadata with end times when there's a None included.""" from satpy.dataset.metadata import combine_metadata - ret = combine_metadata(*self.datetime_dts) - assert self.datetime_dts[2]["start_time"] == ret["start_time"] + ret = combine_metadata(*self.end_time_dts_with_none) + assert ret["end_time"] == self.end_time_dts_with_none[-2]["end_time"] - def test_combine_times_without_averaging(self): - """Test the combine_metadata with times without averaging.""" + def test_combine_other_times(self): + """Test the combine_metadata with other time values than start or end times.""" from satpy.dataset.metadata import combine_metadata - ret = combine_metadata(*self.datetime_dts, average_times=False) - # times are not equal so don't include it in the final result - assert "start_time" not in ret + ret = combine_metadata(*self.other_time_dts) + assert ret["other_time"] == self.other_time_dts[2]["other_time"] def test_combine_arrays(self): """Test the combine_metadata with arrays.""" @@ -327,10 +374,10 @@ def test_combine_dicts_close(): "c": [1, 2, 3], "d": { "e": np.str_("bar"), - "f": datetime(2020, 1, 1, 12, 15, 30), + "f": dt.datetime(2020, 1, 1, 12, 15, 30), "g": np.array([1, 2, 3]), }, - "h": np.array([datetime(2020, 1, 1), datetime(2020, 1, 1)]) + "h": np.array([dt.datetime(2020, 1, 1), dt.datetime(2020, 1, 1)]) } } attrs_close = { @@ -340,10 +387,10 @@ def test_combine_dicts_close(): "c": np.array([1, 2, 3]) + 1E-12, "d": { "e": np.str_("bar"), - "f": datetime(2020, 1, 1, 12, 15, 30), + "f": dt.datetime(2020, 1, 1, 12, 15, 30), "g": np.array([1, 2, 3]) + 1E-12 }, - "h": np.array([datetime(2020, 1, 1), datetime(2020, 1, 1)]) + "h": np.array([dt.datetime(2020, 1, 1), dt.datetime(2020, 1, 1)]) } } test_metadata = [attrs, attrs_close] diff --git a/satpy/tests/test_file_handlers.py b/satpy/tests/test_file_handlers.py index 403e686204..7e1424414e 100644 --- a/satpy/tests/test_file_handlers.py +++ b/satpy/tests/test_file_handlers.py @@ -15,10 +15,11 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """test file handler baseclass.""" +import datetime as dt import unittest -from datetime import datetime, timedelta from unittest import mock import numpy as np @@ -49,25 +50,27 @@ def setUp(self): """Set up the test.""" self.fh = BaseFileHandler( "filename", {"filename_info": "bla"}, "filetype_info") + self.early_time = dt.datetime(2024, 2, 12, 11, 00) + self.late_time = dt.datetime(2024, 2, 12, 12, 00) def test_combine_times(self): """Combine times.""" - info1 = {"start_time": 1} - info2 = {"start_time": 2} + info1 = {"start_time": self.early_time} + info2 = {"start_time": self.late_time} res = self.fh.combine_info([info1, info2]) - exp = {"start_time": 1} + exp = {"start_time": self.early_time} assert res == exp res = self.fh.combine_info([info2, info1]) - exp = {"start_time": 1} + exp = {"start_time": self.early_time} assert res == exp - info1 = {"end_time": 1} - info2 = {"end_time": 2} + info1 = {"end_time": self.early_time} + info2 = {"end_time": self.late_time} res = self.fh.combine_info([info1, info2]) - exp = {"end_time": 2} + exp = {"end_time": self.late_time} assert res == exp res = self.fh.combine_info([info2, info1]) - exp = {"end_time": 2} + exp = {"end_time": self.late_time} assert res == exp def test_combine_orbits(self): @@ -159,13 +162,13 @@ def test_combine_orbital_parameters(self): def test_combine_time_parameters(self): """Combine times in 'time_parameters.""" time_params1 = { - "nominal_start_time": datetime(2020, 1, 1, 12, 0, 0), - "nominal_end_time": datetime(2020, 1, 1, 12, 2, 30), - "observation_start_time": datetime(2020, 1, 1, 12, 0, 2, 23821), - "observation_end_time": datetime(2020, 1, 1, 12, 2, 23, 12348), + "nominal_start_time": dt.datetime(2020, 1, 1, 12, 0, 0), + "nominal_end_time": dt.datetime(2020, 1, 1, 12, 2, 30), + "observation_start_time": dt.datetime(2020, 1, 1, 12, 0, 2, 23821), + "observation_end_time": dt.datetime(2020, 1, 1, 12, 2, 23, 12348), } time_params2 = {} - time_shift = timedelta(seconds=1.5) + time_shift = dt.timedelta(seconds=1.5) for key, value in time_params1.items(): time_params2[key] = value + time_shift res = self.fh.combine_info([ @@ -173,10 +176,10 @@ def test_combine_time_parameters(self): {"time_parameters": time_params2} ]) res_time_params = res["time_parameters"] - assert res_time_params["nominal_start_time"] == datetime(2020, 1, 1, 12, 0, 0) - assert res_time_params["nominal_end_time"] == datetime(2020, 1, 1, 12, 2, 31, 500000) - assert res_time_params["observation_start_time"] == datetime(2020, 1, 1, 12, 0, 2, 23821) - assert res_time_params["observation_end_time"] == datetime(2020, 1, 1, 12, 2, 24, 512348) + assert res_time_params["nominal_start_time"] == dt.datetime(2020, 1, 1, 12, 0, 0) + assert res_time_params["nominal_end_time"] == dt.datetime(2020, 1, 1, 12, 2, 31, 500000) + assert res_time_params["observation_start_time"] == dt.datetime(2020, 1, 1, 12, 0, 2, 23821) + assert res_time_params["observation_end_time"] == dt.datetime(2020, 1, 1, 12, 2, 24, 512348) def test_file_is_kept_intact(self): """Test that the file object passed (string, path, or other) is kept intact.""" diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index 0c8eb51b3f..7e28a7456b 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -15,9 +15,11 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Tests for modifiers in modifiers/__init__.py.""" + +import datetime as dt import unittest -from datetime import datetime from unittest import mock import dask.array as da @@ -25,7 +27,9 @@ import pytest import xarray as xr from pyresample.geometry import AreaDefinition, StackedAreaDefinition -from pytest_lazyfixture import lazy_fixture +from pytest_lazy_fixtures import lf as lazy_fixture + +from satpy.tests.utils import RANDOM_GEN def _sunz_area_def(): @@ -57,7 +61,7 @@ def _sunz_stacked_area_def(): def _shared_sunz_attrs(area_def): attrs = {"area": area_def, - "start_time": datetime(2018, 1, 1, 18), + "start_time": dt.datetime(2018, 1, 1, 18), "modifiers": tuple(), "name": "test_vis"} return attrs @@ -213,23 +217,23 @@ def setUp(self): "area": area, "start_time": self.start_time} - nir_arr = np.random.random((2, 2)) + nir_arr = RANDOM_GEN.random((2, 2)) self.nir = xr.DataArray(da.from_array(nir_arr), dims=["y", "x"]) self.nir.attrs.update(self.metadata) - ir_arr = 100 * np.random.random((2, 2)) + ir_arr = 100 * RANDOM_GEN.random((2, 2)) self.ir_ = xr.DataArray(da.from_array(ir_arr), dims=["y", "x"]) self.ir_.attrs["area"] = area - self.sunz_arr = 100 * np.random.random((2, 2)) + self.sunz_arr = 100 * RANDOM_GEN.random((2, 2)) self.sunz = xr.DataArray(da.from_array(self.sunz_arr), dims=["y", "x"]) self.sunz.attrs["standard_name"] = "solar_zenith_angle" self.sunz.attrs["area"] = area self.da_sunz = da.from_array(self.sunz_arr) - refl_arr = np.random.random((2, 2)) + refl_arr = RANDOM_GEN.random((2, 2)) self.refl = da.from_array(refl_arr) - self.refl_with_co2 = da.from_array(np.random.random((2, 2))) + self.refl_with_co2 = da.from_array(RANDOM_GEN.random((2, 2))) self.refl_from_tbs = mock.MagicMock() self.refl_from_tbs.side_effect = self.fake_refl_from_tbs @@ -292,7 +296,7 @@ def test_no_sunz_with_co2(self, calculator, apply_modifier_info, sza): comp = NIRReflectance(name="test") info = {"modifiers": None} - co2_arr = np.random.random((2, 2)) + co2_arr = RANDOM_GEN.random((2, 2)) co2 = xr.DataArray(da.from_array(co2_arr), dims=["y", "x"]) co2.attrs["wavelength"] = [12.0, 13.0, 14.0] co2.attrs["units"] = "K" @@ -378,14 +382,14 @@ def test_compositor(self, calculator, apply_modifier_info, sza): """Test the NIR emissive part from reflectance compositor.""" from satpy.modifiers.spectral import NIRReflectance - refl_arr = np.random.random((2, 2)) + refl_arr = RANDOM_GEN.random((2, 2)) refl = da.from_array(refl_arr) refl_from_tbs = mock.MagicMock() refl_from_tbs.return_value = refl calculator.return_value = mock.MagicMock(reflectance_from_tbs=refl_from_tbs) - emissive_arr = np.random.random((2, 2)) + emissive_arr = RANDOM_GEN.random((2, 2)) emissive = da.from_array(emissive_arr) emissive_part = mock.MagicMock() emissive_part.return_value = emissive @@ -405,17 +409,17 @@ def test_compositor(self, calculator, apply_modifier_info, sza): get_lonlats.return_value = (lons, lats) area = mock.MagicMock(get_lonlats=get_lonlats) - nir_arr = np.random.random((2, 2)) + nir_arr = RANDOM_GEN.random((2, 2)) nir = xr.DataArray(da.from_array(nir_arr), dims=["y", "x"]) nir.attrs["platform_name"] = platform nir.attrs["sensor"] = sensor nir.attrs["name"] = chan_name nir.attrs["area"] = area - ir_arr = np.random.random((2, 2)) + ir_arr = RANDOM_GEN.random((2, 2)) ir_ = xr.DataArray(da.from_array(ir_arr), dims=["y", "x"]) ir_.attrs["area"] = area - sunz_arr = 100 * np.random.random((2, 2)) + sunz_arr = 100 * RANDOM_GEN.random((2, 2)) sunz = xr.DataArray(da.from_array(sunz_arr), dims=["y", "x"]) sunz.attrs["standard_name"] = "solar_zenith_angle" sunz.attrs["area"] = area @@ -591,7 +595,7 @@ def test_call(self): lats[1, 1] = np.inf lats = da.from_array(lats, chunks=5) area = SwathDefinition(lons, lats) - stime = datetime(2020, 1, 1, 12, 0, 0) + stime = dt.datetime(2020, 1, 1, 12, 0, 0) orb_params = { "satellite_actual_altitude": 12345678, "nadir_longitude": 0.0, diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index db3d1ccb1d..fc641a32a4 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -15,10 +15,12 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Test classes and functions in the readers/__init__.py module.""" import builtins import contextlib +import datetime as dt import os import sys import unittest @@ -30,11 +32,11 @@ import numpy as np import pytest import xarray as xr -from pytest_lazyfixture import lazy_fixture +from pytest_lazy_fixtures import lf as lazy_fixture from satpy.dataset.data_dict import get_key from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange -from satpy.readers import find_files_and_readers +from satpy.readers import FSFile, find_files_and_readers, open_file_or_filename # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: @@ -399,43 +401,37 @@ def test_missing_requirements(self, *mocks): def test_all_filtered(self): """Test behaviour if no file matches the filter parameters.""" - import datetime - from satpy.readers import load_readers filenames = { "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], } - filter_params = {"start_time": datetime.datetime(1970, 1, 1), - "end_time": datetime.datetime(1970, 1, 2), + filter_params = {"start_time": dt.datetime(1970, 1, 1), + "end_time": dt.datetime(1970, 1, 2), "area": None} with pytest.raises(ValueError, match="No dataset could be loaded.*"): load_readers(filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) def test_all_filtered_multiple(self): """Test behaviour if no file matches the filter parameters.""" - import datetime - from satpy.readers import load_readers filenames = { "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], "abi_l1b": ["OR_ABI-L1b-RadF-M3C01_G16_s20120561730408_e20120561741175_c20172631741218.nc"], } - filter_params = {"start_time": datetime.datetime(1970, 1, 1), - "end_time": datetime.datetime(1970, 1, 2)} + filter_params = {"start_time": dt.datetime(1970, 1, 1), + "end_time": dt.datetime(1970, 1, 2)} with pytest.raises(ValueError, match="No dataset could be loaded."): load_readers(filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) def test_almost_all_filtered(self): """Test behaviour if only one reader has datasets.""" - import datetime - from satpy.readers import load_readers filenames = { "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], "abi_l1b": ["OR_ABI-L1b-RadF-M3C01_G16_s20172631730408_e20172631741175_c20172631741218.nc"], } - filter_params = {"start_time": datetime.datetime(2012, 2, 25), - "end_time": datetime.datetime(2012, 2, 26)} + filter_params = {"start_time": dt.datetime(2012, 2, 25), + "end_time": dt.datetime(2012, 2, 26)} # viirs has data that matches the request, abi doesn't readers = load_readers(filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) assert "viirs_sdr" in readers @@ -480,11 +476,9 @@ def test_reader_other_name(self, monkeypatch, tmp_path): def test_reader_name_matched_start_end_time(self, viirs_file): """Test with start and end time matching the filename.""" - from datetime import datetime - ri = find_files_and_readers(reader="viirs_sdr", - start_time=datetime(2012, 2, 25, 18, 0, 0), - end_time=datetime(2012, 2, 25, 19, 0, 0), + start_time=dt.datetime(2012, 2, 25, 18, 0, 0), + end_time=dt.datetime(2012, 2, 25, 19, 0, 0), ) assert list(ri.keys()) == ["viirs_sdr"] assert ri["viirs_sdr"] == [viirs_file] @@ -494,9 +488,7 @@ def test_reader_name_matched_start_time(self, viirs_file): Start time in the middle of the file time should still match the file. """ - from datetime import datetime - - ri = find_files_and_readers(reader="viirs_sdr", start_time=datetime(2012, 2, 25, 18, 1, 30)) + ri = find_files_and_readers(reader="viirs_sdr", start_time=dt.datetime(2012, 2, 25, 18, 1, 30)) assert list(ri.keys()) == ["viirs_sdr"] assert ri["viirs_sdr"] == [viirs_file] @@ -506,20 +498,16 @@ def test_reader_name_matched_end_time(self, viirs_file): End time in the middle of the file time should still match the file. """ - from datetime import datetime - - ri = find_files_and_readers(reader="viirs_sdr", end_time=datetime(2012, 2, 25, 18, 1, 30)) + ri = find_files_and_readers(reader="viirs_sdr", end_time=dt.datetime(2012, 2, 25, 18, 1, 30)) assert list(ri.keys()) == ["viirs_sdr"] assert ri["viirs_sdr"] == [viirs_file] def test_reader_name_unmatched_start_end_time(self, viirs_file): """Test with start and end time matching the filename.""" - from datetime import datetime - with pytest.raises(ValueError, match="No supported files found"): find_files_and_readers(reader="viirs_sdr", - start_time=datetime(2012, 2, 26, 18, 0, 0), - end_time=datetime(2012, 2, 26, 19, 0, 0)) + start_time=dt.datetime(2012, 2, 26, 18, 0, 0), + end_time=dt.datetime(2012, 2, 26, 19, 0, 0)) def test_no_parameters(self, viirs_file): """Test with no limiting parameters.""" @@ -1115,6 +1103,16 @@ def test_hash(self, local_filename, local_filename2, local_zip_file): for fs in [None, lfs, zfs, cfs]}) == 2 * 4 +def test_open_file_or_filename_uses_mode(tmp_path): + """Test that open_file_or_filename uses provided mode.""" + filename = tmp_path / "hej" + with open(filename, mode="wb") as fd: + fd.write(b"hej") + fileobj = FSFile(os.fspath(filename)) + res = open_file_or_filename(fileobj, mode="rb").read() + assert isinstance(res, bytes) + + @pytest.fixture(scope="module") def local_netcdf_filename(tmp_path_factory): """Create a simple local NetCDF file.""" diff --git a/satpy/tests/test_writers.py b/satpy/tests/test_writers.py index bc68d767c1..701347cdbe 100644 --- a/satpy/tests/test_writers.py +++ b/satpy/tests/test_writers.py @@ -18,7 +18,7 @@ from __future__ import annotations -import datetime +import datetime as dt import os import shutil import unittest @@ -546,7 +546,6 @@ class TestComputeWriterResults(unittest.TestCase): def setUp(self): """Create temporary directory to save files to and a mock scene.""" import tempfile - from datetime import datetime from pyresample.geometry import AreaDefinition @@ -560,7 +559,7 @@ def setUp(self): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", - "start_time": datetime(2018, 1, 1, 0, 0, 0), + "start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "area": adef} ) self.scn = Scene() @@ -655,7 +654,6 @@ class TestBaseWriter: def setup_method(self): """Set up tests.""" import tempfile - from datetime import datetime from pyresample.geometry import AreaDefinition @@ -670,7 +668,7 @@ def setup_method(self): dims=("y", "x"), attrs={ "name": "test", - "start_time": datetime(2018, 1, 1, 0, 0, 0), + "start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "sensor": "fake_sensor", "area": adef, } @@ -881,7 +879,7 @@ def test_group_results_by_output_file(tmp_path): "kraken_depth": dat}, daskify=True, area=fake_area, - common_attrs={"start_time": datetime.datetime(2022, 11, 16, 13, 27)}) + common_attrs={"start_time": dt.datetime(2022, 11, 16, 13, 27)}) # NB: even if compute=False, ``save_datasets`` creates (empty) files (sources, targets) = fake_scene.save_datasets( filename=os.fspath(tmp_path / "test-{name}.tif"), diff --git a/satpy/tests/test_yaml_reader.py b/satpy/tests/test_yaml_reader.py index 0b0293e453..699f6619b6 100644 --- a/satpy/tests/test_yaml_reader.py +++ b/satpy/tests/test_yaml_reader.py @@ -15,12 +15,13 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Testing the yaml_reader module.""" +import datetime as dt import os import random import unittest -from datetime import datetime from tempfile import mkdtemp from unittest.mock import MagicMock, call, patch @@ -182,8 +183,8 @@ def __init__(self, filename, filename_info, filetype_info): """Initialize the dummy reader.""" super(DummyReader, self).__init__( filename, filename_info, filetype_info) - self._start_time = datetime(2000, 1, 1, 12, 1) - self._end_time = datetime(2000, 1, 1, 12, 2) + self._start_time = dt.datetime(2000, 1, 1, 12, 1) + self._end_time = dt.datetime(2000, 1, 1, 12, 2) self.metadata = {} @property @@ -227,8 +228,8 @@ def setUp(self): self.config = res_dict self.reader = yr.FileYAMLReader(self.config, filter_parameters={ - "start_time": datetime(2000, 1, 1), - "end_time": datetime(2000, 1, 2)}) + "start_time": dt.datetime(2000, 1, 1), + "end_time": dt.datetime(2000, 1, 2)}) def test_select_from_pathnames(self): """Check select_files_from_pathnames.""" @@ -280,8 +281,8 @@ def setUp(self): self.config = MHS_YAML_READER_DICT self.reader = yr.FileYAMLReader(MHS_YAML_READER_DICT, filter_parameters={ - "start_time": datetime(2000, 1, 1), - "end_time": datetime(2000, 1, 2), + "start_time": dt.datetime(2000, 1, 1), + "end_time": dt.datetime(2000, 1, 2), }) def test_custom_type_with_dict_contents_gets_parsed_correctly(self): @@ -321,8 +322,8 @@ def setUp(self): self.config = res_dict self.reader = yr.FileYAMLReader(res_dict, filter_parameters={ - "start_time": datetime(2000, 1, 1), - "end_time": datetime(2000, 1, 2), + "start_time": dt.datetime(2000, 1, 1), + "end_time": dt.datetime(2000, 1, 2), }) def test_deprecated_passing_config_files(self): @@ -362,17 +363,18 @@ def test_available_dataset_names(self): def test_filter_fh_by_time(self): """Check filtering filehandlers by time.""" - fh0 = FakeFH(datetime(1999, 12, 30), datetime(1999, 12, 31)) - fh1 = FakeFH(datetime(1999, 12, 31, 10, 0), - datetime(2000, 1, 1, 12, 30)) - fh2 = FakeFH(datetime(2000, 1, 1, 10, 0), - datetime(2000, 1, 1, 12, 30)) - fh3 = FakeFH(datetime(2000, 1, 1, 12, 30), - datetime(2000, 1, 2, 12, 30)) - fh4 = FakeFH(datetime(2000, 1, 2, 12, 30), - datetime(2000, 1, 3, 12, 30)) - fh5 = FakeFH(datetime(1999, 12, 31, 10, 0), - datetime(2000, 1, 3, 12, 30)) + fh0 = FakeFH(dt.datetime(1999, 12, 30), + dt.datetime(1999, 12, 31)) + fh1 = FakeFH(dt.datetime(1999, 12, 31, 10, 0), + dt.datetime(2000, 1, 1, 12, 30)) + fh2 = FakeFH(dt.datetime(2000, 1, 1, 10, 0), + dt.datetime(2000, 1, 1, 12, 30)) + fh3 = FakeFH(dt.datetime(2000, 1, 1, 12, 30), + dt.datetime(2000, 1, 2, 12, 30)) + fh4 = FakeFH(dt.datetime(2000, 1, 2, 12, 30), + dt.datetime(2000, 1, 3, 12, 30)) + fh5 = FakeFH(dt.datetime(1999, 12, 31, 10, 0), + dt.datetime(2000, 1, 3, 12, 30)) for idx, fh in enumerate([fh0, fh1, fh2, fh3, fh4, fh5]): res = self.reader.time_matches(fh.start_time, fh.end_time) @@ -388,8 +390,8 @@ def test_filter_fh_by_time(self): @patch("satpy.readers.yaml_reader.Boundary") def test_file_covers_area(self, bnd, adb, gad): """Test that area coverage is checked properly.""" - file_handler = FakeFH(datetime(1999, 12, 31, 10, 0), - datetime(2000, 1, 3, 12, 30)) + file_handler = FakeFH(dt.datetime(1999, 12, 31, 10, 0), + dt.datetime(2000, 1, 3, 12, 30)) self.reader.filter_parameters["area"] = True bnd.return_value.contour_poly.intersection.return_value = True @@ -417,18 +419,18 @@ def test_start_end_time(self): with pytest.raises(RuntimeError): self.reader.end_time - fh0 = FakeFH(datetime(1999, 12, 30, 0, 0), - datetime(1999, 12, 31, 0, 0)) - fh1 = FakeFH(datetime(1999, 12, 31, 10, 0), - datetime(2000, 1, 1, 12, 30)) - fh2 = FakeFH(datetime(2000, 1, 1, 10, 0), - datetime(2000, 1, 1, 12, 30)) - fh3 = FakeFH(datetime(2000, 1, 1, 12, 30), - datetime(2000, 1, 2, 12, 30)) - fh4 = FakeFH(datetime(2000, 1, 2, 12, 30), - datetime(2000, 1, 3, 12, 30)) - fh5 = FakeFH(datetime(1999, 12, 31, 10, 0), - datetime(2000, 1, 3, 12, 30)) + fh0 = FakeFH(dt.datetime(1999, 12, 30, 0, 0), + dt.datetime(1999, 12, 31, 0, 0)) + fh1 = FakeFH(dt.datetime(1999, 12, 31, 10, 0), + dt.datetime(2000, 1, 1, 12, 30)) + fh2 = FakeFH(dt.datetime(2000, 1, 1, 10, 0), + dt.datetime(2000, 1, 1, 12, 30)) + fh3 = FakeFH(dt.datetime(2000, 1, 1, 12, 30), + dt.datetime(2000, 1, 2, 12, 30)) + fh4 = FakeFH(dt.datetime(2000, 1, 2, 12, 30), + dt.datetime(2000, 1, 3, 12, 30)) + fh5 = FakeFH(dt.datetime(1999, 12, 31, 10, 0), + dt.datetime(2000, 1, 3, 12, 30)) self.reader.file_handlers = { "0": [fh1, fh2, fh3, fh4, fh5], @@ -436,8 +438,8 @@ def test_start_end_time(self): "2": [fh2, fh3], } - assert self.reader.start_time == datetime(1999, 12, 30, 0, 0) - assert self.reader.end_time == datetime(2000, 1, 3, 12, 30) + assert self.reader.start_time == dt.datetime(1999, 12, 30, 0, 0) + assert self.reader.end_time == dt.datetime(2000, 1, 3, 12, 30) def test_select_from_pathnames(self): """Check select_files_from_pathnames.""" @@ -572,8 +574,8 @@ def setUp(self): self.config = res_dict self.reader = yr.FileYAMLReader(res_dict, filter_parameters={ - "start_time": datetime(2000, 1, 1), - "end_time": datetime(2000, 1, 2), + "start_time": dt.datetime(2000, 1, 1), + "end_time": dt.datetime(2000, 1, 2), }) fake_fh = FakeFH(None, None) self.lons = xr.DataArray(np.ones((2, 2)) * 2, diff --git a/satpy/tests/utils.py b/satpy/tests/utils.py index a6ebf8753e..57cbdc84d0 100644 --- a/satpy/tests/utils.py +++ b/satpy/tests/utils.py @@ -16,8 +16,8 @@ # along with this program. If not, see . """Utilities for various satpy tests.""" +import datetime as dt from contextlib import contextmanager -from datetime import datetime from typing import Any from unittest import mock @@ -34,8 +34,10 @@ from satpy.modifiers import ModifierBase from satpy.readers.file_handlers import BaseFileHandler -FAKE_FILEHANDLER_START = datetime(2020, 1, 1, 0, 0, 0) -FAKE_FILEHANDLER_END = datetime(2020, 1, 1, 1, 0, 0) +FAKE_FILEHANDLER_START = dt.datetime(2020, 1, 1, 0, 0, 0) +FAKE_FILEHANDLER_END = dt.datetime(2020, 1, 1, 1, 0, 0) + +RANDOM_GEN = np.random.default_rng() def make_dataid(**items): diff --git a/satpy/tests/writer_tests/test_awips_tiled.py b/satpy/tests/writer_tests/test_awips_tiled.py index dbc1bc82d7..364d0c6b8e 100644 --- a/satpy/tests/writer_tests/test_awips_tiled.py +++ b/satpy/tests/writer_tests/test_awips_tiled.py @@ -17,10 +17,10 @@ # satpy. If not, see . """Tests for the AWIPS Tiled writer.""" +import datetime as dt import logging import os import shutil -from datetime import datetime, timedelta from glob import glob import dask @@ -32,8 +32,8 @@ from satpy.resample import update_resampled_coords -START_TIME = datetime(2018, 1, 1, 12, 0, 0) -END_TIME = START_TIME + timedelta(minutes=20) +START_TIME = dt.datetime(2018, 1, 1, 12, 0, 0) +END_TIME = START_TIME + dt.timedelta(minutes=20) # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: @@ -378,7 +378,7 @@ def test_lettered_tiles_sector_ref(self, tmp_path): unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) - expected_start = (START_TIME + timedelta(minutes=20)).strftime("%Y-%m-%dT%H:%M:%S") + expected_start = (START_TIME + dt.timedelta(minutes=20)).strftime("%Y-%m-%dT%H:%M:%S") assert masked_ds.attrs["start_date_time"] == expected_start def test_lettered_tiles_no_fit(self, tmp_path): diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 6d1d15527b..18a3682fc7 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -17,10 +17,10 @@ # satpy. If not, see . """Tests for the CF writer.""" +import datetime as dt import os import tempfile import warnings -from datetime import datetime import numpy as np import pytest @@ -74,8 +74,8 @@ def test_init(self): def test_save_array(self): """Test saving an array to netcdf/cf.""" scn = Scene() - start_time = datetime(2018, 5, 30, 10, 0) - end_time = datetime(2018, 5, 30, 10, 15) + start_time = dt.datetime(2018, 5, 30, 10, 0) + end_time = dt.datetime(2018, 5, 30, 10, 15) scn["test-array"] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time, @@ -90,8 +90,8 @@ def test_save_array(self): def test_save_array_coords(self): """Test saving array with coordinates.""" scn = Scene() - start_time = datetime(2018, 5, 30, 10, 0) - end_time = datetime(2018, 5, 30, 10, 15) + start_time = dt.datetime(2018, 5, 30, 10, 0) + end_time = dt.datetime(2018, 5, 30, 10, 15) coords = { "x": np.arange(3), "y": np.arange(1), @@ -162,8 +162,8 @@ def test_ancillary_variables(self): """Test ancillary_variables cited each other.""" from satpy.tests.utils import make_dataid scn = Scene() - start_time = datetime(2018, 5, 30, 10, 0) - end_time = datetime(2018, 5, 30, 10, 15) + start_time = dt.datetime(2018, 5, 30, 10, 0) + end_time = dt.datetime(2018, 5, 30, 10, 15) da = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time, @@ -180,8 +180,8 @@ def test_ancillary_variables(self): def test_groups(self): """Test creating a file with groups.""" - tstart = datetime(2019, 4, 1, 12, 0) - tend = datetime(2019, 4, 1, 12, 15) + tstart = dt.datetime(2019, 4, 1, 12, 0) + tend = dt.datetime(2019, 4, 1, 12, 15) data_visir = [[1, 2], [3, 4]] y_visir = [1, 2] @@ -238,8 +238,8 @@ def test_groups(self): def test_single_time_value(self): """Test setting a single time value.""" scn = Scene() - start_time = datetime(2018, 5, 30, 10, 0) - end_time = datetime(2018, 5, 30, 10, 15) + start_time = dt.datetime(2018, 5, 30, 10, 0) + end_time = dt.datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]) scn["test-array"] = xr.DataArray(test_array, dims=["x", "y"], @@ -272,8 +272,8 @@ def test_time_coordinate_on_a_swath(self): def test_bounds(self): """Test setting time bounds.""" scn = Scene() - start_time = datetime(2018, 5, 30, 10, 0) - end_time = datetime(2018, 5, 30, 10, 15) + start_time = dt.datetime(2018, 5, 30, 10, 0) + end_time = dt.datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) scn["test-array"] = xr.DataArray(test_array, dims=["x", "y", "time"], @@ -307,10 +307,10 @@ def test_bounds(self): def test_bounds_minimum(self): """Test minimum bounds.""" scn = Scene() - start_timeA = datetime(2018, 5, 30, 10, 0) # expected to be used - end_timeA = datetime(2018, 5, 30, 10, 20) - start_timeB = datetime(2018, 5, 30, 10, 3) - end_timeB = datetime(2018, 5, 30, 10, 15) # expected to be used + start_timeA = dt.datetime(2018, 5, 30, 10, 0) # expected to be used + end_timeA = dt.datetime(2018, 5, 30, 10, 20) + start_timeB = dt.datetime(2018, 5, 30, 10, 3) + end_timeB = dt.datetime(2018, 5, 30, 10, 15) # expected to be used test_arrayA = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1) scn["test-arrayA"] = xr.DataArray(test_arrayA, @@ -333,8 +333,8 @@ def test_bounds_minimum(self): def test_bounds_missing_time_info(self): """Test time bounds generation in case of missing time.""" scn = Scene() - start_timeA = datetime(2018, 5, 30, 10, 0) - end_timeA = datetime(2018, 5, 30, 10, 15) + start_timeA = dt.datetime(2018, 5, 30, 10, 0) + end_timeA = dt.datetime(2018, 5, 30, 10, 15) test_arrayA = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1) scn["test-arrayA"] = xr.DataArray(test_arrayA, @@ -355,8 +355,8 @@ def test_bounds_missing_time_info(self): def test_unlimited_dims_kwarg(self): """Test specification of unlimited dimensions.""" scn = Scene() - start_time = datetime(2018, 5, 30, 10, 0) - end_time = datetime(2018, 5, 30, 10, 15) + start_time = dt.datetime(2018, 5, 30, 10, 0) + end_time = dt.datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]) scn["test-array"] = xr.DataArray(test_array, dims=["x", "y"], @@ -372,8 +372,8 @@ def test_unlimited_dims_kwarg(self): def test_header_attrs(self): """Check global attributes are set.""" scn = Scene() - start_time = datetime(2018, 5, 30, 10, 0) - end_time = datetime(2018, 5, 30, 10, 15) + start_time = dt.datetime(2018, 5, 30, 10, 0) + end_time = dt.datetime(2018, 5, 30, 10, 15) scn["test-array"] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time)) @@ -423,8 +423,8 @@ def test_load_module_with_old_pyproj(self): def test_global_attr_default_history_and_Conventions(self): """Test saving global attributes history and Conventions.""" scn = Scene() - start_time = datetime(2018, 5, 30, 10, 0) - end_time = datetime(2018, 5, 30, 10, 15) + start_time = dt.datetime(2018, 5, 30, 10, 0) + end_time = dt.datetime(2018, 5, 30, 10, 15) scn["test-array"] = xr.DataArray([[1, 2, 3]], dims=("y", "x"), attrs=dict(start_time=start_time, @@ -439,8 +439,8 @@ def test_global_attr_default_history_and_Conventions(self): def test_global_attr_history_and_Conventions(self): """Test saving global attributes history and Conventions.""" scn = Scene() - start_time = datetime(2018, 5, 30, 10, 0) - end_time = datetime(2018, 5, 30, 10, 15) + start_time = dt.datetime(2018, 5, 30, 10, 0) + end_time = dt.datetime(2018, 5, 30, 10, 15) scn["test-array"] = xr.DataArray([[1, 2, 3]], dims=("y", "x"), attrs=dict(start_time=start_time, @@ -465,8 +465,8 @@ def scene(self): """Create a fake scene.""" scn = Scene() attrs = { - "start_time": datetime(2018, 5, 30, 10, 0), - "end_time": datetime(2018, 5, 30, 10, 15) + "start_time": dt.datetime(2018, 5, 30, 10, 0), + "end_time": dt.datetime(2018, 5, 30, 10, 15) } scn["test-array"] = xr.DataArray([1., 2, 3], attrs=attrs) return scn diff --git a/satpy/tests/writer_tests/test_geotiff.py b/satpy/tests/writer_tests/test_geotiff.py index 8925857637..d0e879c4b2 100644 --- a/satpy/tests/writer_tests/test_geotiff.py +++ b/satpy/tests/writer_tests/test_geotiff.py @@ -17,7 +17,7 @@ # satpy. If not, see . """Tests for the geotiff writer.""" -from datetime import datetime +import datetime as dt from unittest import mock import dask.array as da @@ -42,7 +42,7 @@ def _get_test_datasets_2d(): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "units": "K", "area": adef} ) @@ -72,7 +72,7 @@ def _get_test_datasets_3d(): dims=("bands", "y", "x"), coords={"bands": ["R", "G", "B"]}, attrs={"name": "test", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "area": adef} ) return [ds1] diff --git a/satpy/tests/writer_tests/test_mitiff.py b/satpy/tests/writer_tests/test_mitiff.py index 2dafdd5896..1642510583 100644 --- a/satpy/tests/writer_tests/test_mitiff.py +++ b/satpy/tests/writer_tests/test_mitiff.py @@ -20,6 +20,8 @@ Based on the test for geotiff writer """ + +import datetime as dt import logging import os import unittest @@ -48,8 +50,6 @@ def tearDown(self): def _get_test_datasets(self): """Create a datasets list.""" - from datetime import datetime - import dask.array as da import xarray as xr from pyproj import CRS @@ -68,7 +68,7 @@ def _get_test_datasets(self): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "1", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "TEST_SENSOR_NAME", "area": area_def, @@ -91,7 +91,7 @@ def _get_test_datasets(self): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "4", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "TEST_SENSOR_NAME", "area": area_def, @@ -114,8 +114,6 @@ def _get_test_datasets(self): def _get_test_datasets_sensor_set(self): """Create a datasets list.""" - from datetime import datetime - import dask.array as da import xarray as xr from pyproj import CRS @@ -134,7 +132,7 @@ def _get_test_datasets_sensor_set(self): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "1", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": {"TEST_SENSOR_NAME"}, "area": area_def, @@ -157,7 +155,7 @@ def _get_test_datasets_sensor_set(self): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "4", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": {"TEST_SENSOR_NAME"}, "area": area_def, @@ -180,8 +178,6 @@ def _get_test_datasets_sensor_set(self): def _get_test_dataset(self, bands=3): """Create a single test dataset.""" - from datetime import datetime - import dask.array as da import xarray as xr from pyproj import CRS @@ -201,7 +197,7 @@ def _get_test_dataset(self, bands=3): da.zeros((bands, 100, 200), chunks=50), dims=("bands", "y", "x"), attrs={"name": "test", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "TEST_SENSOR_NAME", "area": area_def, @@ -211,8 +207,6 @@ def _get_test_dataset(self, bands=3): def _get_test_one_dataset(self): """Create a single test dataset.""" - from datetime import datetime - import dask.array as da import xarray as xr from pyproj import CRS @@ -232,7 +226,7 @@ def _get_test_one_dataset(self): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "avhrr", "area": area_def, @@ -242,8 +236,6 @@ def _get_test_one_dataset(self): def _get_test_one_dataset_sensor_set(self): """Create a single test dataset.""" - from datetime import datetime - import dask.array as da import xarray as xr from pyproj import CRS @@ -263,7 +255,7 @@ def _get_test_one_dataset_sensor_set(self): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": {"avhrr"}, "area": area_def, @@ -273,8 +265,6 @@ def _get_test_one_dataset_sensor_set(self): def _get_test_dataset_with_bad_values(self, bands=3): """Create a single test dataset.""" - from datetime import datetime - import xarray as xr from pyproj import CRS from pyresample.geometry import AreaDefinition @@ -298,7 +288,7 @@ def _get_test_dataset_with_bad_values(self, bands=3): ds1 = xr.DataArray(rgb_data, dims=("bands", "y", "x"), attrs={"name": "test", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "TEST_SENSOR_NAME", "area": area_def, @@ -307,8 +297,6 @@ def _get_test_dataset_with_bad_values(self, bands=3): def _get_test_dataset_calibration(self, bands=6): """Create a single test dataset.""" - from datetime import datetime - import dask.array as da import xarray as xr from pyproj import CRS @@ -362,7 +350,7 @@ def _get_test_dataset_calibration(self, bands=6): bands.append(p.attrs["name"]) data["bands"] = list(bands) new_attrs = {"name": "datasets", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "test-sensor", "area": area_def, @@ -411,8 +399,6 @@ def _get_test_dataset_calibration(self, bands=6): def _get_test_dataset_calibration_one_dataset(self, bands=1): """Create a single test dataset.""" - from datetime import datetime - import dask.array as da import xarray as xr from pyproj import CRS @@ -441,7 +427,7 @@ def _get_test_dataset_calibration_one_dataset(self, bands=1): for p in scene: calibration.append(p.attrs["calibration"]) new_attrs = {"name": "datasets", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "test-sensor", "area": area_def, @@ -465,8 +451,6 @@ def _get_test_dataset_calibration_one_dataset(self, bands=1): def _get_test_dataset_three_bands_two_prereq(self, bands=3): """Create a single test dataset.""" - from datetime import datetime - import dask.array as da import xarray as xr from pyproj import CRS @@ -488,7 +472,7 @@ def _get_test_dataset_three_bands_two_prereq(self, bands=3): coords=[["R", "G", "B"], list(range(100)), list(range(200))], dims=("bands", "y", "x"), attrs={"name": "test", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "TEST_SENSOR_NAME", "area": area_def, @@ -499,8 +483,6 @@ def _get_test_dataset_three_bands_two_prereq(self, bands=3): def _get_test_dataset_three_bands_prereq(self, bands=3): """Create a single test dataset.""" - from datetime import datetime - import dask.array as da import xarray as xr from pyproj import CRS @@ -522,7 +504,7 @@ def _get_test_dataset_three_bands_prereq(self, bands=3): coords=[["R", "G", "B"], list(range(100)), list(range(200))], dims=("bands", "y", "x"), attrs={"name": "test", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "TEST_SENSOR_NAME", "area": area_def, diff --git a/satpy/tests/writer_tests/test_simple_image.py b/satpy/tests/writer_tests/test_simple_image.py index 01d89a22ad..6a4eba95e3 100644 --- a/satpy/tests/writer_tests/test_simple_image.py +++ b/satpy/tests/writer_tests/test_simple_image.py @@ -38,7 +38,7 @@ def tearDown(self): @staticmethod def _get_test_datasets(): """Create DataArray for testing.""" - from datetime import datetime + import datetime as dt import dask.array as da import xarray as xr @@ -46,7 +46,7 @@ def _get_test_datasets(): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", - "start_time": datetime.utcnow()} + "start_time": dt.datetime.utcnow()} ) return [ds1] diff --git a/satpy/writers/awips_tiled.py b/satpy/writers/awips_tiled.py index 03ce3e9d68..1652a3786e 100644 --- a/satpy/writers/awips_tiled.py +++ b/satpy/writers/awips_tiled.py @@ -213,13 +213,14 @@ lettered tile locations. """ + +import datetime as dt import logging import os import string import sys import warnings from collections import namedtuple -from datetime import datetime, timedelta import dask import dask.array as da @@ -1101,7 +1102,7 @@ def apply_misc_metadata(self, new_ds, sector_id=None, creator=None, creation_tim if creator is None: creator = "Satpy Version {} - AWIPS Tiled Writer".format(__version__) if creation_time is None: - creation_time = datetime.utcnow() + creation_time = dt.datetime.utcnow() self._add_sector_id_global(new_ds, sector_id) new_ds.attrs["Conventions"] = "CF-1.7" @@ -1493,8 +1494,8 @@ def _save_nonempty_mfdatasets(self, datasets_to_save, output_filenames, **kwargs def _adjust_metadata_times(self, ds_info): debug_shift_time = int(os.environ.get("DEBUG_TIME_SHIFT", 0)) if debug_shift_time: - ds_info["start_time"] += timedelta(minutes=debug_shift_time) - ds_info["end_time"] += timedelta(minutes=debug_shift_time) + ds_info["start_time"] += dt.timedelta(minutes=debug_shift_time) + ds_info["end_time"] += dt.timedelta(minutes=debug_shift_time) def _get_tile_data_info(self, data_arrs, creation_time, source_name): # use the first data array as a "representative" for the group @@ -1597,7 +1598,7 @@ def save_datasets(self, datasets, sector_id=None, # noqa: D417 area_data_arrs = self._group_by_area(datasets) datasets_to_save = [] output_filenames = [] - creation_time = datetime.utcnow() + creation_time = dt.datetime.utcnow() area_tile_data_gen = self._iter_area_tile_info_and_datasets( area_data_arrs, template, lettered_grid, sector_id, num_subtiles, tile_size, tile_count, use_sector_reference) @@ -1775,7 +1776,7 @@ def create_debug_lettered_tiles(**writer_kwargs): sector_info = writer.awips_sectors[sector_id] area_def, arr = _create_debug_array(sector_info, save_kwargs["num_subtiles"]) - now = datetime.utcnow() + now = dt.datetime.utcnow() product = xr.DataArray(da.from_array(arr, chunks="auto"), attrs=dict( name="debug_{}".format(sector_id), platform_name="DEBUG", @@ -1824,7 +1825,7 @@ def main(): group_2.add_argument("--letters", dest="lettered_grid", action="store_true", help="Create tiles from a static letter-based grid based on the product projection") group_2.add_argument("--letter-subtiles", nargs=2, type=int, default=(2, 2), - help="Specify number of subtiles in each lettered tile: \'row col\'") + help="Specify number of subtiles in each lettered tile: 'row col'") group_2.add_argument("--output-pattern", default=DEFAULT_OUTPUT_PATTERN, help="output filenaming pattern") group_2.add_argument("--source-name", default="SSEC", diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 205f924b33..0000000000 --- a/setup.cfg +++ /dev/null @@ -1,25 +0,0 @@ -[bdist_rpm] -requires=h5py pyresample python2-numexpr pyhdf xarray dask h5netcdf -release=1 -doc_files = doc/Makefile doc/source/*.rst doc/examples/*.py - -[bdist_wheel] -universal=1 - -[flake8] -max-line-length = 120 -exclude = - satpy/readers/li_l2.py - satpy/readers/scatsat1_l2b.py - satpy/version.py - satpy/tests/features -per-file-ignores = - satpy/tests/*/conftest.py:F401 - satpy/tests/*/*/conftest.py:F401 - doc/source/doi_role.py:D103 - satpy/tests/features/steps/*.py:F811 - -[coverage:run] -relative_files = True -omit = - satpy/version.py diff --git a/setup.py b/setup.py deleted file mode 100644 index 3439e8fa89..0000000000 --- a/setup.py +++ /dev/null @@ -1,171 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# Copyright (c) 2009-2023 Satpy developers -# -# This file is part of satpy. -# -# satpy is free software: you can redistribute it and/or modify it under the -# terms of the GNU General Public License as published by the Free Software -# Foundation, either version 3 of the License, or (at your option) any later -# version. -# -# satpy is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# satpy. If not, see . -"""Setup file for satpy.""" - -import os.path -from glob import glob - -from setuptools import find_packages, setup - -requires = ["numpy >=1.21", "pillow", "pyresample >=1.24.0", "trollsift", - "trollimage >=1.20", "pykdtree", "pyyaml >=5.1", "xarray >=0.14.1", - "dask[array] >=0.17.1", "pyproj>=2.2", "zarr", "donfig", "appdirs", - "packaging", "pooch", "pyorbital"] - -test_requires = ["behave", "h5py", "netCDF4", "pyhdf", "imageio", - "rasterio", "geoviews", "trollimage", "fsspec", "bottleneck", - "rioxarray", "pytest", "pytest-lazy-fixture", "defusedxml", - "s3fs", "eccodes", "h5netcdf", "xarray-datatree", - "skyfield", "ephem", "pint-xarray", "astropy", "dask-image"] - -extras_require = { - # Readers: - "avhrr_l1b_gaclac": ["pygac >= 1.3.0"], - "modis_l1b": ["pyhdf", "python-geotiepoints >= 1.1.7"], - "geocat": ["pyhdf"], - "acspo": ["netCDF4 >= 1.1.8"], - "clavrx": ["netCDF4 >= 1.1.8"], - "viirs_l1b": ["netCDF4 >= 1.1.8"], - "viirs_sdr": ["h5py >= 2.7.0"], - "viirs_compact": ["h5py >= 2.7.0"], - "omps_edr": ["h5py >= 2.7.0"], - "amsr2_l1b": ["h5py >= 2.7.0"], - "hrpt": ["pyorbital >= 1.3.1", "pygac", "python-geotiepoints >= 1.1.7"], - "hrit_msg": ["pytroll-schedule"], - "msi_safe": ["rioxarray", "bottleneck", "python-geotiepoints"], - "nc_nwcsaf_msg": ["netCDF4 >= 1.1.8"], - "sar_c": ["python-geotiepoints >= 1.1.7", "rasterio", "rioxarray", "defusedxml"], - "abi_l1b": ["h5netcdf"], - "seviri_l1b_hrit": ["pyorbital >= 1.3.1"], - "seviri_l1b_native": ["pyorbital >= 1.3.1"], - "seviri_l1b_nc": ["pyorbital >= 1.3.1", "netCDF4 >= 1.1.8"], - "seviri_l2_bufr": ["eccodes"], - "seviri_l2_grib": ["eccodes"], - "hsaf_grib": ["pygrib"], - "remote_reading": ["fsspec"], - "insat_3d": ["xarray-datatree"], - "gms5-vissr_l1b": ["numba"], - # Writers: - "cf": ["h5netcdf >= 0.7.3"], - "awips_tiled": ["netCDF4 >= 1.1.8"], - "geotiff": ["rasterio", "trollimage[geotiff]"], - "ninjo": ["pyninjotiff", "pint"], - "units": ["pint-xarray"], - # Composites/Modifiers: - "rayleigh": ["pyspectral >= 0.10.1"], - "angles": ["pyorbital >= 1.3.1"], - "filters": ["dask-image"], - # MultiScene: - "animations": ["imageio"], - # Documentation: - "doc": ["sphinx", "sphinx_rtd_theme", "sphinxcontrib-apidoc"], - # Other - "geoviews": ["geoviews"], - "holoviews": ["holoviews"], - "hvplot": ["hvplot", "geoviews", "cartopy", "holoviews"], - "overlays": ["pycoast", "pydecorate"], - "satpos_from_tle": ["skyfield", "astropy"], - "tests": test_requires, -} -all_extras = [] -for extra_deps in extras_require.values(): - all_extras.extend(extra_deps) -extras_require["all"] = list(set(all_extras)) - - -def _config_data_files(base_dirs, extensions=(".cfg", )): - """Find all subdirectory configuration files. - - Searches each base directory relative to this setup.py file and finds - all files ending in the extensions provided. - - :param base_dirs: iterable of relative base directories to search - :param extensions: iterable of file extensions to include (with '.' prefix) - :returns: list of 2-element tuples compatible with `setuptools.setup` - """ - data_files = [] - pkg_root = os.path.realpath(os.path.dirname(__file__)) + "/" - for base_dir in base_dirs: - new_data_files = [] - for ext in extensions: - configs = glob(os.path.join(pkg_root, base_dir, "*" + ext)) - configs = [c.replace(pkg_root, "") for c in configs] - new_data_files.extend(configs) - data_files.append((base_dir, new_data_files)) - - return data_files - - -entry_points = { - "console_scripts": [ - "satpy_retrieve_all_aux_data=satpy.aux_download:retrieve_all_cmd", - ], -} - - -NAME = "satpy" -with open("README.rst", "r") as readme: - README = readme.read() - -setup(name=NAME, - description="Python package for earth-observing satellite data processing", - long_description=README, - author="The Pytroll Team", - author_email="pytroll@googlegroups.com", - classifiers=["Development Status :: 5 - Production/Stable", - "Intended Audience :: Science/Research", - "License :: OSI Approved :: GNU General Public License v3 " + - "or later (GPLv3+)", - "Operating System :: OS Independent", - "Programming Language :: Python", - "Topic :: Scientific/Engineering"], - url="https://github.com/pytroll/satpy", - download_url="https://pypi.python.org/pypi/satpy", - project_urls={ - "Bug Tracker": "https://github.com/pytroll/satpy/issues", - "Documentation": "https://satpy.readthedocs.io/en/stable/", - "Source Code": "https://github.com/pytroll/satpy", - "Organization": "https://pytroll.github.io/", - "Slack": "https://pytroll.slack.com/", - "Twitter": "https://twitter.com/hashtag/satpy?src=hashtag_click", - "Release Notes": "https://github.com/pytroll/satpy/blob/main/CHANGELOG.md", - "Mastodon": "https://fosstodon.org/tags/satpy", - }, - packages=find_packages(), - # Always use forward '/', even on Windows - # See https://setuptools.readthedocs.io/en/latest/userguide/datafiles.html#data-files-support - package_data={"satpy": ["etc/geo_image.cfg", - "etc/areas.yaml", - "etc/satpy.cfg", - "etc/himawari-8.cfg", - "etc/eps_avhrrl1b_6.5.xml", - "etc/readers/*.yaml", - "etc/writers/*.yaml", - "etc/composites/*.yaml", - "etc/enhancements/*.cfg", - "etc/enhancements/*.yaml", - "tests/etc/readers/*.yaml", - "tests/etc/composites/*.yaml", - "tests/etc/writers/*.yaml", - ]}, - zip_safe=False, - install_requires=requires, - python_requires=">=3.9", - extras_require=extras_require, - entry_points=entry_points, - )