diff --git a/.flake8 b/.flake8 index 0335236f..b2811baa 100644 --- a/.flake8 +++ b/.flake8 @@ -16,3 +16,6 @@ ignore = # Unused imports - pb with import_and_args.py # F401 +per-file-ignores = + # __init__ files + dataset_product_mapping.py: E501 diff --git a/.github/workflows/binaries.yml b/.github/workflows/binaries.yml new file mode 100644 index 00000000..fc95a490 --- /dev/null +++ b/.github/workflows/binaries.yml @@ -0,0 +1,56 @@ +name: Create Binaries + +on: + push: + branches: + - main + - 'pre-releases/**' + +jobs: + build: + strategy: + fail-fast: false + matrix: + os: [macos-latest, ubuntu-latest, macos-13] + runs-on: ${{ matrix.os }} + timeout-minutes: 20 + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - uses: mamba-org/setup-micromamba@v1 + with: + micromamba-version: '1.5.6-0' + micromamba-binary-path: ${{ runner.temp }}/bin/micromamba + environment-file: conda_environment_binary.yaml + environment-name: copernicusmarine-binary + condarc-file: .condarc + cache-environment: true + post-cleanup: 'all' + + - name: Build with PyInstaller + shell: micromamba-shell {0} + run: | + make run-using-pyinstaller-${{ matrix.os }} + + - name: Set VERSION environment variable + id: set-version + shell: micromamba-shell {0} + run: echo "VERSION=$(poetry version --short)" >> $GITHUB_OUTPUT + + - name: Upload binaries to macos or ubuntu + shell: micromamba-shell {0} + env: + GH_TOKEN: ${{ github.token }} + run: | + if [ "${{ matrix.os }}" == "macos-latest" ]; then + ARCH="macos-arm64" + elif [ "${{ matrix.os }}" == "macos-13" ]; then + ARCH="macos-x86_64" + elif [ "${{ matrix.os }}" == "ubuntu-latest" ]; then + ARCH="linux" + fi + gh release upload v${{steps.set-version.outputs.VERSION}} dist/copernicusmarine_${ARCH}.cli#copernicusmarine-binary-${ARCH}-for-v${{steps.set-version.outputs.VERSION}} diff --git a/.github/workflows/binary-windows.yml b/.github/workflows/binary-windows.yml new file mode 100644 index 00000000..0c597621 --- /dev/null +++ b/.github/workflows/binary-windows.yml @@ -0,0 +1,45 @@ +name: Create Binaries + +on: + push: + branches: + - main + - 'pre-releases/**' + +jobs: + build: + runs-on: windows-latest + timeout-minutes: 20 + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - uses: mamba-org/setup-micromamba@v1 + with: + micromamba-version: '1.5.6-0' + micromamba-binary-path: ${{ runner.temp }}\Scripts\micromamba.exe + environment-file: conda_environment_binary.yaml + environment-name: copernicusmarine-binary + condarc-file: .condarc + cache-environment: true + post-cleanup: 'all' + + - name: Build with PyInstaller + shell: bash -el {0} + run: | + make run-using-pyinstaller-windows-latest + + - name: Set VERSION environment variable + id: set-version + shell: bash -el {0} + run: echo "VERSION=$(poetry version --short)" >> $GITHUB_OUTPUT + + - name: Upload binaries to windows + shell: bash -el {0} + env: + GH_TOKEN: ${{ github.token }} + run: | + gh release upload v${{steps.set-version.outputs.VERSION}} dist/copernicusmarine.exe#copernicusmarine-binary-windows-for-v${{steps.set-version.outputs.VERSION}} diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 517b1012..a4888f52 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -2,7 +2,10 @@ name: Run tests on: pull_request: - branches: [ "main", "copernicusmarine-toolbox-v2" ] + branches: + - "main" + - "copernicusmarine-toolbox-v2" + - "pre-releases/**" jobs: tests: diff --git a/.gitignore b/.gitignore index 14d2bc10..f33af1c4 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,9 @@ copernicusmarine/**/__pycache__/ **/.DS_Store __debug.py dist/ + +# Sphinx documentation html build +doc/_build/ + +# Jupyter notebook checkpoints +**/.ipynb_checkpoints/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 06fd3682..a08a91f9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -28,6 +28,7 @@ repos: args: [--pytest-test-first] exclude: ^tests/resources/ - id: pretty-format-json + exclude: ^doc/usage/quickoverview.ipynb - repo: https://github.com/pre-commit/mirrors-isort rev: v5.10.1 hooks: diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 00000000..16dc60ad --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,38 @@ +# .readthedocs.yaml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the OS, Python version and other tools you might need +build: + os: ubuntu-22.04 + tools: + python: "mambaforge-22.9" + # You can also specify other tool versions: + # nodejs: "19" + # rust: "1.64" + # golang: "1.19" + +# Build documentation in the "docs/" directory with Sphinx +sphinx: + configuration: doc/conf.py + +conda: + environment: conda_environment_sphinx.yaml +# Optionally build your docs in additional formats such as PDF and ePub +# formats: +# - pdf +# - epub + +# Optional but recommended, declare the Python requirements required +# to build your documentation +# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html +python: + install: + - method: pip + path: . + +#TODO: add environment variables if we want to build the notebook when building the documentation +# https://docs.readthedocs.io/en/stable/environment-variables.html diff --git a/CONTRIBUTION.md b/CONTRIBUTION.md index 6c1ff1e5..4328c2a0 100644 --- a/CONTRIBUTION.md +++ b/CONTRIBUTION.md @@ -112,3 +112,40 @@ First, here is the link to the conda-forge feedstock repository: [https://github All the conda-forge informations about this repository are available [here in the README](https://github.com/orgs/conda-forge/teams/copernicusmarine). To update it (new version, new maintainer...), please follow the indicated procedure. Please also take a look at [this conda-forge documentation](https://conda-forge.org/docs/maintainer/updating_pkgs/#example-workflow-for-updating-a-package) for more information about the update procedure. + +## Documentation + +We use sphinx and read the docs to respectively build and distribute the documentation of the toolbox. + +### Sphinx + +We use: + +- autodoc: to create the documentation from the docstrings of the Python interface or the comments in the models +- numpydoc: to convert numpydoc documentation to restructuresText +- sphinx-click: to generate the documentation from the click CLI +- furo: as a base template + +The configuration of sphinx can be found in the `doc/conf.py` file and the versions are in the `conda_environment_sphinx.yaml` file. The `_build` folder is gitignored. + +To build the documentation do: + +```bash +cd doc/ +make html +``` + +### ReadTheDocs + +Please see the admin page of the toolbox: [here](https://readthedocs.org/projects/copernicusmarine/). + +To access admin rights, you need to be added to the readthedocs project (after creating an account). For the moment Mathis and Simon are the admin of this page. + +Example: [toolbox documentation](https://copernicusmarine.readthedocs.io) + +Readthedocs have a webhook on the copernicusmarine repo and is triggered when: a commit is pushed, a tag is created and other events. + +We defined some automatisation processes that listen to these events and trigger some actions. + +- If a tag of a release is pushed (eg v1.3.3) then the doc is built and the v1.3.3 will be available and the default documentation ie any user going to the root of the documentation [https://copernicusmarine.readthedocs.io](https://copernicusmarine.readthedocs.io) will be directed to the newest version: `copernicusmarine.readthedocs.io/en/v1.3.3`. +- If a tag of a pre-release is pushed (eg v2.0.0a1) then the doc is built and the v2.0.0a1 is available but won't be the default one. diff --git a/Makefile b/Makefile index 429ba99e..208f5789 100644 --- a/Makefile +++ b/Makefile @@ -54,6 +54,23 @@ run-tests-dependencie-versions: ${ACTIVATE_ENVIRONMENT} tox run +run-using-pyinstaller-windows-latest: + pip install -e . + python -m PyInstaller --copy-metadata copernicusmarine --icon=toolbox_icon.png --copy-metadata xarray --name copernicusmarine.exe --add-data "C:\Users\runneradmin\micromamba\envs\copernicusmarine-binary\Lib\site-packages\distributed\distributed.yaml;.\distributed" copernicusmarine/command_line_interface/copernicus_marine.py --onefile + +run-using-pyinstaller-macos-latest: + pip install -e . + python -m PyInstaller --collect-all tzdata --copy-metadata copernicusmarine --name copernicusmarine_macos-arm64.cli --icon=toolbox_icon.png copernicusmarine/command_line_interface/copernicus_marine.py --onefile --target-architecture=arm64 + +run-using-pyinstaller-macos-13: + pip install -e . + python -m PyInstaller --collect-all tzdata --copy-metadata copernicusmarine --name copernicusmarine_macos-x86_64.cli --icon=toolbox_icon.png copernicusmarine/command_line_interface/copernicus_marine.py --onefile --target-architecture=x86_64 + +run-using-pyinstaller-ubuntu-latest: + pip install -e . + python3 -m PyInstaller --collect-all tzdata --copy-metadata copernicusmarine --icon=toolbox_icon.png --name copernicusmarine_linux.cli --add-data="/home/runner/micromamba/envs/copernicusmarine-binary/lib/python3.12/site-packages/distributed/distributed.yaml:./distributed" copernicusmarine/command_line_interface/copernicus_marine.py --onefile --path /opt/hostedtoolcache/Python/3.12.6/x64/lib/python3.12/site-packages --copy-metadata xarray + chmod +rwx /home/runner/work/copernicus-marine-toolbox/copernicus-marine-toolbox/dist/copernicusmarine_linux.cli + release: SELECTED_ENVIRONMENT_NAME = ${ENVIRONMENT_NAME} release: ${ACTIVATE_ENVIRONMENT} @@ -90,3 +107,14 @@ build-and-publish-dockerhub-image: docker build --ulimit nofile=65536:65536 --tag copernicusmarine/copernicusmarine:$${VERSION} --tag copernicusmarine/copernicusmarine:latest -f Dockerfile.dockerhub --build-arg VERSION="$${VERSION}" . docker push copernicusmarine/copernicusmarine:$${VERSION} docker push copernicusmarine/copernicusmarine:latest + +build-and-prepare-for-binary: + python -m pip install --upgrade pip + pip install pyinstaller + pip install -e . + pip install poetry + pip install distributed + echo "VERSION=$$(poetry version --short)" >> ${GITHUB_OUTPUT} + +update-snapshots-tests: + pytest --snapshot-update tests/test_command_line_interface.py::TestCommandLineInterface::test_describe_including_datasets diff --git a/README.md b/README.md index 3f8a0486..d2bba14e 100644 --- a/README.md +++ b/README.md @@ -11,103 +11,50 @@ ## Features -The `copernicusmarine` offers capabilities through both **Command Line Interface (CLI)** and **Python API**: +The Copernicus Marine toolbox offers capabilities through both **Command Line Interface (CLI)** and **Python API**: -- **Metadata Information**: List and retrieve metadata information on all variables, datasets, products, and their associated documentation. +- **Metadata Information**: List and retrieve metadata information on all products, datasets, variables. - **Subset Datasets**: Subset datasets to extract only the parts of interest, in preferred format, such as Analysis-Ready Cloud-Optimized (ARCO) Zarr or NetCDF file format. - **Advanced Filters**: Apply simple or advanced filters to get multiple files, in original formats like NetCDF/GeoTIFF, via direct Marine Data Store connections. - **No Quotas**: Enjoy no quotas, neither on volume size nor bandwidth. +## Documentation + +The full documentation of the toolbox is available here: [Copernicusmarine Documentation](https://toolbox-docs.marine.copernicus.eu/). Please refer to it for the more exhaustive and up to date documentation. + +You might also find more comprehensive details on how to use the `copernicusmarine` Toolbox, please refer to our [Help Center](https://help.marine.copernicus.eu/en/collections/9080063-copernicus-marine-toolbox). It ensures a smooth migration for existing users of legacy services such as MOTU, OPeNDAP or FTP. + ## Installation For installation, multiple options are available depending on your setup: ### Mamba | Conda -A `conda` package is available on [Anaconda](https://anaconda.org/conda-forge/copernicusmarine). - -You can install it using `mamba` (or conda) through the `conda-forge` channel with the following command: - ```bash mamba install conda-forge::copernicusmarine --yes ``` -To upgrade the Toolbox with mamba (or conda): +or conda: ```bash -mamba update --name copernicusmarine copernicusmarine --yes +conda install -c conda-forge copernicusmarine ``` ### Docker -A docker image is also available here: [https://hub.docker.com/r/copernicusmarine/copernicusmarine](https://hub.docker.com/r/copernicusmarine/copernicusmarine) - -First step is to pull the container image: - ```bash docker pull copernicusmarine/copernicusmarine:latest ``` -Then run it: - -```bash -docker run -it --rm copernicusmarine/copernicusmarine --version -``` - ### Pip -Otherwise, if you already have an environment (safer to clone it), the package can be installed using the `pip` command: - ```bash python -m pip install copernicusmarine ``` -And to **upgrade the package** to the newest available version, run: - -```bash -python -m pip install copernicusmarine --upgrade -``` - -## User Guide - -For more comprehensive details on how to use the `copernicusmarine` Toolbox, please refer to our [Help Center](https://help.marine.copernicus.eu/en/collections/9080063-copernicus-marine-toolbox). It ensures a smooth migration for existing users of legacy services such as MOTU, OPeNDAP or FTP. - -### General configuration - -#### Cache Usage - -Cachier library is used for caching part of the requests (as result of `describe` or `login`). By default, the cache will be located in the home folder. If you need to change the location of the cache, you can set the environment variable `COPERNICUSMARINE_CACHE_DIRECTORY` to point to the desired directory: - -- on **UNIX** platforms: `export COPERNICUSMARINE_CACHE_DIRECTORY=` -- on **Windows** platforms: `set COPERNICUSMARINE_CACHE_DIRECTORY=` - -### Network configuration - -#### Disable SSL - -A global SSL context is used when making HTTP calls using the `copernicusmarine` Toolbox. For some reason, it can lead to unexpected behavior depending on your network configuration. You can set the `COPERNICUSMARINE_DISABLE_SSL_CONTEXT` environment variable to any value to globally disable the usage of SSL in the toolbox: - -- on **UNIX** platforms: `export COPERNICUSMARINE_DISABLE_SSL_CONTEXT=True` -- on **Windows** platforms: `set COPERNICUSMARINE_DISABLE_SSL_CONTEXT=True` - -#### Trust Env for python libraries - -To do HTTP calls, the Copernicus Marine Toolbox uses two python libraries: requests and aiohttp. By default, those libraries will have `trust_env` values set to `True`. If you want to deactivate this, you can set `COPERNICUSMARINE_TRUST_ENV=False` (default `True`). This can be useful for example if you don't want those libraries to read your `.netrc` file as it has been reported that having a `.netrc` with a line: "default login anonymous password user@site" is incompatible with S3 connection required by the toolbox. +### Dependencies -#### Proxy - -To use proxies, as describe in the [aiohttp documentation](https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support) you can use two options: - -- set the `HTTPS_PROXY` variable. For eg: `HTTPS_PROXY="http://user:pass@some.proxy.com"`. It should work even with `COPERNICUSMARINE_TRUST_ENV=False`. -- use a `.netrc` file but be aware that having a line: "default login anonymous password user@site" is incompatible with S3 connection required by the toolbox. Also note that if you have `COPERNICUSMARINE_TRUST_ENV=True` (the default value) then if `NETRC` environment variable is set with a specified location, the `.netrc` file will be read from the specified location there rather than from `~/.netrc`. - -#### Number of concurrent requests - -The toolbox makes many requests to STAC to be able to parse the full marine data store STAC catalog. For that, it uses asynchronous calls. It can be problematic to do too many requests at the same time. To limit the number of requests at the same time you can use: `COPERNICUSMARINE_MAX_CONCURRENT_REQUESTS`. The default value is `15` and minimum value is `1`. - -Note, that this concerns only the catalog parsing step so the describe command and the start of the get and subset command. It does not apply when downloading files or listing files from the get command or when requesting the data chunks for the subset command. - -For the `get` command, you can use the `COPERNICUSMARINE_GET_CONCURRENT_DOWNLOADS` to set the number of threads open to download in parallel. There are no default value. By default the toolbox uses the python `multiprocessing.pool.ThreadPool`. You can set the environment variable to 0 if you don't want to use the `multiprocessing` library at all, the download will be used only through `boto3`. +Note that the use of `xarray<2024.7.0` with `numpy>=2.0.0` leads to inconsistent results. See this issue: [xarray issue](https://github.com/pydata/xarray/issues/9179). ## Command Line Interface (CLI) @@ -131,473 +78,34 @@ Options: -h, --help Show this message and exit. Commands: - describe Print Copernicus Marine catalog as JSON. + describe Print Copernicus Marine catalogue as JSON. get Download originally produced data files. login Create a configuration file with your Copernicus Marine credentials. subset Download subsets of datasets as NetCDF files or Zarr stores. ``` -### Command `describe` - -Retrieve metadata information about all products/datasets and display as JSON output: - -```bash -copernicusmarine describe --include-datasets -``` - -The JSON output can also be saved as follows: - -```bash -copernicusmarine describe --include-datasets > all_datasets_copernicusmarine.json -``` - -### Command `login` - -Create a single configuration file `.copernicusmarine-credentials` allowing to access all Copernicus Marine Data Store data services. By default, the file is saved in user's home directory. - -Example: - -```bash -> copernicusmarine login -username : johndoe -password : -INFO - Configuration files stored in /Users/foo/.copernicusmarine -``` - -If `.copernicusmarine-credentials` already exists, the user is asked for confirmation to overwrite (`--overwrite`/`--overwrite-configuration-file`). - -You can use the `--skip-if-user-logged-in` option to skip the configuration file overwrite if the user is already logged in. - -#### Access points migration and evolution - -If you still have a configuration for legacy services (e.g. `~/motuclient/motuclient-python.ini`, `~/.netrc` or `~/_netrc`) in your home directory, it will automatically be taken into account with commands `get` and `subset` without the need for running the `login` command. -If the configuration files are already available in another directory, when running commands `subset` or `get`, you can use the `--credentials-file` option to point to the files. - -### Command `subset` - -Remotely subset a dataset, based on variable names, geographical and temporal parameters. - -Example: - -```bash -copernicusmarine subset --dataset-id cmems_mod_ibi_phy_my_0.083deg-3D_P1D-m --variable thetao --variable so --start-datetime 2021-01-01 --end-datetime 2021-01-03 --minimum-longitude 0.0 --maximum-longitude 0.1 --minimum-latitude 28.0 --maximum-latitude 28.1 -``` - -Returns: - -```bash -INFO - 2024-04-03T10:18:18Z - Size: 3kB -Dimensions: (depth: 50, latitude: 2, longitude: 1, time: 3) -Coordinates: - * depth (depth) float32 200B 0.5058 1.556 2.668 ... 5.292e+03 5.698e+03 - * latitude (latitude) float32 8B 28.0 28.08 - * longitude (longitude) float32 4B 0.08333 - * time (time) datetime64[ns] 24B 2021-01-01 2021-01-02 2021-01-03 -Data variables: - thetao (time, depth, latitude, longitude) float32 1kB dask.array - so (time, depth, latitude, longitude) float32 1kB dask.array -Attributes: (12/20) - Conventions: CF-1.0 - bulletin_date: 2020-12-01 - ... ... - references: http://marine.copernicus.eu - copernicusmarine_version: 1.1.0 -INFO - 2024-04-03T10:18:18Z - Estimated size of the dataset file is 0.002 MB. - -Do you want to proceed with download? [Y/n]: -``` - -By default, after the display of the summary of the dataset subset, a download confirmation is asked. To skip this confirmation, use the option `--force-download`. - -#### Note about `--subset-method` option - -By default, the `subset` feature uses the `nearest` method of xarray. By specifying `--subset-method strict`, you can only request dimension strictly inside the dataset, useful for **operational use-case**. - -#### Note about longitude range - -Options `--minimum-longitude` and `--maximum-longitude` work as follows: - -- If the result of the substraction ( `--maximum-longitude` minus `--minimum-longitude` ) is superior or equal to 360, then return the full dataset. -- If the requested longitude range: - - **does not cross** the antemeridian, then return the dataset between range -180 and 180. - - **does cross** the antemeridian, then return the dataset between range 0 and 360. - -Note that you can request any longitudes you want. A modulus is applied to bring the result between -180° and 360°. For example, if you request [530, 560], the result dataset will be in [170, 200]. - -#### Note about `--netcdf-compression-enabled` and `--netcdf-compression-level` options - -When subsetting data, if you decide to write your data as a NetCDF file (which is the default behavior), then you can provide the extra option `--netcdf-compression-enabled`. The downloaded file will be lighter but it will take more time to write it (because of the compression task). If you don't provide it, the task will be faster, but the file heavier. -Otherwise, if you decide to write your data in Zarr format (`.zarr` extension), the original compression used in the Copernicus Marine Data Store will be applied, which means that the download task will be fast **and** the file compressed. In that case, you cannot use the `--netcdf-compression-enabled`. - -Here are the default parameters added to xarray in the background when using the option: `{'zlib': True, 'complevel': 1, 'contiguous': False, 'shuffle': True}` - -In addition to this option, you can also provide the `--netcdf-compression-level` option and customize the NetCDF compression level between 0 (no compression) and 9 (maximal compression). - -#### Note about `--netcdf3-compatible` option - -The `--netcdf3-compatible` option has been added to allow the downloaded dataset to be compatible with the netCDF3 format. It uses the `format="NETCDF3_CLASSIC"` of the xarray [to_netcdf](https://docs.xarray.dev/en/latest/generated/xarray.Dataset.to_netcdf.html) method. - -### Command `get` - -Download the dataset file(s) as originally produced, based on the datasetID or the path to files. - -Example: - -```bash -copernicusmarine get --dataset-id cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m --service original-files -``` - -Returns: - -```bash -INFO - 2024-04-03T11:39:18Z - Dataset version was not specified, the latest one was selected: "202211" -INFO - 2024-04-03T11:39:18Z - Dataset part was not specified, the first one was selected: "default" -INFO - 2024-04-03T11:39:18Z - Service was not specified, the default one was selected: "original-files" -INFO - 2024-04-03T11:39:18Z - Downloading using service original-files... -INFO - 2024-04-03T11:39:19Z - You requested the download of the following files: -s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m_202211/CMEMS_v5r1_IBI_PHY_MY_NL_01yav_19930101_19931231_R20221101_RE01.nc - 8.83 MB - 2023-11-12T23:47:13Z -[... truncated for brevity..] -s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m_202211/CMEMS_v5r1_IBI_PHY_MY_NL_01yav_20120101_20121231_R20221101_RE01.nc - 8.62 MB - 2023-11-12T23:47:14Z -Printed 20 out of 29 files - -Total size of the download: 252.94 MB -Do you want to proceed with download? [Y/n]: -``` - -By default: - -- After the header displays a summary of the request, a download confirmation is asked. To skip this user's action, add option `--force-download`. -- Files are downloaded to the current directory applying the original folder structure. To avoid this behavior, add `--no-directories` and specify a destination with `-o/--output-directory`. - -Option `--show-outputnames` displays the full paths of the output files, if required. - -Option `--create-file-list` only creates a file containing the names of the targeted files instead of downloading them. You have to input a file name, e.g. `--create-file-list my_files.txt`. The format needs to be `.txt` or `.csv`: - -- If the user inputs a filename that ends in `.txt`, then the file contains only the full s3 path to the targeted files and is compatible with the `--file-list` option. - -Example: - -```bash -copernicusmarine get --dataset-id cmems_mod_ibi_phy_my_0.083deg-3D_P1M-m --filter "*2021*" --create-file-list selected_files_for_2021.txt -``` - -The content of `selected_files_for_2021.txt` would be: - -```txt -s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1M-m_202012/2021/CMEMS_v5r1_IBI_PHY_MY_PdE_01mav_20210101_20210131_R20230101_RE01.nc -s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1M-m_202012/2021/CMEMS_v5r1_IBI_PHY_MY_PdE_01mav_20210201_20210228_R20230101_RE01.nc -[... truncated for brevity..] -s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1M-m_202012/2021/CMEMS_v5r1_IBI_PHY_MY_PdE_01mav_20211101_20211130_R20230101_RE01.nc -s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1M-m_202012/2021/CMEMS_v5r1_IBI_PHY_MY_PdE_01mav_20211201_20211231_R20230101_RE01.nc -``` - -- If the user inputs a filename that ends in `.csv` the file contains the following columns, separated by a comma: `filename`, `size` (in Bytes), `last_modified_datetime`, and `etag`. It is **not** compatible "as is" with the `--file-list` option and would need further post-processing from user's side. - -Example: - -```bash -copernicusmarine get --dataset-id cmems_mod_ibi_phy_my_0.083deg-3D_P1M-m --filter "*2021*" --create-file-list selected_files_for_2021.csv -``` - - The content of `selected_files_for_2021.csv` would be: - -```txt -filename,size,last_modified_datetime,etag -s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1M-m_202012/2021/CMEMS_v5r1_IBI_PHY_MY_PdE_01mav_20210101_20210131_R20230101_RE01.nc,12295906,2023-11-12 23:47:05.466000+00:00,"e8a7e564f676a08bf601bcdeaebdc563" -s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1M-m_202012/2021/CMEMS_v5r1_IBI_PHY_MY_PdE_01mav_20210201_20210228_R20230101_RE01.nc,12436177,2023-11-12 23:47:05.540000+00:00,"d4a22dfb6c7ed85860c4a122c45eb953" -[... truncated for brevity..] -s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1M-m_202012/2021/CMEMS_v5r1_IBI_PHY_MY_PdE_01mav_20211101_20211130_R20230101_RE01.nc,12386940,2023-11-12 23:47:06.358000+00:00,"ea15d1f70fcc7f2ce404184d983530ff" -s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1M-m_202012/2021/CMEMS_v5r1_IBI_PHY_MY_PdE_01mav_20211201_20211231_R20230101_RE01.nc,12398208,2023-11-12 23:47:06.456000+00:00,"585f49867aaefa2ce9d6e68dd468b5e1" -``` - -If specified, no other action will be performed. - -#### Note about sync option - -Option `--sync` allows to download original files only if not exist and not up to date. The Toolbox checks the destination folder against the source folder. It can be combined with filters. Note that if set with `--overwrite-output-data`, the latter will be ignored. -The logic is largely inspired from [s5cmd package sync command](https://github.com/peak/s5cmd#sync). -Option `--sync-delete` will work as `--sync` with the added fonctionnality that it deletes any local file that has not been found on the remote server. Note that the files found on the server are also filtered. Hence, a file present locally might be deleted even if it is on the server because, for example, the executed `get` command contains a filter that excludes this specific file. - -Limitations: - -- `--sync` is not compatible with `--no-directories`. -- `--sync` only works with `--dataset-version`. -- `--sync` functionality is not available for datasets with several parts (like INSITU or static datasets for example). - -#### Note about filtering options - -Option `--filter` allows to specify a Unix shell-style wildcard pattern (see [fnmatch — Unix filename pattern matching](https://docs.python.org/3/library/fnmatch.html)) and select specific files: - -```bash -copernicusmarine get --dataset-id cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m --filter "*01yav_200[0-2]*" -``` - -Returns: - -```bash -INFO - 2024-04-03T11:51:15Z - Dataset version was not specified, the latest one was selected: "202211" -INFO - 2024-04-03T11:51:15Z - Dataset part was not specified, the first one was selected: "default" -INFO - 2024-04-03T11:51:15Z - Service was not specified, the default one was selected: "original-files" -INFO - 2024-04-03T11:51:15Z - Downloading using service original-files... -INFO - 2024-04-03T11:51:17Z - You requested the download of the following files: -s3://mdl-native/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m_202211/CMEMS_v5r1_IBI_PHY_MY_NL_01yav_20000101_20001231_R20221101_RE01.nc - 8.93 MB -s3://mdl-native/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m_202211/CMEMS_v5r1_IBI_PHY_MY_NL_01yav_20010101_20011231_R20221101_RE01.nc - 8.91 MB -s3://mdl-native/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m_202211/CMEMS_v5r1_IBI_PHY_MY_NL_01yav_20020101_20021231_R20221101_RE01.nc - 8.75 MB - -Total size of the download: 26.59 MB -Do you want to proceed with download? [Y/n]: -``` - -Option `--regex` allows to specify a regular expression for more advanced files selection: - -```bash -copernicusmarine get -i cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m --regex ".*01yav_20(00|01|02).*.nc" -``` - -Returns: - -```bash -INFO - 2024-04-03T11:52:43Z - Dataset version was not specified, the latest one was selected: "202211" -INFO - 2024-04-03T11:52:43Z - Dataset part was not specified, the first one was selected: "default" -INFO - 2024-04-03T11:52:43Z - Service was not specified, the default one was selected: "original-files" -INFO - 2024-04-03T11:52:43Z - Downloading using service original-files... -INFO - 2024-04-03T11:52:44Z - You requested the download of the following files: -s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m_202211/CMEMS_v5r1_IBI_PHY_MY_NL_01yav_20000101_20001231_R20221101_RE01.nc - 8.93 MB - 2023-11-12T23:47:13Z -s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m_202211/CMEMS_v5r1_IBI_PHY_MY_NL_01yav_20010101_20011231_R20221101_RE01.nc - 8.91 MB - 2023-11-12T23:47:13Z -s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m_202211/CMEMS_v5r1_IBI_PHY_MY_NL_01yav_20020101_20021231_R20221101_RE01.nc - 8.75 MB - 2023-11-12T23:47:13Z - -Total size of the download: 26.59 MB -Do you want to proceed with download? [Y/n]: -``` - -#### Notes about the file list option - -Option `--file-list` allows to specify a list of files for more advanced files selection. -The file can contain complete absolute paths for each target file (default behavior) or only a partial path defined by the user, as shown below. - -By default, the get functionality lists all the files on the bucket to be able to select the requested ones. This create some overhead when there are a lot of files for a specific dataset. For example, a dataset with more than 100 000 files would create an overhead of around two minutes. The file list option will directly download the files and avoid the listings if all the files listed are found. - -Careful, a path can easily be mispelled or wrongly queried. The toolbox will display a warning if the file is not found on the bucket and try to find the file by listing all the files on the bucket. - -Example of `file_list.txt` with paths that would be directly downloaded without the listing overhead: - -```txt -# correct paths -> s3://mdl-native-01/native/INSITU_GLO_PHYBGCWAV_DISCRETE_MYNRT_013_030/cmems_obs-ins_glo_phybgcwav_mynrt_na_irr_202311/history/BO/AR_PR_BO_58JM.nc -> INSITU_GLO_PHYBGCWAV_DISCRETE_MYNRT_013_030/cmems_obs-ins_glo_phybgcwav_mynrt_na_irr_202311/history/BO/AR_PR_BO_58JM.nc -> cmems_obs-ins_glo_phybgcwav_mynrt_na_irr_202311/history/BO/AR_PR_BO_58JM.nc -> history/BO/AR_PR_BO_58JM.nc -> index_history.txt - -# incorrect paths -# version is missing -> INSITU_GLO_PHYBGCWAV_DISCRETE_MYNRT_013_030/cmems_obs-ins_glo_phybgcwav_mynrt_na_irr/history/BO/AR_PR_BO_58JM.nc -# only the file name and not the path to the file -> AR_PR_BO_58JM.nc -# not the same dataset -> another_dataset/history/BO/AR_PR_BO_58JM.nc -``` - - -Example of `file_list.txt` with absolute paths: - -```txt -s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1M-m_202012/2021/CMEMS_v5r1_IBI_PHY_MY_PdE_01mav_20210101_20210131_R20230101_RE01.nc -s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1M-m_202012/2021/CMEMS_v5r1_IBI_PHY_MY_PdE_01mav_20210201_20210228_R20230101_RE01.nc -s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1M-m_202012/2021/CMEMS_v5r1_IBI_PHY_MY_PdE_01mav_20210301_20210331_R20230101_RE01.nc -``` - -Note that a path to a file can be seen in 3 parts: - -- the provenance that indicates in which bucket the data is. For example, `s3://mdl-native-10/`. It can be found in the metadata. -- the productID and datasetID. For example, `IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1M-m_202012/`. It also contains the version when the dataset has one. -- the filename which is everything that comes after the dataset id. For example, `2021/CMEMS_v5r1_IBI_PHY_MY_PdE_01mav_20210301_20210331_R20230101_RE01.nc`. It should be considered like a filename. If any components are absent, the file name is not complete and the file cannot be directly downloaded. Thus a listing of all the files is necessary in order to download the file. For example, `2021/CMEMS_v5r1_IBI_PHY_MY_PdE_01mav_20210301_20210331_R20230101_RE01.nc` is a filename and `CMEMS_v5r1_IBI_PHY_MY_PdE_01mav_20210301_20210331_R20230101_RE01.nc` is an incomplete filename. - -> **_NOTE:_** This option is compatible with the file generated by the `--create-file-list` option if you generated a ".txt" file. - -Then the following command: - -```bash -copernicusmarine get -i cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m --file-list file_list.txt -``` - -Returns: - -```bash -INFO - 2024-04-03T12:57:44Z - Dataset version was not specified, the latest one was selected: "202211" -INFO - 2024-04-03T12:57:44Z - Dataset part was not specified, the first one was selected: "default" -INFO - 2024-04-03T12:57:44Z - Service was not specified, the default one was selected: "original-files" -INFO - 2024-04-03T12:57:44Z - Downloading using service original-files... -INFO - 2024-04-03T12:57:45Z - You requested the download of the following files: -s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m_202211/CMEMS_v5r1_IBI_PHY_MY_NL_01yav_20000101_20001231_R20221101_RE01.nc - 8.93 MB - 2023-11-12T23:47:13Z -s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m_202211/CMEMS_v5r1_IBI_PHY_MY_NL_01yav_20010101_20011231_R20221101_RE01.nc - 8.91 MB - 2023-11-12T23:47:13Z -s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m_202211/CMEMS_v5r1_IBI_PHY_MY_NL_01yav_20020101_20021231_R20221101_RE01.nc - 8.75 MB - 2023-11-12T23:47:13Z - -Total size of the download: 26.59 MB -Do you want to proceed with download? [Y/n]: -``` - -Also, there is a specific command `--index-parts` to retrieve the index files of INSITU datasets (as listed on the [Copernicus Marine File Browser](https://data.marine.copernicus.eu/product/INSITU_BLK_PHYBGCWAV_DISCRETE_MYNRT_013_034/files?subdataset=cmems_obs-ins_blk_phybgcwav_mynrt_na_irr_202311--ext--history&path=INSITU_BLK_PHYBGCWAV_DISCRETE_MYNRT_013_034%2Fcmems_obs-ins_blk_phybgcwav_mynrt_na_irr_202311%2F)). -> **_NOTE:_** In the future, it is planned to have the index files for those datasets directly available through the `--filter`, `--regex` and/or `--file-list` options. Meanwhile, check this [Help Center article for a working example](https://help.marine.copernicus.eu/en/articles/9133855-how-to-download-insitu-data-using-index-files). - -Then the following command: - -```bash -copernicusmarine get --dataset-id cmems_obs-ins_blk_phybgcwav_mynrt_na_irr --index-parts -``` - -Returns: - -```txt -INFO - 2024-04-03T12:58:40Z - Dataset version was not specified, the latest one was selected: "202311" -INFO - 2024-04-03T12:58:40Z - Dataset part was not specified, the first one was selected: "history" -INFO - 2024-04-03T12:58:40Z - You forced selection of service: original-files -INFO - 2024-04-03T12:58:40Z - Downloading using service original-files... -INFO - 2024-04-03T12:58:41Z - You requested the download of the following files: -s3://mdl-native-08/native/INSITU_BLK_PHYBGCWAV_DISCRETE_MYNRT_013_034/cmems_obs-ins_blk_phybgcwav_mynrt_na_irr_202311/index_history.txt - 333.13 kB - 2024-04-02T08:40:30Z -s3://mdl-native-08/native/INSITU_BLK_PHYBGCWAV_DISCRETE_MYNRT_013_034/cmems_obs-ins_blk_phybgcwav_mynrt_na_irr_202311/index_latest.txt - 466.38 kB - 2024-04-03T12:51:52Z -s3://mdl-native-08/native/INSITU_BLK_PHYBGCWAV_DISCRETE_MYNRT_013_034/cmems_obs-ins_blk_phybgcwav_mynrt_na_irr_202311/index_monthly.txt - 1.51 MB - 2024-03-05T18:09:43Z -s3://mdl-native-08/native/INSITU_BLK_PHYBGCWAV_DISCRETE_MYNRT_013_034/cmems_obs-ins_blk_phybgcwav_mynrt_na_irr_202311/index_platform.txt - 209.27 kB - 2024-04-03T08:33:37Z - -Total size of the download: 2.52 MB -Do you want to proceed with download? [Y/n]: -``` - -### Shared options - -Both `subset` and `get` commands provide these options: - -#### Option `--overwrite-output-data` - -When specified, the existing files will be overwritten. -Otherwise, if the files already exist on destination, new ones with a unique index will be created once the download has been accepted (or once `--force-download` is provided). - -#### Option `--create-template` - -Option to create a file in your current directory containing request parameters. If specified, no other action will be performed. -It will create the following files depending on the feature: - -- `subset` - -Example: - -```bash -copernicusmarine subset --create-template -``` - -Returns: - -```txt -INFO - 2024-04-04T14:38:09Z - Template created at: subset_template.json -``` - -- `get` -Example: - -```bash -copernicusmarine get --create-template -``` - -Returns: - -```txt -INFO - 2024-04-04T14:38:09Z - Template created at: get_template.json -``` - -#### Option `--request-file` - -This option allows to specify request parameters but in a provided `.json` file, useful for batch processing. -You can try the following templates or use the `--create-template` option to create both `subset` or `get` template request files. - -- Template for `subset` data request: - -```json -{ - "dataset_id": "cmems_mod_glo_phy-thetao_anfc_0.083deg_P1D-m", - "start_datetime": "2022-04-11", - "end_datetime": "2023-08-11", - "minimum_longitude": -182.79, - "maximum_longitude": -179.69, - "minimum_latitude": -40, - "maximum_latitude": -36, - "minimum_depth": 0, - "maximum_depth": 0, - "variables": ["thetao"], - "output_directory": "./data/", - "force_download": true -} -``` - -Example: - -```bash -copernicusmarine subset --request-file template_subset_data_request.json -``` - -- Template for `get` data request: - -```json -{ - "dataset_id": "cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m", - "filter": "*01yav_200[0-2]*", - "force_download": false, - "log_level": "INFO", - "no_directories": false, - "no_metadata_cache": false, - "output_directory": "./data/", - "overwrite_output_data": false, - "overwrite_metadata_cache": false, - "show_outputnames": true -} -``` - -Example: - -```bash -copernicusmarine get --request-file template_get_data_request.json -``` - -#### Option `--credentials-file` - -You can use the `--credentials-file` option to point to a credentials file. The file can be either `.copernicusmarine-credentials`, `motuclient-python.ini`, `.netrc` or `_netrc`. - -#### Option `--dataset-version` - -You can use the `--dataset-version` option to fetch a specific dataset version. Particularly useful to keep an operational chain working when an evolution impact the chosen dataset. - -#### Option `--dataset-part` - -You can use the `--dataset-part` option to fecth a specific part for the chosen dataset version. - -#### Option `--log-level` - -Set the details printed to console by the command (based on standard logging library). -Available values are: `[DEBUG|INFO|WARN|ERROR|CRITICAL|QUIET]` - -All logs of the library are by default logged in stderr except the output of the `describe` command and the output of `--show-outputnames` option that are sent to stdout. - -_For versions <=1.2.4_, all logs are sent to stdout by default. - ## Python package (API) -The `copernicusmarine` exposes a Python interface to allow you to [call commands as functions](https://help.marine.copernicus.eu/en/collections/9054839-main-functionalities). - -## Documentation - -A detailed standalone API documentation is under construction and will come at a later stage. For the moment, see the [Help Center](https://help.marine.copernicus.eu/en/collections/9080063-copernicus-marine-toolbox). +The `copernicusmarine` exposes a Python interface to allow you to [call commands as functions](https://toolbox-docs.marine.copernicus.eu/). ## Version management -We are using semantic versioning X.Y.Z → for example 1.0.2 +We are using semantic versioning X.Y.Z → MAJOR.MINOR.PATCH → for example 1.0.2. We follow the SEMVER principles: -- Z is bumped on minor non-breaking changes. -- Y is bumped on breaking changes. -- X is bumped on demand to highlight a new significant feature or for communication purposes (new Copernicus Marine Service release for example). +>Given a version number MAJOR.MINOR.PATCH, increment the: +> +>- MAJOR version when you make incompatible API changes +>- MINOR version when you add functionality in a backward compatible manner +>- PATCH version when you make backward compatible bug fixes +> +>Additional labels for pre-release and build metadata are available as extensions to the MAJOR.MINOR.PATCH format. ## Contribution We welcome contributions from the community to enhance this package. If you find any issues or have suggestions for improvements, please check out our [Report Template](https://help.marine.copernicus.eu/en/articles/8218546-reporting-an-issue-or-feature-request). +You are welcome to submit issues to the GitHub repository or create a pull request; however, please be advised that we may not respond to your request or may provide a negative response. + ## Future improvements & Roadmap To keep up to date with the most recent and planned advancements, including revisions, corrections, and feature requests generated from users' feedback, please refer to our [Roadmap](https://help.marine.copernicus.eu/en/articles/8218641-next-milestones-and-roadmap). diff --git a/conda_environment_binary.yaml b/conda_environment_binary.yaml new file mode 100644 index 00000000..de523683 --- /dev/null +++ b/conda_environment_binary.yaml @@ -0,0 +1,12 @@ +dependencies: + - python==3.12 + - poetry==1.8.2 + - git==2.44.0 + - pip==24.0 + - gh==2.45.0 + - pyinstaller==6.10.0 + - pip: + - xarray + - distributed + - tzdata + - pillow diff --git a/conda_environment_sphinx.yaml b/conda_environment_sphinx.yaml new file mode 100644 index 00000000..e61bed3c --- /dev/null +++ b/conda_environment_sphinx.yaml @@ -0,0 +1,14 @@ +name: conda-environment-sphinx +dependencies: + - python==3.9.18 + - pip + - pip: + - sphinx==7.4.7 + - sphinx-click==6.0.0 + - numpydoc==1.8.0 + - sphinx-copybutton==0.5.2 + - furo==2024.8.6 + - myst_parser==3.0.1 + - notebook==7.1.2 + - ipywidgets==8.1.5 + - matplotlib==3.9.2 diff --git a/conda_environment_test.yaml b/conda_environment_test.yaml index a0ea96f8..227a8992 100644 --- a/conda_environment_test.yaml +++ b/conda_environment_test.yaml @@ -2,10 +2,12 @@ dependencies: - python==3.12 - pytest==8.1.1 - pip - - xarray==2024.3.0 - pystac==1.8.3 - tox==4.11.4 - netcdf4==1.6.5 - syrupy==4.6.1 + - compliance-checker==5.1.1 + - numpydoc==1.8.0 - pip: - pytest-order==1.2.1 + - freezegun==1.5.1 diff --git a/copernicusmarine/__init__.py b/copernicusmarine/__init__.py index 651cafd4..24eb448c 100644 --- a/copernicusmarine/__init__.py +++ b/copernicusmarine/__init__.py @@ -1,7 +1,3 @@ -""" -. -""" - import logging.config import time from importlib.metadata import version @@ -13,15 +9,49 @@ logging.config.dictConfig(logging_configuration_dict) logging.Formatter.converter = time.gmtime +from copernicusmarine.catalogue_parser.models import ( + DatasetNotFound, + DatasetVersionNotFound, + DatasetVersionPartNotFound, + ServiceNotHandled, +) +from copernicusmarine.command_line_interface.utils import ( + OtherOptionsPassedWithCreateTemplate, +) +from copernicusmarine.core_functions.credentials_utils import ( + CouldNotConnectToAuthenticationSystem, + CredentialsCannotBeNone, + InvalidUsernameOrPassword, +) +from copernicusmarine.core_functions.exceptions import ( + CoordinatesOutOfDatasetBounds, + FormatNotSupported, + MinimumLongitudeGreaterThanMaximumLongitude, + NetCDFCompressionNotAvailable, + ServiceNotSupported, + VariableDoesNotExistInTheDataset, + WrongDatetimeFormat, +) +from copernicusmarine.core_functions.models import ( + DatasetCoordinatesExtent, + FileGet, + GeographicalExtent, + ResponseGet, + ResponseSubset, + TimeExtent, +) +from copernicusmarine.core_functions.services_utils import ( + NoServiceAvailable, + ServiceDoesNotExistForCommand, + ServiceNotAvailable, +) from copernicusmarine.python_interface.describe import describe from copernicusmarine.python_interface.get import get from copernicusmarine.python_interface.login import login from copernicusmarine.python_interface.open_dataset import ( - load_xarray_dataset, # depracated open_dataset, ) from copernicusmarine.python_interface.read_dataframe import ( - load_pandas_dataframe, # depracated read_dataframe, ) from copernicusmarine.python_interface.subset import subset diff --git a/copernicusmarine/aioretry/LICENSE b/copernicusmarine/aioretry/LICENSE deleted file mode 100644 index c7778359..00000000 --- a/copernicusmarine/aioretry/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -Copyright (c) 2013 kaelzhang <>, contributors - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/copernicusmarine/aioretry/__init__.py b/copernicusmarine/aioretry/__init__.py deleted file mode 100644 index 867d4610..00000000 --- a/copernicusmarine/aioretry/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from .retry import BeforeRetry # noqa: F401 -from .retry import RetryInfo # noqa: F401 -from .retry import RetryPolicy # noqa: F401 -from .retry import RetryPolicyStrategy # noqa: F401 -from .retry import retry # noqa: F401 diff --git a/copernicusmarine/aioretry/retry.py b/copernicusmarine/aioretry/retry.py deleted file mode 100644 index b4d9bc03..00000000 --- a/copernicusmarine/aioretry/retry.py +++ /dev/null @@ -1,162 +0,0 @@ -import asyncio -import inspect -import warnings -from datetime import datetime -from typing import Awaitable, Callable, Optional, Tuple, TypeVar, Union - - -# Copyright from: https://github.com/kaelzhang/python-aioretry -class RetryInfo: - __slots__ = ("fails", "exception", "since") - - fails: int - exception: Exception - since: datetime - - def __init__( - self, fails: int, exception: Exception, since: datetime - ) -> None: - self.fails = fails - self.exception = exception - self.since = since - - def update(self, exception: Exception) -> "RetryInfo": - """Create a new RetryInfo and update fails and exception - - Why? - The object might be collected by user, - so we need to create a new object every time it fails. - """ - - return RetryInfo(self.fails + 1, exception, self.since) - - -RetryPolicyStrategy = Tuple[bool, Union[int, float]] - -RetryPolicy = Callable[[RetryInfo], RetryPolicyStrategy] -BeforeRetry = Callable[[RetryInfo], Optional[Awaitable[None]]] - -ParamRetryPolicy = Union[RetryPolicy, str] -ParamBeforeRetry = Union[BeforeRetry, str] - -TargetFunction = Callable[..., Awaitable] -Exceptions = Tuple[Exception, ...] -ExceptionsOrException = Union[Exceptions, Exception] - -T = TypeVar("T", RetryPolicy, BeforeRetry) - - -async def await_coro(coro): - if inspect.isawaitable(coro): - return await coro - - return coro - - -def warn(method_name: str, exception: Exception): - warnings.warn( - f"""[aioretry] {method_name} raises an exception: - {exception} -It is usually a bug that you should fix!""", - UserWarning, - stacklevel=2, - ) - - -async def perform( - fn: TargetFunction, - retry_policy: RetryPolicy, - before_retry: Optional[BeforeRetry], - *args, - **kwargs, -): - info = None - - while True: - try: - return await fn(*args, **kwargs) - except Exception as e: - if info is None: - info = RetryInfo(1, e, datetime.now()) - else: - info = info.update(e) - - try: - abandon, delay = retry_policy(info) - except Exception as e2: - warn("retry_policy", e2) - raise e2 - - if abandon: - raise e - - if before_retry is not None: - try: - await await_coro(before_retry(info)) - except Exception as e: - warn("before_retry", e) - raise e - - # `delay` could be 0 - if delay > 0: - await asyncio.sleep(delay) - - -def get_method( - target: Union[T, str], - args: Tuple, - name: str, -) -> T: - if not isinstance(target, str): - return target - - if len(args) == 0: - raise RuntimeError( - f"[aioretry] decorator should be used for instance method" - f" if {name} as a str '{target}', which should be fixed" - ) - - self = args[0] - - return getattr(self, target) # type: ignore - - -def retry( - retry_policy: ParamRetryPolicy, - before_retry: Optional[ParamBeforeRetry] = None, -) -> Callable[[TargetFunction], TargetFunction]: - """Creates a decorator function - - Args: - retry_policy (RetryPolicy, str): the retry policy - before_retry (BeforeRetry, str, None): the function to - be called after each failure of fn - and before the corresponding retry. - - Returns: - A wrapped function which accepts the same arguments as - fn and returns an Awaitable - - Usage:: - @retry(retry_policy) - async def coro_func(): - ... - """ - - def wrapper(fn: TargetFunction) -> TargetFunction: - async def wrapped(*args, **kwargs): - return await perform( - fn, - get_method(retry_policy, args, "retry_policy"), - ( - get_method(before_retry, args, "before_retry") - if before_retry is not None - else None - ), - *args, - **kwargs, - ) - - return wrapped - - return wrapper diff --git a/copernicusmarine/catalogue_parser/catalogue_parser.py b/copernicusmarine/catalogue_parser/catalogue_parser.py index 81c41b32..8bc91cfb 100644 --- a/copernicusmarine/catalogue_parser/catalogue_parser.py +++ b/copernicusmarine/catalogue_parser/catalogue_parser.py @@ -1,743 +1,207 @@ -import asyncio import logging -import re -from abc import ABC, abstractmethod from dataclasses import dataclass -from datetime import timedelta from enum import Enum -from importlib.metadata import version as package_version from itertools import groupby -from typing import Any, Dict, Iterator, List, Optional, Tuple, Union +from typing import Any, Optional -import nest_asyncio import pystac -from aiohttp import ContentTypeError, ServerDisconnectedError -from cachier.core import cachier from tqdm import tqdm -from copernicusmarine.aioretry import RetryInfo, RetryPolicyStrategy, retry -from copernicusmarine.command_line_interface.exception_handler import ( - log_exception_debug, -) -from copernicusmarine.core_functions.environment_variables import ( - COPERNICUSMARINE_MAX_CONCURRENT_REQUESTS, +from copernicusmarine.catalogue_parser.models import ( + CopernicusMarineCatalogue, + CopernicusMarineProduct, + CopernicusMarineProductDataset, + DatasetNotFound, + get_version_and_part_from_full_dataset_id, ) from copernicusmarine.core_functions.sessions import ( - get_configured_aiohttp_session, - get_https_proxy, + get_configured_requests_session, ) from copernicusmarine.core_functions.utils import ( - CACHE_BASE_DIRECTORY, construct_query_params_for_marine_data_store_monitoring, - datetime_parser, map_reject_none, - next_or_raise_exception, - rolling_batch_gather, + run_concurrently, ) -logger = logging.getLogger("copernicus_marine_root_logger") - - -class _ServiceName(str, Enum): - GEOSERIES = "arco-geo-series" - TIMESERIES = "arco-time-series" - FILES = "original-files" - WMTS = "wmts" - OMI_ARCO = "omi-arco" - STATIC_ARCO = "static-arco" - - -class _ServiceShortName(str, Enum): - GEOSERIES = "geoseries" - TIMESERIES = "timeseries" - FILES = "files" - WMTS = "wmts" - OMI_ARCO = "omi-arco" - STATIC_ARCO = "static-arco" +logger = logging.getLogger("copernicusmarine") +MARINE_DATA_STORE_ROOT_METADATA_URL = ( + "https://s3.waw3-1.cloudferro.com/mdl-metadata" +) -MARINE_DATA_STORE_STAC_BASE_URL = ( - "https://s3.waw3-1.cloudferro.com/mdl-metadata/metadata" +MARINE_DATA_STORE_ROOT_METADATA_URL_STAGING = ( + "https://s3.waw3-1.cloudferro.com/mdl-metadata-dta" ) + +MARINE_DATA_STORE_STAC_URL = f"{MARINE_DATA_STORE_ROOT_METADATA_URL}/metadata" MARINE_DATA_STORE_STAC_ROOT_CATALOG_URL = ( - MARINE_DATA_STORE_STAC_BASE_URL + "/catalog.stac.json" + MARINE_DATA_STORE_STAC_URL + "/catalog.stac.json" ) -MARINE_DATA_STORE_STAC_BASE_URL_STAGING = ( - "https://s3.waw3-1.cloudferro.com/mdl-metadata-dta/metadata" +MARINE_DATA_STORE_STAC_URL_STAGING = ( + f"{MARINE_DATA_STORE_ROOT_METADATA_URL_STAGING}/metadata" ) MARINE_DATA_STORE_STAC_ROOT_CATALOG_URL_STAGING = ( - MARINE_DATA_STORE_STAC_BASE_URL_STAGING + "/catalog.stac.json" + MARINE_DATA_STORE_STAC_URL_STAGING + "/catalog.stac.json" ) -MAX_CONCURRENT_REQUESTS = int(COPERNICUSMARINE_MAX_CONCURRENT_REQUESTS) - - -@dataclass(frozen=True) -class _Service: - service_name: _ServiceName - short_name: _ServiceShortName - - def aliases(self) -> List[str]: - return ( - [self.service_name.value, self.short_name.value] - if self.short_name.value != self.service_name.value - else [self.service_name.value] - ) - - def to_json_dict(self): - return { - "service_name": self.service_name.value, - "short_name": self.short_name.value, - } - - -class CopernicusMarineDatasetServiceType(_Service, Enum): - GEOSERIES = _ServiceName.GEOSERIES, _ServiceShortName.GEOSERIES - TIMESERIES = ( - _ServiceName.TIMESERIES, - _ServiceShortName.TIMESERIES, - ) - FILES = _ServiceName.FILES, _ServiceShortName.FILES - WMTS = _ServiceName.WMTS, _ServiceShortName.WMTS - OMI_ARCO = _ServiceName.OMI_ARCO, _ServiceShortName.OMI_ARCO - STATIC_ARCO = _ServiceName.STATIC_ARCO, _ServiceShortName.STATIC_ARCO - - -class CopernicusMarineServiceFormat(str, Enum): - ZARR = "zarr" - SQLITE = "sqlite" - - -def _service_type_from_web_api_string( - name: str, -) -> CopernicusMarineDatasetServiceType: - class WebApi(Enum): - GEOSERIES = "timeChunked" - TIMESERIES = "geoChunked" - FILES = "native" - WMTS = "wmts" - OMI_ARCO = "omi" - STATIC_ARCO = "static" - - web_api_mapping = { - WebApi.GEOSERIES: CopernicusMarineDatasetServiceType.GEOSERIES, - WebApi.TIMESERIES: CopernicusMarineDatasetServiceType.TIMESERIES, - WebApi.FILES: CopernicusMarineDatasetServiceType.FILES, - WebApi.WMTS: CopernicusMarineDatasetServiceType.WMTS, - WebApi.OMI_ARCO: CopernicusMarineDatasetServiceType.OMI_ARCO, - WebApi.STATIC_ARCO: CopernicusMarineDatasetServiceType.STATIC_ARCO, - } - - return next_or_raise_exception( - ( - service_type - for service_web_api, service_type in web_api_mapping.items() - if service_web_api.value == name - ), - ServiceNotHandled(name), - ) - - -class ServiceNotHandled(Exception): - ... - - -VERSION_DEFAULT = "default" -PART_DEFAULT = "default" - - -@dataclass -class CopernicusMarineCoordinates: - coordinates_id: str - units: str - minimum_value: Optional[float] - maximum_value: Optional[float] - step: Optional[float] - values: Optional[list[Union[float, int]]] - chunking_length: Optional[int] - chunk_type: Optional[str] - chunk_reference_coordinate: Optional[int] - chunk_geometric_factor: Optional[int] - - def convert_elevation_to_depth(self): - self.coordinates_id = "depth" - minimum_elevation = self.minimum_value - maximum_elevation = self.maximum_value - if minimum_elevation is not None: - self.maximum_value = -minimum_elevation - else: - self.maximum_value = None - if maximum_elevation is not None: - self.minimum_value = -maximum_elevation - else: - self.minimum_value = None - if self.values is not None: - self.values = [-value for value in self.values] +class CatalogParserConnection: + def __init__(self) -> None: + self.session = get_configured_requests_session() -@dataclass -class CopernicusMarineVariable: - short_name: str - standard_name: str - units: str - bbox: Tuple[float, float, float, float] - coordinates: list[CopernicusMarineCoordinates] + def get_json_file(self, url: str) -> dict[str, Any]: + logger.debug(f"Fetching json file at this url: {url}") + with self.session.get( + url, + params=construct_query_params_for_marine_data_store_monitoring(), + proxies=self.session.proxies, + ) as response: + return response.json() + def __enter__(self): + return self -@dataclass -class CopernicusMarineService: - service_type: CopernicusMarineDatasetServiceType - service_format: Optional[CopernicusMarineServiceFormat] - uri: str - variables: list[CopernicusMarineVariable] + def __exit__(self, exc_type, exc_val, exc_tb): + self.session.close() -@dataclass -class CopernicusMarineVersionPart: - name: str - services: list[CopernicusMarineService] - retired_date: Optional[str] - released_date: Optional[str] - - def get_service_by_service_type( - self, service_type: CopernicusMarineDatasetServiceType - ): - return next( - service - for service in self.services - if service.service_type == service_type +def get_dataset_metadata( + dataset_id: str, staging: bool +) -> Optional[CopernicusMarineProductDataset]: + with CatalogParserConnection() as connection: + stac_url = ( + MARINE_DATA_STORE_STAC_URL + if not staging + else MARINE_DATA_STORE_STAC_URL_STAGING ) - - -@dataclass -class CopernicusMarineDatasetVersion: - label: str - parts: list[CopernicusMarineVersionPart] - - def get_part( - self, force_part: Optional[str] - ) -> CopernicusMarineVersionPart: - wanted_part = force_part or PART_DEFAULT - for part in self.parts: - if part.name == wanted_part: - return part - elif not force_part: - return part - raise dataset_version_part_not_found_exception(self) - - def sort_parts(self) -> tuple[Optional[str], Optional[str]]: - not_released_parts = { - part.name - for part in self.parts - if part.released_date - and datetime_parser(part.released_date) > datetime_parser("now") - } - will_be_retired_parts = { - part.name: datetime_parser(part.retired_date).timestamp() - for part in self.parts - if part.retired_date - } - max_retired_timestamp = 0 - if will_be_retired_parts: - max_retired_timestamp = max(will_be_retired_parts.values()) + 1 - self.parts = sorted( - self.parts, - key=lambda x: ( - x.name in not_released_parts, - max_retired_timestamp - - will_be_retired_parts.get(x.name, max_retired_timestamp), - -(x.name == PART_DEFAULT), - -(x.name == "latest"), # for INSITU datasets - -(x.name == "bathy"), # for STATIC datasets - x.name, - ), + root_url = ( + MARINE_DATA_STORE_ROOT_METADATA_URL + if not staging + else MARINE_DATA_STORE_ROOT_METADATA_URL_STAGING ) - return self.parts[0].released_date, self.parts[0].retired_date - - -class DatasetVersionPartNotFound(Exception): - ... - - -class DatasetVersionNotFound(Exception): - ... - - -@dataclass -class CopernicusMarineProductDataset: - dataset_id: str - dataset_name: str - versions: list[CopernicusMarineDatasetVersion] - - def _seperate_version_and_default( - self, - ) -> Tuple[ - Optional[CopernicusMarineDatasetVersion], - List[CopernicusMarineDatasetVersion], - ]: - default_version = None - versions = [] - for version in self.versions: - if version.label == VERSION_DEFAULT: - default_version = version - else: - versions.append(version) - return default_version, versions - - def get_latest_version_or_raise(self) -> CopernicusMarineDatasetVersion: - default_version, versions = self._seperate_version_and_default() - sorted_versions = sorted(versions, key=lambda x: x.label) - if sorted_versions: - return sorted_versions[-1] - if default_version: - return default_version - raise dataset_version_not_found_exception(self) - - def get_version( - self, force_version: Optional[str] - ) -> CopernicusMarineDatasetVersion: - wanted_version = force_version or VERSION_DEFAULT - for version in self.versions: - if version.label == wanted_version: - return version - elif not force_version: - return version - raise dataset_version_not_found_exception(self) - - def sort_versions(self) -> None: - not_released_versions: set[str] = set() - retired_dates = {} - for version in self.versions: - released_date, retired_date = version.sort_parts() - if released_date and datetime_parser( - released_date - ) > datetime_parser("now"): - not_released_versions.add(version.label) - if retired_date: - retired_dates[version.label] = retired_date - - self.versions = sorted( - self.versions, - key=lambda x: ( - -(x.label in not_released_versions), - retired_dates.get(x.label, "9999-12-31"), - -(x.label == VERSION_DEFAULT), - x.label, - ), - reverse=True, + dataset_product_mapping_url = ( + f"{root_url}/dataset_product_id_mapping.json" ) - - -def dataset_version_part_not_found_exception( - version: CopernicusMarineDatasetVersion, -) -> DatasetVersionPartNotFound: - return DatasetVersionPartNotFound( - f"No part found for version {version.label}" - ) - - -def dataset_version_not_found_exception( - dataset: CopernicusMarineProductDataset, -) -> DatasetVersionNotFound: - return DatasetVersionNotFound( - f"No version found for dataset {dataset.dataset_id}" - ) - - -@dataclass -class CopernicusMarineProductProvider: - name: str - roles: list[str] - url: str - email: str - - -@dataclass -class CopernicusMarineProduct: - title: str - product_id: str - thumbnail_url: str - description: str - digital_object_identifier: Optional[str] - sources: List[str] - processing_level: Optional[str] - production_center: str - keywords: dict[str, str] - datasets: list[CopernicusMarineProductDataset] - - -@dataclass -class ProductDatasetParser(ABC): - dataset_id: str - dataset_name: str - versions: list[CopernicusMarineDatasetVersion] - - @abstractmethod - def to_copernicus_marine_dataset( - self, - ) -> CopernicusMarineProductDataset: - ... - - -@dataclass -class ProductParser(ABC): - title: str - product_id: str - thumbnail_url: str - description: str - digital_object_identifier: Optional[str] - sources: List[str] - processing_level: Optional[str] - production_center: str - keywords: dict[str, str] - - -@dataclass -class ProductDatasetFromMarineDataStore(ProductDatasetParser): - def to_copernicus_marine_dataset(self) -> CopernicusMarineProductDataset: - dataset = CopernicusMarineProductDataset( - dataset_id=self.dataset_id, - dataset_name=self.dataset_name, - versions=self.versions, + product_id = connection.get_json_file(dataset_product_mapping_url).get( + dataset_id ) - dataset.sort_versions() - return dataset - - -@dataclass -class ProductFromMarineDataStore(ProductParser): - datasets: list[ProductDatasetFromMarineDataStore] - - def to_copernicus_marine_product(self) -> CopernicusMarineProduct: - return CopernicusMarineProduct( - title=self.title, - product_id=self.product_id, - thumbnail_url=self.thumbnail_url, - description=self.description, - digital_object_identifier=self.digital_object_identifier, - sources=self.sources, - processing_level=self.processing_level, - production_center=self.production_center, - keywords=self.keywords, - datasets=[ - dataset.to_copernicus_marine_dataset() - for dataset in self.datasets - ], - ) - - -@dataclass -class CopernicusMarineCatalogue: - products: list[CopernicusMarineProduct] - - def filter(self, tokens: list[str]): - return filter_catalogue_with_strings(self, tokens) - - def filter_only_official_versions_and_parts(self): - products_to_remove = [] - for product in self.products: - datasets_to_remove = [] - for dataset in product.datasets: - latest_version = dataset.versions[0] - parts_to_remove = [] - for part in latest_version.parts: - if part.released_date and datetime_parser( - part.released_date - ) > datetime_parser("now"): - parts_to_remove.append(part) - for part_to_remove in parts_to_remove: - latest_version.parts.remove(part_to_remove) - if not latest_version.parts: - datasets_to_remove.append(dataset) - else: - dataset.versions = [latest_version] - for dataset_to_remove in datasets_to_remove: - product.datasets.remove(dataset_to_remove) - if not product.datasets: - products_to_remove.append(product) - for product_to_remove in products_to_remove: - self.products.remove(product_to_remove) - - -class CatalogParserConnection: - def __init__(self, proxy: Optional[str] = None) -> None: - self.proxy = proxy - self.session = get_configured_aiohttp_session() - self.proxy = get_https_proxy() - self.__max_retries = 5 - self.__sleep_time = 1 - - @retry("_retry_policy") - async def get_json_file(self, url: str) -> dict[str, Any]: - logger.debug(f"Fetching json file at this url: {url}") - async with self.session.get( - url, - params=construct_query_params_for_marine_data_store_monitoring(), - proxy=self.proxy, - ) as response: - return await response.json() + if not product_id: + raise DatasetNotFound(dataset_id) + url = f"{stac_url}/{product_id}/product.stac.json" + product_json = connection.get_json_file(url) + product_collection = pystac.Collection.from_dict(product_json) + product_datasets_metadata_links = product_collection.get_item_links() + datasets_metadata_links = [ + dataset_metadata_link + for dataset_metadata_link in product_datasets_metadata_links + if dataset_id in dataset_metadata_link.href + ] + if not datasets_metadata_links: + return None + dataset_jsons: list[dict] = [ + connection.get_json_file(f"{stac_url}/{product_id}/{link.href}") + for link in datasets_metadata_links + ] + + dataset_items = [ + dataset_item + for dataset_json in dataset_jsons + if ( + dataset_item := _parse_dataset_json_to_pystac_item( + dataset_json + ) + ) + ] + return _parse_and_sort_dataset_items(dataset_items) - async def close(self) -> None: - await self.session.close() - def _retry_policy(self, info: RetryInfo) -> RetryPolicyStrategy: - if not isinstance( - info.exception, - ( - TimeoutError, - ConnectionResetError, - ContentTypeError, - ServerDisconnectedError, - ), - ): - logger.error( - f"Unexpected error while downloading: {info.exception}" - ) - return True, 0 - logger.debug( - f"Retrying {info.fails} times after error: {info.exception}" +def _parse_dataset_json_to_pystac_item( + metadate_json: dict, +) -> Optional[pystac.Item]: + try: + return pystac.Item.from_dict(metadate_json) + except pystac.STACError as exception: + message = ( + "Invalid Item: If datetime is None, a start_datetime " + + "and end_datetime must be supplied." ) - return info.fails >= self.__max_retries, info.fails * self.__sleep_time + if exception.args[0] != message: + logger.error(exception) + raise pystac.STACError(exception.args) + return None -def _construct_copernicus_marine_service( - stac_service_name, stac_asset, datacube -) -> Optional[CopernicusMarineService]: +def _parse_product_json_to_pystac_collection( + metadata_json: dict, +) -> Optional[pystac.Collection]: try: - service_uri = stac_asset.get_absolute_href() - service_type = _service_type_from_web_api_string(stac_service_name) - service_format = None - admp_in_preparation = datacube.properties.get("admp_in_preparation") - if stac_asset.media_type and "zarr" in stac_asset.media_type: - service_format = CopernicusMarineServiceFormat.ZARR - elif stac_asset.media_type and "sqlite3" in stac_asset.media_type: - service_format = CopernicusMarineServiceFormat.SQLITE - - if not service_uri.endswith("/"): - if admp_in_preparation and ( - service_type == CopernicusMarineDatasetServiceType.GEOSERIES - or service_type - == CopernicusMarineDatasetServiceType.TIMESERIES - ): - return None - else: - return CopernicusMarineService( - service_type=service_type, - uri=service_uri, - variables=_get_variables(datacube, stac_asset), - service_format=service_format, - ) - return None - except ServiceNotHandled as service_not_handled: - log_exception_debug(service_not_handled) + return pystac.Collection.from_dict(metadata_json) + except KeyError as exception: + messages = ["spatial", "temporal"] + if exception.args[0] not in messages: + logger.error(exception) return None -def _get_versions_from_marine_datastore( - datacubes: List[pystac.Item], -) -> List[CopernicusMarineDatasetVersion]: - copernicus_marine_dataset_versions: List[ - CopernicusMarineDatasetVersion - ] = [] - - datacubes_by_version = groupby( - datacubes, - key=lambda datacube: get_version_and_part_from_full_dataset_id( - datacube.id - )[1], +def _parse_and_sort_dataset_items( + dataset_items: list[pystac.Item], +) -> Optional[CopernicusMarineProductDataset]: + """ + Return all dataset metadata parsed and sorted. + The first version and part are the default. + """ + dataset_item_example = dataset_items[0] + dataset_id, _, _ = get_version_and_part_from_full_dataset_id( + dataset_item_example.id ) - for dataset_version, datacubes in datacubes_by_version: # type: ignore - parts = _get_parts(datacubes) - - if parts: - version = CopernicusMarineDatasetVersion( - label=dataset_version, - parts=parts, - ) - copernicus_marine_dataset_versions.append(version) - - return copernicus_marine_dataset_versions - - -def _get_parts( - datacubes: List[pystac.Item], -) -> List[CopernicusMarineVersionPart]: - parts: List[CopernicusMarineVersionPart] = [] - for datacube in datacubes: - released_date = datacube.properties.get("admp_released_date") - retired_date = datacube.properties.get("admp_retired_date") - if retired_date and datetime_parser(retired_date) < datetime_parser( - "now" - ): - continue - - services = _get_services(datacube) - _, _, part = get_version_and_part_from_full_dataset_id(datacube.id) - - if services: - parts.append( - CopernicusMarineVersionPart( - name=part, - services=services, - retired_date=retired_date, - released_date=released_date, - ) - ) - - if parts: - return parts - return [] - - -def _get_services( - datacube: pystac.Item, -) -> list[CopernicusMarineService]: - stac_assets_dict = datacube.get_assets() - return [ - dataset_service - for stac_service_name, stac_asset in stac_assets_dict.items() - if ( - dataset_service := _construct_copernicus_marine_service( - stac_service_name, stac_asset, datacube - ) - ) - is not None - ] - - -def _format_arco_data_metadata_producer_valid_start_date( - arco_data_metadata_producer_valid_start_date: str, - to_timestamp: bool = False, -) -> Union[str, int]: - if to_timestamp: - return int( - datetime_parser( - arco_data_metadata_producer_valid_start_date.split(".")[0] - ).timestamp() - * 1000 - ) - return arco_data_metadata_producer_valid_start_date - - -def _get_variables( - stac_dataset: pystac.Item, - stac_asset: pystac.Asset, -) -> list[CopernicusMarineVariable]: - bbox = stac_dataset.bbox - return [ - CopernicusMarineVariable( - short_name=var_cube["id"], - standard_name=var_cube["standardName"], - units=var_cube.get("unit") or "", - bbox=bbox, - coordinates=_get_coordinates( - var_cube["id"], - stac_asset, - stac_dataset.properties.get("admp_valid_start_date"), - ) - or [], - ) - for var_cube in stac_dataset.properties["cube:variables"].values() - ] - + dataset_part_version_merged = CopernicusMarineProductDataset( + dataset_id=dataset_id, + dataset_name=dataset_item_example.properties.get("title", dataset_id), + versions=[], + ) + dataset_part_version_merged.parse_dataset_metadata_items(dataset_items) -def _get_coordinates( - variable_id: str, - stac_asset: pystac.Asset, - arco_data_metadata_producer_valid_start_date: Optional[str], -) -> Optional[list[CopernicusMarineCoordinates]]: - extra_fields_asset = stac_asset.extra_fields - dimensions = extra_fields_asset.get("viewDims") - if dimensions: - coordinates = [] - for dimension, dimension_metadata in dimensions.items(): - coordinates_info = dimension_metadata.get("coords", {}) - if ( - arco_data_metadata_producer_valid_start_date - and dimension == "time" - ): - minimum_value = ( - _format_arco_data_metadata_producer_valid_start_date( - arco_data_metadata_producer_valid_start_date, - to_timestamp=isinstance( - coordinates_info.get("min"), int - ), - ) - ) - else: - minimum_value = coordinates_info.get("min") - chunking_length = dimension_metadata.get("chunkLen") - if isinstance(chunking_length, dict): - chunking_length = chunking_length.get(variable_id) - coordinate = CopernicusMarineCoordinates( - coordinates_id=( - "depth" if dimension == "elevation" else dimension - ), - units=dimension_metadata.get("units") or "", - minimum_value=minimum_value, # type: ignore - maximum_value=coordinates_info.get("max"), - step=coordinates_info.get("step"), - values=coordinates_info.get("values"), - chunking_length=chunking_length, - chunk_type=dimension_metadata.get("chunkType"), - chunk_reference_coordinate=dimension_metadata.get( - "chunkRefCoord" - ), - chunk_geometric_factor=dimension_metadata.get( - "chunkGeometricFactor", {} - ).get(variable_id), - ) - if dimension == "elevation": - coordinate.convert_elevation_to_depth() - coordinates.append(coordinate) - return coordinates - else: + if dataset_part_version_merged.versions == []: return None - -def _construct_marine_data_store_dataset( - datacubes_by_id: List, -) -> Optional[ProductDatasetFromMarineDataStore]: - dataset_id = datacubes_by_id[0] - datacubes = list(datacubes_by_id[1]) - dataset_name = ( - datacubes[0].properties["title"] if len(datacubes) == 1 else dataset_id - ) - if datacubes: - versions = _get_versions_from_marine_datastore(datacubes) - if versions: - return ProductDatasetFromMarineDataStore( - dataset_id=dataset_id, - dataset_name=dataset_name, - versions=versions, - ) - return None + dataset_part_version_merged.sort_versions() + return dataset_part_version_merged def _construct_marine_data_store_product( - stac_tuple: Tuple[pystac.Collection, List[pystac.Item]], -) -> ProductFromMarineDataStore: + stac_tuple: tuple[pystac.Collection, list[pystac.Item]], +) -> CopernicusMarineProduct: stac_product, stac_datasets = stac_tuple stac_datasets_sorted = sorted(stac_datasets, key=lambda x: x.id) - datacubes_by_id = groupby( + dataset_items_by_dataset_id = groupby( stac_datasets_sorted, key=lambda x: get_version_and_part_from_full_dataset_id(x.id)[0], ) - datasets = map( - _construct_marine_data_store_dataset, # type: ignore - datacubes_by_id, # type: ignore - ) + datasets = [ + dataset_metadata + for _, dataset_items in dataset_items_by_dataset_id + if ( + dataset_metadata := _parse_and_sort_dataset_items( + list(dataset_items) + ) + ) + ] production_center = [ provider.name for provider in stac_product.providers or [] - if "producer" in provider.roles + if provider.roles and "producer" in provider.roles ] production_center_name = production_center[0] if production_center else "" - thumbnail = stac_product.assets and stac_product.assets.get("thumbnail") + thumbnail_url = None + if stac_product.assets: + thumbnail = stac_product.assets.get("thumbnail") + if thumbnail: + thumbnail_url = thumbnail.get_absolute_href() + digital_object_identifier = ( stac_product.extra_fields.get("sci:doi", None) if stac_product.extra_fields @@ -748,27 +212,24 @@ def _construct_marine_data_store_product( stac_product, "processingLevel" ) - return ProductFromMarineDataStore( + return CopernicusMarineProduct( title=stac_product.title or stac_product.id, product_id=stac_product.id, - thumbnail_url=thumbnail.get_absolute_href() if thumbnail else "", + thumbnail_url=thumbnail_url or "", description=stac_product.description, digital_object_identifier=digital_object_identifier, sources=sources, processing_level=processing_level, production_center=production_center_name, keywords=stac_product.keywords, - datasets=sorted( - [dataset for dataset in datasets if dataset], - key=lambda dataset: dataset.dataset_id, - ), + datasets=datasets, ) def _get_stac_product_property( stac_product: pystac.Collection, property_key: str ) -> Optional[Any]: - properties: Dict[str, str] = ( + properties: dict[str, str] = ( stac_product.extra_fields.get("properties", {}) if stac_product.extra_fields else {} @@ -776,164 +237,131 @@ def _get_stac_product_property( return properties.get(property_key) -async def async_fetch_items_from_collection( +def fetch_dataset_items( root_url: str, connection: CatalogParserConnection, collection: pystac.Collection, -) -> List[pystac.Item]: +) -> list[pystac.Item]: items = [] for link in collection.get_item_links(): if not link.owner: logger.warning(f"Invalid Item, no owner for: {link.href}") continue url = root_url + "/" + link.owner.id + "/" + link.href - try: - item_json = await connection.get_json_file(url) - items.append(pystac.Item.from_dict(item_json)) - except pystac.STACError as exception: - message = ( - "Invalid Item: If datetime is None, a start_datetime " - + "and end_datetime must be supplied." - ) - if exception.args[0] != message: - logger.error(exception) - raise pystac.STACError(exception.args) + item_json = connection.get_json_file(url) + item = _parse_dataset_json_to_pystac_item(item_json) + if item: + items.append(item) return items -async def async_fetch_collection( - root_url: str, connection: CatalogParserConnection, url: str -) -> Optional[Tuple[pystac.Collection, List[pystac.Item]]]: - json_collection = await connection.get_json_file(url) - try: - collection = pystac.Collection.from_dict(json_collection) - items = await async_fetch_items_from_collection( - root_url, connection, collection - ) +def fetch_collection( + root_url: str, + connection: CatalogParserConnection, + url: str, +) -> Optional[tuple[pystac.Collection, list[pystac.Item]]]: + json_collection = connection.get_json_file(url) + collection = _parse_product_json_to_pystac_collection(json_collection) + if collection: + items = fetch_dataset_items(root_url, connection, collection) return (collection, items) - - except KeyError as exception: - messages = ["spatial", "temporal"] - if exception.args[0] not in messages: - logger.error(exception) - return None + return None -async def async_fetch_childs( +def fetch_product_items( root_url: str, connection: CatalogParserConnection, - child_links: List[pystac.Link], -) -> Iterator[Optional[Tuple[pystac.Collection, List[pystac.Item]]]]: + child_links: list[pystac.Link], + max_concurrent_requests: int, + disable_progress_bar: bool, +) -> list[Optional[tuple[pystac.Collection, list[pystac.Item]]]]: tasks = [] for link in child_links: - tasks.append( - async_fetch_collection(root_url, connection, link.absolute_href) + tasks.append((root_url, connection, link.absolute_href)) + tdqm_bar_configuration = { + "desc": "Fetching products", + "disable": disable_progress_bar, + "leave": False, + } + return [ + result + for result in run_concurrently( + fetch_collection, + tasks, + max_concurrent_requests, + tdqm_bar_configuration, ) - return filter( - lambda x: x is not None, - await rolling_batch_gather(tasks, MAX_CONCURRENT_REQUESTS), - ) + if result is not None + ] -async def async_fetch_catalog( +def fetch_all_products_items( connection: CatalogParserConnection, - staging: bool = False, -) -> Iterator[pystac.Collection]: + max_concurrent_requests: int, + staging: bool, + disable_progress_bar: bool, +) -> list[Optional[tuple[pystac.Collection, list[pystac.Item]]]]: catalog_root_url = ( MARINE_DATA_STORE_STAC_ROOT_CATALOG_URL if not staging else MARINE_DATA_STORE_STAC_ROOT_CATALOG_URL_STAGING ) - json_catalog = await connection.get_json_file(catalog_root_url) + json_catalog = connection.get_json_file(catalog_root_url) catalog = pystac.Catalog.from_dict(json_catalog) catalog.set_self_href(catalog_root_url) child_links = catalog.get_child_links() root_url = ( - MARINE_DATA_STORE_STAC_BASE_URL + MARINE_DATA_STORE_STAC_URL if not staging - else (MARINE_DATA_STORE_STAC_BASE_URL_STAGING) + else (MARINE_DATA_STORE_STAC_URL_STAGING) ) - childs = await async_fetch_childs(root_url, connection, child_links) - return childs - - -def _retrieve_marine_data_store_products( - connection: CatalogParserConnection, - staging: bool = False, -) -> list[ProductFromMarineDataStore]: - nest_asyncio.apply() - loop = asyncio.get_event_loop() - marine_data_store_root_collections = loop.run_until_complete( - async_fetch_catalog(connection=connection, staging=staging) - ) - - products = map_reject_none( - _construct_marine_data_store_product, - marine_data_store_root_collections, + childs = fetch_product_items( + root_url, + connection, + child_links, + max_concurrent_requests, + disable_progress_bar, ) - - return list(products) + return childs def parse_catalogue( - no_metadata_cache: bool, + max_concurrent_requests: int, disable_progress_bar: bool, staging: bool = False, ) -> CopernicusMarineCatalogue: logger.debug("Parsing catalogue...") - try: - catalog = _parse_catalogue( - ignore_cache=no_metadata_cache, - _versions=package_version("copernicusmarine"), - disable_progress_bar=disable_progress_bar, - staging=staging, - ) - except ValueError as e: - logger.debug(f"Error while parsing catalogue: {e}") - logger.debug( - "Now retrying without cache. If the problem with " - "the cache persists, try running " - "copernicusmarine describe --overwrite-metadata-cache" - ) - catalog = _parse_catalogue( - ignore_cache=True, - _versions=package_version("copernicusmarine"), - disable_progress_bar=disable_progress_bar, - staging=staging, - ) - logger.debug("Catalogue parsed") - return catalog - - -@cachier(cache_dir=CACHE_BASE_DIRECTORY, stale_after=timedelta(hours=24)) -def _parse_catalogue( - _versions: str, # force cachier to overwrite cache in case of version update - disable_progress_bar: bool, - staging: bool = False, -) -> CopernicusMarineCatalogue: progress_bar = tqdm( - total=3, desc="Fetching catalog", disable=disable_progress_bar + total=2, desc="Fetching catalog", disable=disable_progress_bar ) - connection = CatalogParserConnection() - marine_data_store_products = _retrieve_marine_data_store_products( - connection=connection, staging=staging - ) + with CatalogParserConnection() as connection: + marine_data_store_root_collections = fetch_all_products_items( + connection=connection, + max_concurrent_requests=max_concurrent_requests, + staging=staging, + disable_progress_bar=disable_progress_bar, + ) progress_bar.update() - products_merged: List[CopernicusMarineProduct] = [ - marine_data_store_product.to_copernicus_marine_product() - for marine_data_store_product in marine_data_store_products - if marine_data_store_product.datasets + products_metadata = [ + product_metadata + for product_item in marine_data_store_root_collections + if product_item + and ( + ( + product_metadata := _construct_marine_data_store_product( + product_item + ) + ).datasets + ) ] - products_merged.sort(key=lambda x: x.product_id) - progress_bar.update() + products_metadata.sort(key=lambda x: x.product_id) - full_catalog = CopernicusMarineCatalogue(products=products_merged) + full_catalog = CopernicusMarineCatalogue(products=products_metadata) progress_bar.update() - asyncio.run(connection.close()) - + logger.debug("Catalogue parsed") return full_catalog @@ -942,31 +370,9 @@ class DistinctDatasetVersionPart: dataset_id: str dataset_version: str dataset_part: str - layer_elements: List - raw_services: Dict - stac_items_values: Optional[Dict] - - -REGEX_PATTERN_DATE_YYYYMM = r"[12]\d{3}(0[1-9]|1[0-2])" -PART_SEPARATOR = "--ext--" - - -def get_version_and_part_from_full_dataset_id( - full_dataset_id: str, -) -> Tuple[str, str, str]: - if PART_SEPARATOR in full_dataset_id: - name_with_maybe_version, part = full_dataset_id.split(PART_SEPARATOR) - else: - name_with_maybe_version = full_dataset_id - part = PART_DEFAULT - pattern = rf"^(.*?)(?:_({REGEX_PATTERN_DATE_YYYYMM}))?$" - match = re.match(pattern, name_with_maybe_version) - if match: - dataset_name = match.group(1) - version = match.group(2) or VERSION_DEFAULT - else: - raise Exception(f"Could not parse dataset id: {full_dataset_id}") - return dataset_name, version, part + layer_elements: list + raw_services: dict + stac_items_values: Optional[dict] # --------------------------------------- @@ -974,23 +380,6 @@ def get_version_and_part_from_full_dataset_id( # --------------------------------------- -def get_product_from_url( - catalogue: CopernicusMarineCatalogue, dataset_url: str -) -> CopernicusMarineProduct: - """ - Return the product object, with its dataset list filtered - """ - filtered_catalogue = filter_catalogue_with_strings( - catalogue, [dataset_url] - ) - if filtered_catalogue is None: - error = TypeError("filtered catalogue is empty") - raise error - if isinstance(filtered_catalogue, CopernicusMarineCatalogue): - return filtered_catalogue.products[0] - return filtered_catalogue["products"][0] - - def filter_catalogue_with_strings( catalogue: CopernicusMarineCatalogue, tokens: list[str] ) -> dict[str, Any]: @@ -1022,7 +411,7 @@ def find_match_enum(enum: Enum, tokens: list[str]) -> Any: return find_match_object(enum.value, tokens) -def find_match_tuple(tuple: Tuple, tokens: list[str]) -> Optional[list[Any]]: +def find_match_tuple(tuple: tuple, tokens: list[str]) -> Optional[list[Any]]: return find_match_list(list(tuple), tokens) diff --git a/copernicusmarine/catalogue_parser/models.py b/copernicusmarine/catalogue_parser/models.py new file mode 100644 index 00000000..5f435482 --- /dev/null +++ b/copernicusmarine/catalogue_parser/models.py @@ -0,0 +1,602 @@ +import re + +# TODO: change to pydantic +from dataclasses import dataclass +from enum import Enum +from typing import Optional, Type, TypeVar, Union + +import pystac + +from copernicusmarine.command_line_interface.exception_handler import ( + log_exception_debug, +) +from copernicusmarine.core_functions.utils import ( + datetime_parser, + next_or_raise_exception, +) + +# Output Types definitions + +VERSION_DEFAULT = "default" +PART_DEFAULT = "default" + + +# Service types +class _ServiceName(str, Enum): + GEOSERIES = "arco-geo-series" + TIMESERIES = "arco-time-series" + FILES = "original-files" + WMTS = "wmts" + OMI_ARCO = "omi-arco" + STATIC_ARCO = "static-arco" + + +class _ServiceShortName(str, Enum): + GEOSERIES = "geoseries" + TIMESERIES = "timeseries" + FILES = "files" + WMTS = "wmts" + OMI_ARCO = "omi-arco" + STATIC_ARCO = "static-arco" + + +@dataclass(frozen=True) +class _Service: + service_name: _ServiceName + short_name: _ServiceShortName + + def aliases(self) -> list[str]: + return ( + [self.service_name.value, self.short_name.value] + if self.short_name.value != self.service_name.value + else [self.service_name.value] + ) + + def to_json_dict(self): + return { + "service_name": self.service_name.value, + "short_name": self.short_name.value, + } + + +class CopernicusMarineDatasetServiceType(_Service, Enum): + GEOSERIES = _ServiceName.GEOSERIES, _ServiceShortName.GEOSERIES + TIMESERIES = ( + _ServiceName.TIMESERIES, + _ServiceShortName.TIMESERIES, + ) + FILES = _ServiceName.FILES, _ServiceShortName.FILES + WMTS = _ServiceName.WMTS, _ServiceShortName.WMTS + OMI_ARCO = _ServiceName.OMI_ARCO, _ServiceShortName.OMI_ARCO + STATIC_ARCO = _ServiceName.STATIC_ARCO, _ServiceShortName.STATIC_ARCO + + +def _service_type_from_web_api_string( + name: str, +) -> CopernicusMarineDatasetServiceType: + class WebApi(Enum): + GEOSERIES = "timeChunked" + TIMESERIES = "geoChunked" + FILES = "native" + WMTS = "wmts" + OMI_ARCO = "omi" + STATIC_ARCO = "static" + + web_api_mapping = { + WebApi.GEOSERIES: CopernicusMarineDatasetServiceType.GEOSERIES, + WebApi.TIMESERIES: CopernicusMarineDatasetServiceType.TIMESERIES, + WebApi.FILES: CopernicusMarineDatasetServiceType.FILES, + WebApi.WMTS: CopernicusMarineDatasetServiceType.WMTS, + WebApi.OMI_ARCO: CopernicusMarineDatasetServiceType.OMI_ARCO, + WebApi.STATIC_ARCO: CopernicusMarineDatasetServiceType.STATIC_ARCO, + } + + return next_or_raise_exception( + ( + service_type + for service_web_api, service_type in web_api_mapping.items() + if service_web_api.value == name + ), + ServiceNotHandled(name), + ) + + +class ServiceNotHandled(Exception): + """ + Exception raised when the dataset does not support the service type requested. + + Please verifiy that the requested service type can be found in + the result of the :func:`~copernicusmarine.describe` command + for this specific dataset, version and part. + """ + + pass + + +# service formats +class CopernicusMarineServiceFormat(str, Enum): + ZARR = "zarr" + SQLITE = "sqlite" + + +@dataclass +class CopernicusMarineCoordinate: + coordinate_id: str + units: str + minimum_value: Optional[float] + maximum_value: Optional[float] + step: Optional[float] + values: Optional[list[Union[float, int]]] + chunking_length: Optional[int] + chunk_type: Optional[str] + chunk_reference_coordinate: Optional[int] + chunk_geometric_factor: Optional[int] + + Coordinate = TypeVar("Coordinate", bound="CopernicusMarineCoordinate") + + @classmethod + def from_metadata_item( + cls: Type[Coordinate], + variable_id: str, + dimension: str, + dimension_metadata: dict, + arco_data_metadata_producer_valid_start_date: Optional[str], + arco_data_metadata_producer_valid_start_index: Optional[int], + ) -> Coordinate: + coordinates_info = dimension_metadata.get("coords", {}) + minimum_value = None + coordinate_values = None + if dimension == "time": + if ( + arco_data_metadata_producer_valid_start_date + ) and coordinates_info.get("min"): + minimum_value = ( + CopernicusMarineCoordinate._format_admp_valid_start_date( + arco_data_metadata_producer_valid_start_date, + to_timestamp=isinstance( + coordinates_info.get("min"), int + ), + ) + ) + elif ( + arco_data_metadata_producer_valid_start_index + and coordinates_info.get("values") + ): + coordinate_values = coordinates_info.get("values")[ + arco_data_metadata_producer_valid_start_index: + ] + chunking_length = dimension_metadata.get("chunkLen") + if isinstance(chunking_length, dict): + chunking_length = chunking_length.get(variable_id) + + coordinate = cls( + coordinate_id=dimension, + units=dimension_metadata.get("units") or "", + minimum_value=minimum_value or coordinates_info.get("min"), # type: ignore + maximum_value=coordinates_info.get("max"), + step=coordinates_info.get("step"), + values=coordinate_values or coordinates_info.get("values"), + chunking_length=chunking_length, + chunk_type=dimension_metadata.get("chunkType"), + chunk_reference_coordinate=dimension_metadata.get("chunkRefCoord"), + chunk_geometric_factor=dimension_metadata.get( + "chunkGeometricFactor", {} + ).get(variable_id), + ) + if dimension == "elevation": + coordinate._convert_elevation_to_depth() + return coordinate + + @staticmethod + def _format_admp_valid_start_date( + arco_data_metadata_producer_valid_start_date: str, + to_timestamp: bool = False, + ) -> Union[str, int]: + if to_timestamp: + return int( + datetime_parser( + arco_data_metadata_producer_valid_start_date + ).timestamp() + * 1000 + ) + return arco_data_metadata_producer_valid_start_date + + def _convert_elevation_to_depth(self): + self.coordinate_id = "depth" + minimum_elevation = self.minimum_value + maximum_elevation = self.maximum_value + if minimum_elevation is not None: + self.maximum_value = -minimum_elevation + else: + self.maximum_value = None + if maximum_elevation is not None: + self.minimum_value = -maximum_elevation + else: + self.minimum_value = None + if self.values is not None: + self.values = [-value for value in self.values] + + +@dataclass +class CopernicusMarineVariable: + short_name: str + standard_name: str + units: str + bbox: Optional[list[float]] + coordinates: list[CopernicusMarineCoordinate] + + Variable = TypeVar("Variable", bound="CopernicusMarineVariable") + + @classmethod + def from_metadata_item( + cls: Type[Variable], + metadata_item: pystac.Item, + asset: pystac.Asset, + variable_id: str, + bbox: Optional[list[float]], + ) -> Variable: + cube_variables = metadata_item.properties["cube:variables"] + cube_variable = cube_variables[variable_id] + + extra_fields_asset = asset.extra_fields + dimensions = extra_fields_asset.get("viewDims") or {} + return cls( + short_name=variable_id, + standard_name=cube_variable["standardName"], + units=cube_variable.get("unit") or "", + bbox=bbox, + coordinates=[ + CopernicusMarineCoordinate.from_metadata_item( + variable_id, + dimension, + dimension_metadata, + metadata_item.properties.get("admp_valid_start_date"), + metadata_item.properties.get("admp_valid_start_index"), + ) + for dimension, dimension_metadata in dimensions.items() + if dimension in cube_variable["dimensions"] + ], + ) + + +@dataclass +class CopernicusMarineService: + service_type: CopernicusMarineDatasetServiceType + service_format: Optional[CopernicusMarineServiceFormat] + uri: str + variables: list[CopernicusMarineVariable] + + Service = TypeVar("Service", bound="CopernicusMarineService") + + @classmethod + def from_metadata_item( + cls: Type[Service], + metadata_item: pystac.Item, + service_name: str, + asset: pystac.Asset, + ) -> Optional[Service]: + try: + service_uri = asset.get_absolute_href() + if not service_uri: + raise ServiceNotHandled(service_name) + service_type = _service_type_from_web_api_string(service_name) + service_format = None + admp_in_preparation = metadata_item.properties.get( + "admp_in_preparation" + ) + if asset.media_type and "zarr" in asset.media_type: + service_format = CopernicusMarineServiceFormat.ZARR + elif asset.media_type and "sqlite3" in asset.media_type: + service_format = CopernicusMarineServiceFormat.SQLITE + + if not service_uri.endswith("/"): + if admp_in_preparation and ( + service_type + == CopernicusMarineDatasetServiceType.GEOSERIES + or service_type + == CopernicusMarineDatasetServiceType.TIMESERIES + ): + return None + else: + bbox = metadata_item.bbox + return cls( + service_type=service_type, + uri=service_uri, + variables=[ + CopernicusMarineVariable.from_metadata_item( + metadata_item, asset, var_cube["id"], bbox + ) + for var_cube in metadata_item.properties[ + "cube:variables" + ].values() + ], + service_format=service_format, + ) + return None + except ServiceNotHandled as service_not_handled: + log_exception_debug(service_not_handled) + return None + + +@dataclass +class CopernicusMarineVersionPart: + name: str + services: list[CopernicusMarineService] + retired_date: Optional[str] + released_date: Optional[str] + + VersionPart = TypeVar("VersionPart", bound="CopernicusMarineVersionPart") + + @classmethod + def from_metadata_item( + cls: Type[VersionPart], metadata_item: pystac.Item, part_name: str + ) -> Optional[VersionPart]: + retired_date = metadata_item.properties.get("admp_retired_date") + released_date = metadata_item.properties.get("admp_released_date") + if retired_date and datetime_parser(retired_date) < datetime_parser( + "now" + ): + return None + services = [ + service + for metadata_service_name, asset in metadata_item.get_assets().items() + if ( + service := CopernicusMarineService.from_metadata_item( + metadata_item, + metadata_service_name, + asset, + ) + ) + ] + if not services: + return None + services = services + return cls( + name=part_name, + services=services, + retired_date=retired_date, + released_date=released_date, + ) + + def get_service_by_service_type( + self, service_type: CopernicusMarineDatasetServiceType + ): + return next( + service + for service in self.services + if service.service_type == service_type + ) + + +@dataclass +class CopernicusMarineDatasetVersion: + label: str + parts: list[CopernicusMarineVersionPart] + + def get_part( + self, force_part: Optional[str] + ) -> CopernicusMarineVersionPart: + wanted_part = force_part or PART_DEFAULT + for part in self.parts: + if part.name == wanted_part: + return part + elif not force_part: + return part + raise DatasetVersionPartNotFound(self) + + def sort_parts(self) -> tuple[Optional[str], Optional[str]]: + not_released_parts = { + part.name + for part in self.parts + if part.released_date + and datetime_parser(part.released_date) > datetime_parser("now") + } + will_be_retired_parts = { + part.name: datetime_parser(part.retired_date).timestamp() + for part in self.parts + if part.retired_date + } + max_retired_timestamp = 0.0 + if will_be_retired_parts: + max_retired_timestamp = max(will_be_retired_parts.values()) + 1 + self.parts = sorted( + self.parts, + key=lambda x: ( + x.name in not_released_parts, + max_retired_timestamp + - will_be_retired_parts.get(x.name, max_retired_timestamp), + -(x.name == PART_DEFAULT), + -(x.name == "latest"), # for INSITU datasets + -(x.name == "bathy"), # for STATIC datasets + x.name, + ), + ) + return self.parts[0].released_date, self.parts[0].retired_date + + +@dataclass +class CopernicusMarineProductDataset: + dataset_id: str + dataset_name: str + versions: list[CopernicusMarineDatasetVersion] + + def get_version( + self, force_version: Optional[str] + ) -> CopernicusMarineDatasetVersion: + wanted_version = force_version or VERSION_DEFAULT + for version in self.versions: + if version.label == wanted_version: + return version + elif not force_version: + return version + raise DatasetVersionNotFound(self) + + def sort_versions(self) -> None: + not_released_versions: set[str] = set() + retired_dates = {} + for version in self.versions: + released_date, retired_date = version.sort_parts() + if released_date and datetime_parser( + released_date + ) > datetime_parser("now"): + not_released_versions.add(version.label) + if retired_date: + retired_dates[version.label] = retired_date + + self.versions = sorted( + self.versions, + key=lambda x: ( + -(x.label in not_released_versions), + retired_dates.get(x.label, "9999-12-31"), + -(x.label == VERSION_DEFAULT), + x.label, + ), + reverse=True, + ) + + def parse_dataset_metadata_items( + self, metadata_items: list[pystac.Item] + ) -> None: + all_versions = set() + for metadata_item in metadata_items: + ( + _, + dataset_version, + dataset_part, + ) = get_version_and_part_from_full_dataset_id(metadata_item.id) + part = CopernicusMarineVersionPart.from_metadata_item( + metadata_item, dataset_part + ) + if not part: + continue + if dataset_version in all_versions: + for version in self.versions: + if version.label == dataset_version: + version.parts.append(part) + break + else: + all_versions.add(dataset_version) + version = CopernicusMarineDatasetVersion( + label=dataset_version, parts=[part] + ) + self.versions.append(version) + + +@dataclass +class CopernicusMarineProduct: + title: str + product_id: str + thumbnail_url: str + description: str + digital_object_identifier: Optional[str] + sources: list[str] + processing_level: Optional[str] + production_center: str + keywords: Optional[list[str]] + datasets: list[CopernicusMarineProductDataset] + + +@dataclass +class CopernicusMarineCatalogue: + products: list[CopernicusMarineProduct] + + def filter_only_official_versions_and_parts(self): + products_to_remove = [] + for product in self.products: + datasets_to_remove = [] + for dataset in product.datasets: + latest_version = dataset.versions[0] + parts_to_remove = [] + for part in latest_version.parts: + if part.released_date and datetime_parser( + part.released_date + ) > datetime_parser("now"): + parts_to_remove.append(part) + for part_to_remove in parts_to_remove: + latest_version.parts.remove(part_to_remove) + if not latest_version.parts: + datasets_to_remove.append(dataset) + else: + dataset.versions = [latest_version] + for dataset_to_remove in datasets_to_remove: + product.datasets.remove(dataset_to_remove) + if not product.datasets: + products_to_remove.append(product) + for product_to_remove in products_to_remove: + self.products.remove(product_to_remove) + + +# Errors +class DatasetVersionPartNotFound(Exception): + """ + Exception raised when the asked part of the version of the dataset cannot be found. + + Please verifiy that the requested part can be found in + the result of the :func:`~copernicusmarine.describe` command + for this specific dataset version and dataset id. + If yes, please contact user support. + """ + + def __init__(self, version: CopernicusMarineDatasetVersion): + message = f"No part found for version {version.label}" + super().__init__(message) + + +class DatasetVersionNotFound(Exception): + """ + Exception raised when the asked version of the dataset cannot be found. + + Please verifiy that the requested version can be found in + the result of the :func:`~copernicusmarine.describe` command + for this specific dataset. + If yes, please contact user support. + """ + + def __init__(self, dataset: CopernicusMarineProductDataset): + message = f"No version found for dataset {dataset.dataset_id}" + super().__init__(message) + + +class DatasetNotFound(Exception): + """ + Exception raised when the dataset is not found in the catalogue. + + Possible reasons: + + - The dataset id is incorrect and not present in the catalog. + - The dataset has been retired. + + Please verifiy that the dataset id is can be found in + the result of the :func:`~copernicusmarine.describe` command. + If yes, please contact user support. + """ + + def __init__(self, dataset_id: str): + message = ( + f"{dataset_id} " + f"Please check that the dataset exists and " + f"the input datasetID is correct." + ) + super().__init__(message) + + +REGEX_PATTERN_DATE_YYYYMM = r"[12]\d{3}(0[1-9]|1[0-2])" +PART_SEPARATOR = "--ext--" + + +def get_version_and_part_from_full_dataset_id( + full_dataset_id: str, +) -> tuple[str, str, str]: + if PART_SEPARATOR in full_dataset_id: + name_with_maybe_version, part = full_dataset_id.split(PART_SEPARATOR) + else: + name_with_maybe_version = full_dataset_id + part = PART_DEFAULT + pattern = rf"^(.*?)(?:_({REGEX_PATTERN_DATE_YYYYMM}))?$" + match = re.match(pattern, name_with_maybe_version) + if match: + dataset_name = match.group(1) + version = match.group(2) or VERSION_DEFAULT + else: + raise Exception(f"Could not parse dataset id: {full_dataset_id}") + return dataset_name, version, part diff --git a/copernicusmarine/catalogue_parser/request_structure.py b/copernicusmarine/catalogue_parser/request_structure.py index c3b893c1..308bb64e 100644 --- a/copernicusmarine/catalogue_parser/request_structure.py +++ b/copernicusmarine/catalogue_parser/request_structure.py @@ -3,18 +3,21 @@ import pathlib import re from dataclasses import dataclass, field -from datetime import datetime from json import load from typing import Any, Dict, List, Optional +from pendulum import DateTime + from copernicusmarine.core_functions.deprecated_options import ( DEPRECATED_OPTIONS, ) from copernicusmarine.core_functions.models import ( + DEFAULT_COORDINATES_SELECTION_METHOD, DEFAULT_FILE_FORMAT, - DEFAULT_SUBSET_METHOD, + DEFAULT_VERTICAL_DIMENSION_OUTPUT, + CoordinatesSelectionMethod, FileFormat, - SubsetMethod, + VerticalDimensionOutput, ) from copernicusmarine.core_functions.utils import datetime_parser from copernicusmarine.download_functions.subset_parameters import ( @@ -23,7 +26,7 @@ TemporalParameters, ) -logger = logging.getLogger("copernicus_marine_root_logger") +logger = logging.getLogger("copernicusmarine") MAPPING_REQUEST_FILES_AND_REQUEST_OPTIONS: dict[str, str] = { @@ -34,19 +37,21 @@ @dataclass -class DatasetTimeAndGeographicalSubset: +class DatasetTimeAndSpaceSubset: minimum_longitude: Optional[float] = None maximum_longitude: Optional[float] = None minimum_latitude: Optional[float] = None maximum_latitude: Optional[float] = None - start_datetime: Optional[datetime] = None - end_datetime: Optional[datetime] = None + minimum_depth: Optional[float] = None + maximum_depth: Optional[float] = None + start_datetime: Optional[DateTime] = None + end_datetime: Optional[DateTime] = None @dataclass class SubsetRequest: + dataset_id: str dataset_url: Optional[str] = None - dataset_id: Optional[str] = None force_dataset_version: Optional[str] = None force_dataset_part: Optional[str] = None variables: Optional[List[str]] = None @@ -56,19 +61,23 @@ class SubsetRequest: maximum_latitude: Optional[float] = None minimum_depth: Optional[float] = None maximum_depth: Optional[float] = None - vertical_dimension_as_originally_produced: bool = True - start_datetime: Optional[datetime] = None - end_datetime: Optional[datetime] = None - subset_method: SubsetMethod = DEFAULT_SUBSET_METHOD + vertical_dimension_output: VerticalDimensionOutput = ( + DEFAULT_VERTICAL_DIMENSION_OUTPUT + ) + start_datetime: Optional[DateTime] = None + end_datetime: Optional[DateTime] = None + coordinates_selection_method: CoordinatesSelectionMethod = ( + DEFAULT_COORDINATES_SELECTION_METHOD + ) output_filename: Optional[str] = None file_format: FileFormat = DEFAULT_FILE_FORMAT force_service: Optional[str] = None output_directory: pathlib.Path = pathlib.Path(".") force_download: bool = False overwrite_output_data: bool = False - netcdf_compression_enabled: bool = False - netcdf_compression_level: Optional[int] = None + netcdf_compression_level: int = 0 netcdf3_compatible: bool = False + dry_run: bool = False def update(self, new_dict: dict): """Method to update values in SubsetRequest object. @@ -112,45 +121,40 @@ def enforce_types(self): type_enforced_dict[key] = new_value self.__dict__.update(type_enforced_dict) - def from_file(self, filepath: pathlib.Path): - self.update(subset_request_from_file(filepath).__dict__) - return self - - def get_time_and_geographical_subset( + def get_time_and_space_subset( self, - ) -> DatasetTimeAndGeographicalSubset: - return DatasetTimeAndGeographicalSubset( + ) -> DatasetTimeAndSpaceSubset: + return DatasetTimeAndSpaceSubset( minimum_longitude=self.minimum_longitude, maximum_longitude=self.maximum_longitude, minimum_latitude=self.minimum_latitude, maximum_latitude=self.maximum_latitude, + minimum_depth=self.minimum_depth, + maximum_depth=self.maximum_depth, start_datetime=self.start_datetime, end_datetime=self.end_datetime, ) + def from_file(self, filepath: pathlib.Path): + json_file = open(filepath) + json_content = load(json_file) + + json_with_deprecated_options_replace = {} + + for key, val in json_content.items(): + if key in DEPRECATED_OPTIONS: + deprecated_option = DEPRECATED_OPTIONS[key] + json_with_deprecated_options_replace[ + deprecated_option.new_name + ] = val + elif key in MAPPING_REQUEST_FILES_AND_REQUEST_OPTIONS: + new_key = MAPPING_REQUEST_FILES_AND_REQUEST_OPTIONS[key] + json_with_deprecated_options_replace[new_key] = val + else: + json_with_deprecated_options_replace[key] = val -def subset_request_from_file(filepath: pathlib.Path) -> SubsetRequest: - json_file = open(filepath) - json_content = load(json_file) - - json_with_deprecated_options_replace = {} - - for key, val in json_content.items(): - if key in DEPRECATED_OPTIONS: - deprecated_option = DEPRECATED_OPTIONS[key] - json_with_deprecated_options_replace[ - deprecated_option.new_name - ] = val - elif key in MAPPING_REQUEST_FILES_AND_REQUEST_OPTIONS: - new_key = MAPPING_REQUEST_FILES_AND_REQUEST_OPTIONS[key] - json_with_deprecated_options_replace[new_key] = val - else: - json_with_deprecated_options_replace[key] = val - - subset_request = SubsetRequest() - subset_request.__dict__.update(json_with_deprecated_options_replace) - subset_request.enforce_types() - return subset_request + self.__dict__.update(json_with_deprecated_options_replace) + self.enforce_types() def convert_motu_api_request_to_structure( @@ -172,6 +176,7 @@ def convert_motu_api_request_to_structure( else: motu_api_request_dict[arg] = value subset_request = SubsetRequest( + dataset_id="", output_directory=pathlib.Path("."), force_download=False, output_filename=None, @@ -198,8 +203,8 @@ def convert_motu_api_request_to_structure( @dataclass class GetRequest: + dataset_id: str dataset_url: Optional[str] = None - dataset_id: Optional[str] = None force_dataset_version: Optional[str] = None force_dataset_part: Optional[str] = None no_directories: bool = False @@ -207,7 +212,6 @@ class GetRequest: output_directory: str = "." force_download: bool = False overwrite_output_data: bool = False - force_service: Optional[str] = None filter: Optional[str] = None regex: Optional[str] = None file_list: Optional[pathlib.Path] = None @@ -215,6 +219,7 @@ class GetRequest: sync_delete: bool = False index_parts: bool = False direct_download: Optional[list[str]] = None + dry_run: bool = False def update(self, new_dict: dict): """Method to update values in GetRequest object. @@ -241,43 +246,34 @@ def enforce_types(self): self.__dict__.update(type_enforced_dict) def from_file(self, filepath: pathlib.Path): - self = get_request_from_file(filepath) - logger.info(filepath) - return self - - -def get_request_from_file(filepath: pathlib.Path) -> GetRequest: - json_file = load(open(filepath)) - json_with_mapped_options = {} - for key, val in json_file.items(): - if key in MAPPING_REQUEST_FILES_AND_REQUEST_OPTIONS: - new_key = MAPPING_REQUEST_FILES_AND_REQUEST_OPTIONS[key] - json_with_mapped_options[new_key] = val - else: - json_with_mapped_options[key] = val - get_request = GetRequest() - get_request.__dict__.update(json_with_mapped_options) - get_request.enforce_types() - full_regex = get_request.regex - if get_request.filter: - filter_regex = filter_to_regex(get_request.filter) - full_regex = overload_regex_with_additionnal_filter( - filter_regex, full_regex - ) - if get_request.file_list: - file_list_regex = file_list_to_regex(get_request.file_list) - full_regex = overload_regex_with_additionnal_filter( - file_list_regex, full_regex - ) - get_request.regex = full_regex - - return get_request + json_file = load(open(filepath)) + json_with_mapped_options = {} + for key, val in json_file.items(): + if key in MAPPING_REQUEST_FILES_AND_REQUEST_OPTIONS: + new_key = MAPPING_REQUEST_FILES_AND_REQUEST_OPTIONS[key] + json_with_mapped_options[new_key] = val + else: + json_with_mapped_options[key] = val + self.__dict__.update(json_with_mapped_options) + self.enforce_types() + full_regex = self.regex + if self.filter: + filter_regex = filter_to_regex(self.filter) + full_regex = overload_regex_with_additionnal_filter( + filter_regex, full_regex + ) + if self.file_list: + file_list_regex = file_list_to_regex(self.file_list) + full_regex = overload_regex_with_additionnal_filter( + file_list_regex, full_regex + ) + self.regex = full_regex @dataclass class LoadRequest: + dataset_id: str dataset_url: Optional[str] = None - dataset_id: Optional[str] = None force_dataset_version: Optional[str] = None force_dataset_part: Optional[str] = None username: Optional[str] = None @@ -290,20 +286,22 @@ class LoadRequest: default_factory=TemporalParameters ) depth_parameters: DepthParameters = field(default_factory=DepthParameters) - subset_method: SubsetMethod = DEFAULT_SUBSET_METHOD + coordinates_selection_method: CoordinatesSelectionMethod = ( + DEFAULT_COORDINATES_SELECTION_METHOD + ) force_service: Optional[str] = None credentials_file: Optional[pathlib.Path] = None - overwrite_metadata_cache: bool = False - no_metadata_cache: bool = False - def get_time_and_geographical_subset( + def get_time_and_space_subset( self, - ) -> DatasetTimeAndGeographicalSubset: - return DatasetTimeAndGeographicalSubset( + ) -> DatasetTimeAndSpaceSubset: + return DatasetTimeAndSpaceSubset( minimum_longitude=self.geographical_parameters.longitude_parameters.minimum_longitude, # noqa maximum_longitude=self.geographical_parameters.longitude_parameters.maximum_longitude, # noqa minimum_latitude=self.geographical_parameters.latitude_parameters.minimum_latitude, # noqa maximum_latitude=self.geographical_parameters.latitude_parameters.maximum_latitude, # noqa + minimum_depth=self.depth_parameters.minimum_depth, + maximum_depth=self.depth_parameters.maximum_depth, start_datetime=self.temporal_parameters.start_datetime, end_datetime=self.temporal_parameters.end_datetime, ) diff --git a/copernicusmarine/command_line_interface/__init__.py b/copernicusmarine/command_line_interface/__init__.py index 0c01cec5..e69de29b 100644 --- a/copernicusmarine/command_line_interface/__init__.py +++ b/copernicusmarine/command_line_interface/__init__.py @@ -1,3 +0,0 @@ -""" -. -""" diff --git a/copernicusmarine/command_line_interface/copernicus_marine.py b/copernicusmarine/command_line_interface/copernicus_marine.py index 074ab61c..beaafec0 100644 --- a/copernicusmarine/command_line_interface/copernicus_marine.py +++ b/copernicusmarine/command_line_interface/copernicus_marine.py @@ -1,22 +1,18 @@ import click -from copernicusmarine.command_line_interface.group_describe import ( - cli_group_describe, -) -from copernicusmarine.command_line_interface.group_get import cli_group_get -from copernicusmarine.command_line_interface.group_login import cli_group_login -from copernicusmarine.command_line_interface.group_subset import ( - cli_group_subset, -) +from copernicusmarine.command_line_interface.group_describe import cli_describe +from copernicusmarine.command_line_interface.group_get import cli_get +from copernicusmarine.command_line_interface.group_login import cli_login +from copernicusmarine.command_line_interface.group_subset import cli_subset @click.command( cls=click.CommandCollection, sources=[ - cli_group_describe, - cli_group_login, - cli_group_subset, - cli_group_get, + cli_describe, + cli_login, + cli_subset, + cli_get, ], context_settings=dict(help_option_names=["-h", "--help"]), ) diff --git a/copernicusmarine/command_line_interface/exception_handler.py b/copernicusmarine/command_line_interface/exception_handler.py index 5b1cd559..ef286388 100644 --- a/copernicusmarine/command_line_interface/exception_handler.py +++ b/copernicusmarine/command_line_interface/exception_handler.py @@ -5,7 +5,7 @@ from sys import exit from typing import Callable -logger = logging.getLogger("copernicus_marine_root_logger") +logger = logging.getLogger("copernicusmarine") def _log_exception(log_function: Callable, exception: Exception): diff --git a/copernicusmarine/command_line_interface/group_describe.py b/copernicusmarine/command_line_interface/group_describe.py index 47f86ab2..7290c128 100644 --- a/copernicusmarine/command_line_interface/group_describe.py +++ b/copernicusmarine/command_line_interface/group_describe.py @@ -5,48 +5,39 @@ from copernicusmarine.command_line_interface.exception_handler import ( log_exception_and_exit, ) -from copernicusmarine.command_line_interface.utils import ( - MutuallyExclusiveOption, - tqdm_disable_option, -) -from copernicusmarine.core_functions.deprecated import ( - DeprecatedClickOption, - DeprecatedClickOptionsCommand, +from copernicusmarine.command_line_interface.utils import tqdm_disable_option +from copernicusmarine.core_functions import documentation_utils +from copernicusmarine.core_functions.click_custom_class import ( + CustomClickOptionsCommand, ) from copernicusmarine.core_functions.describe import describe_function -logger = logging.getLogger("copernicus_marine_root_logger") -blank_logger = logging.getLogger("copernicus_marine_blank_logger") +logger = logging.getLogger("copernicusmarine") +blank_logger = logging.getLogger("copernicusmarine_blank_logger") @click.group() -def cli_group_describe() -> None: +def cli_describe() -> None: pass -@cli_group_describe.command( +@cli_describe.command( "describe", - cls=DeprecatedClickOptionsCommand, - short_help="Print Copernicus Marine catalog as JSON.", - help=""" - Print Copernicus Marine catalog as JSON. - - The default display contains information on the products, and more data - can be displayed using the --include- flags. - - The --contains option allows the user to specify one or several strings to - filter through the catalogue display. The search is performed recursively - on all attributes of the catalogue, and the tokens only need to be - contained in one of the attributes (i.e. not exact match). - """, + cls=CustomClickOptionsCommand, + short_help="Print Copernicus Marine catalogue as JSON.", + help=documentation_utils.DESCRIBE["DESCRIBE_DESCRIPTION_HELP"] + + " \n\nReturns\n " + + documentation_utils.DESCRIBE["DESCRIBE_RESPONSE_HELP"], # noqa epilog=""" Examples: - \b - copernicusmarine describe --contains METOFFICE-GLO-SST-L4-NRT-OBS-SST-V2 --include-datasets + .. code-block:: bash + + copernicusmarine describe --contains METOFFICE-GLO-SST-L4-NRT-OBS-SST-V2 --include-datasets - \b - copernicusmarine describe -c METOFFICE-GLO-SST-L4-NRT-OBS-SST-V2 + .. code-block:: bash + + copernicusmarine describe -c METOFFICE-GLO-SST-L4-NRT-OBS-SST-V2 \n """, # noqa ) @click.option( @@ -54,33 +45,28 @@ def cli_group_describe() -> None: type=bool, is_flag=True, default=False, - help="Include product description in output.", + help=documentation_utils.DESCRIBE["INCLUDE_DESCRIPTION_HELP"], ) @click.option( "--include-datasets", type=bool, is_flag=True, default=False, - help="Include product dataset details in output.", + help=documentation_utils.DESCRIBE["INCLUDE_DATASETS_HELP"], ) @click.option( "--include-keywords", type=bool, is_flag=True, default=False, - help="Include product keyword details in output.", + help=documentation_utils.DESCRIBE["INCLUDE_KEYWORDS_HELP"], ) @click.option( "--include-versions", - "--include-all-versions", - cls=DeprecatedClickOption, - deprecated=["--include-all-versions"], - preferred="--include-versions", type=bool, is_flag=True, default=False, - help="Include dataset versions in output. " - "By default, shows only the default version.", + help=documentation_utils.DESCRIBE["INCLUDE_VERSIONS_HELP"], ) @click.option( "-a", @@ -88,44 +74,27 @@ def cli_group_describe() -> None: type=bool, is_flag=True, default=False, - help="Include all the possible data in output: " - "description, datasets, keywords, and versions.", + help=documentation_utils.DESCRIBE["INCLUDE_ALL_HELP"], ) @click.option( "--contains", "-c", type=str, multiple=True, - help="Filter catalogue output. Returns products with attributes " - "matching a string token.", + help=documentation_utils.DESCRIBE["CONTAINS_HELP"], ) @click.option( - "--overwrite-metadata-cache", - cls=MutuallyExclusiveOption, - type=bool, - is_flag=True, - default=False, - help="Force to refresh the catalogue by overwriting the local cache.", - mutually_exclusive=["no_metadata_cache"], -) -@click.option( - "--no-metadata-cache", - cls=MutuallyExclusiveOption, - type=bool, - is_flag=True, - default=False, - help="Bypass the use of cache.", - mutually_exclusive=["overwrite_metadata_cache"], + "--max-concurrent-requests", + type=int, + default=15, + help=documentation_utils.DESCRIBE["MAX_CONCURRENT_REQUESTS_HELP"], ) @tqdm_disable_option @click.option( "--log-level", type=click.Choice(["DEBUG", "INFO", "WARN", "ERROR", "CRITICAL", "QUIET"]), default="INFO", - help=( - "Set the details printed to console by the command " - "(based on standard logging library)." - ), + help=documentation_utils.DESCRIBE["LOG_LEVEL_HELP"], ) @click.option( "--staging", @@ -142,8 +111,7 @@ def describe( include_versions: bool, include_all: bool, contains: list[str], - overwrite_metadata_cache: bool, - no_metadata_cache: bool, + max_concurrent_requests: int, disable_progress_bar: bool, log_level: str, staging: bool, @@ -169,8 +137,7 @@ def describe( include_keywords=include_keywords, include_versions=include_versions, contains=contains, - overwrite_metadata_cache=overwrite_metadata_cache, - no_metadata_cache=no_metadata_cache, + max_concurrent_requests=max_concurrent_requests, disable_progress_bar=disable_progress_bar, staging=staging, ) diff --git a/copernicusmarine/command_line_interface/group_get.py b/copernicusmarine/command_line_interface/group_get.py index eec1ef79..06d58cf4 100644 --- a/copernicusmarine/command_line_interface/group_get.py +++ b/copernicusmarine/command_line_interface/group_get.py @@ -14,58 +14,45 @@ force_dataset_version_option, tqdm_disable_option, ) -from copernicusmarine.core_functions.deprecated import ( - DeprecatedClickOption, - DeprecatedClickOptionsCommand, +from copernicusmarine.core_functions import documentation_utils +from copernicusmarine.core_functions.click_custom_class import ( + CustomClickOptionsCommand, ) from copernicusmarine.core_functions.get import ( create_get_template, get_function, ) -from copernicusmarine.core_functions.services_utils import CommandType -from copernicusmarine.core_functions.utils import ( - OVERWRITE_LONG_OPTION, - OVERWRITE_OPTION_HELP_TEXT, - OVERWRITE_SHORT_OPTION, -) -logger = logging.getLogger("copernicus_marine_root_logger") +logger = logging.getLogger("copernicusmarine") +blank_logger = logging.getLogger("copernicusmarine_blank_logger") @click.group() -def cli_group_get() -> None: +def cli_get() -> None: pass -@cli_group_get.command( +@cli_get.command( "get", - cls=DeprecatedClickOptionsCommand, + cls=CustomClickOptionsCommand, short_help="Download originally produced data files.", - help=""" - Download originally produced data files. - - Either one of --dataset-id or --dataset-url is required (can be found via the "describe" command). - The function fetches the files recursively if a folder path is passed as URL. - When provided a datasetID, all the files in the corresponding folder will be downloaded if none of the --filter or --regex options is specified. - """, # noqa + help=documentation_utils.GET["GET_DESCRIPTION_HELP"] + + " See :ref:`describe `. \n\nReturns\n " + + documentation_utils.GET["GET_RESPONSE_HELP"], epilog=""" - Examples: + Example to download all the files from a given dataset: - \b - copernicusmarine get -nd -o data_folder --dataset-id cmems_mod_nws_bgc-pft_myint_7km-3D-diato_P1M-m + .. code-block:: bash + + copernicusmarine get -i cmems_mod_nws_bgc-pft_myint_7km-3D-diato_P1M-m \n """, # noqa ) -@click.option( - "--dataset-url", - "-u", - type=str, - help="URL to the data files.", -) @click.option( "--dataset-id", "-i", type=str, - help="The datasetID.", + default=None, + help=documentation_utils.GET["DATASET_ID_HELP"], ) @force_dataset_version_option @force_dataset_part_option @@ -73,164 +60,97 @@ def cli_group_get() -> None: "--username", type=str, default=None, - help="If not set, search for environment variable" - + " COPERNICUSMARINE_SERVICE_USERNAME" - + ", or else look for configuration files, or else ask for user input.", + help=documentation_utils.GET["USERNAME_HELP"], ) @click.option( "--password", type=str, default=None, - help="If not set, search for environment variable" - + " COPERNICUSMARINE_SERVICE_PASSWORD" - + ", or else look for configuration files, or else ask for user input.", + help=documentation_utils.GET["PASSWORD_HELP"], ) @click.option( "--no-directories", "-nd", cls=MutuallyExclusiveOption, is_flag=True, - help="Option to not recreate folder hierarchy in ouput directory.", + help=documentation_utils.GET["NO_DIRECTORIES_HELP"], default=False, mutually_exclusive=["sync"], ) @click.option( "--show-outputnames", is_flag=True, - help="Option to display the names of the" - + " output files before download.", + help=documentation_utils.GET["SHOW_OUTPUTNAMES_HELP"], default=False, ) @click.option( "--output-directory", "-o", type=click.Path(path_type=pathlib.Path), - help="The destination directory for the downloaded files." - + " Default is the current directory.", + help=documentation_utils.GET["OUTPUT_DIRECTORY_HELP"], ) @click.option( "--credentials-file", type=click.Path(path_type=pathlib.Path), - help=( - "Path to a credentials file if not in its default directory. " - "Accepts .copernicusmarine-credentials / .netrc or _netrc / " - "motuclient-python.ini files." - ), + help=documentation_utils.GET["CREDENTIALS_FILE_HELP"], ) @click.option( "--force-download", is_flag=True, default=False, - help="Flag to skip confirmation before download.", + help=documentation_utils.GET["FORCE_DOWNLOAD_HELP"], ) @click.option( - OVERWRITE_LONG_OPTION, - OVERWRITE_SHORT_OPTION, + documentation_utils.GET["OVERWRITE_LONG_OPTION"], + documentation_utils.GET["OVERWRITE_SHORT_OPTION"], is_flag=True, default=False, - help=OVERWRITE_OPTION_HELP_TEXT, -) -@click.option( - "--service", - "--force-service", - "-s", - cls=DeprecatedClickOption, - deprecated=["--force-service"], - preferred="--service", - type=str, - help=( - "Force download through one of the available services " - f"using the service name among {CommandType.GET.service_names()} " - f"or its short name among {CommandType.GET.service_short_names()}." - ), + help=documentation_utils.GET["OVERWRITE_OUTPUT_DATA_HELP"], ) @click.option( "--create-template", type=bool, is_flag=True, default=False, - help="Option to create a file get_template.json in your current directory " - "containing CLI arguments. If specified, no other action will be performed.", + help=documentation_utils.GET["CREATE_TEMPLATE_HELP"], ) @click.option( "--request-file", type=click.Path(exists=True, path_type=pathlib.Path), - help="Option to pass a file containing CLI arguments. " - "The file MUST follow the structure of dataclass 'GetRequest'." - " For more information please refer to the README.", -) -@click.option( - "--overwrite-metadata-cache", - cls=MutuallyExclusiveOption, - type=bool, - is_flag=True, - default=False, - help="Force to refresh the catalogue by overwriting the local cache.", - mutually_exclusive=["no_metadata_cache"], -) -@click.option( - "--no-metadata-cache", - cls=MutuallyExclusiveOption, - type=bool, - is_flag=True, - default=False, - help="Bypass the use of cache.", - mutually_exclusive=["overwrite_metadata_cache"], + help=documentation_utils.GET["REQUEST_FILE_HELP"], ) @click.option( "--filter", "--filter-with-globbing-pattern", type=str, default=None, - help="A pattern that must match the absolute paths of " - "the files to download.", + help=documentation_utils.GET["FILTER_HELP"], ) @click.option( "--regex", "--filter-with-regular-expression", type=str, default=None, - help="The regular expression that must match the absolute paths of " - "the files to download.", + help=documentation_utils.GET["REGEX_HELP"], ) @click.option( "--file-list", type=pathlib.Path, default=None, - help="Path to a .txt file containing a list of file paths," - " line by line, that will be downloaded directly." - " These files must be from the specified dataset using the --dataset-id." - " If no files can be found, the Toolbox will list all" - " files on the remote server and attempt to find a match.", + help=documentation_utils.GET["FILE_LIST_HELP"], ) @click.option( "--create-file-list", type=str, default=None, - help="Option to only create a file containing " - "the names of the targeted files instead of downloading them. " - "It writes the file in the directory specified with the " - "--output-directory option (default to current directory). " - "The file name specified should end with '.txt' or '.csv' " - "If specified, no other action will be performed. " - "Please find more information in the README.", -) -@click.option( - "--download-file-list", - type=bool, - is_flag=True, - cls=DeprecatedClickOption, - deprecated=["--download-file-list"], - preferred="--create-file-list", - hidden=True, + help=documentation_utils.GET["CREATE_FILE_LIST_HELP"], ) @click.option( "--sync", cls=MutuallyExclusiveOption, is_flag=True, default=False, - help="Option to synchronize the local directory with " - "the remote directory. See the documentation for more details.", + help=documentation_utils.GET["SYNC_HELP"], mutually_exclusive=["no-directories"], ) @click.option( @@ -238,8 +158,7 @@ def cli_group_get() -> None: cls=MutuallyExclusiveOption, is_flag=True, default=False, - help="Option to delete local files that are not present on " - "the remote server while applying sync.", + help=documentation_utils.GET["SYNC_DELETE_HELP"], mutually_exclusive=["no-directories"], ) @click.option( @@ -247,17 +166,27 @@ def cli_group_get() -> None: type=bool, is_flag=True, default=False, - help="Option to get the index files of an INSITU dataset. Temporary option.", + help=documentation_utils.GET["INDEX_PARTS_HELP"], +) +@click.option( + "--dry-run", + type=bool, + is_flag=True, + default=False, + help=documentation_utils.GET["DRY_RUN_HELP"], +) +@click.option( + "--max-concurrent-requests", + type=int, + default=15, + help=documentation_utils.GET["MAX_CONCURRENT_REQUESTS_HELP"], ) @tqdm_disable_option @click.option( "--log-level", type=click.Choice(["DEBUG", "INFO", "WARN", "ERROR", "CRITICAL", "QUIET"]), default="INFO", - help=( - "Set the details printed to console by the command " - "(based on standard logging library)." - ), + help=documentation_utils.GET["LOG_LEVEL_HELP"], ) @click.option( "--staging", @@ -268,7 +197,6 @@ def cli_group_get() -> None: ) @log_exception_and_exit def get( - dataset_url: Optional[str], dataset_id: Optional[str], dataset_version: Optional[str], dataset_part: Optional[str], @@ -282,17 +210,15 @@ def get( overwrite_output_data: bool, create_template: bool, request_file: Optional[pathlib.Path], - service: Optional[str], - overwrite_metadata_cache: bool, - no_metadata_cache: bool, filter: Optional[str], regex: Optional[str], file_list: Optional[pathlib.Path], create_file_list: Optional[str], - download_file_list: bool, sync: bool, sync_delete: bool, index_parts: bool, + dry_run: bool, + max_concurrent_requests: int, disable_progress_bar: bool, log_level: str, staging: bool, @@ -313,8 +239,7 @@ def get( create_get_template() return - return get_function( - dataset_url=dataset_url, + result = get_function( dataset_id=dataset_id, force_dataset_version=dataset_version, force_dataset_part=dataset_part, @@ -327,17 +252,16 @@ def get( force_download=force_download, overwrite_output_data=overwrite_output_data, request_file=request_file, - force_service=service, - overwrite_metadata_cache=overwrite_metadata_cache, - no_metadata_cache=no_metadata_cache, filter=filter, regex=regex, file_list_path=file_list, create_file_list=create_file_list, - download_file_list=download_file_list, sync=sync, sync_delete=sync_delete, index_parts=index_parts, + dry_run=dry_run, + max_concurrent_requests=max_concurrent_requests, disable_progress_bar=disable_progress_bar, staging=staging, ) + blank_logger.info(result.model_dump_json(indent=2)) diff --git a/copernicusmarine/command_line_interface/group_login.py b/copernicusmarine/command_line_interface/group_login.py index cb787329..8df8c1d8 100644 --- a/copernicusmarine/command_line_interface/group_login.py +++ b/copernicusmarine/command_line_interface/group_login.py @@ -7,81 +7,88 @@ from copernicusmarine.command_line_interface.exception_handler import ( log_exception_and_exit, ) +from copernicusmarine.core_functions import documentation_utils +from copernicusmarine.core_functions.click_custom_class import ( + CustomClickOptionsCommand, +) +from copernicusmarine.core_functions.credentials_utils import ( + DEFAULT_CLIENT_BASE_DIRECTORY, +) from copernicusmarine.core_functions.login import login_function -from copernicusmarine.core_functions.utils import DEFAULT_CLIENT_BASE_DIRECTORY -logger = logging.getLogger("copernicus_marine_root_logger") +logger = logging.getLogger("copernicusmarine") @click.group() -def cli_group_login() -> None: +def cli_login() -> None: pass -@cli_group_login.command( +@cli_login.command( "login", + cls=CustomClickOptionsCommand, short_help="Create a configuration file with your Copernicus Marine credentials.", - help=""" - Create a configuration file with your Copernicus Marine credentials. - - Create a configuration file under the $HOME/.copernicusmarine directory (overwritable with option --credentials-file). - """, # noqa + help=documentation_utils.LOGIN["LOGIN_DESCRIPTION_HELP"] + + " \n\nReturns\n " + + documentation_utils.LOGIN["LOGIN_RESPONSE_HELP"], # noqa epilog=""" Examples: - \b - COPERNICUSMARINE_SERVICE_USERNAME= COPERNICUSMARINE_SERVICE_PASSWORD= copernicusmarine login + Using environment variables: + + .. code-block:: bash + + COPERNICUSMARINE_SERVICE_USERNAME= COPERNICUSMARINE_SERVICE_PASSWORD= copernicusmarine login + + Using command line arguments: + + .. code-block:: bash + + copernicusmarine login --username --password + + Using directly user input: - \b - copernicusmarine login --username --password + .. code-block:: bash - \b - copernicusmarine login - > Username: [USER-INPUT] - > Password: [USER-INPUT] + copernicusmarine login + > Username: [USER-INPUT] + > Password: [USER-INPUT] """, # noqa ) @click.option( "--username", hide_input=False, - help="If not set, search for environment variable" - + " COPERNICUSMARINE_SERVICE_USERNAME" - + ", or else ask for user input.", + help=documentation_utils.LOGIN["USERNAME_HELP"], ) @click.option( "--password", hide_input=True, - help="If not set, search for environment variable" - + " COPERNICUSMARINE_SERVICE_PASSWORD" - + ", or else ask for user input.", + help=documentation_utils.LOGIN["PASSWORD_HELP"], ) @click.option( "--configuration-file-directory", type=click.Path(path_type=pathlib.Path), default=DEFAULT_CLIENT_BASE_DIRECTORY, - help="Path to the directory where the configuration file is stored.", + help=documentation_utils.LOGIN["CONFIGURATION_FILE_DIRECTORY_HELP"], ) @click.option( "--overwrite-configuration-file", "-overwrite", is_flag=True, default=False, - help="Flag to skip confirmation before overwriting configuration file.", + help=documentation_utils.LOGIN["OVERWRITE_CONFIGURATION_FILE_HELP"], ) @click.option( - "--skip-if-user-logged-in", + "--check-credentials-valid", is_flag=True, default=False, - help="Flag to skip the logging process if the user is already logged in.", + help=documentation_utils.LOGIN["CHECK_CREDENTIALS_VALID_HELP"], ) @click.option( "--log-level", type=click.Choice(["DEBUG", "INFO", "WARN", "ERROR", "CRITICAL", "QUIET"]), default="INFO", - help=( - "Set the details printed to console by the command " - "(based on standard logging library)." - ), + help=documentation_utils.LOGIN["LOG_LEVEL_HELP"], ) @log_exception_and_exit def login( @@ -89,7 +96,7 @@ def login( password: Optional[str], configuration_file_directory: pathlib.Path, overwrite_configuration_file: bool, - skip_if_user_logged_in: bool, + check_credentials_valid: bool, log_level: str = "INFO", ) -> None: if log_level == "QUIET": @@ -97,10 +104,11 @@ def login( logger.setLevel(level="CRITICAL") else: logger.setLevel(level=log_level) - login_function( + if not login_function( username=username, password=password, configuration_file_directory=configuration_file_directory, overwrite_configuration_file=overwrite_configuration_file, - skip_if_user_logged_in=skip_if_user_logged_in, - ) + check_credentials_valid=check_credentials_valid, + ): + exit(1) diff --git a/copernicusmarine/command_line_interface/group_subset.py b/copernicusmarine/command_line_interface/group_subset.py index 83745fef..a06b6146 100644 --- a/copernicusmarine/command_line_interface/group_subset.py +++ b/copernicusmarine/command_line_interface/group_subset.py @@ -1,6 +1,5 @@ import logging import pathlib -from datetime import datetime from typing import List, Optional import click @@ -9,82 +8,69 @@ log_exception_and_exit, ) from copernicusmarine.command_line_interface.utils import ( - MutuallyExclusiveOption, assert_cli_args_are_not_set_except_create_template, force_dataset_part_option, force_dataset_version_option, tqdm_disable_option, ) -from copernicusmarine.core_functions.deprecated import ( - DeprecatedClickOption, - DeprecatedClickOptionsCommand, +from copernicusmarine.core_functions import documentation_utils +from copernicusmarine.core_functions.click_custom_class import ( + CustomClickOptionsCommand, ) from copernicusmarine.core_functions.models import ( + DEFAULT_COORDINATES_SELECTION_METHOD, + DEFAULT_COORDINATES_SELECTION_METHODS, DEFAULT_FILE_FORMAT, DEFAULT_FILE_FORMATS, - DEFAULT_SUBSET_METHOD, - DEFAULT_SUBSET_METHODS, + DEFAULT_VERTICAL_DIMENSION_OUTPUT, + DEFAULT_VERTICAL_DIMENSION_OUTPUTS, + CoordinatesSelectionMethod, FileFormat, - SubsetMethod, + VerticalDimensionOutput, ) -from copernicusmarine.core_functions.services_utils import CommandType from copernicusmarine.core_functions.subset import ( create_subset_template, subset_function, ) -from copernicusmarine.core_functions.utils import ( - DATETIME_SUPPORTED_FORMATS, - OVERWRITE_LONG_OPTION, - OVERWRITE_OPTION_HELP_TEXT, - OVERWRITE_SHORT_OPTION, -) +from copernicusmarine.core_functions.utils import datetime_parser -logger = logging.getLogger("copernicus_marine_root_logger") +logger = logging.getLogger("copernicusmarine") +blank_logger = logging.getLogger("copernicusmarine_blank_logger") @click.group() -def cli_group_subset() -> None: +def cli_subset() -> None: pass -@cli_group_subset.command( +@cli_subset.command( "subset", - cls=DeprecatedClickOptionsCommand, + cls=CustomClickOptionsCommand, short_help="Download subsets of datasets as NetCDF files or Zarr stores.", - help=""" - Download subsets of datasets as NetCDF files or Zarr stores. - - Either one of --dataset-id or --dataset-url is required (can be found via the "describe" command). - The argument values passed individually through the CLI take precedence over the values from the --motu-api-request option, - which takes precedence over the ones from the --request-file option. - """, # noqa + help=documentation_utils.SUBSET["SUBSET_DESCRIPTION_HELP"] + + "See :ref:`describe `." + + " \n\nReturns\n " + + documentation_utils.SUBSET["SUBSET_RESPONSE_HELP"], epilog=""" Examples: - \b - copernicusmarine subset - --dataset-id cmems_mod_glo_phy-thetao_anfc_0.083deg_PT6H-i - --variable thetao - --start-datetime 2022-01-01T00:00:00 --end-datetime 2022-12-31T23:59:59 - --minimum-longitude -6.17 --maximum-longitude -5.08 - --minimum-latitude 35.75 --maximum-latitude 36.30 - --minimum-depth 0.0 --maximum-depth 5.0 + .. code-block:: bash + + copernicusmarine subset --dataset-id cmems_mod_ibi_phy_my_0.083deg-3D_P1D-m --variable thetao --variable so --start-datetime 2021-01-01 --end-datetime 2021-01-03 --minimum-longitude 0.0 --maximum-longitude 0.1 --minimum-latitude 28.0 --maximum-latitude 28.1 --minimum-depth 1 --maximum-depth 2 + + Equivalent to: + + .. code-block:: bash - \b - copernicusmarine subset -i cmems_mod_glo_phy-thetao_anfc_0.083deg_PT6H-i -v thetao -t 2022-01-01T00:00:00 -T 2022-12-31T23:59:59 -x -6.17 -X -5.08 -y 35.75 -Y 36.30 -z 0.0 -Z 5.0 + copernicusmarine subset -i cmems_mod_ibi_phy_my_0.083deg-3D_P1D-m -v thetao -v so -t 2021-01-01 -T 2021-01-03 -x 0.0 -X 0.1 -y 28.0 -Y 28.1 -z 1 -Z 2 \n """, # noqa ) -@click.option( - "--dataset-url", - "-u", - type=str, - help="The full dataset URL.", -) @click.option( "--dataset-id", "-i", type=str, - help="The datasetID.", + default=None, + help=documentation_utils.SUBSET["DATASET_ID_HELP"], ) @force_dataset_version_option @force_dataset_part_option @@ -92,238 +78,159 @@ def cli_group_subset() -> None: "--username", type=str, default=None, - help="If not set, search for environment variable" - + " COPERNICUSMARINE_SERVICE_USERNAME" - + ", or else look for configuration files, or else ask for user input.", + help=documentation_utils.SUBSET["USERNAME_HELP"], ) @click.option( "--password", type=str, default=None, - help="If not set, search for environment variable" - + " COPERNICUSMARINE_SERVICE_PASSWORD" - + ", or else look for configuration files, or else ask for user input.", + help=documentation_utils.SUBSET["PASSWORD_HELP"], ) @click.option( "--variable", "-v", "variables", type=str, - help="Specify dataset variable. Can be used multiple times.", + help=documentation_utils.SUBSET["VARIABLES_HELP"], multiple=True, ) @click.option( "--minimum-longitude", - "--minimal-longitude", "-x", - cls=DeprecatedClickOption, - deprecated=["--minimal-longitude"], - preferred="--minimum-longitude", type=float, - help=( - "Minimum longitude for the subset. " - "The value will be reduced to the interval [-180; 360[." - ), + help=documentation_utils.SUBSET["MINIMUM_LONGITUDE_HELP"], ) @click.option( "--maximum-longitude", - "--maximal-longitude", "-X", - cls=DeprecatedClickOption, - deprecated=["--maximal-longitude"], - preferred="--maximum-longitude", type=float, - help=( - "Maximum longitude for the subset. " - "The value will be reduced to the interval [-180; 360[." - ), + help=documentation_utils.SUBSET["MAXIMUM_LONGITUDE_HELP"], ) @click.option( "--minimum-latitude", - "--minimal-latitude", "-y", - cls=DeprecatedClickOption, - deprecated=["--minimal-latitude"], - preferred="--minimum-latitude", type=click.FloatRange(min=-90, max=90), - help="Minimum latitude for the subset." - " Requires a float within this range:", + help=documentation_utils.SUBSET["MINIMUM_LATITUDE_HELP"], ) @click.option( "--maximum-latitude", - "--maximal-latitude", "-Y", - cls=DeprecatedClickOption, - deprecated=["--maximal-latitude"], - preferred="--maximum-latitude", type=click.FloatRange(min=-90, max=90), - help="Maximum latitude for the subset." - " Requires a float within this range:", + help=documentation_utils.SUBSET["MAXIMUM_LATITUDE_HELP"], ) @click.option( "--minimum-depth", - "--minimal-depth", "-z", - cls=DeprecatedClickOption, - deprecated=["--minimal-depth"], - preferred="--minimum-depth", type=click.FloatRange(min=0), - help="Minimum depth for the subset. Requires a float within this range:", + help=documentation_utils.SUBSET["MINIMUM_DEPTH_HELP"], ) @click.option( "--maximum-depth", - "--maximal-depth", "-Z", - cls=DeprecatedClickOption, - deprecated=["--maximal-depth"], - preferred="--maximum-depth", type=click.FloatRange(min=0), - help="Maximum depth for the subset. Requires a float within this range:", + help=documentation_utils.SUBSET["MAXIMUM_DEPTH_HELP"], ) @click.option( - "--vertical-dimension-as-originally-produced", - type=bool, - default=True, - show_default=True, - help=( - "Consolidate the vertical dimension (the z-axis) as it is in the " - "dataset originally produced, " - "named `depth` with descending positive values." - ), + "--vertical-dimension-output", + "-V", + type=click.Choice(DEFAULT_VERTICAL_DIMENSION_OUTPUTS), + default=DEFAULT_VERTICAL_DIMENSION_OUTPUT, + help=documentation_utils.SUBSET["VERTICAL_DIMENSION_OUTPUT_HELP"], ) @click.option( "--start-datetime", "-t", - type=click.DateTime(DATETIME_SUPPORTED_FORMATS), - help="The start datetime of the temporal subset. " - "Caution: encapsulate date " - + 'with " " to ensure valid expression for format "%Y-%m-%d %H:%M:%S".', + type=str, + help=documentation_utils.SUBSET["START_DATETIME_HELP"] + + "Caution: encapsulate date with “ “ to ensure valid " + "expression for format “%Y-%m-%d %H:%M:%S”.", ) @click.option( "--end-datetime", "-T", - type=click.DateTime(DATETIME_SUPPORTED_FORMATS), - help="The end datetime of the temporal subset. Caution: encapsulate date " - + 'with " " to ensure valid expression for format "%Y-%m-%d %H:%M:%S".', + type=str, + help=documentation_utils.SUBSET["END_DATETIME_HELP"] + + "Caution: encapsulate date with “ “ to ensure valid " + "expression for format “%Y-%m-%d %H:%M:%S”.", ) @click.option( - "--subset-method", - type=click.Choice(DEFAULT_SUBSET_METHODS), - default=DEFAULT_SUBSET_METHOD, - help=( - "The subset method when requesting the dataset. If strict, you can only " - "request dimension strictly inside the dataset." - ), + "--coordinates-selection-method", + type=click.Choice(DEFAULT_COORDINATES_SELECTION_METHODS), + default=DEFAULT_COORDINATES_SELECTION_METHOD, + help=documentation_utils.SUBSET["COORDINATES_SELECTION_METHOD_HELP"], ) @click.option( "--output-directory", "-o", type=click.Path(path_type=pathlib.Path), - help="The destination folder for the downloaded files." - + " Default is the current directory.", + help=documentation_utils.SUBSET["OUTPUT_DIRECTORY_HELP"], ) @click.option( "--credentials-file", type=click.Path(path_type=pathlib.Path), - help=( - "Path to a credentials file if not in its default directory. " - "Accepts .copernicusmarine-credentials / .netrc or _netrc / " - "motuclient-python.ini files." - ), + help=documentation_utils.SUBSET["CREDENTIALS_FILE_HELP"], ) @click.option( "--output-filename", "-f", type=str, - help=( - "Concatenate the downloaded data in the given file name " - "(under the output directory)." - ), + help=documentation_utils.SUBSET["OUTPUT_FILENAME_HELP"], ) @click.option( "--file-format", type=click.Choice(DEFAULT_FILE_FORMATS), default=DEFAULT_FILE_FORMAT, - help=("Format of the downloaded dataset. Default to NetCDF (.nc)."), + help=documentation_utils.SUBSET["FILE_FORMAT_HELP"], ) @click.option( "--force-download", is_flag=True, default=False, - help="Flag to skip confirmation before download.", + help=documentation_utils.SUBSET["FORCE_DOWNLOAD_HELP"], ) @click.option( - OVERWRITE_LONG_OPTION, - OVERWRITE_SHORT_OPTION, + documentation_utils.SUBSET["OVERWRITE_LONG_OPTION"], + documentation_utils.SUBSET["OVERWRITE_SHORT_OPTION"], is_flag=True, default=False, - help=OVERWRITE_OPTION_HELP_TEXT, + help=documentation_utils.SUBSET["OVERWRITE_OUTPUT_DATA_HELP"], ) @click.option( "--service", - "--force-service", "-s", - cls=DeprecatedClickOption, - deprecated=["--force-service"], - preferred="--service", type=str, - help=( - "Force download through one of the available services " - f"using the service name among {CommandType.SUBSET.service_names()} " - f"or its short name among {CommandType.SUBSET.service_short_names()}." - ), + help=documentation_utils.SUBSET["SERVICE_HELP"], ) @click.option( "--create-template", type=bool, is_flag=True, default=False, - help="Option to create a file subset_template.json in your current directory " - "containing CLI arguments. If specified, no other action will be performed.", + help=documentation_utils.SUBSET["CREATE_TEMPLATE_HELP"], ) @click.option( "--request-file", type=click.Path(exists=True, path_type=pathlib.Path), - help="Option to pass a file containing CLI arguments. " - "The file MUST follow the structure of dataclass 'SubsetRequest'." - " For more information please refer to the README.", + help=documentation_utils.SUBSET["REQUEST_FILE_HELP"], ) @click.option( "--motu-api-request", type=str, - help=( - "Option to pass a complete MOTU API request as a string. " - 'Caution, user has to replace double quotes " with single ' - "quotes ' in the request." - ), -) -@click.option( - "--overwrite-metadata-cache", - cls=MutuallyExclusiveOption, - type=bool, - is_flag=True, - default=False, - help="Force to refresh the catalogue by overwriting the local cache.", - mutually_exclusive=["no_metadata_cache"], + help=documentation_utils.SUBSET["MOTU_API_REQUEST_HELP"], ) @click.option( - "--no-metadata-cache", - cls=MutuallyExclusiveOption, + "--dry-run", type=bool, is_flag=True, default=False, - help="Bypass the use of cache.", - mutually_exclusive=["overwrite_metadata_cache"], + help=documentation_utils.SUBSET["DRY_RUN_HELP"], ) @tqdm_disable_option @click.option( "--log-level", type=click.Choice(["DEBUG", "INFO", "WARN", "ERROR", "CRITICAL", "QUIET"]), default="INFO", - help=( - "Set the details printed to console by the command " - "(based on standard logging library)." - ), + help=documentation_utils.SUBSET["LOG_LEVEL_HELP"], ) @click.option( "--staging", @@ -332,37 +239,25 @@ def cli_group_subset() -> None: is_flag=True, hidden=True, ) -@click.option( - "--netcdf-compression-enabled", - type=bool, - default=False, - is_flag=True, - help=( - "Enable compression level 1 to the NetCDF output file. " - "Use --netcdf-compression-level option to customize the compression " - "level" - ), -) @click.option( "--netcdf-compression-level", type=click.IntRange(0, 9), - help=( - "Specify a compression level to apply on the NetCDF output file. " - "A value of 0 means no compression, and 9 is the highest level of " - "compression available" - ), + is_flag=False, + flag_value=1, + default=0, + help=documentation_utils.SUBSET["NETCDF_COMPRESSION_LEVEL_HELP"] + + " If used as a flag, the assigned value will be 1.", ) @click.option( "--netcdf3-compatible", type=bool, default=False, is_flag=True, - help=("Enable downloading the dataset in a netCDF 3 compatible format."), + help=documentation_utils.SUBSET["NETCDF3_COMPATIBLE_HELP"], ) @log_exception_and_exit def subset( - dataset_url: Optional[str], - dataset_id: Optional[str], + dataset_id: str, dataset_version: Optional[str], dataset_part: Optional[str], username: Optional[str], @@ -374,14 +269,13 @@ def subset( maximum_latitude: Optional[float], minimum_depth: Optional[float], maximum_depth: Optional[float], - vertical_dimension_as_originally_produced: bool, - start_datetime: Optional[datetime], - end_datetime: Optional[datetime], - subset_method: SubsetMethod, + vertical_dimension_output: VerticalDimensionOutput, + start_datetime: Optional[str], + end_datetime: Optional[str], + coordinates_selection_method: CoordinatesSelectionMethod, output_filename: Optional[str], file_format: FileFormat, - netcdf_compression_enabled: bool, - netcdf_compression_level: Optional[int], + netcdf_compression_level: int, netcdf3_compatible: bool, service: Optional[str], create_template: bool, @@ -391,8 +285,7 @@ def subset( motu_api_request: Optional[str], force_download: bool, overwrite_output_data: bool, - overwrite_metadata_cache: bool, - no_metadata_cache: bool, + dry_run: bool, disable_progress_bar: bool, log_level: str, staging: bool = False, @@ -413,38 +306,38 @@ def subset( create_subset_template() return - subset_function( - dataset_url, - dataset_id, - dataset_version, - dataset_part, - username, - password, - variables, - minimum_longitude, - maximum_longitude, - minimum_latitude, - maximum_latitude, - minimum_depth, - maximum_depth, - vertical_dimension_as_originally_produced, - start_datetime, - end_datetime, - subset_method, - output_filename, - file_format, - service, - request_file, - output_directory, - credentials_file, - motu_api_request, - force_download, - overwrite_output_data, - overwrite_metadata_cache, - no_metadata_cache, - disable_progress_bar, - staging, - netcdf_compression_enabled, - netcdf_compression_level, + response = subset_function( + dataset_id=dataset_id, + force_dataset_version=dataset_version, + force_dataset_part=dataset_part, + username=username, + password=password, + variables=variables, + minimum_longitude=minimum_longitude, + maximum_longitude=maximum_longitude, + minimum_latitude=minimum_latitude, + maximum_latitude=maximum_latitude, + minimum_depth=minimum_depth, + maximum_depth=maximum_depth, + vertical_dimension_output=vertical_dimension_output, + start_datetime=( + datetime_parser(start_datetime) if start_datetime else None + ), + end_datetime=datetime_parser(end_datetime) if end_datetime else None, + coordinates_selection_method=coordinates_selection_method, + output_filename=output_filename, + file_format=file_format, + force_service=service, + request_file=request_file, + output_directory=output_directory, + credentials_file=credentials_file, + motu_api_request=motu_api_request, + force_download=force_download, + overwrite_output_data=overwrite_output_data, + dry_run=dry_run, + disable_progress_bar=disable_progress_bar, + staging=staging, + netcdf_compression_level=netcdf_compression_level, netcdf3_compatible=netcdf3_compatible, ) + blank_logger.info(response.model_dump_json(indent=2)) diff --git a/copernicusmarine/command_line_interface/utils.py b/copernicusmarine/command_line_interface/utils.py index a9a65342..7d1a3d1c 100644 --- a/copernicusmarine/command_line_interface/utils.py +++ b/copernicusmarine/command_line_interface/utils.py @@ -2,7 +2,7 @@ from click import Context, Option, UsageError from click.core import ParameterSource -from copernicusmarine.core_functions.deprecated import DeprecatedClickOption +from copernicusmarine.core_functions import documentation_utils class MutuallyExclusiveOption(Option): @@ -33,7 +33,14 @@ def handle_parse_result(self, ctx, opts, args): class OtherOptionsPassedWithCreateTemplate(Exception): - ... + """ + Exception raised when other options are passed with create_template. + + Please note that create_template should be passed with no other option + except log_level. + """ + + pass def assert_cli_args_are_not_set_except_create_template( @@ -50,27 +57,19 @@ def assert_cli_args_are_not_set_except_create_template( "--disable-progress-bar", is_flag=True, default=False, - help="Flag to hide progress bar.", + help=documentation_utils.SUBSET["DISABLE_PROGRESS_BAR_HELP"], ) force_dataset_version_option = click.option( "--dataset-version", - "--force-dataset-version", - cls=DeprecatedClickOption, - deprecated=["--force-dataset-version"], - preferred="--dataset-version", type=str, default=None, - help="Force the selection of a specific dataset version.", + help=documentation_utils.SUBSET["DATASET_VERSION_HELP"], ) force_dataset_part_option = click.option( "--dataset-part", - "--force-dataset-part", - cls=DeprecatedClickOption, - deprecated=["--force-dataset-part"], - preferred="--dataset-part", type=str, default=None, - help="Force the selection of a specific dataset part.", + help=documentation_utils.SUBSET["DATASET_PART_HELP"], ) diff --git a/copernicusmarine/core_functions/deprecated.py b/copernicusmarine/core_functions/click_custom_class.py similarity index 53% rename from copernicusmarine/core_functions/deprecated.py rename to copernicusmarine/core_functions/click_custom_class.py index 48e8ed75..53a8027a 100644 --- a/copernicusmarine/core_functions/deprecated.py +++ b/copernicusmarine/core_functions/click_custom_class.py @@ -1,28 +1,11 @@ -import functools import inspect import logging -from typing import Any, Callable, Dict import click -logger = logging.getLogger("copernicus_marine_root_logger") +from copernicusmarine.core_functions.utils import log_deprecated_message - -def get_deprecated_message(old_value, preferred_value): - return ( - f"'{old_value}' has been deprecated, use '{preferred_value}' instead" - ) - - -def log_deprecated_message(old_value, preferred_value): - logger.warning(get_deprecated_message(old_value, preferred_value)) - - -def raise_both_old_and_new_value_error(old_value, new_value): - raise TypeError( - f"Received both {old_value} and {new_value} as arguments! " - f"{get_deprecated_message(old_value, new_value)}" - ) +logger = logging.getLogger("copernicusmarine") class DeprecatedClickOption(click.Option): @@ -32,7 +15,7 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) -class DeprecatedClickOptionsCommand(click.Command): +class CustomClickOptionsCommand(click.Command): def make_parser(self, ctx): parser = super().make_parser(ctx) @@ -68,25 +51,10 @@ def process(value, state): return parser - -def deprecated_python_option(**aliases: str) -> Callable: - def deco(f: Callable): - @functools.wraps(f) - def wrapper(*args, **kwargs): - rename_kwargs(f.__name__, kwargs, aliases) - return f(*args, **kwargs) - - return wrapper - - return deco - - -def rename_kwargs( - func_name: str, kwargs: Dict[str, Any], aliases: Dict[str, str] -): - for alias, new in aliases.items(): - if alias in kwargs: - if new in kwargs: - raise_both_old_and_new_value_error(alias, new) - log_deprecated_message(alias, new) - kwargs[new] = kwargs.pop(alias) + def format_epilog(self, ctx, formatter): + if self.epilog: + formatter.write_paragraph() + for line in self.epilog.split("\n"): + if ".. code-block::" in line: + continue + formatter.write(line + "\n") diff --git a/copernicusmarine/core_functions/credentials_utils.py b/copernicusmarine/core_functions/credentials_utils.py index a554f48d..9e823677 100644 --- a/copernicusmarine/core_functions/credentials_utils.py +++ b/copernicusmarine/core_functions/credentials_utils.py @@ -2,7 +2,6 @@ import configparser import logging import pathlib -from datetime import timedelta from netrc import netrc from platform import system from typing import Literal, Optional, Tuple @@ -10,34 +9,85 @@ import click import lxml.html import requests -from cachier.core import cachier from copernicusmarine.core_functions.environment_variables import ( + COPERNICUSMARINE_CREDENTIALS_DIRECTORY, COPERNICUSMARINE_SERVICE_PASSWORD, COPERNICUSMARINE_SERVICE_USERNAME, ) from copernicusmarine.core_functions.sessions import ( get_configured_requests_session, ) -from copernicusmarine.core_functions.utils import ( - CACHE_BASE_DIRECTORY, - DEFAULT_CLIENT_BASE_DIRECTORY, -) -logger = logging.getLogger("copernicus_marine_root_logger") +logger = logging.getLogger("copernicusmarine") +USER_DEFINED_CACHE_DIRECTORY: str = ( + COPERNICUSMARINE_CREDENTIALS_DIRECTORY or "" +) +DEFAULT_CLIENT_BASE_DIRECTORY: pathlib.Path = ( + pathlib.Path(USER_DEFINED_CACHE_DIRECTORY) + if USER_DEFINED_CACHE_DIRECTORY + else pathlib.Path.home() +) / ".copernicusmarine" DEFAULT_CLIENT_CREDENTIALS_FILENAME = ".copernicusmarine-credentials" DEFAULT_CLIENT_CREDENTIALS_FILEPATH = ( DEFAULT_CLIENT_BASE_DIRECTORY / DEFAULT_CLIENT_CREDENTIALS_FILENAME ) +RECOVER_YOUR_CREDENTIALS_MESSAGE = ( + "Learn how to recover your credentials at: " + "https://help.marine.copernicus.eu/en/articles/" + "4444552-i-forgot-my-username-or-my-password-what-should-i-do" +) + +COPERNICUS_MARINE_AUTH_SYSTEM_URL = "https://auth.marine.copernicus.eu/" +COPERNICUS_MARINE_AUTH_SYSTEM_TOKEN_ENDPOINT = ( + COPERNICUS_MARINE_AUTH_SYSTEM_URL + + "realms/MIS/protocol/openid-connect/token" +) +COPERNICUS_MARINE_AUTH_SYSTEM_USERINFO_ENDPOINT = ( + COPERNICUS_MARINE_AUTH_SYSTEM_URL + + "realms/MIS/protocol/openid-connect/userinfo" +) + + +class CredentialsCannotBeNone(Exception): + """ + Exception raised when credentials are not set. + To use the Copernicus Marine Service, you need to provide a username and + a password. You can set them as environment variables or pass them as + arguments to the function or use the :func:`~copernicusmarine.login` command. + To register and create your valid credentials, please visit: + `copernicusmarine registration page `_ + """ -class CredentialCannotBeNone(Exception): - ... + pass class InvalidUsernameOrPassword(Exception): - ... + """ + Exception raised when the username or password are invalid. + + To register and create your valid credentials, please visit: + `copernicusmarine registration page `_ + """ + + pass + + +class CouldNotConnectToAuthenticationSystem(Exception): + """ + Exception raised when the client could not connect to the authentication system. + + Please check the following common problems: + + - Check your internet connection + - make sure to authorize ``cmems-cas.cls.fr`` and/or ``auth.marine.copernicus.eu`` domains + + If none of this worked, maybe the authentication system is down, please try again later. + """ # noqa + + pass def _load_credential_from_copernicus_marine_configuration_file( @@ -87,17 +137,25 @@ def _load_credential_from_motu_configuration_file( def _retrieve_credential_from_prompt( credential_type: Literal["username", "password"], hide_input: bool ) -> str: - return click.prompt(credential_type, hide_input=hide_input) + if credential_type == "username": + logger.info( + "Downloading Copernicus Marine data requires a Copernicus Marine username " + "and password, sign up for free at:" + " https://data.marine.copernicus.eu/register" + ) + return click.prompt( + "Copernicus Marine " + credential_type, hide_input=hide_input + ) def _retrieve_credential_from_environment_variable( credential_type: Literal["username", "password"] ) -> Optional[str]: if credential_type == "username": - logger.debug("username loaded from environment variable") + logger.debug("Tried to load username from environment variable") return COPERNICUSMARINE_SERVICE_USERNAME if credential_type == "password": - logger.debug("password loaded from environment variable") + logger.debug("Tried to load password from environment variable") return COPERNICUSMARINE_SERVICE_PASSWORD @@ -178,9 +236,60 @@ def copernicusmarine_configuration_file_exists( return configuration_filename.exists() +def copernicusmarine_credentials_are_valid( + configuration_file_directory: pathlib.Path, + username: Optional[str], + password: Optional[str], +): + if username and password: + if _are_copernicus_marine_credentials_valid(username, password): + logger.info("Valid credentials from input username and password.") + return True + else: + logger.info( + "Invalid credentials from input username and password." + ) + logger.info(RECOVER_YOUR_CREDENTIALS_MESSAGE) + return False + elif ( + COPERNICUSMARINE_SERVICE_USERNAME and COPERNICUSMARINE_SERVICE_PASSWORD + ): + if _are_copernicus_marine_credentials_valid( + COPERNICUSMARINE_SERVICE_USERNAME, + COPERNICUSMARINE_SERVICE_PASSWORD, + ): + logger.info( + "Valid credentials from environment variables: " + "COPERNICUSMARINE_SERVICE_USERNAME and " + "COPERNICUSMARINE_SERVICE_PASSWORD." + ) + return True + else: + logger.info( + "Invalid credentials from environment variables: " + "COPERNICUSMARINE_SERVICE_USERNAME and " + "COPERNICUSMARINE_SERVICE_PASSWORD." + ) + logger.info(RECOVER_YOUR_CREDENTIALS_MESSAGE) + return False + elif copernicusmarine_configuration_file_is_valid( + configuration_file_directory + ): + logger.info("Valid credentials from configuration file.") + return True + else: + logger.info("Invalid credentials from configuration file.") + logger.info(RECOVER_YOUR_CREDENTIALS_MESSAGE) + return False + + def copernicusmarine_configuration_file_is_valid( configuration_file_directory: pathlib.Path, ) -> bool: + if not copernicusmarine_configuration_file_exists( + configuration_file_directory + ): + return False configuration_filename = pathlib.Path( configuration_file_directory / DEFAULT_CLIENT_CREDENTIALS_FILENAME ) @@ -193,7 +302,7 @@ def copernicusmarine_configuration_file_is_valid( return ( username is not None and password is not None - and _check_credentials_with_cas(username, password) + and _are_copernicus_marine_credentials_valid(username, password) ) @@ -226,7 +335,7 @@ def create_copernicusmarine_configuration_file( return configuration_filename -def _check_credentials_with_cas(username: str, password: str) -> bool: +def _check_credentials_with_old_cas(username: str, password: str) -> bool: logger.debug("Checking user credentials...") service = "copernicus-marine-client" cmems_cas_login_url = ( @@ -255,12 +364,79 @@ def _check_credentials_with_cas(username: str, password: str) -> bool: return login_success -@cachier(stale_after=timedelta(hours=48), cache_dir=CACHE_BASE_DIRECTORY) +def _check_credentials_with_cas(username: str, password: str) -> bool: + keycloak_url = COPERNICUS_MARINE_AUTH_SYSTEM_TOKEN_ENDPOINT + client_id = "toolbox" + scope = "openid profile email" + + data = { + "client_id": client_id, + "grant_type": "password", + "username": username, + "password": password, + "scope": scope, + } + conn_session = get_configured_requests_session() + response = conn_session.post(keycloak_url, data=data) + response.raise_for_status() + if response.status_code == 200: + token_response = response.json() + access_token = token_response["access_token"] + userinfo_url = COPERNICUS_MARINE_AUTH_SYSTEM_USERINFO_ENDPOINT + headers = {"Authorization": f"Bearer {access_token}"} + response = conn_session.get(userinfo_url, headers=headers) + response.raise_for_status() + if response.status_code == 200: + return True + else: + return False + else: + return False + + +def _are_copernicus_marine_credentials_valid_old_system( + username: str, password: str +) -> bool: + number_of_retry = 3 + user_is_active = None + while (user_is_active not in [True, False]) and number_of_retry > 0: + try: + user_is_active = _check_credentials_with_old_cas( + username=username, password=password + ) + except requests.exceptions.ConnectTimeout: + number_of_retry -= 1 + except requests.exceptions.ConnectionError: + number_of_retry -= 1 + if user_is_active is None: + raise CouldNotConnectToAuthenticationSystem() + return user_is_active + + def _are_copernicus_marine_credentials_valid( username: str, password: str -) -> Optional[bool]: +) -> bool: + try: + result = _are_copernicus_marine_credentials_valid_new_system( + username, password + ) + return result + + except Exception as e: + logger.debug( + f"Could not connect with new authentication system because of: {e}" + ) + logger.debug("Trying with old authentication system...") + return _are_copernicus_marine_credentials_valid_old_system( + username, password + ) + + +def _are_copernicus_marine_credentials_valid_new_system( + username: str, password: str +) -> bool: number_of_retry = 3 - user_is_active = None # Not cached by cachier + user_is_active = None while (user_is_active not in [True, False]) and number_of_retry > 0: try: user_is_active = _check_credentials_with_cas( @@ -270,6 +446,8 @@ def _are_copernicus_marine_credentials_valid( number_of_retry -= 1 except requests.exceptions.ConnectionError: number_of_retry -= 1 + if user_is_active is None: + raise CouldNotConnectToAuthenticationSystem() return user_is_active @@ -310,14 +488,14 @@ def get_and_check_username_password( username: Optional[str], password: Optional[str], credentials_file: Optional[pathlib.Path], - no_metadata_cache: bool, ) -> Tuple[str, str]: username, password = get_username_password( username=username, password=password, credentials_file=credentials_file ) copernicus_marine_credentials_are_valid = ( _are_copernicus_marine_credentials_valid( - username, password, ignore_cache=no_metadata_cache + username, + password, ) ) if not copernicus_marine_credentials_are_valid: @@ -363,7 +541,7 @@ def _get_credential_from_environment_variable_or_prompt( credential_type, hide_input ) if not credential: - raise CredentialCannotBeNone(credential_type) + raise CredentialsCannotBeNone(credential_type) return credential @@ -380,9 +558,7 @@ def credentials_file_builder( password, "password", True ) copernicus_marine_credentials_are_valid = ( - _are_copernicus_marine_credentials_valid( - username, password, ignore_cache=False - ) + _are_copernicus_marine_credentials_valid(username, password) ) if copernicus_marine_credentials_are_valid: configuration_file = create_copernicusmarine_configuration_file( diff --git a/copernicusmarine/core_functions/custom_open_zarr.py b/copernicusmarine/core_functions/custom_open_zarr.py index a8947570..304524c2 100644 --- a/copernicusmarine/core_functions/custom_open_zarr.py +++ b/copernicusmarine/core_functions/custom_open_zarr.py @@ -11,12 +11,12 @@ from copernicusmarine.core_functions.sessions import ( PROXIES, TRUST_ENV, - _get_ssl_context, get_configured_boto3_session, + get_ssl_context, ) from copernicusmarine.core_functions.utils import parse_access_dataset_url -logger = logging.getLogger("copernicus_marine_root_logger") +logger = logging.getLogger("copernicusmarine") class CustomS3Store(MutableMapping): @@ -160,7 +160,7 @@ def open_zarr( { "storage_options": { "client_kwargs": {"trust_env": TRUST_ENV, "proxies": PROXIES}, - "ssl": _get_ssl_context(), + "ssl": get_ssl_context(), } } ) diff --git a/copernicusmarine/core_functions/deprecated_options.py b/copernicusmarine/core_functions/deprecated_options.py index e22e6a30..02c37636 100644 --- a/copernicusmarine/core_functions/deprecated_options.py +++ b/copernicusmarine/core_functions/deprecated_options.py @@ -37,37 +37,4 @@ def dict_old_names_to_new_names(self): return result_dict -DEPRECATED_OPTIONS: DeprecatedOptionMapping = DeprecatedOptionMapping( - [ - DeprecatedOption( - old_name="minimal_longitude", new_name="minimum_longitude" - ), - DeprecatedOption( - old_name="maximal_longitude", new_name="maximum_longitude" - ), - DeprecatedOption( - old_name="minimal_latitude", new_name="minimum_latitude" - ), - DeprecatedOption( - old_name="maximal_latitude", new_name="maximum_latitude" - ), - DeprecatedOption(old_name="minimal_depth", new_name="minimum_depth"), - DeprecatedOption(old_name="maximal_depth", new_name="maximum_depth"), - DeprecatedOption( - old_name="force_dataset_version", new_name="dataset_version" - ), - DeprecatedOption( - old_name="force_dataset_part", new_name="dataset_part" - ), - DeprecatedOption(old_name="force_service", new_name="service"), - DeprecatedOption( - old_name="download_file_list", - new_name="create_file_list", - replace=False, - ), - DeprecatedOption( - old_name="include_all_versions", - new_name="include_versions", - ), - ] -) +DEPRECATED_OPTIONS: DeprecatedOptionMapping = DeprecatedOptionMapping([]) diff --git a/copernicusmarine/core_functions/describe.py b/copernicusmarine/core_functions/describe.py index 1e0338cc..e0759fbc 100644 --- a/copernicusmarine/core_functions/describe.py +++ b/copernicusmarine/core_functions/describe.py @@ -2,18 +2,16 @@ import logging from copernicusmarine.catalogue_parser.catalogue_parser import ( - CopernicusMarineCatalogue, - CopernicusMarineDatasetServiceType, filter_catalogue_with_strings, parse_catalogue, ) -from copernicusmarine.core_functions.utils import ( - create_cache_directory, - delete_cache_folder, +from copernicusmarine.catalogue_parser.models import ( + CopernicusMarineCatalogue, + CopernicusMarineDatasetServiceType, ) from copernicusmarine.core_functions.versions_verifier import VersionVerifier -logger = logging.getLogger("copernicus_marine_root_logger") +logger = logging.getLogger("copernicusmarine") def describe_function( @@ -22,11 +20,11 @@ def describe_function( include_keywords: bool, include_versions: bool, contains: list[str], - overwrite_metadata_cache: bool, - no_metadata_cache: bool, + max_concurrent_requests: int, disable_progress_bar: bool, staging: bool, ) -> str: + VersionVerifier.check_version_describe(staging) if staging: logger.warning( @@ -34,14 +32,8 @@ def describe_function( "Data will come from the staging environment." ) - if overwrite_metadata_cache: - delete_cache_folder(quiet=True) - - if not no_metadata_cache: - create_cache_directory() - base_catalogue: CopernicusMarineCatalogue = parse_catalogue( - no_metadata_cache=no_metadata_cache, + max_concurrent_requests=max_concurrent_requests, disable_progress_bar=disable_progress_bar, staging=staging, ) diff --git a/copernicusmarine/core_functions/documentation_utils.py b/copernicusmarine/core_functions/documentation_utils.py new file mode 100644 index 00000000..798b2f5e --- /dev/null +++ b/copernicusmarine/core_functions/documentation_utils.py @@ -0,0 +1,246 @@ +from copernicusmarine.core_functions.services_utils import CommandType + +SHARED: dict[str, str] = { + "OVERWRITE_SHORT_OPTION": "--overwrite", + "OVERWRITE_LONG_OPTION": "--overwrite-output-data", + "OVERWRITE_OUTPUT_DATA_HELP": ( + "If specified and if the file already exists on destination, then it will be " + "overwritten instead of creating new one with unique index." + ), + "USERNAME_HELP": ( + "The username for authentication." + ), # a little hardcoding in Python API + "PASSWORD_HELP": ( + "The password for authentication." + ), # a little hardcoding in Python API + "LOG_LEVEL_HELP": ( + "Set the details printed to console by the command " + "(based on standard logging library)." + ), + "CREATE_TEMPLATE_HELP": ( + "Option to create a file _template.json in your current directory " + "containing the arguments. If specified, no other action will be performed." + ), + "CREDENTIALS_FILE_HELP": ( + "Path to a credentials file if not in its default directory" + " (``$HOME/.copernicusmarine``). Accepts " + ".copernicusmarine-credentials / .netrc or _netrc / motuclient-python.ini " + "files." + ), + "DATASET_VERSION_HELP": "Force the selection of a specific dataset version.", + "DATASET_PART_HELP": "Force the selection of a specific dataset part.", + "DATASET_ID_HELP": ( + "The datasetID, required either as an argument or in the request_file option." + ), + "DISABLE_PROGRESS_BAR_HELP": "Flag to hide progress bar.", + "FORCE_DOWNLOAD_HELP": "Flag to skip confirmation before download.", + "DRY_RUN_HELP": "If True, runs query without downloading data.", + "OUTPUT_DIRECTORY_HELP": ( + "The destination folder for the downloaded files. Default is the current " + "directory." + ), + "REQUEST_FILE_HELP": ( + "Option to pass a file containing the arguments. For more information " + "please refer to the documentation or use option ``--create-template`` " + "from the command line interface for an example template." + ), +} + +LOGIN: dict[str, str] = { + "LOGIN_DESCRIPTION_HELP": ( + "Create a configuration file with your Copernicus Marine credentials" + " under the ``$HOME/.copernicusmarine`` directory" + " (overwritable with the ``overwrite_configuration_file`` option)." + ), + "LOGIN_RESPONSE_HELP": ( + "Exit code\n 0 exit code if the login was successfully " + "completed, 1 otherwise." + ), + "CONFIGURATION_FILE_DIRECTORY_HELP": ( + "Path to the directory where the configuration file is stored." + ), + "OVERWRITE_CONFIGURATION_FILE_HELP": ( + "Flag to skip confirmation before overwriting configuration file." + ), + "CHECK_CREDENTIALS_VALID_HELP": ( + "Flag to check if the credentials are valid. " + "No other action will be performed. " + "The validity will be check in this order: " + "1. Check if the credentials are valid" + " with the provided username and password. " + "2. Check if the credentials are valid in the environment variables. " + "3. Check if the credentials are valid in the configuration file. " + "When any is found (valid or not valid), will return immediately." + ), +} + +DESCRIBE: dict[str, str] = { + "DESCRIBE_DESCRIPTION_HELP": ( + "Retrieve and parse the metadata information " + "from the Copernicus Marine catalogue." + ), + "DESCRIBE_RESPONSE_HELP": ( + "JSON\n A dictionary containing the retrieved metadata information." + ), + "MAX_CONCURRENT_REQUESTS_HELP": ( + "Maximum number of concurrent requests (>=1). Default 15. The command uses " + "a thread pool executor to manage concurrent requests." + ), + "INCLUDE_DESCRIPTION_HELP": "Include product description in output.", + "INCLUDE_DATASETS_HELP": "Include product dataset details in output.", + "INCLUDE_KEYWORDS_HELP": "Include product keyword details in output.", + "INCLUDE_VERSIONS_HELP": ( + "Include dataset versions in output. By default, shows only the default " + "version." + ), + "INCLUDE_ALL_HELP": ( + "Include all the possible data in output: description, datasets, keywords, " + "and versions." + ), + "CONTAINS_HELP": ( + "Filter catalogue output. Returns products with attributes matching a string " + "token." + ), +} + +SUBSET: dict[str, str] = { + "SUBSET_DESCRIPTION_HELP": ( + "Extracts a subset of data from a specified dataset using given parameters." + "\n\nThe datasetID is required and can be found via the ``describe`` " + "command. " # has some hardcoding in CLI and python API + ), + "SUBSET_RESPONSE_HELP": ( + "JSON \n A description of the downloaded data and its destination." + ), + "SERVICE_HELP": ( + f"Force download through one of the available services using the service name " + f"among {CommandType.SUBSET.service_names()} or " + f"its short name among {CommandType.SUBSET.service_names()}." + ), + "VARIABLES_HELP": "Specify dataset variable. Can be used multiple times.", + "MINIMUM_LONGITUDE_HELP": ( + "Minimum longitude for the subset. The value will be transposed " + "to the interval [-180; 360[." + ), + "MAXIMUM_LONGITUDE_HELP": ( + "Maximum longitude for the subset. The value will be transposed" + " to the interval [-180; 360[." + ), + "MINIMUM_LATITUDE_HELP": ( + "Minimum latitude for the subset. Requires a float from -90 " + "degrees to +90." + ), + "MAXIMUM_LATITUDE_HELP": ( + "Maximum latitude for the subset. Requires a float from -90 degrees " + "to +90." + ), + "MINIMUM_DEPTH_HELP": ( + "Minimum depth for the subset. Requires a positive float (or 0)." + ), + "MAXIMUM_DEPTH_HELP": ( + "Maximum depth for the subset. Requires a positive float (or 0)." + ), + "VERTICAL_DIMENSION_OUTPUT_HELP": ( + "Consolidate the vertical dimension (the z-axis) as requested: depth with " + "descending positive values, elevation with ascending positive values. " + "Default is depth." + ), + "START_DATETIME_HELP": ( + "The start datetime of the temporal subset. Supports common " + "format parsed by pendulum (https://pendulum.eustace.io/docs/#parsing)." + ), # hardocded in cli: Caution: encapsulate date with “ “ to ensure valid + # expression for format “%Y-%m-%d %H:%M:%S”. + "END_DATETIME_HELP": ( + "The end datetime of the temporal subset. Supports common " + "format parsed by pendulum (https://pendulum.eustace.io/docs/#parsing)." + ), # hardocded in cli: Caution: encapsulate date with “ “ + # to ensure valid expression for format “%Y-%m-%d %H:%M:%S”. + "COORDINATES_SELECTION_METHOD_HELP": ( + "If ``inside``, the " + "selection retrieved will be inside the requested range. If ``strict-" + "inside``, the selection retrieved will be inside the requested range, " + "and an error will be raised if the values don't exist. " + "If ``nearest``, the extremes closest to the requested values will " + "be returned. If ``outside``," + " the extremes will be taken to contain all the requested interval." + " The methods ``inside``, ``nearest`` and ``outside`` will display" + " a warning if the request is out of bounds." + ), + "OUTPUT_FILENAME_HELP": ( + "Save the downloaded data with the given file name (under the output " + "directory)." + ), + "FILE_FORMAT_HELP": "Format of the downloaded dataset. Default to NetCDF '.nc'.", + "MOTU_API_REQUEST_HELP": ( + "Option to pass a complete MOTU API request as a string. Caution, user has to " + "replace double quotes “ with single quotes ' in the request." + ), + "NETCDF_COMPRESSION_LEVEL_HELP": ( + "Specify a compression level to apply on the NetCDF output file. A value of 0 " + "means no compression, and 9 is the highest level of compression available." + ), # some hardcoding in CLI to add the flag value + "NETCDF3_COMPATIBLE_HELP": ( + "Enable downloading the dataset in a netCDF3 compatible format." + ), +} + +GET: dict[str, str] = { + "GET_DESCRIPTION_HELP": ( + "Download originally produced data files.\n\n" + "The datasetID is required (either as an " + "argument or in a request file) and can be found via the ``describe``" + " command." + ), # has some hardcoding in CLI + "MAX_CONCURRENT_REQUESTS_HELP": ( + "Maximum number of concurrent requests. Default 15. The command uses a thread " + "pool executor to manage concurrent requests. If set to 0, no parallel" + " executions are used." + ), + "GET_RESPONSE_HELP": ( + "JSON \n A list of files that were downloaded and some metadata." + ), + "SHOW_OUTPUTNAMES_HELP": ( + "Option to display the names of the output files before download." + ), + "FILTER_HELP": ( + "A pattern that must match the absolute paths of the files to download." + ), + "REGEX_HELP": ( + "The regular expression that must match the absolute paths of the files to " + "download." + ), + "FILE_LIST_HELP": ( + "Path to a '.txt' file containing a " + "list of file paths, line by line, that will " + "be downloaded directly. These files must be from the same dataset as the one s" + "pecified dataset with the datasetID option. If no files can be found, the " + "Toolbox will list all files on the remote server and attempt to find a match." + ), + "CREATE_FILE_LIST_HELP": ( + "Option to only create a file containing the names of the targeted files " + "instead of downloading them. It writes the file to the specified output" + " directory (default to current directory). The file " + "name specified should end with '.txt' or '.csv'. If specified, no other " + "action will be performed." + ), + "SYNC_HELP": ( + "Option to synchronize the local directory with the remote directory. See the " + "documentation for more details." + ), + "SYNC_DELETE_HELP": ( + "Option to delete local files that are not present on the remote server while " + "applying sync." + ), + "INDEX_PARTS_HELP": ( + "Option to get the index files of an INSITU dataset." + ), + "NO_DIRECTORIES_HELP": ( + "If True, downloaded files will not be organized into directories." + ), +} + + +SUBSET.update(SHARED) +GET.update(SHARED) +LOGIN.update(SHARED) +DESCRIBE.update(SHARED) diff --git a/copernicusmarine/core_functions/environment_variables.py b/copernicusmarine/core_functions/environment_variables.py index ee22f9e6..cb0f9416 100644 --- a/copernicusmarine/core_functions/environment_variables.py +++ b/copernicusmarine/core_functions/environment_variables.py @@ -1,7 +1,7 @@ import logging import os -logger = logging.getLogger("copernicus_marine_root_logger") +logger = logging.getLogger("copernicusmarine") COPERNICUSMARINE_SERVICE_USERNAME = os.getenv( "COPERNICUSMARINE_SERVICE_USERNAME" @@ -20,22 +20,27 @@ "Please use COPERNICUSMARINE_SERVICE_PASSWORD instead." ) -COPERNICUSMARINE_CACHE_DIRECTORY = os.getenv( - "COPERNICUSMARINE_CACHE_DIRECTORY", "" +COPERNICUSMARINE_CREDENTIALS_DIRECTORY = os.getenv( + "COPERNICUSMARINE_CREDENTIALS_DIRECTORY" ) -COPERNICUSMARINE_MAX_CONCURRENT_REQUESTS = os.getenv( - "COPERNICUSMARINE_MAX_CONCURRENT_REQUESTS", "15" +COPERNICUSMARINE_DISABLE_SSL_CONTEXT = os.getenv( + "COPERNICUSMARINE_DISABLE_SSL_CONTEXT" ) -COPERNICUSMARINE_GET_CONCURRENT_DOWNLOADS = os.getenv( - "COPERNICUSMARINE_GET_CONCURRENT_DOWNLOADS", None +COPERNICUSMARINE_SET_SSL_CERTIFICATE_PATH = os.getenv( + "COPERNICUSMARINE_SET_SSL_CERTIFICATE_PATH" ) -COPERNICUSMARINE_DISABLE_SSL_CONTEXT = os.getenv( - "COPERNICUSMARINE_DISABLE_SSL_CONTEXT" -) COPERNICUSMARINE_TRUST_ENV = os.getenv("COPERNICUSMARINE_TRUST_ENV", "True") +COPERNICUSMARINE_HTTPS_TIMEOUT = os.getenv( + "COPERNICUSMARINE_HTTPS_TIMEOUT", "60" +) + +COPERNICUSMARINE_HTTPS_RETRIES = os.getenv( + "COPERNICUSMARINE_HTTPS_RETRIES", "5" +) + PROXY_HTTPS = os.getenv("HTTPS_PROXY", "") PROXY_HTTP = os.getenv("HTTP_PROXY", "") diff --git a/copernicusmarine/core_functions/exceptions.py b/copernicusmarine/core_functions/exceptions.py index 12b7b955..58e396da 100644 --- a/copernicusmarine/core_functions/exceptions.py +++ b/copernicusmarine/core_functions/exceptions.py @@ -1,8 +1,21 @@ class MinimumLongitudeGreaterThanMaximumLongitude(Exception): + """ + Exception raised when the minimum longitude is greater than the maximum longitude. + + Please make sure the minimum longitude is less or equal than the maximum longitude. + """ + pass class VariableDoesNotExistInTheDataset(Exception): + """ + Exception raised when the variable does not exist in the dataset. + + Please sure the variable exists in the dataset + and/or that you use the standard name. + """ + def __init__(self, variable): super().__init__( f"The variable '{variable}' is neither a variable or a standard name in" @@ -16,10 +29,64 @@ def __init__(self, variable): class CoordinatesOutOfDatasetBounds(Exception): + """ + Exception raised when the coordinates are out of the dataset bounds. + + Please make sure the coordinates are within the dataset bounds. If you are using the + strict-inside mode, the coordinates must be within the dataset bounds. + """ + def __init__(self, message: str): super().__init__(message) self.__setattr__("custom_exception_message", message) class NetCDFCompressionNotAvailable(Exception): + """ + Exception raised when the NetCDF compression is not available. + + Please make sure the NetCDF compression is available + with the current python libraries. + """ + pass + + +class WrongDatetimeFormat(Exception): + """ + Exception raised when the datetime format is wrong. + + Supported formats are: + + * the string "now" + * all formats supported by pendulum python library + + see `pendulum parsing page `_. + """ + + pass + + +class FormatNotSupported(Exception): + """ + Exception raised when the format is not supported for the subset. + + For now, we are not able to subset sparse datasets which are in sqlite format. + This feature will be available in the future. + """ + + def __init__(self, format_type): + super().__init__( + f"Subsetting format type {format_type} not supported yet." + ) + + +class ServiceNotSupported(Exception): + """ + Exception raised when the service type is not supported. + + Some services are not supported by the current implementation of the toolbox. + """ + + def __init__(self, service_type): + super().__init__(f"Service type {service_type} not supported.") diff --git a/copernicusmarine/core_functions/get.py b/copernicusmarine/core_functions/get.py index 196b2ef9..aa1b4405 100644 --- a/copernicusmarine/core_functions/get.py +++ b/copernicusmarine/core_functions/get.py @@ -2,38 +2,32 @@ import logging import os import pathlib -from typing import List, Optional +from typing import Optional -from copernicusmarine.catalogue_parser.catalogue_parser import parse_catalogue from copernicusmarine.catalogue_parser.request_structure import ( GetRequest, filter_to_regex, - get_request_from_file, overload_regex_with_additionnal_filter, ) from copernicusmarine.core_functions.credentials_utils import ( get_and_check_username_password, ) +from copernicusmarine.core_functions.models import ResponseGet from copernicusmarine.core_functions.services_utils import ( CommandType, RetrievalService, get_retrieval_service, ) -from copernicusmarine.core_functions.utils import ( - create_cache_directory, - delete_cache_folder, - get_unique_filename, -) +from copernicusmarine.core_functions.utils import get_unique_filename from copernicusmarine.core_functions.versions_verifier import VersionVerifier from copernicusmarine.download_functions.download_original_files import ( download_original_files, ) -logger = logging.getLogger("copernicus_marine_root_logger") +logger = logging.getLogger("copernicusmarine") def get_function( - dataset_url: Optional[str], dataset_id: Optional[str], force_dataset_version: Optional[str], force_dataset_part: Optional[str], @@ -46,20 +40,18 @@ def get_function( force_download: bool, overwrite_output_data: bool, request_file: Optional[pathlib.Path], - force_service: Optional[str], - overwrite_metadata_cache: bool, - no_metadata_cache: bool, filter: Optional[str], regex: Optional[str], file_list_path: Optional[pathlib.Path], create_file_list: Optional[str], - download_file_list: bool, sync: bool, sync_delete: bool, index_parts: bool, + dry_run: bool, + max_concurrent_requests: int, disable_progress_bar: bool, staging: bool, -) -> List[pathlib.Path]: +) -> ResponseGet: VersionVerifier.check_version_get(staging) if staging: logger.warning( @@ -67,24 +59,17 @@ def get_function( "Data will come from the staging environment." ) - if overwrite_metadata_cache: - delete_cache_folder() - - get_request = GetRequest() + get_request = GetRequest(dataset_id=dataset_id or "") if request_file: - get_request = get_request_from_file(request_file) + get_request.from_file(request_file) + if not get_request.dataset_id: + raise ValueError("Please provide a dataset id for a get request.") request_update_dict = { - "dataset_url": dataset_url, - "dataset_id": dataset_id, "force_dataset_version": force_dataset_version, "output_directory": output_directory, - "force_service": force_service, } get_request.update(request_update_dict) - if not no_metadata_cache: - create_cache_directory() - # Specific treatment for default values: # In order to not overload arguments with default values # TODO is this really useful? @@ -100,8 +85,6 @@ def get_function( get_request.force_download = force_download if overwrite_output_data: get_request.overwrite_output_data = overwrite_output_data - if force_service: - get_request.force_service = force_service if filter: get_request.regex = filter_to_regex(filter) if regex: @@ -119,21 +102,20 @@ def get_function( get_request.sync_delete = sync_delete if index_parts: get_request.index_parts = index_parts - get_request.force_service = "files" get_request.regex = overload_regex_with_additionnal_filter( filter_to_regex("*index_*"), get_request.regex ) - if download_file_list and not create_file_list: - create_file_list = "files_to_download.txt" if create_file_list is not None: assert create_file_list.endswith(".txt") or create_file_list.endswith( ".csv" - ), "Download file list must be a .txt or .csv file. " + ), "Download file list must be a '.txt' or '.csv' file. " f"Got '{create_file_list}' instead." if file_list_path: direct_download_files = get_direct_download_files(file_list_path) if direct_download_files: get_request.direct_download = direct_download_files + if create_file_list or dry_run: + get_request.dry_run = True return _run_get_request( username=username, @@ -141,7 +123,7 @@ def get_function( get_request=get_request, create_file_list=create_file_list, credentials_file=credentials_file, - no_metadata_cache=no_metadata_cache, + max_concurrent_requests=max_concurrent_requests, disable_progress_bar=disable_progress_bar, staging=staging, ) @@ -153,32 +135,25 @@ def _run_get_request( get_request: GetRequest, create_file_list: Optional[str], credentials_file: Optional[pathlib.Path], - no_metadata_cache: bool, + max_concurrent_requests: int, disable_progress_bar: bool, staging: bool = False, -) -> List[pathlib.Path]: +) -> ResponseGet: + logger.debug("Checking username and password...") username, password = get_and_check_username_password( - username, - password, - credentials_file, - no_metadata_cache=no_metadata_cache, + username, password, credentials_file ) + logger.debug("Checking dataset metadata...") - catalogue = parse_catalogue( - no_metadata_cache=no_metadata_cache, - disable_progress_bar=disable_progress_bar, - staging=staging, - ) retrieval_service: RetrievalService = get_retrieval_service( - catalogue, get_request.dataset_id, - get_request.dataset_url, get_request.force_dataset_version, get_request.force_dataset_part, - get_request.force_service, + None, CommandType.GET, get_request.index_parts, dataset_sync=get_request.sync, + staging=staging, ) get_request.dataset_url = retrieval_service.uri logger.info( @@ -189,6 +164,7 @@ def _run_get_request( username, password, get_request, + max_concurrent_requests, disable_progress_bar, create_file_list, ) @@ -213,7 +189,6 @@ def create_get_template() -> None: "regex": None, "output_directory": "copernicusmarine_data", "show_outputnames": True, - "service": "files", "force_download": False, "file_list": None, "sync": False, @@ -221,8 +196,6 @@ def create_get_template() -> None: "index_parts": False, "disable_progress_bar": False, "overwrite_output_data": False, - "overwrite_metadata_cache": False, - "no_metadata_cache": False, "log_level": "INFO", }, output_file, @@ -238,7 +211,7 @@ def get_direct_download_files( if not os.path.exists(file_list_path): raise FileNotFoundError( f"File {file_list_path} does not exist." - " Please provide a valid path to a .txt file." + " Please provide a valid path to a '.txt' file." ) with open(file_list_path) as f: direct_download_files = [line.strip() for line in f.readlines()] diff --git a/copernicusmarine/core_functions/login.py b/copernicusmarine/core_functions/login.py index 6635d017..cf4e6097 100644 --- a/copernicusmarine/core_functions/login.py +++ b/copernicusmarine/core_functions/login.py @@ -3,12 +3,12 @@ from typing import Optional from copernicusmarine.core_functions.credentials_utils import ( - copernicusmarine_configuration_file_exists, - copernicusmarine_configuration_file_is_valid, + RECOVER_YOUR_CREDENTIALS_MESSAGE, + copernicusmarine_credentials_are_valid, credentials_file_builder, ) -logger = logging.getLogger("copernicus_marine_root_logger") +logger = logging.getLogger("copernicusmarine") def login_function( @@ -16,19 +16,16 @@ def login_function( password: Optional[str], configuration_file_directory: pathlib.Path, overwrite_configuration_file: bool, - skip_if_user_logged_in: bool, + check_credentials_valid: bool, ) -> bool: - if ( - skip_if_user_logged_in - and copernicusmarine_configuration_file_exists( - configuration_file_directory - ) - and copernicusmarine_configuration_file_is_valid( - configuration_file_directory - ) - ): - logger.info("You are already logged in. Skipping login.") - return True + if check_credentials_valid: + logger.info("Checking if credentials are valid.") + if copernicusmarine_credentials_are_valid( + configuration_file_directory, username, password + ): + return True + else: + return False credentials_file = credentials_file_builder( username=username, password=password, @@ -42,9 +39,5 @@ def login_function( logger.info( "Invalid credentials. No configuration file have been modified." ) - logger.info( - "Learn how to recover your credentials at: " - "https://help.marine.copernicus.eu/en/articles/" - "4444552-i-forgot-my-username-or-my-password-what-should-i-do" - ) + logger.info(RECOVER_YOUR_CREDENTIALS_MESSAGE) return False diff --git a/copernicusmarine/core_functions/models.py b/copernicusmarine/core_functions/models.py index 606efd55..3671083b 100644 --- a/copernicusmarine/core_functions/models.py +++ b/copernicusmarine/core_functions/models.py @@ -1,4 +1,7 @@ -from typing import Literal, get_args +import pathlib +from typing import Literal, Optional, get_args + +from pydantic import BaseModel, model_serializer FileFormat = Literal["netcdf", "zarr"] DEFAULT_FILE_FORMAT: FileFormat = "netcdf" @@ -8,6 +11,84 @@ DEFAULT_FILE_EXTENSION: FileExtension = ".nc" DEFAULT_FILE_EXTENSIONS = list(get_args(FileExtension)) -SubsetMethod = Literal["nearest", "strict"] -DEFAULT_SUBSET_METHOD: SubsetMethod = "nearest" -DEFAULT_SUBSET_METHODS = list(get_args(SubsetMethod)) +CoordinatesSelectionMethod = Literal[ + "inside", "strict-inside", "nearest", "outside" +] +DEFAULT_COORDINATES_SELECTION_METHOD: CoordinatesSelectionMethod = "inside" +DEFAULT_COORDINATES_SELECTION_METHODS = list( + get_args(CoordinatesSelectionMethod) +) + +VerticalDimensionOutput = Literal["depth", "elevation"] +DEFAULT_VERTICAL_DIMENSION_OUTPUT: VerticalDimensionOutput = "depth" +DEFAULT_VERTICAL_DIMENSION_OUTPUTS = list(get_args(VerticalDimensionOutput)) + + +class FileGet(BaseModel): + #: Full url of the location of the file on remote server. + url: str + #: Size of the file in MB. + size: float + #: Last modified date. + last_modified: str + #: Path to the local downloaded file + output: pathlib.Path + + +class ResponseGet(BaseModel): + """Metadata returned when using :func:`~copernicusmarine.get`""" + + #: Description of the files concerned by the query + files: list[FileGet] + + +class GeographicalExtent(BaseModel): + """Interval for geographical coordinates.""" + + minimum: Optional[float] + maximum: Optional[float] + + +class TimeExtent(BaseModel): + """Interval for time coordinates.""" + + minimum: Optional[str] + maximum: Optional[str] + + +class DatasetCoordinatesExtent(BaseModel): + #: Longitude interval of the subsetted data. + longitude: GeographicalExtent + #: Latitude interval of the subsetted data. + latitude: GeographicalExtent + #: Time interval of the subsetted data in iso8601 string + time: TimeExtent + #: Depth interval of the subsetted data. + depth: Optional[GeographicalExtent] = None + #: Elevation interval of the subsetted data. + #: Is relevant if data are requested for elevation + #: instead of depth + elevation: Optional[GeographicalExtent] = None + + @model_serializer(mode="wrap") + def _serialize(self, handler): + d = handler(self) + if not self.depth: + del d["depth"] + if not self.elevation: + del d["elevation"] + return d + + +class ResponseSubset(BaseModel): + """Metadata returned when using :func:`~copernicusmarine.subset`""" + + #: Path to the result file. + output: pathlib.Path + #: Estimation of the size of the final result file in MB. + size: Optional[float] + #: Estimation of the maximum amount of data needed to + #: get the final result in MB. + data_needed: Optional[float] + #: The bounds of the subsetted dataset. + coordinates_extent: DatasetCoordinatesExtent diff --git a/copernicusmarine/core_functions/services_utils.py b/copernicusmarine/core_functions/services_utils.py index f0c796d5..6678d99e 100644 --- a/copernicusmarine/core_functions/services_utils.py +++ b/copernicusmarine/core_functions/services_utils.py @@ -2,10 +2,12 @@ from dataclasses import dataclass from enum import Enum from itertools import chain -from typing import List, Literal, Optional, Tuple, Union +from typing import List, Literal, Optional, Union from copernicusmarine.catalogue_parser.catalogue_parser import ( - CopernicusMarineCatalogue, + get_dataset_metadata, +) +from copernicusmarine.catalogue_parser.models import ( CopernicusMarineDatasetServiceType, CopernicusMarineDatasetVersion, CopernicusMarineProductDataset, @@ -14,11 +16,11 @@ CopernicusMarineVersionPart, ) from copernicusmarine.catalogue_parser.request_structure import ( - DatasetTimeAndGeographicalSubset, + DatasetTimeAndSpaceSubset, ) from copernicusmarine.core_functions import custom_open_zarr +from copernicusmarine.core_functions.exceptions import FormatNotSupported from copernicusmarine.core_functions.utils import ( - FormatNotSupported, datetime_parser, next_or_raise_exception, ) @@ -26,7 +28,7 @@ get_size_of_coordinate_subset, ) -logger = logging.getLogger("copernicus_marine_root_logger") +logger = logging.getLogger("copernicusmarine") class _Command(Enum): @@ -107,6 +109,12 @@ def assert_service_type_for_command( class ServiceDoesNotExistForCommand(Exception): + """ + Exception raised when the service does not exist for the command. + + Please make sure the service exists for the command. + """ # TODO: list available services per command + def __init__(self, service_name, command_name, available_services): super().__init__() self.__setattr__( @@ -152,7 +160,7 @@ def _select_forced_service( def _get_best_arco_service_type( - dataset_subset: DatasetTimeAndGeographicalSubset, + dataset_subset: DatasetTimeAndSpaceSubset, dataset_url: str, username: Optional[str], ) -> Literal[ @@ -178,8 +186,16 @@ def _get_best_arco_service_type( time_size = get_size_of_coordinate_subset( dataset, "time", - dataset_subset.start_datetime, - dataset_subset.end_datetime, + ( + dataset_subset.start_datetime.in_tz("UTC").naive() + if dataset_subset.start_datetime + else dataset_subset.start_datetime + ), + ( + dataset_subset.end_datetime.in_tz("UTC").naive() + if dataset_subset.end_datetime + else dataset_subset.end_datetime + ), ) dataset_coordinates = dataset.coords @@ -219,7 +235,7 @@ def _get_first_available_service_type( def _select_service_by_priority( dataset_version_part: CopernicusMarineVersionPart, command_type: CommandType, - dataset_subset: Optional[DatasetTimeAndGeographicalSubset], + dataset_subset: Optional[DatasetTimeAndSpaceSubset], username: Optional[str], ) -> CopernicusMarineService: dataset_available_service_types = [ @@ -258,129 +274,44 @@ def _select_service_by_priority( return first_available_service -def parse_dataset_id_and_service_and_suffix_path_from_url( - catalogue: CopernicusMarineCatalogue, - dataset_url: Optional[str], -) -> Tuple[str, CopernicusMarineDatasetServiceType, str,]: - if dataset_url is None: - syntax_error = SyntaxError( - "Must specify at least one of " - "'dataset_url' or 'dataset_id' options" - ) - raise syntax_error - return next_or_raise_exception( - ( - ( - dataset.dataset_id, - service.service_type, - dataset_url.split(service.uri)[1], - ) - for product in catalogue.products - for dataset in product.datasets - for dataset_version in dataset.versions - for dataset_part in dataset_version.parts - for service in dataset_part.services - if dataset_url.startswith(service.uri) - ), - KeyError( - f"The requested dataset URL '{dataset_url}' " - "was not found in the catalogue, " - "you can use 'copernicusmarine describe --include-datasets " - "--contains ' to find datasets" - ), - ) - - @dataclass class RetrievalService: dataset_id: str service_type: CopernicusMarineDatasetServiceType service_format: Optional[CopernicusMarineServiceFormat] uri: str - dataset_valid_start_date: Optional[Union[str, int]] + dataset_valid_start_date: Optional[Union[str, int, float]] + service: CopernicusMarineService def get_retrieval_service( - catalogue: CopernicusMarineCatalogue, - dataset_id: Optional[str], - dataset_url: Optional[str], + dataset_id: str, force_dataset_version_label: Optional[str], force_dataset_part_label: Optional[str], force_service_type_string: Optional[str], command_type: CommandType, index_parts: bool = False, - dataset_subset: Optional[DatasetTimeAndGeographicalSubset] = None, + dataset_subset: Optional[DatasetTimeAndSpaceSubset] = None, dataset_sync: bool = False, username: Optional[str] = None, + staging: bool = False, ) -> RetrievalService: + dataset_metadata = get_dataset_metadata(dataset_id, staging=staging) + # logger.debug(dataset_metadata) + if not dataset_metadata: + raise KeyError( + f"The requested dataset '{dataset_id}' was not found in the catalogue," + " you can use 'copernicusmarine describe --include-datasets " + "--contains ' to find datasets" + ) force_service_type: Optional[CopernicusMarineDatasetServiceType] = ( _service_type_from_string(force_service_type_string, command_type) if force_service_type_string else None ) - if dataset_id is None: - ( - dataset_id, - service_type, - suffix_path, - ) = parse_dataset_id_and_service_and_suffix_path_from_url( - catalogue, dataset_url - ) - force_service_type = ( - service_type if not force_service_type else force_service_type - ) - else: - if dataset_url is not None: - syntax_error = SyntaxError( - "Must specify only one of 'dataset_url' or 'dataset_id' options" - ) - raise syntax_error - suffix_path = "" - return _get_retrieval_service_from_dataset_id( - catalogue=catalogue, - dataset_id=dataset_id, - suffix_path=suffix_path, - force_dataset_version_label=force_dataset_version_label, - force_dataset_part_label=force_dataset_part_label, - force_service_type=force_service_type, - command_type=command_type, - index_parts=index_parts, - dataset_subset=dataset_subset, - dataset_sync=dataset_sync, - username=username, - ) - - -def _get_retrieval_service_from_dataset_id( - catalogue: CopernicusMarineCatalogue, - dataset_id: str, - suffix_path: str, - force_dataset_version_label: Optional[str], - force_dataset_part_label: Optional[str], - force_service_type: Optional[CopernicusMarineDatasetServiceType], - command_type: CommandType, - index_parts: bool, - dataset_subset: Optional[DatasetTimeAndGeographicalSubset], - dataset_sync: bool, - username: Optional[str], -) -> RetrievalService: - dataset: CopernicusMarineProductDataset = next_or_raise_exception( - ( - dataset - for product in catalogue.products - for dataset in product.datasets - if dataset_id == dataset.dataset_id - ), - KeyError( - f"The requested dataset '{dataset_id}' was not found in the catalogue," - " you can use 'copernicusmarine describe --include-datasets " - "--contains ' to find datasets" - ), - ) return _get_retrieval_service_from_dataset( - dataset=dataset, - suffix_path=suffix_path, + dataset=dataset_metadata, force_dataset_version_label=force_dataset_version_label, force_dataset_part_label=force_dataset_part_label, force_service_type=force_service_type, @@ -394,13 +325,12 @@ def _get_retrieval_service_from_dataset_id( def _get_retrieval_service_from_dataset( dataset: CopernicusMarineProductDataset, - suffix_path: str, force_dataset_version_label: Optional[str], force_dataset_part_label: Optional[str], force_service_type: Optional[CopernicusMarineDatasetServiceType], command_type: CommandType, index_parts: bool, - dataset_subset: Optional[DatasetTimeAndGeographicalSubset], + dataset_subset: Optional[DatasetTimeAndSpaceSubset], dataset_sync: bool, username: Optional[str], ) -> RetrievalService: @@ -419,7 +349,6 @@ def _get_retrieval_service_from_dataset( dataset_id=dataset.dataset_id, dataset_version=dataset_version, force_dataset_part_label=force_dataset_part_label, - suffix_path=suffix_path, force_service_type=force_service_type, command_type=command_type, index_parts=index_parts, @@ -433,11 +362,10 @@ def _get_retrieval_service_from_dataset_version( dataset_id: str, dataset_version: CopernicusMarineDatasetVersion, force_dataset_part_label: Optional[str], - suffix_path: str, force_service_type: Optional[CopernicusMarineDatasetServiceType], command_type: CommandType, index_parts: bool, - dataset_subset: Optional[DatasetTimeAndGeographicalSubset], + dataset_subset: Optional[DatasetTimeAndSpaceSubset], dataset_sync: bool, username: Optional[str], ) -> RetrievalService: @@ -445,17 +373,6 @@ def _get_retrieval_service_from_dataset_version( raise Exception( "Sync is not supported for datasets with multiple parts." ) - if ( - force_service_type == CopernicusMarineDatasetServiceType.FILES - and not force_dataset_part_label - and not index_parts - and len(dataset_version.parts) > 1 - ): - raise Exception( - "When dataset has multiple parts and using 'files' service" - ", please indicate the part you want to download " - "with the dataset-part option" - ) if force_dataset_part_label: logger.info( f"You forced selection of dataset part " @@ -505,29 +422,34 @@ def _get_retrieval_service_from_dataset_version( return RetrievalService( dataset_id=dataset_id, service_type=service.service_type, - uri=service.uri + suffix_path, + uri=service.uri, dataset_valid_start_date=dataset_start_date, service_format=service.service_format, + service=service, ) def _get_dataset_start_date_from_service( service: CopernicusMarineService, -) -> Optional[Union[str, int]]: +) -> Optional[Union[str, int, float]]: for variable in service.variables: for coordinate in variable.coordinates: - if ( - coordinate.coordinates_id == "time" - and coordinate.minimum_value - ): - if isinstance(coordinate.minimum_value, str): - return coordinate.minimum_value.replace("Z", "") - return int(coordinate.minimum_value) + if coordinate.coordinate_id == "time": + if coordinate.minimum_value: + return coordinate.minimum_value + if coordinate.values: + return min(coordinate.values) return None class ServiceNotAvailable(Exception): - ... + """ + Exception raised when the service is not available for the dataset. + + Please make sure the service is available for the specific dataset. + """ + + pass def _warning_dataset_will_be_deprecated( @@ -581,7 +503,14 @@ def _service_not_available_error( class NoServiceAvailable(Exception): - ... + """ + Exception raised when no service is available for the dataset. + + We could not find a service for this dataset. + Please make sure there is a service available for the dataset. + """ + + pass def _no_service_available_for_command( diff --git a/copernicusmarine/core_functions/sessions.py b/copernicusmarine/core_functions/sessions.py index edc162ce..0ba5879e 100644 --- a/copernicusmarine/core_functions/sessions.py +++ b/copernicusmarine/core_functions/sessions.py @@ -1,17 +1,18 @@ import ssl from typing import Any, List, Literal, Optional, Tuple -import aiohttp import boto3 import botocore import botocore.config import certifi -import nest_asyncio import requests from requests.adapters import HTTPAdapter, Retry from copernicusmarine.core_functions.environment_variables import ( COPERNICUSMARINE_DISABLE_SSL_CONTEXT, + COPERNICUSMARINE_HTTPS_RETRIES, + COPERNICUSMARINE_HTTPS_TIMEOUT, + COPERNICUSMARINE_SET_SSL_CERTIFICATE_PATH, COPERNICUSMARINE_TRUST_ENV, PROXY_HTTP, PROXY_HTTPS, @@ -24,20 +25,22 @@ PROXIES["http"] = PROXY_HTTP if PROXY_HTTPS: PROXIES["https"] = PROXY_HTTPS +try: + HTTPS_TIMEOUT = float(COPERNICUSMARINE_HTTPS_TIMEOUT) +except ValueError: + HTTPS_TIMEOUT = 60 +try: + HTTPS_RETRIES = int(COPERNICUSMARINE_HTTPS_RETRIES) +except ValueError: + HTTPS_RETRIES = 5 -def _get_ssl_context() -> Optional[ssl.SSLContext]: +def get_ssl_context() -> Optional[ssl.SSLContext]: if COPERNICUSMARINE_DISABLE_SSL_CONTEXT is not None: return None return ssl.create_default_context(cafile=certifi.where()) -def get_configured_aiohttp_session() -> aiohttp.ClientSession: - nest_asyncio.apply() - connector = aiohttp.TCPConnector(ssl=_get_ssl_context()) - return aiohttp.ClientSession(connector=connector, trust_env=TRUST_ENV) - - def get_https_proxy() -> Optional[str]: return PROXIES.get("https") @@ -76,19 +79,33 @@ def get_configured_boto3_session( return s3_client, s3_resource -def get_configured_requests_session() -> requests.Session: - session = requests.Session() - session.trust_env = TRUST_ENV - session.verify = certifi.where() - session.proxies = PROXIES - session.mount( - "https://", - HTTPAdapter( - max_retries=Retry( - total=5, - backoff_factor=1, - status_forcelist=[500, 502, 503, 504], +# TODO: add tests +# example: with https://httpbin.org/delay/10 or +# https://medium.com/@mpuig/testing-robust-requests-with-python-a06537d97771 +class ConfiguredRequestsSession(requests.Session): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.trust_env = TRUST_ENV + self.verify = ( + COPERNICUSMARINE_SET_SSL_CERTIFICATE_PATH or certifi.where() + ) + self.proxies = PROXIES + if HTTPS_RETRIES: + self.mount( + "https://", + HTTPAdapter( + max_retries=Retry( + total=HTTPS_RETRIES, + backoff_factor=1, + status_forcelist=[408, 429, 500, 502, 503, 504], + ) + ), ) - ), - ) - return session + + def request(self, *args, **kwargs): + kwargs.setdefault("timeout", HTTPS_TIMEOUT) + return super().request(*args, **kwargs) + + +def get_configured_requests_session() -> requests.Session: + return ConfiguredRequestsSession() diff --git a/copernicusmarine/core_functions/subset.py b/copernicusmarine/core_functions/subset.py index 5a75ff32..8085594c 100644 --- a/copernicusmarine/core_functions/subset.py +++ b/copernicusmarine/core_functions/subset.py @@ -1,35 +1,33 @@ import json import logging import pathlib -from datetime import datetime from typing import List, Optional -from copernicusmarine.catalogue_parser.catalogue_parser import ( +from pendulum import DateTime + +from copernicusmarine.catalogue_parser.models import ( CopernicusMarineDatasetServiceType, CopernicusMarineServiceFormat, - parse_catalogue, ) from copernicusmarine.catalogue_parser.request_structure import ( SubsetRequest, convert_motu_api_request_to_structure, - subset_request_from_file, ) from copernicusmarine.core_functions.credentials_utils import ( get_and_check_username_password, ) -from copernicusmarine.core_functions.models import SubsetMethod +from copernicusmarine.core_functions.exceptions import ServiceNotSupported +from copernicusmarine.core_functions.models import ( + CoordinatesSelectionMethod, + ResponseSubset, + VerticalDimensionOutput, +) from copernicusmarine.core_functions.services_utils import ( CommandType, RetrievalService, get_retrieval_service, - parse_dataset_id_and_service_and_suffix_path_from_url, -) -from copernicusmarine.core_functions.utils import ( - ServiceNotSupported, - create_cache_directory, - delete_cache_folder, - get_unique_filename, ) +from copernicusmarine.core_functions.utils import get_unique_filename from copernicusmarine.core_functions.versions_verifier import VersionVerifier from copernicusmarine.download_functions.download_arco_series import ( download_zarr, @@ -39,11 +37,10 @@ ) from copernicusmarine.download_functions.utils import FileFormat -logger = logging.getLogger("copernicus_marine_root_logger") +logger = logging.getLogger("copernicusmarine") def subset_function( - dataset_url: Optional[str], dataset_id: Optional[str], force_dataset_version: Optional[str], force_dataset_part: Optional[str], @@ -56,10 +53,10 @@ def subset_function( maximum_latitude: Optional[float], minimum_depth: Optional[float], maximum_depth: Optional[float], - vertical_dimension_as_originally_produced: bool, - start_datetime: Optional[datetime], - end_datetime: Optional[datetime], - subset_method: SubsetMethod, + vertical_dimension_output: VerticalDimensionOutput, + start_datetime: Optional[DateTime], + end_datetime: Optional[DateTime], + coordinates_selection_method: CoordinatesSelectionMethod, output_filename: Optional[str], file_format: FileFormat, force_service: Optional[str], @@ -69,14 +66,12 @@ def subset_function( motu_api_request: Optional[str], force_download: bool, overwrite_output_data: bool, - overwrite_metadata_cache: bool, - no_metadata_cache: bool, + dry_run: bool, disable_progress_bar: bool, staging: bool, - netcdf_compression_enabled: bool, - netcdf_compression_level: Optional[int], + netcdf_compression_level: int, netcdf3_compatible: bool, -) -> pathlib.Path: +) -> ResponseSubset: VersionVerifier.check_version_subset(staging) if staging: logger.warning( @@ -84,32 +79,15 @@ def subset_function( "Data will come from the staging environment." ) - if overwrite_metadata_cache: - delete_cache_folder() - - if not no_metadata_cache: - create_cache_directory() - - if ( - netcdf_compression_level is not None - and netcdf_compression_enabled is False - ): - raise ValueError( - "You must provide --netcdf-compression-enabled if you want to use " - "--netcdf-compression-level option" - ) - - subset_request = SubsetRequest() + subset_request = SubsetRequest(dataset_id=dataset_id or "") if request_file: - subset_request = subset_request_from_file(request_file) + subset_request.from_file(request_file) if motu_api_request: motu_api_subset_request = convert_motu_api_request_to_structure( motu_api_request ) subset_request.update(motu_api_subset_request.__dict__) request_update_dict = { - "dataset_url": dataset_url, - "dataset_id": dataset_id, "force_dataset_version": force_dataset_version, "force_dataset_part": force_dataset_part, "variables": variables, @@ -119,24 +97,25 @@ def subset_function( "maximum_latitude": maximum_latitude, "minimum_depth": minimum_depth, "maximum_depth": maximum_depth, - "vertical_dimension_as_originally_produced": vertical_dimension_as_originally_produced, # noqa + "vertical_dimension_output": vertical_dimension_output, "start_datetime": start_datetime, "end_datetime": end_datetime, - "subset_method": subset_method, + "coordinates_selection_method": coordinates_selection_method, "output_filename": output_filename, "file_format": file_format, "force_service": force_service, "output_directory": output_directory, - "netcdf_compression_enabled": netcdf_compression_enabled, "netcdf_compression_level": netcdf_compression_level, "netcdf3_compatible": netcdf3_compatible, + "dry_run": dry_run, } subset_request.update(request_update_dict) + if not subset_request.dataset_id: + raise ValueError("Please provide a dataset id for a subset request.") username, password = get_and_check_username_password( username, password, credentials_file, - no_metadata_cache=no_metadata_cache, ) if all( e is None @@ -152,31 +131,9 @@ def subset_function( subset_request.end_datetime, ] ): - if not subset_request.dataset_id: - if subset_request.dataset_url: - catalogue = parse_catalogue( - no_metadata_cache=no_metadata_cache, - disable_progress_bar=disable_progress_bar, - staging=staging, - ) - ( - dataset_id, - _, - _, - ) = parse_dataset_id_and_service_and_suffix_path_from_url( - catalogue, subset_request.dataset_url - ) - else: - syntax_error = SyntaxError( - "Must specify at least one of " - "'dataset_url' or 'dataset_id' options" - ) - raise syntax_error - else: - dataset_id = subset_request.dataset_id logger.info( "To retrieve a complete dataset, please use instead: " - f"copernicusmarine get --dataset-id {dataset_id}" + f"copernicusmarine get --dataset-id {subset_request.dataset_id}" ) raise ValueError( "Missing subset option. Try 'copernicusmarine subset --help'." @@ -188,20 +145,13 @@ def subset_function( if overwrite_output_data: subset_request.overwrite_output_data = overwrite_output_data - catalogue = parse_catalogue( - no_metadata_cache=no_metadata_cache, - disable_progress_bar=disable_progress_bar, - staging=staging, - ) retrieval_service: RetrievalService = get_retrieval_service( - catalogue, subset_request.dataset_id, - subset_request.dataset_url, subset_request.force_dataset_version, subset_request.force_dataset_part, subset_request.force_service, CommandType.SUBSET, - dataset_subset=subset_request.get_time_and_geographical_subset(), + dataset_subset=subset_request.get_time_and_space_subset(), ) subset_request.dataset_url = retrieval_service.uri check_dataset_subset_bounds( @@ -209,8 +159,8 @@ def subset_function( password=password, dataset_url=subset_request.dataset_url, service_type=retrieval_service.service_type, - dataset_subset=subset_request.get_time_and_geographical_subset(), - subset_method=subset_request.subset_method, + dataset_subset=subset_request.get_time_and_space_subset(), + coordinates_selection_method=subset_request.coordinates_selection_method, dataset_valid_date=retrieval_service.dataset_valid_start_date, ) logger.info( @@ -227,17 +177,18 @@ def subset_function( retrieval_service.service_format == CopernicusMarineServiceFormat.ZARR ): - output_path = download_zarr( + response = download_zarr( username, password, subset_request, retrieval_service.dataset_id, disable_progress_bar, retrieval_service.dataset_valid_start_date, + service=retrieval_service.service, ) else: raise ServiceNotSupported(retrieval_service.service_type) - return output_path + return response def create_subset_template() -> None: @@ -247,24 +198,22 @@ def create_subset_template() -> None: with open(filename, "w") as output_file: json.dump( { - "dataset_id": "cmems_mod_glo_phy_anfc_0.083deg_P1D-m", + "dataset_id": "cmems_mod_glo_phy_myint_0.083deg_P1M-m", "start_datetime": "2023-10-07", "end_datetime": "2023-10-12", "minimum_longitude": -85, "maximum_longitude": -10, "minimum_latitude": 35, "maximum_latitude": 43, - "minimum_depth": False, - "maximum_depth": False, - "variables": ["zos", "tob"], + "minimum_depth": 1, + "maximum_depth": 10, + "variables": ["so", "thetao"], "output_directory": "copernicusmarine_data", "force_service": False, "force_download": False, "request_file": False, "motu_api_request": False, "overwrite_output_data": False, - "overwrite_metadata_cache": False, - "no_metadata_cache": False, }, output_file, indent=4, diff --git a/copernicusmarine/core_functions/utils.py b/copernicusmarine/core_functions/utils.py index 01d2ec36..3e08e290 100644 --- a/copernicusmarine/core_functions/utils.py +++ b/copernicusmarine/core_functions/utils.py @@ -1,20 +1,17 @@ -import asyncio +import concurrent.futures +import functools import logging -import os import pathlib import re -from datetime import datetime from importlib.metadata import version from typing import ( Any, - Awaitable, Callable, - Coroutine, Iterable, Iterator, - List, + Literal, Optional, - Tuple, + Sequence, TypeVar, Union, ) @@ -22,40 +19,30 @@ import cftime import numpy import pandas as pd +import pendulum +import pendulum.exceptions import xarray +from pendulum import DateTime from requests import PreparedRequest +from tqdm import tqdm from copernicusmarine import __version__ as copernicusmarine_version -from copernicusmarine.core_functions.environment_variables import ( - COPERNICUSMARINE_CACHE_DIRECTORY, -) +from copernicusmarine.core_functions.exceptions import WrongDatetimeFormat -logger = logging.getLogger("copernicus_marine_root_logger") +logger = logging.getLogger("copernicusmarine") -OVERWRITE_SHORT_OPTION = "--overwrite" -OVERWRITE_LONG_OPTION = "--overwrite-output-data" -OVERWRITE_OPTION_HELP_TEXT = ( - "If specified and if the file already exists on destination, then it will be " - "overwritten instead of creating new one with unique index." -) FORCE_DOWNLOAD_CLI_PROMPT_MESSAGE = "Do you want to proceed with download?" -USER_DEFINED_CACHE_DIRECTORY: str = COPERNICUSMARINE_CACHE_DIRECTORY -DEFAULT_CLIENT_BASE_DIRECTORY: pathlib.Path = ( - pathlib.Path(USER_DEFINED_CACHE_DIRECTORY) - if USER_DEFINED_CACHE_DIRECTORY - else pathlib.Path.home() -) / ".copernicusmarine" - -CACHE_BASE_DIRECTORY: pathlib.Path = DEFAULT_CLIENT_BASE_DIRECTORY / "cache" DATETIME_SUPPORTED_FORMATS = [ "%Y", "%Y-%m-%d", "%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S", + "%Y-%m-%dT%H:%M:%S.%f", "%Y-%m-%dT%H:%M:%S.%fZ", + "%Y-%m-%d %H:%M:%S.%f%Z", ] @@ -77,18 +64,6 @@ def get_unique_filename( return filepath -class ServiceNotSupported(Exception): - def __init__(self, service_type): - super().__init__(f"Service type {service_type} not supported.") - - -class FormatNotSupported(Exception): - def __init__(self, format_type): - super().__init__( - f"Subsetting format type {format_type} not supported yet." - ) - - _T = TypeVar("_T") _S = TypeVar("_S") @@ -108,10 +83,6 @@ def next_or_raise_exception( raise exception_to_raise from exception -def flatten(list: list[list[_T]]) -> list[_T]: - return [item for sublist in list for item in sublist] - - def construct_url_with_query_params(url, query_params: dict) -> Optional[str]: req = PreparedRequest() req.prepare_url(url, query_params) @@ -130,19 +101,41 @@ def construct_query_params_for_marine_data_store_monitoring( return query_params -class WrongDatetimeFormat(Exception): - ... - - -def datetime_parser(string: str): - if string == "now": - return datetime.now() - for format in DATETIME_SUPPORTED_FORMATS: - try: - return datetime.strptime(string, format) - except ValueError: - pass - raise WrongDatetimeFormat(string) +def datetime_parser(date: Union[str, numpy.datetime64]) -> DateTime: + if date == "now": + return pendulum.now(tz="UTC") + try: + if isinstance(date, numpy.datetime64): + date = str(date) + parsed_datetime = pendulum.parse(date) + # ignoring types because one needs to pass + # `exact=True` to `parse` method to get + # something else than `pendulum.DateTime` + return parsed_datetime # type: ignore + except pendulum.exceptions.ParserError: + pass + raise WrongDatetimeFormat(date) + + +def timestamp_parser( + timestamp: Union[int, float], unit: Literal["s", "ms"] = "ms" +) -> DateTime: + """ + Convert a timestamp in milliseconds to a pendulum DateTime object + by default. The unit can be changed to seconds by passing "s" as + the unit. + """ + conversion_factor = 1 if unit == "s" else 1e3 + return pendulum.from_timestamp(timestamp / conversion_factor, tz="UTC") + + +def timestamp_or_datestring_to_datetime( + date: Union[str, int, float, numpy.datetime64] +) -> DateTime: + if isinstance(date, int) or isinstance(date, float): + return timestamp_parser(date) + else: + return datetime_parser(date) def convert_datetime64_to_netcdf_timestamp( @@ -160,40 +153,30 @@ def add_copernicusmarine_version_in_dataset_attributes( return dataset -def create_cache_directory(): - pathlib.Path(CACHE_BASE_DIRECTORY).mkdir(parents=True, exist_ok=True) - - -def delete_cache_folder(quiet: bool = False): - try: - elements = pathlib.Path(CACHE_BASE_DIRECTORY).glob("*") - files = [x for x in elements if x.is_file()] - for file in files: - os.remove(file) - if not quiet: - logger.info("Old cache successfully deleted") - except Exception as exc: - logger.warning("Error occurred while deleting old cache files") - raise exc - - -async def rolling_batch_gather( - promises: Union[List[Coroutine[Any, Any, Any]], List[Awaitable[Any]]], - per_batch: int, -) -> List[Any]: - tasks: asyncio.Queue = asyncio.Queue() - for promise in promises: - tasks.put_nowait(promise) - - async def worker(): - res = [] - while not tasks.empty(): - res.append(await tasks.get_nowait()) - - return res - - results = await asyncio.gather(*[worker() for _ in range(per_batch)]) - return [s for r in results for s in r] +# From: https://stackoverflow.com/a/46144596/20983727 +def run_concurrently( + func: Callable[..., _T], + function_arguments: Sequence[tuple[Any, ...]], + max_concurrent_requests: int, + tdqm_bar_configuration: dict = {}, +) -> list[_T]: + out = [] + with tqdm( + total=len(function_arguments), + **tdqm_bar_configuration, + ) as pbar: + with concurrent.futures.ThreadPoolExecutor( + max_workers=max_concurrent_requests + ) as executor: + future_to_url = ( + executor.submit(func, *function_argument) + for function_argument in function_arguments + ) + for future in concurrent.futures.as_completed(future_to_url): + data = future.result() + out.append(data) + pbar.update(1) + return out # Example data_path @@ -202,7 +185,7 @@ async def worker(): # https://s3.region.cloudferro.com:443/bucket/arco/product/dataset/geoChunked.zarr def parse_access_dataset_url( data_path: str, only_dataset_root_path: bool = False -) -> Tuple[str, str, str]: +) -> tuple[str, str, str]: match = re.search( r"^(http|https):\/\/([\w\-\.]+)(:[\d]+)?(\/.*)", data_path @@ -233,3 +216,44 @@ def _add_custom_query_param(params, context, **kwargs): ) return _add_custom_query_param + + +# Deprecation utils +def get_deprecated_message(old_value, preferred_value): + return ( + f"'{old_value}' has been deprecated, use '{preferred_value}' instead" + ) + + +def log_deprecated_message(old_value, preferred_value): + logger.warning(get_deprecated_message(old_value, preferred_value)) + + +def raise_both_old_and_new_value_error(old_value, new_value): + raise TypeError( + f"Received both {old_value} and {new_value} as arguments! " + f"{get_deprecated_message(old_value, new_value)}" + ) + + +def deprecated_python_option(**aliases: str) -> Callable: + def deco(f: Callable): + @functools.wraps(f) + def wrapper(*args, **kwargs): + rename_kwargs(f.__name__, kwargs, aliases) + return f(*args, **kwargs) + + return wrapper + + return deco + + +def rename_kwargs( + func_name: str, kwargs: dict[str, Any], aliases: dict[str, str] +): + for alias, new in aliases.items(): + if alias in kwargs: + if new in kwargs: + raise_both_old_and_new_value_error(alias, new) + log_deprecated_message(alias, new) + kwargs[new] = kwargs.pop(alias) diff --git a/copernicusmarine/core_functions/versions_verifier.py b/copernicusmarine/core_functions/versions_verifier.py index 5878db6a..e5ced404 100644 --- a/copernicusmarine/core_functions/versions_verifier.py +++ b/copernicusmarine/core_functions/versions_verifier.py @@ -10,7 +10,7 @@ construct_query_params_for_marine_data_store_monitoring, ) -logger = logging.getLogger("copernicus_marine_root_logger") +logger = logging.getLogger("copernicusmarine") class VersionVerifier: @@ -43,17 +43,25 @@ def _check_version(function_name: str, staging: bool): function_name ]: required_version = marine_data_store_versions[service] - if not semver.Version.parse(client_version).match( - required_version - ): - logger.debug( - f"Client version {client_version} is not compatible with " - f"{service}. Service needs version {required_version}." - ) - logger.error( - f"Client version {client_version} is not compatible with current " - "backend service. Please update to the latest client version." + try: + if not semver.Version.parse(client_version).match( + required_version + ): + logger.debug( + f"Client version {client_version} is not compatible with " + f"{service}. Service needs version {required_version}." + ) + logger.error( + f"Client version {client_version} is not " + f"compatible with current backend service. " + f"Please update to the latest client version." + ) + except ValueError: + logger.warning( + f"Using a pre-release or a non-official version " + f"of the client. Client version: {client_version}" ) + return @staticmethod def _get_client_required_versions( diff --git a/copernicusmarine/download_functions/common_download.py b/copernicusmarine/download_functions/common_download.py index 4316354c..84d52ef3 100644 --- a/copernicusmarine/download_functions/common_download.py +++ b/copernicusmarine/download_functions/common_download.py @@ -1,6 +1,5 @@ import logging import pathlib -from typing import Optional import xarray import zarr @@ -11,18 +10,17 @@ NetCDFCompressionNotAvailable, ) -logger = logging.getLogger("copernicus_marine_root_logger") +logger = logging.getLogger("copernicusmarine") def get_delayed_download( dataset: xarray.Dataset, output_path: pathlib.Path, - netcdf_compression_enabled: bool, - netcdf_compression_level: Optional[int], + netcdf_compression_level: int, netcdf3_compatible: bool, ): if output_path.suffix == ".zarr": - if netcdf_compression_enabled: + if netcdf_compression_level > 0: raise NetCDFCompressionNotAvailable( "--netcdf-compression-enabled option cannot be used when " "writing to ZARR" @@ -32,7 +30,6 @@ def get_delayed_download( delayed = _prepare_download_dataset_as_netcdf( dataset, output_path, - netcdf_compression_enabled, netcdf_compression_level, netcdf3_compatible, ) @@ -52,21 +49,19 @@ def download_delayed_dataset( def _prepare_download_dataset_as_netcdf( dataset: xarray.Dataset, output_path: pathlib.Path, - netcdf_compression_enabled: bool, - netcdf_compression_level: Optional[int], + netcdf_compression_level: int, netcdf3_compatible: bool, ): logger.debug("Writing dataset to NetCDF") for coord in dataset.coords: dataset[coord].encoding["_FillValue"] = None - if netcdf_compression_enabled: - complevel = ( - 1 if netcdf_compression_level is None else netcdf_compression_level + if netcdf_compression_level > 0: + logger.info( + f"NetCDF compression enabled with level {netcdf_compression_level}" ) - logger.info(f"NetCDF compression enabled with level {complevel}") comp = dict( zlib=True, - complevel=complevel, + complevel=netcdf_compression_level, contiguous=False, shuffle=True, ) @@ -81,6 +76,7 @@ def _prepare_download_dataset_as_netcdf( compute=False, encoding=encoding, format=xarray_download_format, + engine="h5netcdf", ) diff --git a/copernicusmarine/download_functions/download_arco_series.py b/copernicusmarine/download_functions/download_arco_series.py index 92d83b25..703b4301 100644 --- a/copernicusmarine/download_functions/download_arco_series.py +++ b/copernicusmarine/download_functions/download_arco_series.py @@ -6,8 +6,13 @@ import pandas import xarray +from copernicusmarine.catalogue_parser.models import CopernicusMarineService from copernicusmarine.catalogue_parser.request_structure import SubsetRequest from copernicusmarine.core_functions import custom_open_zarr +from copernicusmarine.core_functions.models import ( + CoordinatesSelectionMethod, + ResponseSubset, +) from copernicusmarine.core_functions.utils import ( FORCE_DOWNLOAD_CLI_PROMPT_MESSAGE, add_copernicusmarine_version_in_dataset_attributes, @@ -24,17 +29,18 @@ LongitudeParameters, TemporalParameters, ) -from copernicusmarine.download_functions.subset_xarray import ( - date_to_datetime, - subset, -) +from copernicusmarine.download_functions.subset_xarray import subset from copernicusmarine.download_functions.utils import ( FileFormat, + get_approximation_size_data_downloaded, + get_approximation_size_final_result, + get_dataset_coordinates_extent, get_filename, - get_formatted_dataset_size_estimation, + get_message_formatted_dataset_size_estimation, + timestamp_or_datestring_to_datetime, ) -logger = logging.getLogger("copernicus_marine_root_logger") +logger = logging.getLogger("copernicusmarine") def _rechunk(dataset: xarray.Dataset) -> xarray.Dataset: @@ -66,18 +72,20 @@ def download_dataset( geographical_parameters: GeographicalParameters, temporal_parameters: TemporalParameters, depth_parameters: DepthParameters, + coordinates_selection_method: CoordinatesSelectionMethod, dataset_url: str, output_directory: pathlib.Path, output_filename: Optional[str], file_format: FileFormat, variables: Optional[list[str]], disable_progress_bar: bool, - netcdf_compression_enabled: bool, - netcdf_compression_level: Optional[int], + netcdf_compression_level: int, netcdf3_compatible: bool, - force_download: bool = False, - overwrite_output_data: bool = False, -): + service: CopernicusMarineService, + dry_run: bool, + force_download: bool, + overwrite_output_data: bool, +) -> ResponseSubset: dataset = _rechunk( open_dataset_from_arco_series( username=username, @@ -87,6 +95,7 @@ def download_dataset( geographical_parameters=geographical_parameters, temporal_parameters=temporal_parameters, depth_parameters=depth_parameters, + coordinates_selection_method=coordinates_selection_method, chunks="auto", ) ) @@ -95,15 +104,20 @@ def download_dataset( filename = get_filename(output_filename, dataset, dataset_id, file_format) output_path = pathlib.Path(output_directory, filename) + final_result_size_estimation = get_approximation_size_final_result(dataset) + data_needed_approximation = get_approximation_size_data_downloaded( + dataset, service + ) + message_formatted_dataset_size_estimation = ( + get_message_formatted_dataset_size_estimation( + final_result_size_estimation, data_needed_approximation + ) + ) if not output_directory.is_dir(): pathlib.Path.mkdir(output_directory, parents=True) - if not force_download: logger.info(dataset) - logger.info( - "Estimated size of the dataset file is " - f"{get_formatted_dataset_size_estimation(dataset)}." - ) + logger.info(message_formatted_dataset_size_estimation) click.confirm( FORCE_DOWNLOAD_CLI_PROMPT_MESSAGE, default=True, @@ -111,27 +125,32 @@ def download_dataset( err=True, ) else: - logger.info( - "Estimated size of the dataset file is " - f"{get_formatted_dataset_size_estimation(dataset)}." - ) - logger.info("Writing to local storage. Please wait...") + logger.info(message_formatted_dataset_size_estimation) output_path = get_unique_filename( filepath=output_path, overwrite_option=overwrite_output_data ) + response = ResponseSubset( + output=output_path, + size=final_result_size_estimation, + data_needed=data_needed_approximation, + coordinates_extent=get_dataset_coordinates_extent(dataset), + ) + if dry_run: + return response + + logger.info("Writing to local storage. Please wait...") delayed = get_delayed_download( dataset, output_path, - netcdf_compression_enabled, netcdf_compression_level, netcdf3_compatible, ) download_delayed_dataset(delayed, disable_progress_bar) logger.info(f"Successfully downloaded to {output_path}") - return output_path + return response def download_zarr( @@ -140,8 +159,9 @@ def download_zarr( subset_request: SubsetRequest, dataset_id: str, disable_progress_bar: bool, - dataset_valid_start_date: Optional[Union[str, int]], -): + dataset_valid_start_date: Optional[Union[str, int, float]], + service: CopernicusMarineService, +) -> ResponseSubset: geographical_parameters = GeographicalParameters( latitude_parameters=LatitudeParameters( minimum_latitude=subset_request.minimum_latitude, @@ -154,7 +174,9 @@ def download_zarr( ) start_datetime = subset_request.start_datetime if dataset_valid_start_date: - minimum_start_date = date_to_datetime(dataset_valid_start_date) + minimum_start_date = timestamp_or_datestring_to_datetime( + dataset_valid_start_date + ) if ( not subset_request.start_datetime or subset_request.start_datetime < minimum_start_date @@ -168,7 +190,7 @@ def download_zarr( depth_parameters = DepthParameters( minimum_depth=subset_request.minimum_depth, maximum_depth=subset_request.maximum_depth, - vertical_dimension_as_originally_produced=subset_request.vertical_dimension_as_originally_produced, # noqa + vertical_dimension_output=subset_request.vertical_dimension_output, ) dataset_url = str(subset_request.dataset_url) output_directory = ( @@ -179,13 +201,14 @@ def download_zarr( variables = subset_request.variables force_download = subset_request.force_download - output_path = download_dataset( + response = download_dataset( username=username, password=password, dataset_id=dataset_id, geographical_parameters=geographical_parameters, temporal_parameters=temporal_parameters, depth_parameters=depth_parameters, + coordinates_selection_method=subset_request.coordinates_selection_method, dataset_url=dataset_url, output_directory=output_directory, output_filename=subset_request.output_filename, @@ -194,11 +217,12 @@ def download_zarr( disable_progress_bar=disable_progress_bar, force_download=force_download, overwrite_output_data=subset_request.overwrite_output_data, - netcdf_compression_enabled=subset_request.netcdf_compression_enabled, netcdf_compression_level=subset_request.netcdf_compression_level, netcdf3_compatible=subset_request.netcdf3_compatible, + dry_run=subset_request.dry_run, + service=service, ) - return output_path + return response def open_dataset_from_arco_series( @@ -209,6 +233,7 @@ def open_dataset_from_arco_series( geographical_parameters: GeographicalParameters, temporal_parameters: TemporalParameters, depth_parameters: DepthParameters, + coordinates_selection_method: CoordinatesSelectionMethod, chunks=Optional[Literal["auto"]], ) -> xarray.Dataset: dataset = custom_open_zarr.open_zarr( @@ -222,6 +247,7 @@ def open_dataset_from_arco_series( geographical_parameters=geographical_parameters, temporal_parameters=temporal_parameters, depth_parameters=depth_parameters, + coordinates_selection_method=coordinates_selection_method, ) return dataset @@ -234,6 +260,7 @@ def read_dataframe_from_arco_series( geographical_parameters: GeographicalParameters, temporal_parameters: TemporalParameters, depth_parameters: DepthParameters, + coordinates_selection_method: CoordinatesSelectionMethod, chunks: Optional[Literal["auto"]], ) -> pandas.DataFrame: dataset = open_dataset_from_arco_series( @@ -244,6 +271,7 @@ def read_dataframe_from_arco_series( geographical_parameters=geographical_parameters, temporal_parameters=temporal_parameters, depth_parameters=depth_parameters, + coordinates_selection_method=coordinates_selection_method, chunks=chunks, ) return dataset.to_dataframe() diff --git a/copernicusmarine/download_functions/download_original_files.py b/copernicusmarine/download_functions/download_original_files.py index 2aa2b2e0..89e62228 100644 --- a/copernicusmarine/download_functions/download_original_files.py +++ b/copernicusmarine/download_functions/download_original_files.py @@ -1,57 +1,50 @@ -import datetime import logging import os import pathlib import re from itertools import chain -from multiprocessing.pool import ThreadPool from pathlib import Path -from typing import Iterator, List, Optional, Tuple +from typing import Optional import click +import pendulum from botocore.client import ClientError -from numpy import append, arange +from pendulum import DateTime from tqdm import tqdm from copernicusmarine.catalogue_parser.request_structure import ( GetRequest, overload_regex_with_additionnal_filter, ) -from copernicusmarine.core_functions.environment_variables import ( - COPERNICUSMARINE_GET_CONCURRENT_DOWNLOADS, -) +from copernicusmarine.core_functions.models import FileGet, ResponseGet from copernicusmarine.core_functions.sessions import ( get_configured_boto3_session, ) from copernicusmarine.core_functions.utils import ( FORCE_DOWNLOAD_CLI_PROMPT_MESSAGE, - flatten, get_unique_filename, parse_access_dataset_url, + run_concurrently, + timestamp_parser, ) -logger = logging.getLogger("copernicus_marine_root_logger") -blank_logger = logging.getLogger("copernicus_marine_blank_logger") - -NUMBER_THREADS = ( - int(COPERNICUSMARINE_GET_CONCURRENT_DOWNLOADS) - if COPERNICUSMARINE_GET_CONCURRENT_DOWNLOADS - else None -) +logger = logging.getLogger("copernicusmarine") +blank_logger = logging.getLogger("copernicusmarine_blank_logger") def download_original_files( username: str, password: str, get_request: GetRequest, + max_concurrent_requests: int, disable_progress_bar: bool, create_file_list: Optional[str], -) -> list[pathlib.Path]: +) -> ResponseGet: files_not_found: list[str] = [] filenames_in_sync_ignored: list[str] = [] total_size: float = 0.0 sizes: list[float] = [] - last_modified_datetimes: list[datetime.datetime] = [] + last_modified_datetimes: list[DateTime] = [] filenames_in: list[str] = [] if get_request.direct_download: ( @@ -88,6 +81,7 @@ def download_original_files( get_request.sync, create_file_list, pathlib.Path(get_request.output_directory), + disable_progress_bar, only_list_root_path=get_request.index_parts, overwrite=get_request.overwrite_output_data, ) @@ -108,7 +102,7 @@ def download_original_files( elif not get_request.direct_download or len(files_not_found) == len( get_request.direct_download ): - return [] + return ResponseGet(files=[]) message = _create_information_message_before_download( filenames_in, sizes, last_modified_datetimes, total_size ) @@ -122,6 +116,22 @@ def download_original_files( else False ), ) + response = ResponseGet( + files=[ + FileGet( + url=s3_url, + size=size_to_MB(size), + last_modified=last_modified.to_iso8601_string(), + output=filename_out, + ) + for s3_url, size, last_modified, filename_out in zip( + filenames_in, + sizes, + last_modified_datetimes, + filenames_out, + ) + ] + ) if not get_request.force_download and total_size: logger.info(message) if get_request.show_outputnames: @@ -148,7 +158,7 @@ def download_original_files( if not total_size: logger.info("No data to download") if not files_to_delete: - return [] + return ResponseGet(files=[]) if not get_request.force_download: click.confirm( FORCE_DOWNLOAD_CLI_PROMPT_MESSAGE, @@ -162,14 +172,18 @@ def download_original_files( if get_request.sync_delete and files_to_delete: for file_to_delete in files_to_delete: file_to_delete.unlink() - return download_files( + if get_request.dry_run: + return response + download_files( username, endpoint, bucket, filenames_in, filenames_out, + max_concurrent_requests, disable_progress_bar, ) + return response def _get_files_to_delete_with_sync( @@ -194,64 +208,43 @@ def download_files( username: str, endpoint_url: str, bucket: str, - filenames_in: List[str], - filenames_out: List[pathlib.Path], + filenames_in: list[str], + filenames_out: list[pathlib.Path], + max_concurrent_requests: int, disable_progress_bar: bool, -) -> list[pathlib.Path]: - nfiles_per_process, nfiles = 1, len(filenames_in) - indexes = append( - arange(0, nfiles, nfiles_per_process, dtype=int), - nfiles, - ) - groups_in_files = [ - filenames_in[indexes[i] : indexes[i + 1]] - for i in range(len(indexes) - 1) - ] - groups_out_files = [ - filenames_out[indexes[i] : indexes[i + 1]] - for i in range(len(indexes) - 1) - ] - - for groups_out_file in groups_out_files: - parent_dir = Path(groups_out_file[0]).parent +) -> None: + for filename_out in filenames_out: + parent_dir = Path(filename_out).parent if not parent_dir.is_dir(): pathlib.Path.mkdir(parent_dir, parents=True) - - # TODO: v2 It would be proably better to use an async approach - # TODO: v2 probably better to use an argument for the number - # of threads instead of using the environment variable - if NUMBER_THREADS is None or NUMBER_THREADS: - pool = ThreadPool(processes=NUMBER_THREADS) - download_summary_list: Iterator[List[Path]] = pool.imap( - _download_files, - zip( - [username] * len(groups_in_files), - [endpoint_url] * len(groups_in_files), - [bucket] * len(groups_in_files), - groups_in_files, - groups_out_files, - ), + if max_concurrent_requests: + run_concurrently( + _download_one_file, + [ + (username, endpoint_url, bucket, in_file, out_file) + for in_file, out_file in zip( + filenames_in, + filenames_out, + ) + ], + max_concurrent_requests, + tdqm_bar_configuration={ + "disable": disable_progress_bar, + "desc": "Downloading files", + }, ) else: logger.info("Downloading files one by one...") - download_summary_list = map( - _download_files, - zip( - [username] * len(groups_in_files), - [endpoint_url] * len(groups_in_files), - [bucket] * len(groups_in_files), - groups_in_files, - groups_out_files, - ), - ) - download_summary = list( - tqdm( - download_summary_list, - total=len(groups_in_files), + with tqdm( + total=len(filenames_in), disable=disable_progress_bar, - ) - ) - return flatten(download_summary) + desc="Downloading files", + ) as pbar: + for in_file, out_file in zip(filenames_in, filenames_out): + _download_one_file( + username, endpoint_url, bucket, in_file, out_file + ) + pbar.update(1) def _download_header( @@ -262,14 +255,15 @@ def _download_header( sync: bool, create_file_list: Optional[str], directory_out: pathlib.Path, + disable_progress_bar: bool, only_list_root_path: bool = False, overwrite: bool = False, ) -> Optional[ - Tuple[ - Tuple[str, str], + tuple[ + tuple[str, str], list[str], - List[float], - List[datetime.datetime], + list[float], + list[DateTime], float, list[str], ] @@ -281,15 +275,21 @@ def _download_header( filenames: list[str] = [] sizes: list[float] = [] total_size = 0.0 - last_modified_datetimes: list[datetime.datetime] = [] + last_modified_datetimes: list[DateTime] = [] etags: list[str] = [] raw_filenames = _list_files_on_marine_data_lake_s3( - username, endpoint_url, bucket, path, not only_list_root_path + username, + endpoint_url, + bucket, + path, + not only_list_root_path, + disable_progress_bar, ) filenames_without_sync = [] for filename, size, last_modified_datetime, etag in raw_filenames: if not regex or re.search(regex, filename): filenames_without_sync.append(filename) + last_modified_datetime = pendulum.instance(last_modified_datetime) if not sync or _check_needs_to_be_synced( filename, size, last_modified_datetime, directory_out ): @@ -341,11 +341,11 @@ def _download_header_for_direct_download( sync: bool, directory_out: pathlib.Path, username: str, -) -> Tuple[ - Tuple[str, str], - List[str], - List[float], - List[datetime.datetime], +) -> tuple[ + tuple[str, str], + list[str], + list[float], + list[DateTime], float, list[str], list[str], @@ -410,7 +410,7 @@ def _download_header_for_direct_download( def _check_needs_to_be_synced( filename: str, size: int, - last_modified_datetime: datetime.datetime, + last_modified_datetime: DateTime, directory_out: pathlib.Path, ) -> bool: filename_out = _local_path_from_s3_url(filename, directory_out) @@ -421,16 +421,19 @@ def _check_needs_to_be_synced( if file_stats.st_size != size: return True else: - last_created_datetime_out = datetime.datetime.fromtimestamp( - file_stats.st_ctime, tz=datetime.timezone.utc + last_created_datetime_out = timestamp_parser( + file_stats.st_mtime, unit="s" ) + # boto3.s3_resource.Object.last_modified is without microsecond + # boto3.paginate s3_object["LastModified"] is with microsecond + last_modified_datetime = last_modified_datetime.set(microsecond=0) return last_modified_datetime > last_created_datetime_out def _create_information_message_before_download( filenames: list[str], sizes: list[float], - last_modified_datetimes: list[datetime.datetime], + last_modified_datetimes: list[DateTime], total_size: float, ) -> str: message = "You requested the download of the following files:\n" @@ -438,13 +441,7 @@ def _create_information_message_before_download( filenames[:20], sizes[:20], last_modified_datetimes[:20] ): message += str(filename) - datetime_iso = re.sub( - r"\+00:00$", - "Z", - last_modified_datetime.astimezone(datetime.timezone.utc).isoformat( - timespec="seconds" - ), - ) + datetime_iso = last_modified_datetime.in_tz("UTC").to_iso8601_string() message += f" - {format_file_size(float(size))} - {datetime_iso}\n" if len(filenames) > 20: message += f"Printed 20 out of {len(filenames)} files\n" @@ -466,8 +463,8 @@ def _list_files_on_marine_data_lake_s3( bucket: str, prefix: str, recursive: bool, -) -> list[tuple[str, int, datetime.datetime, str]]: - + disable_progress_bar: bool, +) -> list[tuple[str, int, DateTime, str]]: s3_client, _ = get_configured_boto3_session( endpoint_url, ["ListObjects"], username ) @@ -478,12 +475,14 @@ def _list_files_on_marine_data_lake_s3( Prefix=prefix, Delimiter="/" if not recursive else "", ) - + logger.info("Listing files on remote server...") s3_objects = chain( - *map(lambda page: page.get("Contents", []), page_iterator) + *map( + lambda page: page.get("Contents", []), + tqdm(page_iterator, disable=disable_progress_bar), + ) ) - - files_already_found: list[tuple[str, int, datetime.datetime, str]] = [] + files_already_found: list[tuple[str, int, DateTime, str]] = [] for s3_object in s3_objects: files_already_found.append( ( @@ -498,7 +497,7 @@ def _list_files_on_marine_data_lake_s3( def _get_file_size_and_last_modified( endpoint_url: str, bucket: str, file_in: str, username: str -) -> Optional[Tuple[int, datetime.datetime]]: +) -> Optional[tuple[int, DateTime]]: s3_client, _ = get_configured_boto3_session( endpoint_url, ["HeadObject"], username ) @@ -508,7 +507,9 @@ def _get_file_size_and_last_modified( Bucket=bucket, Key=file_in.replace(f"s3://{bucket}/", ""), ) - return s3_object["ContentLength"], s3_object["LastModified"] + return s3_object["ContentLength"], pendulum.instance( + s3_object["LastModified"] + ) except ClientError as e: if "404" in str(e): logger.warning( @@ -519,55 +520,38 @@ def _get_file_size_and_last_modified( raise e -def _download_files( - tuple_original_files_filename: Tuple[ - str, str, str, list[str], list[pathlib.Path] - ], -) -> list[pathlib.Path]: - ( - username, +def _download_one_file( + username, + endpoint_url: str, + bucket: str, + file_in: str, + file_out: pathlib.Path, +) -> None: + s3_client, s3_resource = get_configured_boto3_session( endpoint_url, + ["GetObject", "HeadObject"], + username, + return_ressources=True, + ) + last_modified_date_epoch = s3_resource.Object( + bucket, file_in.replace(f"s3://{bucket}/", "") + ).last_modified.timestamp() + + s3_client.download_file( bucket, - filenames_in, - filenames_out, - ) = tuple_original_files_filename - - def _original_files_file_download( - endpoint_url: str, bucket: str, file_in: str, file_out: pathlib.Path - ) -> pathlib.Path: - """ - Download ONE file and return a string of the result - """ - s3_client, s3_resource = get_configured_boto3_session( - endpoint_url, - ["GetObject", "HeadObject"], - username, - return_ressources=True, - ) - last_modified_date_epoch = s3_resource.Object( - bucket, file_in.replace(f"s3://{bucket}/", "") - ).last_modified.timestamp() - - s3_client.download_file( - bucket, - file_in.replace(f"s3://{bucket}/", ""), - file_out, - ) + file_in.replace(f"s3://{bucket}/", ""), + file_out, + ) + try: os.utime( file_out, (last_modified_date_epoch, last_modified_date_epoch) ) - - return file_out - - download_summary = [] - for file_in, file_out in zip(filenames_in, filenames_out): - download_summary.append( - _original_files_file_download( - endpoint_url, bucket, file_in, file_out - ) + except PermissionError: + logger.warning( + f"Permission to modify the last modified date " + f"of the file {file_out} is denied." ) - return download_summary # ///////////////////////////// @@ -629,3 +613,7 @@ def format_file_size( size /= step return ("%." + str(decimals) + "f %s") % (size, largest_unit) + + +def size_to_MB(size: float) -> float: + return size / 1024**2 diff --git a/copernicusmarine/download_functions/subset_parameters.py b/copernicusmarine/download_functions/subset_parameters.py index 343e81a8..1da5d7ed 100644 --- a/copernicusmarine/download_functions/subset_parameters.py +++ b/copernicusmarine/download_functions/subset_parameters.py @@ -1,7 +1,13 @@ from dataclasses import dataclass, field -from datetime import datetime from typing import Optional +from pendulum import DateTime + +from copernicusmarine.core_functions.models import ( + DEFAULT_VERTICAL_DIMENSION_OUTPUT, + VerticalDimensionOutput, +) + @dataclass class LatitudeParameters: @@ -27,12 +33,14 @@ class GeographicalParameters: @dataclass class TemporalParameters: - start_datetime: Optional[datetime] = None - end_datetime: Optional[datetime] = None + start_datetime: Optional[DateTime] = None + end_datetime: Optional[DateTime] = None @dataclass class DepthParameters: minimum_depth: Optional[float] = None maximum_depth: Optional[float] = None - vertical_dimension_as_originally_produced: bool = True + vertical_dimension_output: VerticalDimensionOutput = ( + DEFAULT_VERTICAL_DIMENSION_OUTPUT + ) diff --git a/copernicusmarine/download_functions/subset_xarray.py b/copernicusmarine/download_functions/subset_xarray.py index 18791cae..fd1ad02d 100644 --- a/copernicusmarine/download_functions/subset_xarray.py +++ b/copernicusmarine/download_functions/subset_xarray.py @@ -1,29 +1,28 @@ import logging import typing -from datetime import datetime from decimal import Decimal from typing import List, Literal, Optional, Union import numpy import xarray -from pandas import Timestamp +from pendulum import DateTime -from copernicusmarine.catalogue_parser.catalogue_parser import ( +from copernicusmarine.catalogue_parser.models import ( CopernicusMarineDatasetServiceType, ) from copernicusmarine.catalogue_parser.request_structure import ( - DatasetTimeAndGeographicalSubset, + DatasetTimeAndSpaceSubset, ) from copernicusmarine.core_functions import custom_open_zarr from copernicusmarine.core_functions.exceptions import ( CoordinatesOutOfDatasetBounds, MinimumLongitudeGreaterThanMaximumLongitude, + ServiceNotSupported, VariableDoesNotExistInTheDataset, ) -from copernicusmarine.core_functions.models import SubsetMethod +from copernicusmarine.core_functions.models import CoordinatesSelectionMethod from copernicusmarine.core_functions.utils import ( - ServiceNotSupported, - convert_datetime64_to_netcdf_timestamp, + timestamp_or_datestring_to_datetime, ) from copernicusmarine.download_functions.subset_parameters import ( DepthParameters, @@ -33,7 +32,7 @@ TemporalParameters, ) -logger = logging.getLogger("copernicus_marine_root_logger") +logger = logging.getLogger("copernicusmarine") COORDINATES_LABEL = { "latitude": ["latitude", "nav_lat", "x", "lat"], @@ -42,18 +41,136 @@ "depth": ["depth", "deptht", "elevation"], } +NETCDF_CONVENTION_VARIABLE_ATTRIBUTES = [ + "standard_name", + "long_name", + "units", + "unit_long", + "valid_min", + "valid_max", +] +NETCDF_CONVENTION_COORDINATE_ATTRIBUTES = [ + "standard_name", + "long_name", + "units", + "unit_long", + "axis", +] +NETCDF_CONVENTION_DATASET_ATTRIBUTES = [ + "title", + "institution", + "source", + "history", + "references", + "comment", + "Conventions", + "producer", + "credit", + "contact", +] + + +@typing.no_type_check +def _choose_extreme_point( + dataset: xarray.Dataset, + coord_label: str, + actual_extreme: Union[float, DateTime], + method: Literal["pad", "backfill", "nearest"], +) -> Union[float, DateTime]: + if ( + coord_label == "time" + and actual_extreme + >= timestamp_or_datestring_to_datetime( + dataset[coord_label].values.min() + ).naive() + and actual_extreme + <= timestamp_or_datestring_to_datetime( + dataset[coord_label].values.max() + ).naive() + ): + external_point = dataset.sel( + {coord_label: actual_extreme}, method=method + )[coord_label].values + external_point = timestamp_or_datestring_to_datetime( + external_point + ).naive() + elif ( + coord_label != "time" + and actual_extreme > dataset[coord_label].min() + and actual_extreme < dataset[coord_label].max() + ): + external_point = dataset.sel( + {coord_label: actual_extreme}, method=method + )[coord_label].values + else: + external_point = actual_extreme + return external_point + + +def _enlarge_selection( + dataset: xarray.Dataset, + coord_label: str, + coord_selection: slice, +) -> slice: + external_minimum = _choose_extreme_point( + dataset, coord_label, coord_selection.start, "pad" + ) + + external_maximum = _choose_extreme_point( + dataset, coord_label, coord_selection.stop, "backfill" + ) + + return slice(external_minimum, external_maximum) + + +def _nearest_selection( + dataset: xarray.Dataset, + coord_label: str, + coord_selection: slice, +) -> slice: + external_minimum = _choose_extreme_point( + dataset, coord_label, coord_selection.start, "nearest" + ) + + external_maximum = _choose_extreme_point( + dataset, coord_label, coord_selection.stop, "nearest" + ) + + return slice(external_minimum, external_maximum) + def _dataset_custom_sel( dataset: xarray.Dataset, coord_type: Literal["latitude", "longitude", "depth", "time"], - coord_selection: Union[float, slice, datetime, None], - method: Union[str, None] = None, + coord_selection: Union[float, slice, DateTime, None], + coordinates_selection_method: CoordinatesSelectionMethod, ) -> xarray.Dataset: for coord_label in COORDINATES_LABEL[coord_type]: if coord_label in dataset.sizes: - tmp_dataset = dataset.sel( - {coord_label: coord_selection}, method=method - ) + if coordinates_selection_method == "outside": + if ( + isinstance(coord_selection, slice) + and coord_selection.stop is not None + ): + coord_selection = _enlarge_selection( + dataset, coord_label, coord_selection + ) + if coordinates_selection_method == "nearest": + if ( + isinstance(coord_selection, slice) + and coord_selection.stop is not None + ): + coord_selection = _nearest_selection( + dataset, coord_label, coord_selection + ) + if isinstance(coord_selection, slice): + tmp_dataset = dataset.sel( + {coord_label: coord_selection}, method=None + ) + else: + tmp_dataset = dataset.sel( + {coord_label: coord_selection}, method="nearest" + ) if tmp_dataset.coords[coord_label].size == 0 or ( coord_label not in tmp_dataset.sizes ): @@ -80,8 +197,8 @@ def _dataset_custom_sel( def get_size_of_coordinate_subset( dataset: xarray.Dataset, coordinate: str, - minimum: Optional[Union[float, datetime]], - maximum: Optional[Union[float, datetime]], + minimum: Optional[Union[float, DateTime]], + maximum: Optional[Union[float, DateTime]], ) -> int: for label in COORDINATES_LABEL[coordinate]: if label in dataset.sizes: @@ -127,6 +244,7 @@ def _shift_longitude_dimension( def _latitude_subset( dataset: xarray.Dataset, latitude_parameters: LatitudeParameters, + coordinates_selection_method: CoordinatesSelectionMethod, ) -> xarray.Dataset: minimum_latitude = latitude_parameters.minimum_latitude maximum_latitude = latitude_parameters.maximum_latitude @@ -136,11 +254,11 @@ def _latitude_subset( if minimum_latitude == maximum_latitude else slice(minimum_latitude, maximum_latitude) ) - latitude_method = ( - "nearest" if minimum_latitude == maximum_latitude else None - ) dataset = _dataset_custom_sel( - dataset, "latitude", latitude_selection, latitude_method + dataset, + "latitude", + latitude_selection, + coordinates_selection_method, ) return dataset @@ -149,10 +267,10 @@ def _latitude_subset( def _longitude_subset( dataset: xarray.Dataset, longitude_parameters: LongitudeParameters, + coordinates_selection_method: CoordinatesSelectionMethod, ) -> xarray.Dataset: minimum_longitude = longitude_parameters.minimum_longitude maximum_longitude = longitude_parameters.maximum_longitude - longitude_method = None if minimum_longitude is not None or maximum_longitude is not None: if minimum_longitude is not None and maximum_longitude is not None: if minimum_longitude > maximum_longitude: @@ -162,9 +280,6 @@ def _longitude_subset( ) if maximum_longitude - minimum_longitude >= 360: longitude_selection: Union[float, slice, None] = None - elif minimum_longitude == maximum_longitude: - longitude_selection = longitude_modulus(minimum_longitude) - longitude_method = "nearest" else: minimum_longitude_modulus = longitude_modulus( minimum_longitude @@ -174,8 +289,15 @@ def _longitude_subset( ) if maximum_longitude_modulus < minimum_longitude_modulus: maximum_longitude_modulus += 360 + if coordinates_selection_method == "outside": + minimum_longitude_modulus = _choose_extreme_point( + dataset, + "longitude", + minimum_longitude_modulus, + "pad", + ) dataset = _shift_longitude_dimension( - dataset, minimum_longitude_modulus + dataset, minimum_longitude_modulus # type: ignore ) longitude_selection = slice( minimum_longitude_modulus, @@ -186,7 +308,10 @@ def _longitude_subset( if longitude_selection is not None: dataset = _dataset_custom_sel( - dataset, "longitude", longitude_selection, longitude_method + dataset, + "longitude", + longitude_selection, + coordinates_selection_method, ) return dataset @@ -194,18 +319,29 @@ def _longitude_subset( def _temporal_subset( dataset: xarray.Dataset, temporal_parameters: TemporalParameters, + coordinates_selection_method: CoordinatesSelectionMethod, ) -> xarray.Dataset: - start_datetime = temporal_parameters.start_datetime - end_datetime = temporal_parameters.end_datetime + start_datetime = ( + temporal_parameters.start_datetime.in_tz("UTC").naive() + if temporal_parameters.start_datetime + else temporal_parameters.start_datetime + ) + end_datetime = ( + temporal_parameters.end_datetime.in_tz("UTC").naive() + if temporal_parameters.end_datetime + else temporal_parameters.end_datetime + ) if start_datetime is not None or end_datetime is not None: temporal_selection = ( start_datetime if start_datetime == end_datetime else slice(start_datetime, end_datetime) ) - temporal_method = "nearest" if start_datetime == end_datetime else None dataset = _dataset_custom_sel( - dataset, "time", temporal_selection, temporal_method + dataset, + "time", + temporal_selection, + coordinates_selection_method, ) return dataset @@ -213,6 +349,7 @@ def _temporal_subset( def _depth_subset( dataset: xarray.Dataset, depth_parameters: DepthParameters, + coordinates_selection_method: CoordinatesSelectionMethod, ) -> xarray.Dataset: def convert_elevation_to_depth(dataset: xarray.Dataset): if "elevation" in dataset.sizes: @@ -237,7 +374,7 @@ def update_elevation_attributes(dataset: xarray.Dataset): dataset["elevation"].attrs = attrs return dataset - if depth_parameters.vertical_dimension_as_originally_produced: + if depth_parameters.vertical_dimension_output == "depth": dataset = convert_elevation_to_depth(dataset) else: dataset = update_elevation_attributes(dataset) @@ -261,9 +398,11 @@ def update_elevation_attributes(dataset: xarray.Dataset): if minimum_depth == maximum_depth else slice(minimum_depth, maximum_depth) ) - depth_method = "nearest" if minimum_depth == maximum_depth else None dataset = _dataset_custom_sel( - dataset, "depth", depth_selection, depth_method + dataset, + "depth", + depth_selection, + coordinates_selection_method, ) return dataset @@ -280,6 +419,35 @@ def _get_variable_name_from_standard_name( return None +def _adequate_dtypes_of_valid_minmax( + dataset: xarray.Dataset, variable: str +) -> xarray.Dataset: + dataset[variable].attrs["valid_min"] = numpy.array( + [dataset[variable].attrs["valid_min"]], + dtype=dataset[variable].encoding["dtype"], + )[0] + dataset[variable].attrs["valid_max"] = numpy.array( + [dataset[variable].attrs["valid_max"]], + dtype=dataset[variable].encoding["dtype"], + )[0] + return dataset + + +def _update_variables_attributes( + dataset: xarray.Dataset, variables: List[str] +) -> xarray.Dataset: + for variable in variables: + dataset[variable].attrs = _filter_attributes( + dataset[variable].attrs, NETCDF_CONVENTION_VARIABLE_ATTRIBUTES + ) + if ( + "valid_min" in dataset[variable].attrs + and "valid_max" in dataset[variable].attrs + ): + _adequate_dtypes_of_valid_minmax(dataset, variable) + return dataset + + def _variables_subset( dataset: xarray.Dataset, variables: List[str] ) -> xarray.Dataset: @@ -298,10 +466,16 @@ def _variables_subset( ) else: raise VariableDoesNotExistInTheDataset(variable) - return dataset[numpy.array(dataset_variables_filter)] + dataset = dataset[numpy.array(dataset_variables_filter)] + return _update_variables_attributes(dataset, dataset_variables_filter) + + +def _filter_attributes(attributes: dict, attributes_to_keep: List[str]): + attributes_that_exist = set(attributes).intersection(attributes_to_keep) + return {key: attributes[key] for key in attributes_that_exist} -def _update_dataset_coordinate_valid_minmax_attributes( +def _update_dataset_coordinate_attributes( dataset: xarray.Dataset, ) -> xarray.Dataset: for coordinate_label in COORDINATES_LABEL: @@ -309,23 +483,24 @@ def _update_dataset_coordinate_valid_minmax_attributes( if coordinate_alias in dataset.sizes: coord = dataset[coordinate_alias] attrs = coord.attrs + coordinate_attributes = ( + NETCDF_CONVENTION_COORDINATE_ATTRIBUTES.copy() + ) if "time" in coordinate_label: - min_time_dimension = coord.values.min() - max_time_dimension = coord.values.max() - netcdf_unit = coord.encoding["units"] - valid_min = convert_datetime64_to_netcdf_timestamp( - min_time_dimension, netcdf_unit - ) - valid_max = convert_datetime64_to_netcdf_timestamp( - max_time_dimension, netcdf_unit + attrs["standard_name"] = "time" + attrs["long_name"] = "Time" + attrs["axis"] = "T" + attrs["unit_long"] = ( + coord.encoding["units"].replace("_", " ").title() ) - attrs["valid_min"] = valid_min - attrs["valid_max"] = valid_max - else: - attrs["valid_min"] = coord.values.min() - attrs["valid_max"] = coord.values.max() + coordinate_attributes.remove("units") + elif coordinate_label in ["depth", "elevation"]: + coordinate_attributes.append("positive") + coord.attrs = _filter_attributes(attrs, coordinate_attributes) - coord.attrs = attrs + dataset.attrs = _filter_attributes( + dataset.attrs, NETCDF_CONVENTION_DATASET_ATTRIBUTES + ) return dataset @@ -336,22 +511,31 @@ def subset( geographical_parameters: GeographicalParameters, temporal_parameters: TemporalParameters, depth_parameters: DepthParameters, + coordinates_selection_method: CoordinatesSelectionMethod, ) -> xarray.Dataset: if variables: dataset = _variables_subset(dataset, variables) dataset = _latitude_subset( - dataset, geographical_parameters.latitude_parameters + dataset, + geographical_parameters.latitude_parameters, + coordinates_selection_method, ) dataset = _longitude_subset( - dataset, geographical_parameters.longitude_parameters + dataset, + geographical_parameters.longitude_parameters, + coordinates_selection_method, ) - dataset = _temporal_subset(dataset, temporal_parameters) + dataset = _temporal_subset( + dataset, temporal_parameters, coordinates_selection_method + ) - dataset = _depth_subset(dataset, depth_parameters) + dataset = _depth_subset( + dataset, depth_parameters, coordinates_selection_method + ) - dataset = _update_dataset_coordinate_valid_minmax_attributes(dataset) + dataset = _update_dataset_coordinate_attributes(dataset) return dataset @@ -361,11 +545,11 @@ def longitude_modulus(longitude: float) -> float: Returns the equivalent longitude in [-180, 180[ """ # We are using Decimal to avoid issue with rounding - modulus = float(Decimal(str(longitude + 180)) % 360) + modulus = (Decimal(str(longitude)) + 180) % 360 # Modulus with python return a negative value if the denominator is negative # To counteract that, we add 360 if the result is < 0 modulus = modulus if modulus >= 0 else modulus + 360 - return modulus - 180 + return float(modulus - 180) def longitude_modulus_upper_bound(longitude: float) -> float: @@ -383,9 +567,9 @@ def check_dataset_subset_bounds( password: str, dataset_url: str, service_type: CopernicusMarineDatasetServiceType, - dataset_subset: DatasetTimeAndGeographicalSubset, - subset_method: SubsetMethod, - dataset_valid_date: Optional[Union[str, int]], + dataset_subset: DatasetTimeAndSpaceSubset, + coordinates_selection_method: CoordinatesSelectionMethod, + dataset_valid_date: Optional[Union[str, int, float]], ) -> None: if service_type in [ CopernicusMarineDatasetServiceType.GEOSERIES, @@ -418,7 +602,7 @@ def check_dataset_subset_bounds( user_maximum_coordinate_value=user_maximum_coordinate_value, dataset_minimum_coordinate_value=latitudes.min(), dataset_maximum_coordinate_value=latitudes.max(), - is_strict=subset_method == "strict", + is_strict=coordinates_selection_method == "strict-inside", ) for coordinate_label in COORDINATES_LABEL["longitude"]: if coordinate_label in dataset.sizes: @@ -439,7 +623,7 @@ def check_dataset_subset_bounds( ), dataset_minimum_coordinate_value=longitudes.min(), dataset_maximum_coordinate_value=longitudes.max(), - is_strict=subset_method == "strict", + is_strict=coordinates_selection_method == "strict-inside", ) for coordinate_label in COORDINATES_LABEL["time"]: if coordinate_label in dataset.sizes: @@ -448,8 +632,12 @@ def check_dataset_subset_bounds( times_min = dataset_valid_date else: times_min = times.min() - dataset_minimum_coordinate_value = date_to_datetime(times_min) - dataset_maximum_coordinate_value = date_to_datetime(times.max()) + dataset_minimum_coordinate_value = ( + timestamp_or_datestring_to_datetime(times_min) + ) + dataset_maximum_coordinate_value = ( + timestamp_or_datestring_to_datetime(times.max()) + ) user_minimum_coordinate_value = ( dataset_subset.start_datetime if dataset_subset.start_datetime is not None @@ -466,30 +654,42 @@ def check_dataset_subset_bounds( user_maximum_coordinate_value=user_maximum_coordinate_value, dataset_minimum_coordinate_value=dataset_minimum_coordinate_value, dataset_maximum_coordinate_value=dataset_maximum_coordinate_value, - is_strict=subset_method == "strict", + is_strict=coordinates_selection_method == "strict-inside", + ) + for coordinate_label in COORDINATES_LABEL["depth"]: + if coordinate_label in dataset.sizes: + depths = -1 * dataset_coordinates[coordinate_label].values + _check_coordinate_overlap( + dimension="depth", + user_minimum_coordinate_value=( + dataset_subset.minimum_depth + if dataset_subset.minimum_depth is not None + else depths.min() + ), + user_maximum_coordinate_value=( + dataset_subset.maximum_depth + if dataset_subset.maximum_depth is not None + else depths.max() + ), + dataset_minimum_coordinate_value=depths.min(), + dataset_maximum_coordinate_value=depths.max(), + is_strict=coordinates_selection_method == "strict-inside", ) - - -def date_to_datetime(date: Union[str, int]) -> datetime: - if isinstance(date, int): - return Timestamp(date * 1e6).to_pydatetime() - else: - return Timestamp(date).to_pydatetime().replace(tzinfo=None) @typing.no_type_check def _check_coordinate_overlap( dimension: str, - user_minimum_coordinate_value: Union[float, datetime], - user_maximum_coordinate_value: Union[float, datetime], - dataset_minimum_coordinate_value: Union[float, datetime], - dataset_maximum_coordinate_value: Union[float, datetime], + user_minimum_coordinate_value: Union[float, DateTime], + user_maximum_coordinate_value: Union[float, DateTime], + dataset_minimum_coordinate_value: Union[float, DateTime], + dataset_maximum_coordinate_value: Union[float, DateTime], is_strict: bool, ) -> None: message = ( - f"Some or all of your subset selection " + f"Some of your subset selection " f"[{user_minimum_coordinate_value}, {user_maximum_coordinate_value}] " - f"for the {dimension} dimension exceed the dataset coordinates " + f"for the {dimension} dimension exceed the dataset coordinates " f"[{dataset_minimum_coordinate_value}, " f"{dataset_maximum_coordinate_value}]" ) diff --git a/copernicusmarine/download_functions/utils.py b/copernicusmarine/download_functions/utils.py index 7a23100f..986f6cb1 100644 --- a/copernicusmarine/download_functions/utils.py +++ b/copernicusmarine/download_functions/utils.py @@ -1,18 +1,29 @@ +import bisect import logging -from datetime import datetime +import math from pathlib import Path -from typing import Optional +from typing import Any, Optional import xarray -from pandas import Timestamp +from pendulum import DateTime +from copernicusmarine.catalogue_parser.models import ( + CopernicusMarineCoordinate, + CopernicusMarineService, +) from copernicusmarine.core_functions.models import ( DEFAULT_FILE_EXTENSIONS, + DatasetCoordinatesExtent, FileFormat, + GeographicalExtent, + TimeExtent, +) +from copernicusmarine.core_functions.utils import ( + timestamp_or_datestring_to_datetime, ) from copernicusmarine.download_functions.subset_xarray import COORDINATES_LABEL -logger = logging.getLogger("copernicus_marine_root_logger") +logger = logging.getLogger("copernicusmarine") def get_file_extension(file_format: FileFormat) -> str: @@ -63,15 +74,14 @@ def _build_filename_from_dataset( min_time_coordinate = _get_min_coordinate(dataset, "time") max_time_coordinate = _get_max_coordinate(dataset, "time") - datetimes = _format_datetimes( ( - Timestamp(min_time_coordinate).to_pydatetime() + timestamp_or_datestring_to_datetime(min_time_coordinate) if min_time_coordinate is not None else None ), ( - Timestamp(max_time_coordinate).to_pydatetime() + timestamp_or_datestring_to_datetime(max_time_coordinate) if max_time_coordinate is not None else None ), @@ -88,14 +98,14 @@ def _build_filename_from_dataset( return filename + get_file_extension(file_format) -def _get_min_coordinate(dataset: xarray.Dataset, coordinate: str): +def _get_min_coordinate(dataset: xarray.Dataset, coordinate: str) -> Any: for coord_label in COORDINATES_LABEL[coordinate]: if coord_label in dataset.sizes: return min(dataset[coord_label].values) return None -def _get_max_coordinate(dataset: xarray.Dataset, coordinate: str): +def _get_max_coordinate(dataset: xarray.Dataset, coordinate: str) -> Any: for coord_label in COORDINATES_LABEL[coordinate]: if coord_label in dataset.sizes: return max(dataset[coord_label].values) @@ -154,29 +164,214 @@ def _format_depths( def _format_datetimes( - minimum_datetime: Optional[datetime], maximum_datetime: Optional[datetime] + minimum_datetime: Optional[DateTime], maximum_datetime: Optional[DateTime] ) -> str: if minimum_datetime is None or maximum_datetime is None: return "" else: if minimum_datetime == maximum_datetime: - formatted_datetime = f"{minimum_datetime.strftime('%Y-%m-%d')}" + formatted_datetime = f"{minimum_datetime.format('YYYY-MM-DD')}" else: formatted_datetime = ( - f"{minimum_datetime.strftime('%Y-%m-%d')}-" - f"{maximum_datetime.strftime('%Y-%m-%d')}" + f"{minimum_datetime.format('YYYY-MM-DD')}-" + f"{maximum_datetime.format('YYYY-MM-DD')}" ) return formatted_datetime -def get_formatted_dataset_size_estimation(dataset: xarray.Dataset) -> str: +def get_dataset_coordinates_extent( + dataset: xarray.Dataset, +) -> DatasetCoordinatesExtent: + minimum_time = _get_min_coordinate(dataset, "time") + if minimum_time: + minimum_time = timestamp_or_datestring_to_datetime( + minimum_time + ).to_iso8601_string() + maximum_time = _get_max_coordinate(dataset, "time") + if maximum_time: + maximum_time = timestamp_or_datestring_to_datetime( + maximum_time + ).to_iso8601_string() + coordinates_extent = DatasetCoordinatesExtent( + longitude=GeographicalExtent( + minimum=_get_min_coordinate(dataset, "longitude"), + maximum=_get_max_coordinate(dataset, "longitude"), + ), + latitude=GeographicalExtent( + minimum=_get_min_coordinate(dataset, "latitude"), + maximum=_get_max_coordinate(dataset, "latitude"), + ), + time=TimeExtent( + minimum=minimum_time, + maximum=maximum_time, + ), + ) + if "depth" in dataset.sizes: + coordinates_extent.depth = GeographicalExtent( + minimum=_get_min_coordinate(dataset, "depth"), + maximum=_get_max_coordinate(dataset, "depth"), + ) + elif "elevation" in dataset.sizes: + coordinates_extent.elevation = GeographicalExtent( + minimum=_get_min_coordinate(dataset, "depth"), + maximum=_get_max_coordinate(dataset, "depth"), + ) + return coordinates_extent + + +def get_message_formatted_dataset_size_estimation( + estimation_size_final_result: Optional[float], + estimation_data_downloaded: Optional[float], +) -> str: + return ( + f"Estimated size of the dataset file is " + f"{estimation_size_final_result:.3f} MB" + f"\nEstimated size of the data that needs " + f"to be downloaded to obtain the result:" + f" {estimation_data_downloaded:.0f} MB" + "\nThis is a very rough estimate that is" + " generally higher than the actual size of the" + " data that needs to be downloaded." + ) + + +def get_approximation_size_final_result( + dataset: xarray.Dataset, +) -> Optional[float]: coordinates_size = 1 - for coordinate in dataset.sizes: - coordinates_size *= dataset[coordinate].size + for coordinate_name in dataset.sizes: + coordinates_size *= dataset[coordinate_name].size estimate_size = ( coordinates_size * len(list(dataset.data_vars)) * dataset[list(dataset.data_vars)[0]].dtype.itemsize / 1048e3 ) - return f"{estimate_size:.3f} MB" + return estimate_size + + +def get_approximation_size_data_downloaded( + dataset: xarray.Dataset, service: CopernicusMarineService +) -> Optional[float]: + temp_dataset = dataset.copy() + if "elevation" in dataset.sizes: + temp_dataset["elevation"] = temp_dataset.elevation * (-1) + temp_dataset = temp_dataset.rename({"elevation": "depth"}) + + download_estimated_size = 0 + for variable_name in temp_dataset.data_vars: + coordinates_size = 1 + variable = [ + var for var in service.variables if var.short_name == variable_name + ][0] + for coordinate_name in temp_dataset.sizes: + if coordinate_name == "elevation": + coordinate_name = "depth" + temp_dataset["elevation"] = temp_dataset.elevation * (-1) + possible_coordinate_id = [ + coordinate_names + for coordinate_names in COORDINATES_LABEL.values() + if coordinate_name in coordinate_names + ][0] + coordinates = [ + coord + for coord in variable.coordinates + if coord.coordinate_id in possible_coordinate_id + ] + if not coordinates: + continue + coordinate = coordinates[0] + chunking_length = coordinate.chunking_length + if not chunking_length: + continue + number_of_chunks_needed = get_number_of_chunks_for_coordinate( + temp_dataset, coordinate, chunking_length + ) + if number_of_chunks_needed is None: + return None + coordinates_size *= number_of_chunks_needed * chunking_length + download_estimated_size += ( + coordinates_size + * temp_dataset[list(temp_dataset.data_vars)[0]].dtype.itemsize + / 1048e3 + ) + + return download_estimated_size + + +def get_number_of_chunks_for_coordinate( + dataset: xarray.Dataset, + coordinate: CopernicusMarineCoordinate, + chunking_length: int, +) -> Optional[int]: + maximum_value = coordinate.maximum_value + minimum_value = coordinate.minimum_value + values = coordinate.values + step_value = coordinate.step + if not values and ( + maximum_value is not None + and minimum_value is not None + and step_value is not None + ): + values = [minimum_value] + for _ in range( + 0, math.ceil((maximum_value - minimum_value) / step_value) + ): + values.append(values[-1] + step_value) + elif not values: + return None + + if coordinate.coordinate_id == "time": + requested_maximum = ( + timestamp_or_datestring_to_datetime( + dataset[coordinate.coordinate_id].values.max() + ).timestamp() + * 1e3 + ) + requested_minimum = ( + timestamp_or_datestring_to_datetime( + dataset[coordinate.coordinate_id].values.min() + ).timestamp() + * 1e3 + ) + else: + requested_maximum = float( + dataset[coordinate.coordinate_id].max().values + ) + requested_minimum = float( + dataset[coordinate.coordinate_id].min().values + ) + + values.sort() + index_left = bisect.bisect_left(values, requested_minimum) + if index_left == len(values) - 1: + chunk_of_requested_minimum = math.floor((index_left) / chunking_length) + elif abs(values[index_left] - requested_minimum) <= abs( + values[index_left + 1] - requested_minimum + ): + chunk_of_requested_minimum = math.floor(index_left / chunking_length) + else: + chunk_of_requested_minimum = math.floor( + (index_left + 1) / chunking_length + ) + + index_left = bisect.bisect_left(values, requested_maximum) + if index_left == len(values) - 1 or index_left == len(values): + chunk_of_requested_maximum = math.floor((index_left) / chunking_length) + elif abs(values[index_left] - requested_maximum) <= abs( + values[index_left + 1] - requested_maximum + ): + chunk_of_requested_maximum = math.floor(index_left / chunking_length) + else: + chunk_of_requested_maximum = math.floor( + (index_left + 1) / chunking_length + ) + return chunk_of_requested_maximum - chunk_of_requested_minimum + 1 + + +def early_exit_message(message: str) -> str: + return ( + message + + "\n Couldn't compute the approximation " + + "of the downloaded data size." + ) diff --git a/copernicusmarine/logging_conf.py b/copernicusmarine/logging_conf.py index ac201d7b..9a763c30 100644 --- a/copernicusmarine/logging_conf.py +++ b/copernicusmarine/logging_conf.py @@ -22,12 +22,12 @@ }, }, "loggers": { - "copernicus_marine_blank_logger": { + "copernicusmarine_blank_logger": { "handlers": ["console_blank"], "level": "INFO", "propagate": False, }, - "copernicus_marine_root_logger": { + "copernicusmarine": { "handlers": ["console"], "level": "INFO", }, diff --git a/copernicusmarine/python_interface/describe.py b/copernicusmarine/python_interface/describe.py index efd2a354..e367ff98 100644 --- a/copernicusmarine/python_interface/describe.py +++ b/copernicusmarine/python_interface/describe.py @@ -1,11 +1,11 @@ import json from typing import Any -from copernicusmarine.core_functions.deprecated import deprecated_python_option from copernicusmarine.core_functions.deprecated_options import ( DEPRECATED_OPTIONS, ) from copernicusmarine.core_functions.describe import describe_function +from copernicusmarine.core_functions.utils import deprecated_python_option from copernicusmarine.python_interface.exception_handler import ( log_exception_and_exit, ) @@ -20,29 +20,37 @@ def describe( include_versions: bool = False, include_all: bool = False, contains: list[str] = [], - overwrite_metadata_cache: bool = False, - no_metadata_cache: bool = False, + max_concurrent_requests: int = 15, disable_progress_bar: bool = False, staging: bool = False, ) -> dict[str, Any]: """ - Retrieve metadata information from the Copernicus Marine catalogue. + Retrieve and parse the metadata information from the Copernicus Marine catalogue. - This function fetches metadata information from the Copernicus Marine catalogue - based on specified parameters and options. + Parameters + ---------- + include_description : bool, optional + Include product description in output. + include_datasets : bool, optional + Include product dataset details in output. + include_keywords : bool, optional + Include product keyword details in output. + include_versions : bool, optional + Include dataset versions in output. By default, shows only the default version. + include_all : bool, optional + Include all the possible data in output: description, datasets, keywords, and versions. + contains : list[str], optional + Filter catalogue output. Returns products with attributes matching a string token. + max_concurrent_requests : int, optional + Maximum number of concurrent requests (>=1). Default 15. The command uses a thread pool executor to manage concurrent requests. + disable_progress_bar : bool, optional + Flag to hide progress bar. - Args: - include_description (bool, optional): Whether to include description for each product. Defaults to False. - include_datasets (bool, optional): Whether to include dataset information. Defaults to False. - include_keywords (bool, optional): Whether to include keywords for each product. Defaults to False. - include_versions (bool, optional): Whether to include all versions of each dataset. Defaults to False. - include_all (bool, optional): Whether to include all metadata information. Defaults to False. - contains (list[str], optional): List of strings to filter items containing these values. Defaults to []. - overwrite_metadata_cache (bool, optional): Whether to overwrite the metadata cache. Defaults to False. - no_metadata_cache (bool, optional): Whether to skip using the metadata cache. Defaults to False. + Returns + ------- + dict[str, Any] + A dictionary containing the retrieved metadata information. - Returns: - dict[str, Any]: A dictionary containing the retrieved metadata information. """ # noqa if not isinstance(contains, list): @@ -60,8 +68,7 @@ def describe( include_keywords, include_versions, contains, - overwrite_metadata_cache, - no_metadata_cache, + max_concurrent_requests, disable_progress_bar, staging=staging, ) diff --git a/copernicusmarine/python_interface/get.py b/copernicusmarine/python_interface/get.py index 93c9d28a..0dc67109 100644 --- a/copernicusmarine/python_interface/get.py +++ b/copernicusmarine/python_interface/get.py @@ -1,15 +1,12 @@ import pathlib -from typing import List, Optional, Union +from typing import Optional, Union -from copernicusmarine.core_functions.deprecated import ( - deprecated_python_option, - log_deprecated_message, - raise_both_old_and_new_value_error, -) from copernicusmarine.core_functions.deprecated_options import ( DEPRECATED_OPTIONS, ) from copernicusmarine.core_functions.get import get_function +from copernicusmarine.core_functions.models import ResponseGet +from copernicusmarine.core_functions.utils import deprecated_python_option from copernicusmarine.python_interface.exception_handler import ( log_exception_and_exit, ) @@ -18,7 +15,6 @@ @deprecated_python_option(**DEPRECATED_OPTIONS.dict_old_names_to_new_names) @log_exception_and_exit def get( - dataset_url: Optional[str] = None, dataset_id: Optional[str] = None, dataset_version: Optional[str] = None, dataset_part: Optional[str] = None, @@ -31,50 +27,75 @@ def get( force_download: bool = False, overwrite_output_data: bool = False, request_file: Optional[Union[pathlib.Path, str]] = None, - service: Optional[str] = None, - overwrite_metadata_cache: bool = False, - no_metadata_cache: bool = False, filter: Optional[str] = None, regex: Optional[str] = None, file_list: Optional[Union[pathlib.Path, str]] = None, create_file_list: Optional[str] = None, - download_file_list: bool = False, index_parts: bool = False, sync: bool = False, sync_delete: bool = False, + dry_run: bool = False, + max_concurrent_requests: int = 15, disable_progress_bar: bool = False, staging: bool = False, -) -> List[pathlib.Path]: +) -> ResponseGet: """ - Fetches data from the Copernicus Marine server based on the provided parameters. + Download originally produced data files. + + The datasetID is required (either as an argument or in a request file) and can be found via the ``describe`` command. + + Parameters + ---------- + dataset_id : str, optional + The datasetID, required either as an argument or in the request_file option. + dataset_version : str, optional + Force the selection of a specific dataset version. + dataset_part : str, optional + Force the selection of a specific dataset part. + username : str, optional + The username for authentication. + password : str, optional + The password for authentication. + output_directory : Union[pathlib.Path, str], optional + The destination folder for the downloaded files. Default is the current directory. + credentials_file : Union[pathlib.Path, str], optional + Path to a credentials file if not in its default directory (``$HOME/.copernicusmarine``). Accepts .copernicusmarine-credentials / .netrc or _netrc / motuclient-python.ini files. + force_download : bool, optional + Flag to skip confirmation before download. + overwrite_output_data : bool, optional + If specified and if the file already exists on destination, then it will be overwritten instead of creating new one with unique index. + request_file : Union[pathlib.Path, str], optional + Option to pass a file containing the arguments. For more information please refer to the documentation or use option ``--create-template`` from the command line interface for an example template. + no_directories : bool, optional + If True, downloaded files will not be organized into directories. + show_outputnames : bool, optional + Option to display the names of the output files before download. + filter : str, optional + A pattern that must match the absolute paths of the files to download. + regex : str, optional + The regular expression that must match the absolute paths of the files to download. + file_list : Union[pathlib.Path, str], optional + Path to a '.txt' file containing a list of file paths, line by line, that will be downloaded directly. These files must be from the same dataset as the one specified dataset with the datasetID option. If no files can be found, the Toolbox will list all files on the remote server and attempt to find a match. + create_file_list : str, optional + Option to only create a file containing the names of the targeted files instead of downloading them. It writes the file to the specified output directory (default to current directory). The file name specified should end with '.txt' or '.csv'. If specified, no other action will be performed. + index_parts : bool, optional + Option to get the index files of an INSITU dataset. + sync : bool, optional + Option to synchronize the local directory with the remote directory. See the documentation for more details. + sync_delete : bool, optional + Option to delete local files that are not present on the remote server while applying sync. + dry_run : bool, optional + If True, runs query without downloading data. + max_concurrent_requests : int, optional + Maximum number of concurrent requests. Default 15. The command uses a thread pool executor to manage concurrent requests. If set to 0, no parallel executions are used. + disable_progress_bar : bool, optional + Flag to hide progress bar. - Args: - dataset_url (str, optional): The URL of the dataset to retrieve. - dataset_id (str, optional): The unique identifier of the dataset. - dataset_version (str, optional): Force the use of a specific dataset version. - dataset_part (str, optional): Force the use of a specific dataset part. - username (str, optional): The username for authentication. - password (str, optional): The password for authentication. - output_directory (Union[pathlib.Path, str], optional): The directory where downloaded files will be saved. - credentials_file (Union[pathlib.Path, str], optional): Path to a file containing authentication credentials. - force_download (bool, optional): Skip confirmation before download. - overwrite_output_data (bool, optional): If True, overwrite existing output files. - request_file (Union[pathlib.Path, str], optional): Path to a file containing request parameters. For more information please refer to the README. - service (str, optional): Force the use of a specific service. - overwrite_metadata_cache (bool, optional): If True, overwrite the metadata cache. - no_metadata_cache (bool, optional): If True, do not use the metadata cache. - no_directories (bool, optional): If True, downloaded files will not be organized into directories. - show_outputnames (bool, optional): If True, display the names of the downloaded files. - filter (str, optional): Apply a filter to the downloaded data. - regex (str, optional): Apply a regular expression filter to the downloaded data. - file_list (Union[pathlib.Path, str], optional): Path to a .txt file containing a list of file paths, line by line, that will be downloaded directly. These files must be from the specified dataset using the --dataset-id. If no files can be found, the Toolbox will list all files on the remote server and attempt to find a match. - create_file_list (str, optional): Option to only create a file containing the names of the the targeted files instead of downloading them. It writes the file in the directory specified with the --output-directory option (default to current directory). If specified, no other action will be performed. - index_parts (bool, optional): If True, download index files. Only for INSITU datasets. Temporary option. - sync (bool, optional): If True, synchronize the local directory with the remote directory. - sync_delete (bool, optional): If True, delete local files that are not present on the remote server while applying sync. + Returns + ------- + ResponseGet + A list of files that were downloaded and some metadata. - Returns: - List[pathlib.Path]: A list of paths to the downloaded files. """ # noqa output_directory = ( pathlib.Path(output_directory) if output_directory else None @@ -84,14 +105,7 @@ def get( ) file_list = pathlib.Path(file_list) if file_list else None request_file = pathlib.Path(request_file) if request_file else None - if download_file_list and create_file_list: - raise_both_old_and_new_value_error( - "download_file_list", "create_file_list" - ) - elif download_file_list: - log_deprecated_message("download_file_list", "create_file_list") return get_function( - dataset_url=dataset_url, dataset_id=dataset_id, force_dataset_version=dataset_version, force_dataset_part=dataset_part, @@ -104,17 +118,15 @@ def get( force_download=force_download, overwrite_output_data=overwrite_output_data, request_file=request_file, - force_service=service, - overwrite_metadata_cache=overwrite_metadata_cache, - no_metadata_cache=no_metadata_cache, filter=filter, regex=regex, file_list_path=file_list, create_file_list=create_file_list, - download_file_list=download_file_list, index_parts=index_parts, sync=sync, sync_delete=sync_delete, + dry_run=dry_run, + max_concurrent_requests=max_concurrent_requests, disable_progress_bar=disable_progress_bar, staging=staging, ) diff --git a/copernicusmarine/python_interface/load_utils.py b/copernicusmarine/python_interface/load_utils.py index dc742b7a..e9f4df4d 100644 --- a/copernicusmarine/python_interface/load_utils.py +++ b/copernicusmarine/python_interface/load_utils.py @@ -3,50 +3,36 @@ import pandas import xarray -from copernicusmarine.catalogue_parser.catalogue_parser import ( +from copernicusmarine.catalogue_parser.models import ( CopernicusMarineDatasetServiceType, - parse_catalogue, ) from copernicusmarine.catalogue_parser.request_structure import LoadRequest from copernicusmarine.core_functions.credentials_utils import ( get_username_password, ) +from copernicusmarine.core_functions.exceptions import ServiceNotSupported from copernicusmarine.core_functions.services_utils import ( CommandType, RetrievalService, get_retrieval_service, ) -from copernicusmarine.core_functions.utils import ( - ServiceNotSupported, - delete_cache_folder, -) from copernicusmarine.download_functions.subset_xarray import ( check_dataset_subset_bounds, - date_to_datetime, + timestamp_or_datestring_to_datetime, ) def load_data_object_from_load_request( load_request: LoadRequest, - disable_progress_bar: bool, arco_series_load_function: Callable, ) -> Union[xarray.Dataset, pandas.DataFrame]: - if load_request.overwrite_metadata_cache: - delete_cache_folder() - - catalogue = parse_catalogue( - no_metadata_cache=load_request.no_metadata_cache, - disable_progress_bar=disable_progress_bar, - ) retrieval_service: RetrievalService = get_retrieval_service( - catalogue=catalogue, dataset_id=load_request.dataset_id, - dataset_url=load_request.dataset_url, force_dataset_version_label=load_request.force_dataset_version, force_dataset_part_label=load_request.force_dataset_part, force_service_type_string=load_request.force_service, command_type=CommandType.LOAD, - dataset_subset=load_request.get_time_and_geographical_subset(), + dataset_subset=load_request.get_time_and_space_subset(), ) username, password = get_username_password( load_request.username, @@ -59,8 +45,8 @@ def load_data_object_from_load_request( password=password, dataset_url=load_request.dataset_url, service_type=retrieval_service.service_type, - dataset_subset=load_request.get_time_and_geographical_subset(), - subset_method=load_request.subset_method, + dataset_subset=load_request.get_time_and_space_subset(), + coordinates_selection_method=load_request.coordinates_selection_method, dataset_valid_date=retrieval_service.dataset_valid_start_date, ) if retrieval_service.service_type in [ @@ -70,7 +56,7 @@ def load_data_object_from_load_request( CopernicusMarineDatasetServiceType.STATIC_ARCO, ]: if retrieval_service.dataset_valid_start_date: - parsed_start_datetime = date_to_datetime( + parsed_start_datetime = timestamp_or_datestring_to_datetime( retrieval_service.dataset_valid_start_date ) if ( @@ -89,6 +75,7 @@ def load_data_object_from_load_request( geographical_parameters=load_request.geographical_parameters, temporal_parameters=load_request.temporal_parameters, depth_parameters=load_request.depth_parameters, + coordinates_selection_method=load_request.coordinates_selection_method, chunks=None, ) else: diff --git a/copernicusmarine/python_interface/login.py b/copernicusmarine/python_interface/login.py index a193ebfc..23007141 100644 --- a/copernicusmarine/python_interface/login.py +++ b/copernicusmarine/python_interface/login.py @@ -1,8 +1,10 @@ import pathlib from typing import Optional +from copernicusmarine.core_functions.credentials_utils import ( + DEFAULT_CLIENT_BASE_DIRECTORY, +) from copernicusmarine.core_functions.login import login_function -from copernicusmarine.core_functions.utils import DEFAULT_CLIENT_BASE_DIRECTORY def login( @@ -10,22 +12,37 @@ def login( password: Optional[str] = None, configuration_file_directory: pathlib.Path = DEFAULT_CLIENT_BASE_DIRECTORY, overwrite_configuration_file: bool = False, - skip_if_user_logged_in: bool = False, + check_credentials_valid: bool = False, ) -> bool: """ - Create a configuration file with your Copernicus Marine credentials. + Create a configuration file with your Copernicus Marine credentials under the ``$HOME/.copernicusmarine`` directory (overwritable with the ``overwrite_configuration_file`` option). - Args: - username (str, optional): If not set, search for environment variable COPERNICUSMARINE_SERVICE_USERNAME, or else ask for user input. - password (str, optional): If not set, search for environment variable COPERNICUSMARINE_SERVICE_PASSWORD, or else ask for user input. - configuration_file_directory (Union[pathlib.Path, str]): Path to the directory where the configuration file is stored. - overwrite_configuration_file (bool): Flag to skip confirmation before overwriting configuration file. - skip_if_user_logged_in (bool): Flag to skip the logging process if the user is already logged in. + Parameters + ---------- + username : str, optional + The username for authentication. + password : str, optional + The password for authentication. + configuration_file_directory : Union[pathlib.Path, str] + Path to the directory where the configuration file is stored. + overwrite_configuration_file : bool + Flag to skip confirmation before overwriting configuration file. + check_credentials_valid : bool + Flag to check if the credentials are valid. No other action will be performed. The validity will be check in this order: + 1. Check if the credentials are valid with the provided username and password. + 2. Check if the credentials are valid in the environment variables. + 3. Check if the credentials are valid in the configuration file. + When any is found (valid or not valid), will return immediately. + + Returns + ------- + bool + True value if the login was successfully completed, False otherwise. """ # noqa return login_function( username=username, password=password, configuration_file_directory=configuration_file_directory, overwrite_configuration_file=overwrite_configuration_file, - skip_if_user_logged_in=skip_if_user_logged_in, + check_credentials_valid=check_credentials_valid, ) diff --git a/copernicusmarine/python_interface/open_dataset.py b/copernicusmarine/python_interface/open_dataset.py index 96d44503..b15fb669 100644 --- a/copernicusmarine/python_interface/open_dataset.py +++ b/copernicusmarine/python_interface/open_dataset.py @@ -5,17 +5,16 @@ import xarray from copernicusmarine.catalogue_parser.request_structure import LoadRequest -from copernicusmarine.core_functions.deprecated import ( - deprecated_python_option, - log_deprecated_message, -) from copernicusmarine.core_functions.deprecated_options import ( DEPRECATED_OPTIONS, ) from copernicusmarine.core_functions.models import ( - DEFAULT_SUBSET_METHOD, - SubsetMethod, + DEFAULT_COORDINATES_SELECTION_METHOD, + DEFAULT_VERTICAL_DIMENSION_OUTPUT, + CoordinatesSelectionMethod, + VerticalDimensionOutput, ) +from copernicusmarine.core_functions.utils import deprecated_python_option from copernicusmarine.download_functions.download_arco_series import ( open_dataset_from_arco_series, ) @@ -35,20 +34,10 @@ from copernicusmarine.python_interface.utils import homogenize_datetime -@log_exception_and_exit -def load_xarray_dataset(*args, **kwargs): - """ - Deprecated function, use 'open_dataset' instead. - """ - log_deprecated_message("load_xarray_dataset", "open_dataset") - return open_dataset(*args, **kwargs) - - @deprecated_python_option(**DEPRECATED_OPTIONS.dict_old_names_to_new_names) @log_exception_and_exit def open_dataset( - dataset_url: Optional[str] = None, - dataset_id: Optional[str] = None, + dataset_id: str, dataset_version: Optional[str] = None, dataset_part: Optional[str] = None, username: Optional[str] = None, @@ -60,46 +49,66 @@ def open_dataset( maximum_latitude: Optional[float] = None, minimum_depth: Optional[float] = None, maximum_depth: Optional[float] = None, - vertical_dimension_as_originally_produced: bool = True, + vertical_dimension_output: VerticalDimensionOutput = DEFAULT_VERTICAL_DIMENSION_OUTPUT, # noqa start_datetime: Optional[Union[datetime, str]] = None, end_datetime: Optional[Union[datetime, str]] = None, - subset_method: SubsetMethod = DEFAULT_SUBSET_METHOD, + coordinates_selection_method: CoordinatesSelectionMethod = ( + DEFAULT_COORDINATES_SELECTION_METHOD + ), service: Optional[str] = None, credentials_file: Optional[Union[pathlib.Path, str]] = None, - overwrite_metadata_cache: bool = False, - no_metadata_cache: bool = False, - disable_progress_bar: bool = False, ) -> xarray.Dataset: """ - Load an xarray dataset using "lazy-loading" mode from a Copernicus Marine data source using either the ARCO series protocol. + Load an xarray dataset using 'lazy-loading' mode from a Copernicus Marine data source. + This means that data is only loaded into memory when a computation is called, optimizing RAM usage by avoiding immediate loading. - It supports various parameters for customization, such as specifying ge ographical bounds, temporal range, depth range, and more. + It supports various parameters for customization, such as specifying geographical bounds, temporal range, depth range, and more. + + + Parameters + ---------- + dataset_id : str + The datasetID, required. + dataset_version : str, optional + Force the selection of a specific dataset version. + dataset_part : str, optional + Force the selection of a specific dataset part. + username : str, optional + The username for authentication. + password : str, optional + The password for authentication. + variables : List[str], optional + List of variable names to extract. + minimum_longitude : float, optional + Minimum longitude for the subset. The value will be transposed to the interval [-180; 360[. + maximum_longitude : float, optional + Maximum longitude for the subset. The value will be transposed to the interval [-180; 360[. + minimum_latitude : float, optional + Minimum latitude for the subset. Requires a float from -90 degrees to +90. + maximum_latitude : float, optional + Maximum latitude for the subset. Requires a float from -90 degrees to +90. + minimum_depth : float, optional + Minimum depth for the subset. Requires a positive float (or 0). + maximum_depth : float, optional + Maximum depth for the subset. Requires a positive float (or 0). + coordinates_selection_method : str, optional + If ``inside``, the selection retrieved will be inside the requested range. If ``strict-inside``, the selection retrieved will be inside the requested range, and an error will be raised if the values don't exist. If ``nearest``, the extremes closest to the requested values will be returned. If ``outside``, the extremes will be taken to contain all the requested interval. The methods ``inside``, ``nearest`` and ``outside`` will display a warning if the request is out of bounds. + vertical_dimension_output : str, optional + Consolidate the vertical dimension (the z-axis) as requested: depth with descending positive values, elevation with ascending positive values. Default is depth. + start_datetime : Union[datetime, str], optional + The start datetime of the temporal subset. Supports common format parsed by pendulum (https://pendulum.eustace.io/docs/#parsing). + end_datetime : Union[datetime, str], optional + The end datetime of the temporal subset. Supports common format parsed by pendulum (https://pendulum.eustace.io/docs/#parsing). + service : str, optional + Force download through one of the available services using the service name among ['arco-geo-series', 'arco-time-series', 'omi-arco', 'static-arco'] or its short name among ['arco-geo-series', 'arco-time-series', 'omi-arco', 'static-arco']. + credentials_file : Union[pathlib.Path, str], optional + Path to a credentials file if not in its default directory (``$HOME/.copernicusmarine``). Accepts .copernicusmarine-credentials / .netrc or _netrc / motuclient-python.ini files. - Args: - dataset_url (str, optional): The URL of the dataset. Either `dataset_url` or `dataset_id` should be provided. - dataset_id (str, optional): The ID of the dataset. Either `dataset_url` or `dataset_id` should be provided. - dataset_version (str, optional): Force the use of a specific dataset version. - dataset_part (str, optional): Force the use of a specific dataset part. - username (str, optional): Username for authentication, if required. - password (str, optional): Password for authentication, if required. - variables (List[str], optional): List of variable names to be loaded from the dataset. - minimum_longitude (float, optional): The minimum longitude for subsetting the data. - maximum_longitude (float, optional): The maximum longitude for subsetting the data. - minimum_latitude (float, optional): The minimum latitude for subsetting the data. - maximum_latitude (float, optional): The maximum latitude for subsetting the data. - minimum_depth (float, optional): The minimum depth for subsetting the data. - maximum_depth (float, optional): The maximum depth for subsetting the data. - subset_method (str, optional): The subset method ('nearest' or 'strict') when requesting the dataset. If strict, you can only request dimension strictly inside the dataset. - vertical_dimension_as_originally_produced (bool, optional): If True, use the vertical dimension as originally produced. - start_datetime (datetime, optional): The start datetime for temporal subsetting. - end_datetime (datetime, optional): The end datetime for temporal subsetting. - service (str, optional): Force the use of a specific service (ARCO geo series or time series). - credentials_file (Union[pathlib.Path, str], optional): Path to a file containing authentication credentials. - overwrite_metadata_cache (bool, optional): If True, overwrite the metadata cache. - no_metadata_cache (bool, optional): If True, do not use the metadata cache. - Returns: - xarray.Dataset: The loaded xarray dataset. + Returns + ------- + xarray.Dataset + The loaded xarray dataset. """ # noqa start_datetime = homogenize_datetime(start_datetime) end_datetime = homogenize_datetime(end_datetime) @@ -107,7 +116,6 @@ def open_dataset( pathlib.Path(credentials_file) if credentials_file else None ) load_request = LoadRequest( - dataset_url=dataset_url, dataset_id=dataset_id, force_dataset_version=dataset_version, force_dataset_part=dataset_part, @@ -131,17 +139,14 @@ def open_dataset( depth_parameters=DepthParameters( minimum_depth=minimum_depth, maximum_depth=maximum_depth, - vertical_dimension_as_originally_produced=vertical_dimension_as_originally_produced, # noqa + vertical_dimension_output=vertical_dimension_output, ), - subset_method=subset_method, + coordinates_selection_method=coordinates_selection_method, force_service=service, credentials_file=credentials_file, - overwrite_metadata_cache=overwrite_metadata_cache, - no_metadata_cache=no_metadata_cache, ) dataset = load_data_object_from_load_request( load_request, - disable_progress_bar, open_dataset_from_arco_series, ) return dataset diff --git a/copernicusmarine/python_interface/read_dataframe.py b/copernicusmarine/python_interface/read_dataframe.py index acd332ab..322618bb 100644 --- a/copernicusmarine/python_interface/read_dataframe.py +++ b/copernicusmarine/python_interface/read_dataframe.py @@ -5,17 +5,16 @@ import pandas from copernicusmarine.catalogue_parser.request_structure import LoadRequest -from copernicusmarine.core_functions.deprecated import ( - deprecated_python_option, - log_deprecated_message, -) from copernicusmarine.core_functions.deprecated_options import ( DEPRECATED_OPTIONS, ) from copernicusmarine.core_functions.models import ( - DEFAULT_SUBSET_METHOD, - SubsetMethod, + DEFAULT_COORDINATES_SELECTION_METHOD, + DEFAULT_VERTICAL_DIMENSION_OUTPUT, + CoordinatesSelectionMethod, + VerticalDimensionOutput, ) +from copernicusmarine.core_functions.utils import deprecated_python_option from copernicusmarine.download_functions.download_arco_series import ( read_dataframe_from_arco_series, ) @@ -35,20 +34,10 @@ from copernicusmarine.python_interface.utils import homogenize_datetime -@log_exception_and_exit -def load_pandas_dataframe(*args, **kwargs): - """ - Deprecated function, use 'read_dataframe' instead. - """ - log_deprecated_message("load_pandas_dataframe", "read_dataframe") - return read_dataframe(*args, **kwargs) - - @deprecated_python_option(**DEPRECATED_OPTIONS.dict_old_names_to_new_names) @log_exception_and_exit def read_dataframe( - dataset_url: Optional[str] = None, - dataset_id: Optional[str] = None, + dataset_id: str, dataset_version: Optional[str] = None, dataset_part: Optional[str] = None, username: Optional[str] = None, @@ -60,53 +49,73 @@ def read_dataframe( maximum_latitude: Optional[float] = None, minimum_depth: Optional[float] = None, maximum_depth: Optional[float] = None, - vertical_dimension_as_originally_produced: bool = True, + vertical_dimension_output: VerticalDimensionOutput = DEFAULT_VERTICAL_DIMENSION_OUTPUT, # noqa start_datetime: Optional[Union[datetime, str]] = None, end_datetime: Optional[Union[datetime, str]] = None, - subset_method: SubsetMethod = DEFAULT_SUBSET_METHOD, - force_service: Optional[str] = None, + coordinates_selection_method: CoordinatesSelectionMethod = ( + DEFAULT_COORDINATES_SELECTION_METHOD + ), + service: Optional[str] = None, credentials_file: Optional[Union[pathlib.Path, str]] = None, - overwrite_metadata_cache: bool = False, - no_metadata_cache: bool = False, - disable_progress_bar: bool = False, ) -> pandas.DataFrame: """ Immediately loads a Pandas DataFrame into memory from a specified dataset. - Unlike “lazy-loading”, the data is loaded as soon as this function is executed, which may be preferable when rapid access to the entire data set is required, but may require careful memory management. - Args: - dataset_url (str, optional): The URL of the dataset. - dataset_id (str, optional): The identifier of the dataset. - dataset_version (str, optional): Force a specific dataset version. - dataset_part (str, optional): Force a specific dataset part. - username (str, optional): Username for authentication. - password (str, optional): Password for authentication. - variables (List[str], optional): List of variable names to load. - minimum_longitude (float, optional): Minimum longitude for spatial subset. - maximum_longitude (float, optional): Maximum longitude for spatial subset. - minimum_latitude (float, optional): Minimum latitude for spatial subset. - maximum_latitude (float, optional): Maximum latitude for spatial subset. - minimum_depth (float, optional): Minimum depth for vertical subset. - maximum_depth (float, optional): Maximum depth for vertical subset. - vertical_dimension_as_originally_produced (bool, optional): If True, use the vertical dimension as originally produced. - start_datetime (datetime, optional): Start datetime for temporal subset. - end_datetime (datetime, optional): End datetime for temporal subset. - subset_method (str, optional): The subset method ('nearest' or 'strict') when requesting the dataset. If strict, you can only request dimension strictly inside the dataset. - force_service (str, optional): Force a specific service for data download. - credentials_file (Union[pathlib.Path, str], optional): Path to a credentials file for authentication. - overwrite_metadata_cache (bool, optional): If True, overwrite the metadata cache. - no_metadata_cache (bool, optional): If True, do not use metadata caching. + Unlike 'lazy-loading,' the data is loaded as soon as this function is executed, which may be preferable when rapid access to the entire dataset is required, but may require careful memory management. + + + Parameters + ---------- + dataset_id : str + The datasetID, required. + dataset_version : str, optional + Force the selection of a specific dataset version. + dataset_part : str, optional + Force the selection of a specific dataset part. + username : str, optional + The username for authentication. + password : str, optional + The password for authentication. + variables : List[str], optional + List of variable names to extract. + minimum_longitude : float, optional + Minimum longitude for the subset. The value will be transposed to the interval [-180; 360[. + maximum_longitude : float, optional + Maximum longitude for the subset. The value will be transposed to the interval [-180; 360[. + minimum_latitude : float, optional + Minimum latitude for the subset. Requires a float from -90 degrees to +90. + maximum_latitude : float, optional + Maximum latitude for the subset. Requires a float from -90 degrees to +90. + minimum_depth : float, optional + Minimum depth for the subset. Requires a positive float (or 0). + maximum_depth : float, optional + Maximum depth for the subset. Requires a positive float (or 0). + vertical_dimension_output : str, optional + Consolidate the vertical dimension (the z-axis) as requested: depth with descending positive values, elevation with ascending positive values. Default is depth. + start_datetime : Union[datetime, str], optional + The start datetime of the temporal subset. Supports common format parsed by pendulum (https://pendulum.eustace.io/docs/#parsing). + end_datetime : Union[datetime, str], optional + The end datetime of the temporal subset. Supports common format parsed by pendulum (https://pendulum.eustace.io/docs/#parsing). + coordinates_selection_method : str, optional + If ``inside``, the selection retrieved will be inside the requested range. If ``strict-inside``, the selection retrieved will be inside the requested range, and an error will be raised if the values don't exist. If ``nearest``, the extremes closest to the requested values will be returned. If ``outside``, the extremes will be taken to contain all the requested interval. The methods ``inside``, ``nearest`` and ``outside`` will display a warning if the request is out of bounds. + service : str, optional + Force download through one of the available services using the service name among ['arco-geo-series', 'arco-time-series', 'omi-arco', 'static-arco'] or its short name among ['arco-geo-series', 'arco-time-series', 'omi-arco', 'static-arco']. + credentials_file : Union[pathlib.Path, str], optional + Path to a credentials file if not in its default directory (``$HOME/.copernicusmarine``). Accepts .copernicusmarine-credentials / .netrc or _netrc / motuclient-python.ini files. - Returns: - pandas.DataFrame: A DataFrame containing the loaded Copernicus Marine data. + + Returns + ------- + pandas.DataFrame + A DataFrame containing the loaded Copernicus Marine data. """ # noqa + start_datetime = homogenize_datetime(start_datetime) end_datetime = homogenize_datetime(end_datetime) credentials_file = ( pathlib.Path(credentials_file) if credentials_file else None ) load_request = LoadRequest( - dataset_url=dataset_url, dataset_id=dataset_id, force_dataset_version=dataset_version, force_dataset_part=dataset_part, @@ -127,20 +136,17 @@ def read_dataframe( start_datetime=start_datetime, end_datetime=end_datetime, ), - subset_method=subset_method, + coordinates_selection_method=coordinates_selection_method, depth_parameters=DepthParameters( minimum_depth=minimum_depth, maximum_depth=maximum_depth, - vertical_dimension_as_originally_produced=vertical_dimension_as_originally_produced, # noqa + vertical_dimension_output=vertical_dimension_output, ), - force_service=force_service, + force_service=service, credentials_file=credentials_file, - overwrite_metadata_cache=overwrite_metadata_cache, - no_metadata_cache=no_metadata_cache, ) dataset = load_data_object_from_load_request( load_request, - disable_progress_bar, read_dataframe_from_arco_series, ) return dataset diff --git a/copernicusmarine/python_interface/subset.py b/copernicusmarine/python_interface/subset.py index dcc70411..5f836c20 100644 --- a/copernicusmarine/python_interface/subset.py +++ b/copernicusmarine/python_interface/subset.py @@ -2,17 +2,20 @@ from datetime import datetime from typing import List, Optional, Union -from copernicusmarine.core_functions.deprecated import deprecated_python_option from copernicusmarine.core_functions.deprecated_options import ( DEPRECATED_OPTIONS, ) from copernicusmarine.core_functions.models import ( + DEFAULT_COORDINATES_SELECTION_METHOD, DEFAULT_FILE_FORMAT, - DEFAULT_SUBSET_METHOD, + DEFAULT_VERTICAL_DIMENSION_OUTPUT, + CoordinatesSelectionMethod, FileFormat, - SubsetMethod, + ResponseSubset, + VerticalDimensionOutput, ) from copernicusmarine.core_functions.subset import subset_function +from copernicusmarine.core_functions.utils import deprecated_python_option from copernicusmarine.python_interface.exception_handler import ( log_exception_and_exit, ) @@ -22,7 +25,6 @@ @deprecated_python_option(**DEPRECATED_OPTIONS.dict_old_names_to_new_names) @log_exception_and_exit def subset( - dataset_url: Optional[str] = None, dataset_id: Optional[str] = None, dataset_version: Optional[str] = None, dataset_part: Optional[str] = None, @@ -35,10 +37,12 @@ def subset( maximum_latitude: Optional[float] = None, minimum_depth: Optional[float] = None, maximum_depth: Optional[float] = None, - vertical_dimension_as_originally_produced: bool = True, + vertical_dimension_output: VerticalDimensionOutput = DEFAULT_VERTICAL_DIMENSION_OUTPUT, # noqa start_datetime: Optional[Union[datetime, str]] = None, end_datetime: Optional[Union[datetime, str]] = None, - subset_method: SubsetMethod = DEFAULT_SUBSET_METHOD, + coordinates_selection_method: CoordinatesSelectionMethod = ( + DEFAULT_COORDINATES_SELECTION_METHOD + ), output_filename: Optional[str] = None, file_format: FileFormat = DEFAULT_FILE_FORMAT, service: Optional[str] = None, @@ -48,51 +52,81 @@ def subset( motu_api_request: Optional[str] = None, force_download: bool = False, overwrite_output_data: bool = False, - overwrite_metadata_cache: bool = False, - no_metadata_cache: bool = False, + dry_run: bool = False, disable_progress_bar: bool = False, staging: bool = False, - netcdf_compression_enabled: bool = False, - netcdf_compression_level: Optional[int] = None, + netcdf_compression_level: int = 0, netcdf3_compatible: bool = False, -) -> pathlib.Path: +) -> ResponseSubset: """ - Extracts a subset of data from a specified dataset using given parameters. + Extracts a subset of data from a specified dataset using given parameters." + + The datasetID is required and can be found via the ``describe`` command. + + Parameters + ---------- + dataset_id : str, optional + The datasetID, required either as an argument or in the request_file option. + dataset_version : str, optional + Force the selection of a specific dataset version. + dataset_part : str, optional + Force the selection of a specific dataset part. + username : str, optional + The username for authentication. See also :func:`~copernicusmarine.login` + password : str, optional + The password for authentication. See also :func:`~copernicusmarine.login` + output_directory : Union[pathlib.Path, str], optional + The destination folder for the downloaded files. Default is the current directory. + credentials_file : Union[pathlib.Path, str], optional + Path to a credentials file if not in its default directory (``$HOME/.copernicusmarine``). Accepts .copernicusmarine-credentials / .netrc or _netrc / motuclient-python.ini files. + force_download : bool, optional + Flag to skip confirmation before download. + overwrite_output_data : bool, optional + If specified and if the file already exists on destination, then it will be overwritten instead of creating new one with unique index. + request_file : Union[pathlib.Path, str], optional + Option to pass a file containing the arguments. For more information please refer to the documentation or use option ``--create-template`` from the command line interface for an example template. + service : str, optional + Force download through one of the available services using the service name among ['arco-geo-series', 'arco-time-series', 'omi-arco', 'static-arco'] or its short name among ['arco-geo-series', 'arco-time-series', 'omi-arco', 'static-arco']. + variables : List[str], optional + List of variable names to extract. + minimum_longitude : float, optional + Minimum longitude for the subset. The value will be transposed to the interval [-180; 360[. + maximum_longitude : float, optional + Maximum longitude for the subset. The value will be transposed to the interval [-180; 360[. + minimum_latitude : float, optional + Minimum latitude for the subset. Requires a float from -90 degrees to +90. + maximum_latitude : float, optional + Maximum latitude for the subset. Requires a float from -90 degrees to +90. + minimum_depth : float, optional + Minimum depth for the subset. Requires a positive float (or 0). + maximum_depth : float, optional + Maximum depth for the subset. Requires a positive float (or 0). + vertical_dimension_output : str, optional + Consolidate the vertical dimension (the z-axis) as requested: depth with descending positive values, elevation with ascending positive values. Default is depth. + start_datetime : Union[datetime, str], optional + The start datetime of the temporal subset. Supports common format parsed by pendulum (https://pendulum.eustace.io/docs/#parsing). + end_datetime : Union[datetime, str], optional + The end datetime of the temporal subset. Supports common format parsed by pendulum (https://pendulum.eustace.io/docs/#parsing). + coordinates_selection_method : str, optional + If ``inside``, the selection retrieved will be inside the requested range. If ``strict-inside``, the selection retrieved will be inside the requested range, and an error will be raised if the values don't exist. If ``nearest``, the extremes closest to the requested values will be returned. If ``outside``, the extremes will be taken to contain all the requested interval. The methods ``inside``, ``nearest`` and ``outside`` will display a warning if the request is out of bounds. + output_filename : str, optional + Save the downloaded data with the given file name (under the output directory). + file_format : str, optional + Format of the downloaded dataset. Default to NetCDF '.nc'. + motu_api_request : str, optional + Option to pass a complete MOTU API request as a string. Caution, user has to replace double quotes “ with single quotes ' in the request. + dry_run : bool, optional + If True, runs query without downloading data. + netcdf_compression_level : int, optional + Specify a compression level to apply on the NetCDF output file. A value of 0 means no compression, and 9 is the highest level of compression available. + netcdf3_compatible : bool, optional + Enable downloading the dataset in a netCDF3 compatible format. + + Returns + ------- + ResponseSubset + A description of the downloaded data and its destination. - Args: - dataset_url (str, optional): The URL of the dataset to retrieve. - dataset_id (str, optional): The unique identifier of the dataset. - dataset_version (str, optional): Force the use of a specific dataset version. - dataset_part (str, optional): Force the use of a specific dataset part. - username (str, optional): The username for authentication. - password (str, optional): The password for authentication. - output_directory (Union[pathlib.Path, str], optional): The directory where downloaded files will be saved. - credentials_file (Union[pathlib.Path, str], optional): Path to a file containing authentication credentials. - force_download (bool, optional): Skip confirmation before download. - overwrite_output_data (bool, optional): If True, overwrite existing output files. - request_file (Union[pathlib.Path, str], optional): Path to a file containing request parameters. For more information please refer to the README. - service (str, optional): Force the use of a specific service. - overwrite_metadata_cache (bool, optional): If True, overwrite the metadata cache. - no_metadata_cache (bool, optional): If True, do not use the metadata cache. - variables (List[str], optional): List of variable names to extract. - minimum_longitude (float, optional): Minimum longitude value for spatial subset. - maximum_longitude (float, optional): Maximum longitude value for spatial subset. - minimum_latitude (float, optional): Minimum latitude value for spatial subset. - maximum_latitude (float, optional): Maximum latitude value for spatial subset. - minimum_depth (float, optional): Minimum depth value for vertical subset. - maximum_depth (float, optional): Maximum depth value for vertical subset. - vertical_dimension_as_originally_produced (bool, optional): Use original vertical dimension. - start_datetime (datetime, optional): Start datetime for temporal subset. - end_datetime (datetime, optional): End datetime for temporal subset. - subset_method (str, optional): The subset method ('nearest' or 'strict') when requesting the dataset. If strict, you can only request dimension strictly inside the dataset. - output_filename (str, optional): Output filename for the subsetted data. - file_format (str, optional): Extension format for the filename. - motu_api_request (str, optional): MOTU API request string. - netcdf_compression_enabled (bool, optional): Enable compression level 1 to the NetCDF output file. Use 'netcdf_compression_level' option to customize the compression level. - netcdf_compression_level (int, optional): Specify a compression level to apply on the NetCDF output file. A value of 0 means no compression, and 9 is the highest level of compression available. - netcdf3_compatible (bool, optional): Enable downloading the dataset in a netCDF 3 compatible format. - Returns: - pathlib.Path: Path to the generated subsetted data file. """ # noqa request_file = pathlib.Path(request_file) if request_file else None output_directory = ( @@ -106,7 +140,6 @@ def subset( end_datetime = homogenize_datetime(end_datetime) return subset_function( - dataset_url, dataset_id, dataset_version, dataset_part, @@ -119,10 +152,10 @@ def subset( maximum_latitude, minimum_depth, maximum_depth, - vertical_dimension_as_originally_produced, + vertical_dimension_output, start_datetime, end_datetime, - subset_method, + coordinates_selection_method, output_filename, file_format, service, @@ -132,11 +165,9 @@ def subset( motu_api_request, force_download, overwrite_output_data, - overwrite_metadata_cache, - no_metadata_cache, + dry_run, disable_progress_bar, staging=staging, - netcdf_compression_enabled=netcdf_compression_enabled, netcdf_compression_level=netcdf_compression_level, netcdf3_compatible=netcdf3_compatible, ) diff --git a/copernicusmarine/python_interface/utils.py b/copernicusmarine/python_interface/utils.py index ab549b87..697b9cab 100644 --- a/copernicusmarine/python_interface/utils.py +++ b/copernicusmarine/python_interface/utils.py @@ -1,12 +1,17 @@ from datetime import datetime from typing import Optional, Union +import pendulum +from pendulum import DateTime + from copernicusmarine.core_functions.utils import datetime_parser def homogenize_datetime( input_datetime: Optional[Union[datetime, str]] -) -> Optional[datetime]: +) -> Optional[DateTime]: + if input_datetime is None: + return None if isinstance(input_datetime, str): return datetime_parser(input_datetime) - return input_datetime + return pendulum.instance(input_datetime) diff --git a/doc/Makefile b/doc/Makefile new file mode 100644 index 00000000..4ffae1c6 --- /dev/null +++ b/doc/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -Ea diff --git a/doc/_static/css/custom.css b/doc/_static/css/custom.css new file mode 100644 index 00000000..33244efa --- /dev/null +++ b/doc/_static/css/custom.css @@ -0,0 +1,18 @@ +.sidebar-brand-text { + font-size: 1.2rem; +} + +.sidebar-logo { + max-width: 70%; +} + +.github-link { + margin-bottom: 1em; +} + +a:visited, +a:active, +a:hover, +a:visited:hover { + color: var(--color-brand-primary); +} diff --git a/doc/_static/favicon_cmems.ico b/doc/_static/favicon_cmems.ico new file mode 100644 index 00000000..92601c61 Binary files /dev/null and b/doc/_static/favicon_cmems.ico differ diff --git a/doc/_static/under_construction.gif b/doc/_static/under_construction.gif new file mode 100644 index 00000000..f986624b Binary files /dev/null and b/doc/_static/under_construction.gif differ diff --git a/doc/_templates/sidebar/github.html b/doc/_templates/sidebar/github.html new file mode 100644 index 00000000..77cdfb61 --- /dev/null +++ b/doc/_templates/sidebar/github.html @@ -0,0 +1,3 @@ + diff --git a/doc/command-line-interface.rst b/doc/command-line-interface.rst new file mode 100644 index 00000000..301ff335 --- /dev/null +++ b/doc/command-line-interface.rst @@ -0,0 +1,34 @@ +.. _command-line-interface: + +Command line interface +=================================================== + +.. _cli-intro: +.. click:: copernicusmarine.command_line_interface.copernicus_marine:base_command_line_interface + :prog: copernicusmarine + :nested: short + :commands: login, get, subset, describe + +.. _cli-describe: +.. click:: copernicusmarine.command_line_interface.group_describe:describe + :prog: describe + :nested: short + :commands: describe + +.. _cli-subset: +.. click:: copernicusmarine.command_line_interface.group_subset:subset + :prog: subset + :nested: short + :commands: subset + +.. _cli-get: +.. click:: copernicusmarine.command_line_interface.group_get:get + :prog: get + :nested: short + :commands: get + +.. _cli-login: +.. click:: copernicusmarine.command_line_interface.group_login:login + :prog: login + :nested: short + :commands: login diff --git a/doc/conf.py b/doc/conf.py new file mode 100644 index 00000000..f6510af4 --- /dev/null +++ b/doc/conf.py @@ -0,0 +1,75 @@ +from sphinx.builders.html import StandaloneHTMLBuilder + +# Configuration file for the Sphinx documentation builder. +# +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +project = "Copernicus Marine toolbox" +copyright = "2024, Mercator Ocean International" +author = "Mercator Ocean International" + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = [ + "sphinx.ext.autodoc", + "sphinx_click", + "numpydoc", + "sphinx_copybutton", + "myst_nb", +] +numpydoc_show_class_members = False + +templates_path = ["_templates"] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] + +pygments_style = "sphinx" +pygments_dark_style = "monokai" + +# -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output + +html_theme = "furo" +html_static_path = ["_static"] +html_favicon = "_static/favicon_cmems.ico" +html_css_files = ["css/custom.css"] + +# -- Options for the furo theme ----------------------------------------------- +# https://pradyunsg.me/furo/customisation/ + +html_logo = "_static/favicon_cmems.ico" + +html_theme_options = { + "light_css_variables": {"color-brand-primary": "#607fad"}, +} + +html_sidebars = { + "**": [ + "sidebar/brand.html", + "sidebar/search.html", + "sidebar/scroll-start.html", + "sidebar/navigation.html", + "sidebar/ethical-ads.html", + "sidebar/scroll-end.html", + "sidebar/github.html", + ] +} + +# -- Options for different image types ------------------------------------------- +# https://stackoverflow.com/questions/45969711/sphinx-doc-how-do-i-render-an-animated-gif-when-building-for-html-but-a-png-wh + +StandaloneHTMLBuilder.supported_image_types = [ + "image/svg+xml", + "image/gif", + "image/png", + "image/jpeg", +] + +# -- Options for myst_nb -------------------------------------------------- +# https://myst-nb.readthedocs.io/en/latest/configuration.html#config-intro + +nb_execution_mode = "off" diff --git a/doc/index.rst b/doc/index.rst new file mode 100644 index 00000000..a0aca458 --- /dev/null +++ b/doc/index.rst @@ -0,0 +1,25 @@ +.. Copernicus Marine toolbox documentation master file, created by + sphinx-quickstart on Wed Aug 21 14:04:51 2024. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Copernicus Marine toolbox documentation +======================================= + +The ``copernicusmarine`` Python library offers capabilities through both **command line interface (CLI)** and **Python API**: + +- **Metadata Information**: List and retrieve metadata information on all variables, datasets and products. +- **Subset Datasets**: Subset datasets to extract only the parts of interest, in preferred format, such as Analysis-Ready Cloud-Optimized (ARCO) Zarr or NetCDF file format. +- **Advanced Filters**: Apply simple or advanced filters to get multiple files, in original formats like NetCDF/GeoTIFF, via direct Marine Data Store connections. +- **No Quotas**: Enjoy no quotas, neither on volume size nor bandwidth. + + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + installation + usage/usage + command-line-interface + python-interface + response-types diff --git a/doc/installation.rst b/doc/installation.rst new file mode 100644 index 00000000..4d9b10cc --- /dev/null +++ b/doc/installation.rst @@ -0,0 +1,121 @@ +.. _installation-page: + +=================================================== +Installing Copernicus Marine toolbox +=================================================== + +There are several ways to install or use the Copernicus Marine toolbox: + +* via pip (see `PyPI repository `_) +* via mamba | conda (see `conda-forge channel `_) +* via docker (see `dockerhub repository `_) + +Alternatively, you can use a binary. + +.. note:: + + Requires Python ``>=3.9`` and ``<3.13``. + +.. note:: + + Note that the use of ``xarray<2024.7.0`` with ``numpy>=2.0.0`` leads to inconsistent results. See this issue: `xarray issue `_. + +Via pip +************** + +If you already have an environment (safer to clone it), the package can be installed using the ``pip`` command from the Python Package Index: + +.. code-block:: bash + + python -m pip install copernicusmarine + +And to **upgrade the package** to the newest available version, run: + +.. code-block:: bash + + python -m pip install copernicusmarine --upgrade + + +Via mamba | conda (conda-forge channel) +********************************************** + +A ``conda`` package is available on `Anaconda `_. + +You can install it using ``mamba`` (or conda) through the ``conda-forge`` channel with the following command: + +.. code-block:: bash + + mamba install conda-forge::copernicusmarine --yes + +To upgrade the Toolbox with mamba (or conda): + +.. code-block:: bash + + mamba update --name copernicusmarine copernicusmarine --yes + +Or using ``conda``: + +.. code-block:: bash + + conda install -c conda-forge copernicusmarine + +Via docker +************** + +A Docker image is also available on the `copernicusmarine dockerhub repository `_. + +First step is to pull the container image: + +.. code-block:: bash + + docker pull copernicusmarine/copernicusmarine:latest + +Then run it: + +.. code-block:: bash + + docker run -it --rm copernicusmarine/copernicusmarine --version + + +Create an account on Copernicus Marine website +*********************************************** + +To be able to download Copernicus Marine data, you need to have an account on the Copernicus Marine website. You can create an account on the `Copernicus Marine registration page `_. + + + +Use the Copernicus Marine toolbox binaries +*********************************************** + +In the `release page `_ you can access the binaries of the latest releases. + +To download directly the latest stable releases: + +- MacOS arm64: `copernicusmarine_macos-arm64 `_ +- MacOS x86_64: `copernicusmarine_macos-x86_64 `_ +- Linux: `copernicusmarine_linux `_ +- Windows: `copernicusmarine `_ + +Once downloaded for the specific platform, you can use the toolbox by running the binary as follows: + +In mac-os or linux: + +.. code-block:: bash + + ./copernicusmarine_macos-latest.cli describe + +(``describe`` or any other command) + +You might have to update the permissions of the binary to be able to execute it with linux: + +.. code-block:: bash + + chmod +rwx cmt_ubuntu-latest.cli + +And from a Windows os (cmd): + +.. code-block:: bash + + copernicusmarine_windows-latest.exe describe + +(``describe`` or any other command) diff --git a/doc/make.bat b/doc/make.bat new file mode 100644 index 00000000..32bb2452 --- /dev/null +++ b/doc/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/doc/python-interface.rst b/doc/python-interface.rst new file mode 100644 index 00000000..dd7011ce --- /dev/null +++ b/doc/python-interface.rst @@ -0,0 +1,6 @@ +================= +Python interface +================= + +.. automodule:: copernicusmarine + :members: get, subset, describe, open_dataset, read_dataframe, login diff --git a/doc/response-types.rst b/doc/response-types.rst new file mode 100644 index 00000000..5e658eab --- /dev/null +++ b/doc/response-types.rst @@ -0,0 +1,65 @@ +.. _response-types: + +================= +Response types +================= + +The Copernicus Marine toolbox commands return some information when downloading. +It can contain useful metadata for the user. + +For the :ref:`command line interface `, all the returned data will be in a form of a json sent to stdout. +You can easily save it locally by doing, for example: + +.. code-block:: bash + + copernicusmarine get -i cmems_mod_glo_phy-thetao_anfc_0.083deg_P1M-m > my_json.json + + +.. note:: + For the "get" and "subset" commands you can get those metadata without + downloading anything by using the ``dry_run`` option (or ``--dry-run`` flag for the CLI). + +------------------- +Commands Response +------------------- + +.. autoclass:: copernicusmarine.ResponseGet() + :members: + :undoc-members: + :exclude-members: model_computed_fields, model_config, model_fields + :member-order: bysource + +.. autoclass:: copernicusmarine.ResponseSubset() + :members: + :undoc-members: + :exclude-members: model_computed_fields, model_config, model_fields + :member-order: bysource + +-------------- +Subtypes +-------------- + +.. autoclass:: copernicusmarine.FileGet() + :members: + :undoc-members: + :exclude-members: model_computed_fields, model_config, model_fields + :member-order: bysource + +.. autoclass:: copernicusmarine.DatasetCoordinatesExtent() + :members: + :undoc-members: + :exclude-members: model_computed_fields, model_config, model_fields + :member-order: bysource + + +.. autoclass:: copernicusmarine.GeographicalExtent() + :members: + :undoc-members: + :exclude-members: model_computed_fields, model_config, model_fields + :member-order: bysource + +.. autoclass:: copernicusmarine.TimeExtent() + :members: + :undoc-members: + :exclude-members: model_computed_fields, model_config, model_fields + :member-order: bysource diff --git a/doc/usage/describe-usage.rst b/doc/usage/describe-usage.rst new file mode 100644 index 00000000..06a03d34 --- /dev/null +++ b/doc/usage/describe-usage.rst @@ -0,0 +1,128 @@ +.. _describe-page: + +===================== +Command ``describe`` +===================== + +The ``describe`` command retrieves metadata information about all products and datasets, displaying it as a JSON output. + +**Usage:** + +.. code-block:: bash + + copernicusmarine describe + +Here the first 2 products are shown: + +.. code-block:: json + + { + "products": [ + { + "title": "Antarctic Sea Ice Extent from Reanalysis", + "product_id": "ANTARCTIC_OMI_SI_extent", + "thumbnail_url": "https://catalogue.marine.copernicus.eu/documents/IMG/ANTARCTIC_OMI_SI_extent.png", + "digital_object_identifier": "10.48670/moi-00186", + "sources": [ + "Numerical models" + ], + "processing_level": null, + "production_center": "Mercator Oc\u00e9an International" + }, + { + "title": "Antarctic Monthly Sea Ice Extent from Observations Reprocessing", + "product_id": "ANTARCTIC_OMI_SI_extent_obs", + "thumbnail_url": "https://catalogue.marine.copernicus.eu/documents/IMG/ANTARCTIC_OMI_SI_extent_obs.png", + "digital_object_identifier": "10.48670/moi-00187", + "sources": [ + "Satellite observations" + ], + "processing_level": null, + "production_center": "MET Norway" + }, + ] + } + + +By default, the command only shows the products. To include the datasets, you can use the ``--include-datasets`` option. + +**Example:** + +.. code-block:: bash + + copernicusmarine describe --include-datasets + +To save the JSON output to a file, you can use the following command: + +.. code-block:: bash + + copernicusmarine describe --include-datasets > all_datasets_copernicusmarine.json + +``--contains`` option +---------------------- + +You also have the option to filter the output by using the ``--contains`` option. It will perform a search on all the text fields of the output. + +**Example:** + +If you want, for example, the ``cmems_obs-ins_glo_phy-temp-sal_my_cora_irr`` dataset only, you can use the following command: + +.. code-block:: bash + + copernicusmarine describe --include-datasets --contains cmems_obs-ins_glo_phy-temp-sal_my_cora_irr + +The output will be something like this: + +.. code-block:: json + + { + "products": [ + { + "title": "Global Ocean- CORA- In-situ Observations Yearly Delivery in Delayed Mode", + "product_id": "INSITU_GLO_PHY_TS_DISCRETE_MY_013_001", + "thumbnail_url": "https://mdl-metadata.s3.waw3-1.cloudferro.com/metadata/thumbnails/INSITU_GLO_PHY_TS_DISCRETE_MY_013_001.jpg", + "digital_object_identifier": "10.17882/46219", + "sources": [ + "In-situ observations" + ], + "processing_level": "Level 2", + "production_center": "OceanScope (France)", + "datasets": [ + { + "dataset_id": "cmems_obs-ins_glo_phy-temp-sal_my_cora_irr", + "dataset_name": "cmems_obs-ins_glo_phy-temp-sal_my_cora_irr_202311", + "versions": [ + { + "label": "202311", + "parts": [ + { + "name": "default", + "services": [ + { + "service_type": { + "service_name": "original-files", + "short_name": "files" + }, + "service_format": null, + "uri": "https://s3.waw3-1.cloudferro.com/mdl-native-03/native/INSITU_GLO_PHY_TS_DISCRETE_MY_013_001/cmems_obs-ins_glo_phy-temp-sal_my_cora_irr_202311", + "variables": [] + } + ], + "retired_date": null, + "released_date": "2023-11-30T11:00:00.000Z" + } + ] + } + ] + } + ] + } + ] + } + +``--include-versions`` option +----------------------------- + +The describe command will show you only one version of a dataset, prioritising the most recent version and the ones that are not planned to be retired. +If you want to see all versions, you can use the ``--include-versions`` option. +It allows in some cases to access the metadata of datasets that are to be released or datasets that will be retired soon. diff --git a/doc/usage/errors.rst b/doc/usage/errors.rst new file mode 100644 index 00000000..3d00c61b --- /dev/null +++ b/doc/usage/errors.rst @@ -0,0 +1,41 @@ +Common and custom errors +========================== + +Custom errors +------------- + +.. autoclass:: copernicusmarine.DatasetNotFound() + +.. autoclass:: copernicusmarine.DatasetVersionNotFound() + +.. autoclass:: copernicusmarine.DatasetVersionPartNotFound() + +.. autoclass:: copernicusmarine.ServiceNotHandled() + +.. autoclass:: copernicusmarine.OtherOptionsPassedWithCreateTemplate() + +.. autoclass:: copernicusmarine.CredentialsCannotBeNone() + +.. autoclass:: copernicusmarine.InvalidUsernameOrPassword() + +.. autoclass:: copernicusmarine.CouldNotConnectToAuthenticationSystem() + +.. autoclass:: copernicusmarine.MinimumLongitudeGreaterThanMaximumLongitude() + +.. autoclass:: copernicusmarine.VariableDoesNotExistInTheDataset() + +.. autoclass:: copernicusmarine.CoordinatesOutOfDatasetBounds() + +.. autoclass:: copernicusmarine.NetCDFCompressionNotAvailable() + +.. autoclass:: copernicusmarine.WrongDatetimeFormat() + +.. autoclass:: copernicusmarine.FormatNotSupported() + +.. autoclass:: copernicusmarine.ServiceNotSupported() + +.. autoclass:: copernicusmarine.ServiceDoesNotExistForCommand() + +.. autoclass:: copernicusmarine.ServiceNotAvailable() + +.. autoclass:: copernicusmarine.NoServiceAvailable() diff --git a/doc/usage/get-usage.rst b/doc/usage/get-usage.rst new file mode 100644 index 00000000..3575490f --- /dev/null +++ b/doc/usage/get-usage.rst @@ -0,0 +1,135 @@ +.. _get-page: + +=============== +Command ``get`` +=============== + +Download the dataset file(s) as originally produced, based on the dataset ID or the path to files. + +**Example:** + +.. code-block:: bash + + copernicusmarine get --dataset-id cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m + +**Returns:** + +.. code-block:: bash + + INFO - 2024-04-03T11:39:18Z - Dataset version was not specified, the latest one was selected: "202211" + INFO - 2024-04-03T11:39:18Z - Dataset part was not specified, the first one was selected: "default" + INFO - 2024-04-03T11:39:18Z - Service was not specified, the default one was selected: "original-files" + INFO - 2024-04-03T11:39:18Z - Downloading using service original-files... + INFO - 2024-04-03T11:39:19Z - You requested the download of the following files: + s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m_202211/CMEMS_v5r1_IBI_PHY_MY_NL_01yav_19930101_19931231_R20221101_RE01.nc - 8.83 MB - 2023-11-12T23:47:13Z + [...] + s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m_202211/CMEMS_v5r1_IBI_PHY_MY_NL_01yav_20120101_20121231_R20221101_RE01.nc - 8.62 MB - 2023-11-12T23:47:14Z + Printed 20 out of 29 files + + Total size of the download: 252.94 MB + Do you want to proceed with download? [Y/n]: + +**By default:** + +- After the header displays a summary of the request, a download confirmation is asked. To skip this user's action, add the option ``--force-download``. +- Files are downloaded to the current directory while maintaining the original folder structure. To avoid this behavior, add ``--no-directories`` and specify a destination with ``-o/--output-directory``. +- Option ``--show-outputnames`` displays the full paths of the output files if required. +- Option ``--create-file-list`` creates a file containing the names of the targeted files instead of downloading them. You must input a file name, e.g., ``--create-file-list my_files.txt``. The format must be ``.txt`` or ``.csv``. + +If the user inputs a filename that ends in ``.txt``, it will contain only the full S3 path to the targeted files, compatible with the ``--file-list`` option. + +**Example:** + +.. code-block:: bash + + copernicusmarine get --dataset-id cmems_mod_ibi_phy_my_0.083deg-3D_P1M-m --filter "*2021*" --create-file-list selected_files_for_2021.txt + +The content of ``selected_files_for_2021.txt`` would be: + +.. code-block:: text + + s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1M-m_202012/2021/CMEMS_v5r1_IBI_PHY_MY_PdE_01mav_20210101_20210131_R20230101_RE01.nc + s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1M-m_202012/2021/CMEMS_v5r1_IBI_PHY_MY_PdE_01mav_20210201_20210228_R20230101_RE01.nc + [...] + +If the user inputs a filename that ends in ``.csv``, it will contain columns separated by a comma: ``filename``, ``size`` (in Bytes), ``last_modified_datetime``, and ``etag``. It is **not** directly compatible with the ``--file-list`` option and would require post-processing. + +**Example:** + +.. code-block:: bash + + copernicusmarine get --dataset-id cmems_mod_ibi_phy_my_0.083deg-3D_P1M-m --filter "*2021*" --create-file-list selected_files_for_2021.csv + +The content of ``selected_files_for_2021.csv`` would be: + +.. code-block:: text + + filename,size,last_modified_datetime,etag + s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1M-m_202012/2021/CMEMS_v5r1_IBI_PHY_MY_PdE_01mav_20210101_20210131_R20230101_RE01.nc,12295906,2023-11-12 23:47:05.466000+00:00,"e8a7e564f676a08bf601bcdeaebdc563" + [...] + +About sync option +--------------------- + +The ``--sync`` option downloads original files only if they do not exist or are not up to date. The Toolbox checks the destination folder against the source folder and can be combined with filters. If set with ``--overwrite-output-data``, the latter will be ignored. The ``--sync-delete`` option works like ``--sync`` but also deletes any local files not found on the remote server. + +**Limitations:** + +- ``--sync`` is not compatible with ``--no-directories``. +- ``--sync`` only works with ``--dataset-version``. (see :ref:`dataset-version ` option ) +- ``--sync`` functionality is not available for datasets with several parts (e.g., INSITU or static datasets). + +About filtering options +------------------------ + +The ``--filter`` option allows specifying a Unix shell-style wildcard pattern to select specific files. + +**Example** To download only files that contains "2000", "2001", or "2002": + +.. code-block:: bash + + copernicusmarine get --dataset-id cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m --filter "*01yav_200[0-2]*" + +Option ``--regex`` allows specifying a regular expression for more advanced file selection. + +**Example** To download only files that contains "2000", "2001", or "2002" using a regular expression: + +.. code-block:: bash + + copernicusmarine get -i cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m --regex ".*01yav_20(00|01|02).*.nc" + +About the file list option +--------------------------- + +The ``--file-list`` option allows specifying a list of files for advanced selection. The file can contain complete absolute paths or only a partial path defined by the user. + +By default, the get functionality lists all files on the remote server to select requested ones. The file list option will directly download files and avoid listings if all listed files are found. + +**Example** of ``file_list.txt`` with paths that would be directly downloaded: + +.. code-block:: text + + # correct paths + > s3://mdl-native-01/native/INSITU_GLO_PHYBGCWAV_DISCRETE_MYNRT_013_030/cmems_obs-ins_glo_phybgcwav_mynrt_na_irr_202311/history/BO/AR_PR_BO_58JM.nc + > INSITU_GLO_PHYBGCWAV_DISCRETE_MYNRT_013_030/cmems_obs-ins_glo_phybgcwav_mynrt_na_irr_202311/history/BO/AR_PR_BO_58JM.nc + > cmems_obs-ins_glo_phybgcwav_mynrt_na_irr_202311/history/BO/AR_PR_BO_58JM.nc + > history/BO/AR_PR_BO_58JM.nc + > index_history.txt + # incorrect paths + # version is missing + > INSITU_GLO_PHYBGCWAV_DISCRETE_MYNRT_013_030/cmems_obs-ins_glo_phybgcwav_mynrt_na_irr/history/BO/AR_PR_BO_58JM.nc + # only the file name and not the path to the file + > AR_PR_BO_58JM.nc + # not the same dataset + > another_dataset/history/BO/AR_PR_BO_58JM.nc + + +**Example** of ``file_list.txt`` with absolute paths: + +.. code-block:: text + + s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1M-m_202012/2021/CMEMS_v5r1_IBI_PHY_MY_PdE_01mav_20210101_20210131_R20230101_RE01.nc + s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1M-m_202012/2021/CMEMS_v5r1_IBI_PHY_MY_PdE_01mav_20210201_20210228_R20230101_RE01.nc + s3://mdl-native-10/native/IBI_MULTIYEAR_PHY_005_002/cmems_mod_ibi_phy_my_0.083deg-3D_P1M-m_202012/2021/CMEMS_v5r1_IBI_PHY_MY_PdE_01mav_20210301_20210331_R20230101_RE01.nc + +Note that a path to a file can include wildcards or regular expressions. diff --git a/doc/usage/login-usage.rst b/doc/usage/login-usage.rst new file mode 100644 index 00000000..77887c65 --- /dev/null +++ b/doc/usage/login-usage.rst @@ -0,0 +1,38 @@ +.. _login-page: + +================== +Command ``login`` +================== + +The ``login`` command creates a configuration file called ``.copernicusmarine-credentials`` that grants access to all Copernicus Marine Data Store services. By default, this file is saved in the user's home directory. + +**Example:** + +.. code-block:: bash + + > copernicusmarine login + username : johndoe + password : + INFO - Configuration files stored in /Users/foo/.copernicusmarine + +If the ``.copernicusmarine-credentials`` file already exists, the system will ask for confirmation before overwriting it. You can also use option ``–-overwrite`` or ``--overwrite-configuration-file`` to skip confirmation. + +You can also use the ``--skip-if-user-logged-in`` option to prevent overwriting the configuration file if the user is already logged in. + +New Copernius Marine authentication system +------------------------------------------- + +A new Copernius Marine authentication system will be released in the following months after the release of the Copernicus Marine toolbox version 2.0.0. +From 2.0.0, the toolbox should be able to handle both the old and the new authentication systems. + +If you are blocking some domains, you will need to authorize the domain ``auth.marine.copernicus.eu`` to be able to connect when the old system is decomissioned. + +.. note:: + One of limitation of the old system is that it goes through HTTP (through redirections) and not HTTPS. The new system will use HTTPS only. + +Access points migration and evolution +------------------------------------- + +If you still have configurations for legacy services (for example, files like ``~/motuclient/motuclient-python.ini``, ``~/.netrc``, or ``~/_netrc`` in your home directory), these will automatically be recognized by the ``get`` and ``subset`` commands without needing to run the ``login`` command. + +If your configuration files are stored in a different directory, you can point to them by using the ``--credentials-file`` option when running the ``get`` or ``subset`` commands. diff --git a/doc/usage/network-configuration.rst b/doc/usage/network-configuration.rst new file mode 100644 index 00000000..5c3cebc1 --- /dev/null +++ b/doc/usage/network-configuration.rst @@ -0,0 +1,73 @@ +Network configuration +====================== + +Disable SSL +----------- + +A global SSL context is used when making HTTP calls using the ``copernicusmarine`` Toolbox. +For some reason, it can lead to unexpected behavior depending on your network configuration. +You can set the ``COPERNICUSMARINE_DISABLE_SSL_CONTEXT`` environment variable to any value +to globally disable the usage of SSL in the toolbox: + +- on **UNIX** platforms: ``export COPERNICUSMARINE_DISABLE_SSL_CONTEXT=True`` +- on **Windows** platforms: ``set COPERNICUSMARINE_DISABLE_SSL_CONTEXT=True`` + +``trust_env`` for Python libraries +------------------------------------ + +To do HTTP calls, the Copernicus Marine toolbox uses the ``requests`` library. +By default, this library will have ``trust_env`` values set to ``True``. + +If you want to deactivate this, you can set ``COPERNICUSMARINE_TRUST_ENV=False`` (default ``True``). +This can be useful, for example, if you don't want those libraries to read your ``.netrc`` file as it has been +reported that having a ``.netrc`` with a line: "default login anonymous password user@site" is incompatible +with S3 connection required by the toolbox. + +Using a custom certificate path +------------------------------- + +Some users reported issues with SSL certificates. You can precise a custom path to your certificate by setting the +``COPERNICUSMARINE_SET_SSL_CERTIFICATE_PATH`` environment variable to the path of your custom certificate. + +It might be useful if you want to use the global certificate instead of the one created by your conda environment for example. + +Proxy +----- + +To use proxies, as described in the `requests documentation `_, +you can use two options: + +- set the ``HTTPS_PROXY`` variable. For example: ``HTTPS_PROXY="http://user:pass@some.proxy.com"``. + It should work even with ``COPERNICUSMARINE_TRUST_ENV=False``. +- use a ``.netrc`` file but be aware that having a line: "default login anonymous password user@site" is incompatible + with S3 connection required by the toolbox. Also note that if you have ``COPERNICUSMARINE_TRUST_ENV=True`` (the default value) + then if ``NETRC`` environment variable is set with a specified location, the ``.netrc`` file will be read from the specified + location there rather than from ``~/.netrc``. + +Number of concurrent requests +----------------------------- + +The toolbox makes many requests to STAC to be able to parse the full marine data store STAC catalog. +For that, it uses concurrent calls on one thread. It also uses this when downloading files for the get command. +It can be problematic to do too many requests at the same time. Or you might want to boost the download. + +To limit the number of requests at the same time you can use: ``max_concurrent_requests`` argument. +See :func:`~copernicusmarine.describe` and :func:`~copernicusmarine.get`. +The default value is ``15`` and minimum value is ``1``. + +.. note:: + For the ``get`` command, you can set the environment variable to ``0`` if you don't want to use the ``concurrent.futures.ThreadPoolExecutor`` at all; + the download will be used only through ``boto3``. + +Connection timeout and retries +------------------------------- + +The toolbox uses the ``requests`` library to make HTTP calls. By default, the connection timeout is set to 60 seconds. +It's the timeout argument that is being set. Please refer to `requests documentation on timeouts `_ +for more information. + +You can set the ``COPERNICUSMARINE_HTTPS_TIMEOUT`` environment variable to a custom value in seconds. +The default value is ``60``. Accepted values are positive floats. + +The toolbox also set retries on http connections by default. You can set the ``COPERNICUSMARINE_HTTPS_RETRIES`` environment variable to a custom value. +The default value is ``5``. Accepted values are integers. diff --git a/doc/usage/quickoverview.ipynb b/doc/usage/quickoverview.ipynb new file mode 100644 index 00000000..bffd4dcb --- /dev/null +++ b/doc/usage/quickoverview.ipynb @@ -0,0 +1,1856 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Quick Overview\n", + "\n", + "Most of the code presented here is for the Python interface. Transposing the examples to bash should be quite straighforward since commands, options and functionalities should be almost identical between the command line interface and the Python interface. " + ] + }, + { + "cell_type": "markdown", + "metadata": { + "jp-MarkdownHeadingCollapsed": true + }, + "source": [ + "## Introduction\n", + "The Copernicus Marine toolbox is a tool developed to connect users with marine data available at the [Copernicus Marine Data Store](https://data.marine.copernicus.eu/products). There, you can find products with relevant data for users.\n", + "\n", + "One way to access this data is through the more visual tool [My Ocean Pro](https://data.marine.copernicus.eu/viewer/expert).\n", + "\n", + "The Copernicus Marine toolbox allows access to this data programmatically and in an automated way, facilitating the download and distribution:\n", + "- [GitHub](https://github.com/mercator-ocean/copernicus-marine-toolbox)\n", + "- [Documentation](https://toolbox-docs.marine.copernicus.eu/)\n", + "- [Help Center](https://help.marine.copernicus.eu/en/)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The main functions are:\n", + "- **Login**: for authentication\n", + "- **Describe**: to get metadata from the Data Store\n", + "- **Get**: to download the native data\n", + "- **Subset**: to download specific (spatiotemporal) regions of interest\n", + "\n", + "These four commands are available from the terminal (command line interface) and/or from a Python API (from scripts to notebooks!)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "jp-MarkdownHeadingCollapsed": true + }, + "source": [ + "## Installation\n", + "\n", + "There are different ways to use the Copernicus Marine toolbox, using the **`pip`** command, **`mamba | conda`**, the docker image or the binaries. You can see more information in the [installation page](installation-page) of the documentation." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Check the Installation\n", + "To verify that it has been installed correctly, we can import the package from Python (or from the CLI) and see if it works properly." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'1.3.3'" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import copernicusmarine\n", + "copernicusmarine.__version__" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Once the package is installed, we can start to explore everything the toolbox has to offer." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "jp-MarkdownHeadingCollapsed": true + }, + "source": [ + "## Copernicus Marine toolbox - Login\n", + "\n", + "To register, you can obtain credentials for free by creating an account at [Copernicus Marine website](https://data.marine.copernicus.eu/register?redirect=%2Fproducts).\n", + "\n", + "For more information, see the [page about login](login-page) of the documentation. You can also check the dedicated pages for the [command line interface](cli-login) or the {func}`Python interface `." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "One of the ways to **save the credentials** (and then use the rest of the functionalities without having to think about it anymore) is with the `login` function. Indeed your credentials will be encoded and saved at `~/.copernicusmarine/.copernicusmarine-credentials`. You can change the folder using the `configuration_file_directory` argument." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "copernicusmarine.login(username='', password='')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You can also set environment variables for your processes, they are gonna be read by the `copernicusmarine.get` and `copernicusmarine.subset` automatically." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# for Jupyter Notebook\n", + "%env COPERNICUSMARINE_SERVICE_USERNAME=\n", + "%env COPERNICUSMARINE_SERVICE_PASSWORD=\n", + "\n", + "# for Python script\n", + "import os\n", + "os.environ['COPERNICUSMARINE_SERVICE_USERNAME'] = ''\n", + "os.environ['COPERNICUSMARINE_SERVICE_PASSWORD'] = ''\n", + "\n", + "# for cli\n", + "!export COPERNICUSMARINE_SERVICE_USERNAME=\n", + "!export COPERNICUSMARINE_SERVICE_PASSWORD=" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "jp-MarkdownHeadingCollapsed": true + }, + "source": [ + "## Copernicus Marine toolbox - Describe\n", + "\n", + "To explore the catalogue of products and datasets available in Copernicus Marine service.\n", + "\n", + "For more information, see the [page about describe](describe-page) of the documentation. You can also check the dedicated pages for the [command line interface](cli-describe) or the {func}`Python interface `." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Fetching catalog: 100%|██████████| 2/2 [00:11<00:00, 5.85s/it]\n" + ] + }, + { + "data": { + "text/plain": [ + "{'title': 'Antarctic Sea Ice Extent from Reanalysis',\n", + " 'product_id': 'ANTARCTIC_OMI_SI_extent',\n", + " 'thumbnail_url': 'https://catalogue.marine.copernicus.eu/documents/IMG/ANTARCTIC_OMI_SI_extent.png',\n", + " 'digital_object_identifier': '10.48670/moi-00186',\n", + " 'sources': ['Numerical models'],\n", + " 'processing_level': None,\n", + " 'production_center': 'Mercator Océan International'}" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# by default, we only get information about the products\n", + "catalogue_products = copernicusmarine.describe()\n", + "catalogue_products[\"products\"][0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# We can get information about the datasets\n", + "catalogue_datasets = copernicusmarine.describe(include_datasets=True)\n", + "# catalogue_datasets[\"products\"][0][\"datasets\"][0] not shown because it is too long" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Fetching catalog: 100%|██████████| 2/2 [00:12<00:00, 6.34s/it]\n" + ] + }, + { + "data": { + "text/plain": [ + "{'products': [{'title': 'Atlantic-Iberian Biscay Irish- Ocean Biogeochemical Analysis and Forecast',\n", + " 'product_id': 'IBI_ANALYSISFORECAST_BGC_005_004',\n", + " 'thumbnail_url': 'https://mdl-metadata.s3.waw3-1.cloudferro.com/metadata/thumbnails/IBI_ANALYSISFORECAST_BGC_005_004.jpg',\n", + " 'description': 'The IBI-MFC provides a high-resolution biogeochemical analysis and forecast product covering the European waters, and more specifically the Iberia–Biscay–Ireland (IBI) area. The last 2 years before now (historic best estimates) as well as daily averaged forecasts with a horizon of 10 days (updated on a weekly basis) are available on the catalogue.\\nTo this aim, an online coupled physical-biogeochemical operational system is based on NEMO-PISCES at 1/36° and adapted to the IBI area, being Mercator-Ocean in charge of the model code development. PISCES is a model of intermediate complexity, with 24 prognostic variables. It simulates marine biological productivity of the lower trophic levels and describes the biogeochemical cycles of carbon and of the main nutrients (P, N, Si, Fe).\\nThe product provides daily and monthly averages of the main biogeochemical variables: chlorophyll, oxygen, nitrate, phosphate, silicate, iron, ammonium, net primary production, euphotic zone depth, phytoplankton carbon, pH, dissolved inorganic carbon, surface partial pressure of carbon dioxide, and zooplankton.\\n\\n**Product Citation**: \\nPlease refer to our Technical FAQ for citing [products.](http://marine.copernicus.eu/faq/cite-cmems-products-cmems-credit/?idpage=169)\\n\\n**DOI (Product)**: \\nhttps://doi.org/10.48670/moi-00026\\n\\n**References:**\\n\\n* Gutknecht, E. and Reffray, G. and Mignot, A. and Dabrowski, T. and Sotillo, M. G. Modelling the marine ecosystem of Iberia-Biscay-Ireland (IBI) European waters for CMEMS operational applications. Ocean Sci., 15, 1489–1516, 2019. https://doi.org/10.5194/os-15-1489-2019\\n',\n", + " 'digital_object_identifier': '10.48670/moi-00026',\n", + " 'sources': ['Numerical models'],\n", + " 'processing_level': 'Level 4',\n", + " 'production_center': 'NOLOGIN'},\n", + " {'title': 'Atlantic-Iberian Biscay Irish- Ocean Physics Analysis and Forecast',\n", + " 'product_id': 'IBI_ANALYSISFORECAST_PHY_005_001',\n", + " 'thumbnail_url': 'https://mdl-metadata.s3.waw3-1.cloudferro.com/metadata/thumbnails/IBI_ANALYSISFORECAST_PHY_005_001.jpg',\n", + " 'description': 'The IBI-MFC provides a high-resolution ocean analysis and forecast product (daily run by Nologin with the support of CESGA in terms of supercomputing resources), covering the European waters, and more specifically the Iberia–Biscay–Ireland (IBI) area. The last 2 years before now (historic best estimates) as well as forecasts of different temporal resolutions with a horizon of 5 days (updated on a daily basis) are available on the catalogue.\\nThe system is based on a eddy-resolving NEMO model application at 1/36º horizontal resolution, being Mercator-Ocean in charge of the model code development. The hydrodynamic forecast includes high frequency processes of paramount importance to characterize regional scale marine processes: tidal forcing, surges and high frequency atmospheric forcing, fresh water river discharge, wave forcing in forecast, etc. A weekly update of IBI downscaled analysis is also delivered as historic IBI best estimates.\\nThe product offers 3D daily and monthly ocean fields, as well as hourly mean and 15-minute instantaneous values for some surface variables. Daily and monthly averages of 3D Temperature, 3D Salinity, 3D Zonal and Meridional Velocity components, Mix Layer Depth, Sea Bottom Temperature and Sea Surface Height are provided. Additionally, hourly means of surface fields for variables such as Sea Surface Height, Mix Layer Depth, Surface Temperature and Currents, together with Barotropic Velocities are delivered. Finally, 15-minute instantaneous values of Sea Surface Height and Currents are also given.\\n\\n**Product Citation**: \\nPlease refer to our Technical FAQ for citing [products.](http://marine.copernicus.eu/faq/cite-cmems-products-cmems-credit/?idpage=169)\\n\\n**DOI (Product)**: \\nhttps://doi.org/10.48670/moi-00027\\n\\n**References:**\\n\\n* Sotillo, M.G.; Campuzano, F.; Guihou, K.; Lorente, P.; Olmedo, E.; Matulka, A.; Santos, F.; Amo-Baladrón, M.A.; Novellino, A. River Freshwater Contribution in Operational Ocean Models along the European Atlantic Façade: Impact of a New River Discharge Forcing Data on the CMEMS IBI Regional Model Solution. J. Mar. Sci. Eng. 2021, 9, 401. https://doi.org/10.3390/jmse9040401\\n* Mason, E. and Ruiz, S. and Bourdalle-Badie, R. and Reffray, G. and García-Sotillo, M. and Pascual, A. New insight into 3-D mesoscale eddy properties from CMEMS operational models in the western Mediterranean. Ocean Sci., 15, 1111–1131, 2019. https://doi.org/10.5194/os-15-1111-2019\\n* Lorente, P. and García-Sotillo, M. and Amo-Baladrón, A. and Aznar, R. and Levier, B. and Sánchez-Garrido, J. C. and Sammartino, S. and de Pascual-Collar, Á. and Reffray, G. and Toledano, C. and Álvarez-Fanjul, E. Skill assessment of global, regional, and coastal circulation forecast models: evaluating the benefits of dynamical downscaling in IBI (Iberia-Biscay-Ireland) surface waters. Ocean Sci., 15, 967–996, 2019. https://doi.org/10.5194/os-15-967-2019\\n* Aznar, R., Sotillo, M. G., Cailleau, S., Lorente, P., Levier, B., Amo-Baladrón, A., Reffray, G., and Alvarez Fanjul, E. Strengths and weaknesses of the CMEMS forecasted and reanalyzed solutions for the Iberia-Biscay-Ireland (IBI) waters. J. Mar. Syst., 159, 1–14, https://doi.org/10.1016/j.jmarsys.2016.02.007, 2016\\n* Sotillo, M. G., Cailleau, S., Lorente, P., Levier, B., Reffray, G., Amo-Baladrón, A., Benkiran, M., and Alvarez Fanjul, E.: The MyOcean IBI Ocean Forecast and Reanalysis Systems: operational products and roadmap to the future Copernicus Service, J. Oper. Oceanogr., 8, 63–79, https://doi.org/10.1080/1755876X.2015.1014663, 2015.\\n',\n", + " 'digital_object_identifier': '10.48670/moi-00027',\n", + " 'sources': ['Numerical models'],\n", + " 'processing_level': 'Level 4',\n", + " 'production_center': 'NOLOGIN'},\n", + " {'title': 'Atlantic-Iberian Biscay Irish- Ocean Wave Analysis and Forecast',\n", + " 'product_id': 'IBI_ANALYSISFORECAST_WAV_005_005',\n", + " 'thumbnail_url': 'https://mdl-metadata.s3.waw3-1.cloudferro.com/metadata/thumbnails/IBI_ANALYSISFORECAST_WAV_005_005.jpg',\n", + " 'description': 'The IBI-MFC provides a high-resolution wave analysis and forecast product (run twice a day by Nologin with the support of CESGA in terms of supercomputing resources), covering the European waters, and more specifically the Iberia–Biscay–Ireland (IBI) area. The last 2 years before now (historic best estimates), as well as hourly instantaneous forecasts with a horizon of up to 10 days (updated on a daily basis) are available on the catalogue.\\nThe IBI wave model system is based on the MFWAM model and runs on a grid of 5 km of horizontal resolution forced with the ECMWF hourly wind data. The system assimilates significant wave height (SWH) altimeter data and CFOSAT wave spectral data (supplied by Météo-France), and it is forced by currents provided by the IBI ocean circulation system. \\nThe product offers hourly instantaneous fields of different wave parameters, including Wave Height, Period and Direction for total spectrum; fields of Wind Wave (or wind sea), Primary Swell Wave and Secondary Swell for partitioned wave spectra; and the highest wave variables, such as maximum crest height and maximum crest-to-trough height. Additionally, the IBI wave system is set up to provide internally some key parameters adequate to be used as forcing in the IBI NEMO ocean model forecast run.\\n\\n**Product Citation**: \\nPlease refer to our Technical FAQ for citing [products.](http://marine.copernicus.eu/faq/cite-cmems-products-cmems-credit/?idpage=169)\\n\\n**DOI (Product)**: \\nhttps://doi.org/10.48670/moi-00025\\n\\n**References:**\\n\\n* Toledano, C.; Ghantous, M.; Lorente, P.; Dalphinet, A.; Aouf, L.; Sotillo, M.G. Impacts of an Altimetric Wave Data Assimilation Scheme and Currents-Wave Coupling in an Operational Wave System: The New Copernicus Marine IBI Wave Forecast Service. J. Mar. Sci. Eng. 2022, 10, 457. https://doi.org/10.3390/jmse10040457\\n',\n", + " 'digital_object_identifier': '10.48670/moi-00025',\n", + " 'sources': ['Numerical models'],\n", + " 'processing_level': 'Level 4',\n", + " 'production_center': 'NOLOGIN'},\n", + " {'title': 'Atlantic-Iberian Biscay Irish- Ocean BioGeoChemistry NON ASSIMILATIVE Hindcast',\n", + " 'product_id': 'IBI_MULTIYEAR_BGC_005_003',\n", + " 'thumbnail_url': 'https://mdl-metadata.s3.waw3-1.cloudferro.com/metadata/thumbnails/IBI_MULTIYEAR_BGC_005_003.jpg',\n", + " 'description': 'The IBI-MFC provides a biogeochemical reanalysis product for the Iberia-Biscay-Ireland (IBI) area starting in 01/01/1993 and being regularly updated on a yearly basis. The model system is run by Mercator-Ocean, being the product post-processed to the user’s format by Nologin with the support of CESGA in terms of supercomputing resources.\\nTo this aim, an application of the biogeochemical model PISCES is run simultaneously with the ocean physical IBI reanalysis, generating both products at the same 1/12° horizontal resolution. The PISCES model is able to simulate the first levels of the marine food web, from nutrients up to mesozooplankton and it has 24 state variables.\\nThe product provides daily, monthly and yearly averages of the main biogeochemical variables: chlorophyll, oxygen, nitrate, phosphate, silicate, iron, ammonium, net primary production, euphotic zone depth, phytoplankton carbon, pH, dissolved inorganic carbon and surface partial pressure of carbon dioxide. Additionally, climatological parameters (monthly mean and standard deviation) of these variables for the period 1993-2016 are delivered.\\n\\n**Product Citation**: \\nPlease refer to our Technical FAQ for citing [products.](http://marine.copernicus.eu/faq/cite-cmems-products-cmems-credit/?idpage=169)\\n\\n**DOI (Product)**: \\nhttps://doi.org/10.48670/moi-00028\\n\\n**References:**\\n\\n* Aznar, R., Sotillo, M. G., Cailleau, S., Lorente, P., Levier, B., Amo-Baladrón, A., Reffray, G., and Alvarez Fanjul, E. Strengths and weaknesses of the CMEMS forecasted and reanalyzed solutions for the Iberia-Biscay-Ireland (IBI) waters. J. Mar. Syst., 159, 1–14, https://doi.org/10.1016/j.jmarsys.2016.02.007, 2016\\n',\n", + " 'digital_object_identifier': '10.48670/moi-00028',\n", + " 'sources': ['Numerical models'],\n", + " 'processing_level': 'Level 4',\n", + " 'production_center': 'NOLOGIN'},\n", + " {'title': 'Atlantic-Iberian Biscay Irish- Ocean Physics Reanalysis',\n", + " 'product_id': 'IBI_MULTIYEAR_PHY_005_002',\n", + " 'thumbnail_url': 'https://mdl-metadata.s3.waw3-1.cloudferro.com/metadata/thumbnails/IBI_MULTIYEAR_PHY_005_002.jpg',\n", + " 'description': 'The IBI-MFC provides a ocean physical reanalysis product for the Iberia-Biscay-Ireland (IBI) area starting in 01/01/1993 and being regularly updated on a yearly basis. The model system is run by Mercator-Ocean, being the product post-processed to the user’s format by Nologin with the support of CESGA in terms of supercomputing resources. \\nThe IBI model numerical core is based on the NEMO v3.6 ocean general circulation model run at 1/12° horizontal resolution. Altimeter data, in situ temperature and salinity vertical profiles and satellite sea surface temperature are assimilated.\\nThe product offers 3D daily, monthly and yearly ocean fields, as well as hourly mean fields for surface variables. Daily, monthly and yearly averages of 3D Temperature, 3D Salinity, 3D Zonal and Meridional Velocity components, Mix Layer Depth, Sea Bottom Temperature and Sea Surface Height are provided. Additionally, hourly means of surface fields for variables such as Sea Surface Height, Mix Layer Depth, Surface Temperature and Currents, together with Barotropic Velocities are distributed. Additionally, climatological parameters (monthly mean and standard deviation) of these variables for the period 1993-2016 are delivered.\\n\\n**Product Citation**: \\nPlease refer to our Technical FAQ for citing [products.](http://marine.copernicus.eu/faq/cite-cmems-products-cmems-credit/?idpage=169)\\n\\n**DOI (Product)**: \\nhttps://doi.org/10.48670/moi-00029',\n", + " 'digital_object_identifier': '10.48670/moi-00029',\n", + " 'sources': ['Numerical models'],\n", + " 'processing_level': 'Level 4',\n", + " 'production_center': 'NOLOGIN'},\n", + " {'title': 'Atlantic -Iberian Biscay Irish- Ocean Wave Reanalysis',\n", + " 'product_id': 'IBI_MULTIYEAR_WAV_005_006',\n", + " 'thumbnail_url': 'https://mdl-metadata.s3.waw3-1.cloudferro.com/metadata/thumbnails/IBI_MULTIYEAR_WAV_005_006.jpg',\n", + " 'description': 'The IBI-MFC provides a high-resolution wave reanalysis product for the Iberia-Biscay-Ireland (IBI) area starting in 01/01/1993 and being regularly extended on a yearly basis. The model system is run by Nologin with the support of CESGA in terms of supercomputing resources. \\nThe Multi-Year model configuration is based on the MFWAM model developed by Météo-France (MF), covering the same region as the IBI-MFC Near Real Time (NRT) analysis and forecasting product, but with an enhanced horizontal resolution (1/36º instead of 1/20º). The system assimilates significant wave height (SWH) altimeter data and wave spectral data (Envisat and CFOSAT), supplied by MF. Both, the MY and the NRT products, are fed by ECMWF hourly winds. Specifically, the MY system is forced by the ERA5 reanalysis wind data. As boundary conditions, the NRT system uses the 2D wave spectra from the Copernicus Marine GLOBAL forecast system, whereas the MY system is nested to the GLOBAL reanalysis.\\nThe product offers hourly instantaneous fields of different wave parameters, including Wave Height, Period and Direction for total spectrum; fields of Wind Wave (or wind sea), Primary Swell Wave and Secondary Swell for partitioned wave spectra; and the highest wave variables, such as maximum crest height and maximum crest-to-trough height. Additionally, climatological parameters of significant wave height (VHM0) and zero -crossing wave period (VTM02) are delivered for the time interval 1993-2016.\\n\\n**Product Citation**: \\nPlease refer to our Technical FAQ for citing [products.](http://marine.copernicus.eu/faq/cite-cmems-products-cmems-credit/?idpage=169)\\n\\n**DOI (Product)**: \\nhttps://doi.org/10.48670/moi-00030',\n", + " 'digital_object_identifier': '10.48670/moi-00030',\n", + " 'sources': ['Numerical models'],\n", + " 'processing_level': 'Level 4',\n", + " 'production_center': 'NOLOGIN'},\n", + " {'title': 'Atlantic Iberian Biscay Irish Ocean- In-Situ Near Real Time Observations',\n", + " 'product_id': 'INSITU_IBI_PHYBGCWAV_DISCRETE_MYNRT_013_033',\n", + " 'thumbnail_url': 'https://mdl-metadata.s3.waw3-1.cloudferro.com/metadata/thumbnails/INSITU_IBI_PHYBGCWAV_DISCRETE_MYNRT_013_033.jpg',\n", + " 'description': 'IBI Seas - near real-time (NRT) in situ quality controlled observations, hourly updated and distributed by INSTAC within 24-48 hours from acquisition in average\\n\\n**DOI (product):** \\nhttps://doi.org/10.48670/moi-00043',\n", + " 'digital_object_identifier': '10.48670/moi-00043',\n", + " 'sources': ['In-situ observations'],\n", + " 'processing_level': 'Level 2',\n", + " 'production_center': 'Puertos del Estado (Spain)'},\n", + " {'title': 'Atlantic Iberian Biscay Mean Sea Level time series and trend from Observations Reprocessing',\n", + " 'product_id': 'OMI_CLIMATE_SL_IBI_area_averaged_anomalies',\n", + " 'thumbnail_url': 'https://catalogue.marine.copernicus.eu/documents/IMG/OMI_CLIMATE_SL_IBI_area_averaged_anomalies.png',\n", + " 'description': '**DEFINITION**\\n\\nThe ocean monitoring indicator on regional mean sea level is derived from the DUACS delayed-time (DT-2021 version, “my” (multi-year) dataset used when available, “myint” (multi-year interim) used after) sea level anomaly maps from satellite altimetry based on a stable number of altimeters (two) in the satellite constellation. These products are distributed by the Copernicus Climate Change Service and the Copernicus Marine Service (SEALEVEL_GLO_PHY_CLIMATE_L4_MY_008_057).\\nThe time series of area averaged anomalies correspond to the area average of the maps in the Irish-Biscay-Iberian (IBI) Sea weighted by the cosine of the latitude (to consider the changing area in each grid with latitude) and by the proportion of ocean in each grid (to consider the coastal areas). The time series are corrected from global TOPEX-A instrumental drift (WCRP Global Sea Level Budget Group, 2018) and regional mean GIA correction (weighted GIA mean of a 27 ensemble model following Spada et Melini, 2019). The time series are adjusted for seasonal annual and semi-annual signals and low-pass filtered at 6 months. Then, the trends/accelerations are estimated on the time series using ordinary least square fit.The trend uncertainty is provided in a 90% confidence interval. It is calculated as the weighted mean uncertainties in the region from Prandi et al., 2021. This estimate only considers errors related to the altimeter observation system (i.e., orbit determination errors, geophysical correction errors and inter-mission bias correction errors). The presence of the interannual signal can strongly influence the trend estimation considering to the altimeter period considered (Wang et al., 2021; Cazenave et al., 2014). The uncertainty linked to this effect is not considered.\\n\\n**CONTEXT **\\n\\nChange in mean sea level is an essential indicator of our evolving climate, as it reflects both the thermal expansion of the ocean in response to its warming and the increase in ocean mass due to the melting of ice sheets and glaciers (WCRP Global Sea Level Budget Group, 2018). At regional scale, sea level does not change homogenously. It is influenced by various other processes, with different spatial and temporal scales, such as local ocean dynamic, atmospheric forcing, Earth gravity and vertical land motion changes (IPCC WGI, 2021). The adverse effects of floods, storms and tropical cyclones, and the resulting losses and damage, have increased as a result of rising sea levels, increasing people and infrastructure vulnerability and food security risks, particularly in low-lying areas and island states (IPCC, 2022a). Adaptation and mitigation measures such as the restoration of mangroves and coastal wetlands, reduce the risks from sea level rise (IPCC, 2022b). \\nIn IBI region, the RMSL trend is modulated by decadal variations. As observed over the global ocean, the main actors of the long-term RMSL trend are associated with anthropogenic global/regional warming. Decadal variability is mainly linked to the strengthening or weakening of the Atlantic Meridional Overturning Circulation (AMOC) (e.g. Chafik et al., 2019). The latest is driven by the North Atlantic Oscillation (NAO) for decadal (20-30y) timescales (e.g. Delworth and Zeng, 2016). Along the European coast, the NAO also influences the along-slope winds dynamic which in return significantly contributes to the local sea level variability observed (Chafik et al., 2019).\\n\\n**KEY FINDINGS**\\n\\nOver the [1993/01/01, 2023/07/06] period, the area-averaged sea level in the IBI area rises at a rate of 4.00 \\uf0b1 0.80 mm/year with an acceleration of 0.14 \\uf0b1\\uf0200.06 mm/year2. This trend estimation is based on the altimeter measurements corrected from the Topex-A drift at the beginning of the time series (Legeais et al., 2020) and global GIA correction (Spada et Melini, 2019) to consider the ongoing movement of land. \\n\\n**DOI (product):** \\nhttps://doi.org/10.48670/moi-00252\\n\\n**References:**\\n\\n* Cazenave, A., Dieng, H.-B., Meyssignac, B., von Schuckmann, K., Decharme, B., and Berthier, E.: The rate of sea-level rise, Nat. Clim. Change, 4, 358–361, https://doi.org/10.1038/nclimate2159, 2014.\\n* Chafik, L., Nilsen, J. E. Ø., Dangendorf, S., Reverdin, G., and Frederikse, T.: North Atlantic Ocean Circulation and Decadal Sea Level Change During the Altimetry Era, Sci. Rep., 9, 1041, https://doi.org/10.1038/s41598-018-37603-6, 2019.\\n* Delworth, T. L. and Zeng, F.: The Impact of the North Atlantic Oscillation on Climate through Its Influence on the Atlantic Meridional Overturning Circulation, J. Clim., 29, 941–962, https://doi.org/10.1175/JCLI-D-15-0396.1, 2016.\\n* Horwath, M., Gutknecht, B. D., Cazenave, A., Palanisamy, H. K., Marti, F., Marzeion, B., Paul, F., Le Bris, R., Hogg, A. E., Otosaka, I., Shepherd, A., Döll, P., Cáceres, D., Müller Schmied, H., Johannessen, J. A., Nilsen, J. E. Ø., Raj, R. P., Forsberg, R., Sandberg Sørensen, L., Barletta, V. R., Simonsen, S. B., Knudsen, P., Andersen, O. B., Ranndal, H., Rose, S. K., Merchant, C. J., Macintosh, C. R., von Schuckmann, K., Novotny, K., Groh, A., Restano, M., and Benveniste, J.: Global sea-level budget and ocean-mass budget, with a focus on advanced data products and uncertainty characterisation, Earth Syst. Sci. Data, 14, 411–447, https://doi.org/10.5194/essd-14-411-2022, 2022.\\n* IPCC: AR6 Synthesis Report: Climate Change 2022, 2022a.\\n* IPCC: Summary for Policymakers [H.-O. Pörtner, D.C. Roberts, E.S. Poloczanska, K. Mintenbeck, M. Tignor, A. Alegría, M. Craig, S. Langsdorf, S. Löschke, V. Möller, A. Okem (eds.)]. In: Climate Change 2022: Impacts, Adaptation, and Vulnerability. Contribution of Working Group II to the Sixth Assessment Report of the Intergovernmental Panel on Climate Change [H.-O. Pörtner, D.C. Roberts, M. Tignor, E.S. Poloczanska, K. Mintenbeck, A. Alegría, M. Craig, S. Langsdorf, S. Löschke, V. Möller, A. Okem, B. Rama (eds.)], 2022b.\\n* IPCC: Summary for Policymakers. In: Climate Change 2022: Mitigation of Climate Change. Contribution of Working Group III to the Sixth Assessment Report of the Intergovernmental Panel on Climate Change [P.R. Shukla, J. Skea, R. Slade, A. Al Khourdajie, R. van Diemen, D. McCollum, M. Pathak, S. Some, P. Vyas, R. Fradera, M. Belkacemi, A. Hasija, G. Lisboa, S. Luz, J. Malley, (eds.)], , https://doi.org/10.1017/9781009157926.001, 2022c.\\n* IPCC WGI: Climate Change 2021: The Physical Science Basis. Contribution of Working Group I to the Sixth Assessment Report of the Intergovernmental Panel on Climate Change, 2021.\\n* IPCC WGII: Climate Change 2021: Impacts, Adaptation and Vulnerability; Summary for Policemakers. Contribution of Working Group II to the Sixth Assessment Report of the Intergovernmental Panel on Climate Change, 2021.\\n* Legeais, J. F., Llowel, W., Melet, A., and Meyssignac, B.: Evidence of the TOPEX-A altimeter instrumental anomaly and acceleration of the global mean sea level, Copernic. Mar. Serv. Ocean State Rep. Issue 4, 13, s77–s82, https://doi.org/10.1080/1755876X.2021.1946240, 2020.\\n* Peltier, W. R.: GLOBAL GLACIAL ISOSTASY AND THE SURFACE OF THE ICE-AGE EARTH: The ICE-5G (VM2) Model and GRACE, Annu. Rev. Earth Planet. Sci., 32, 111–149, https://doi.org/10.1146/annurev.earth.32.082503.144359, 2004.\\n* Prandi, P., Meyssignac, B., Ablain, M., Spada, G., Ribes, A., and Benveniste, J.: Local sea level trends, accelerations and uncertainties over 1993–2019, Sci. Data, 8, 1, https://doi.org/10.1038/s41597-020-00786-7, 2021.\\n* Wang, J., Church, J. A., Zhang, X., and Chen, X.: Reconciling global mean and regional sea level change in projections and observations, Nat. Commun., 12, 990, https://doi.org/10.1038/s41467-021-21265-6, 2021.\\n* WCRP Global Sea Level Budget Group: Global sea-level budget 1993–present, Earth Syst. Sci. Data, 10, 1551–1590, https://doi.org/10.5194/essd-10-1551-2018, 2018.\\n',\n", + " 'digital_object_identifier': '10.48670/moi-00252',\n", + " 'sources': ['Satellite observations'],\n", + " 'processing_level': None,\n", + " 'production_center': 'CLS (France)'},\n", + " {'title': 'Iberia Biscay Ireland sea level extreme variability mean and anomaly (observations)',\n", + " 'product_id': 'OMI_EXTREME_SL_IBI_slev_mean_and_anomaly_obs',\n", + " 'thumbnail_url': 'https://catalogue.marine.copernicus.eu/documents/IMG/OMI_EXTREME_SL_IBI_slev_mean_and_anomaly_obs.png',\n", + " 'description': '**DEFINITION**\\n\\nThe OMI_EXTREME_SL_IBI_slev_mean_and_anomaly_obs indicator is based on the computation of the 99th and the 1st percentiles from in situ data (observations). It is computed for the variable sea level measured by tide gauges along the coast. The use of percentiles instead of annual maximum and minimum values, makes this extremes study less affected by individual data measurement errors. The annual percentiles referred to annual mean sea level are temporally averaged and their spatial evolution is displayed in the dataset ibi_omi_sl_extreme_var_slev_mean_and_anomaly_obs, jointly with the anomaly in the target year. This study of extreme variability was first applied to sea level variable (Pérez Gómez et al 2016) and then extended to other essential variables, sea surface temperature and significant wave height (Pérez Gómez et al 2018).\\n\\n**CONTEXT**\\nSea level (SLEV) is one of the Essential Ocean Variables most affected by climate change. Global mean sea level rise has accelerated since the 1990’s (Abram et al., 2019, Legeais et al., 2020), due to the increase of ocean temperature and mass volume caused by land ice melting (WCRP, 2018). Basin scale oceanographic and meteorological features lead to regional variations of this trend that combined with changes in the frequency and intensity of storms could also rise extreme sea levels up to one metre by the end of the century (Vousdoukas et al., 2020). This will significantly increase coastal vulnerability to storms, with important consequences on the extent of flooding events, coastal erosion and damage to infrastructures caused by waves.\\nThe Iberian Biscay Ireland region shows positive sea level trend modulated by decadal-to-multidecadal variations driven by ocean dynamics and superposed to the long-term trend (Chafik et al., 2019).\\n\\n** KEY FINDINGS**\\nThe completeness index criteria is fulfilled by 55 stations in 2021, three more than those available in 2020 (52), recently added to the multi-year product INSITU_GLO_PHY_SSH_DISCRETE_MY_013_053. The mean 99th percentiles reflect the great tide spatial variability around the UK and the north of France. Minimum values are observed in the Irish coast (e.g.: 0.66 m above mean sea level in Arklow Harbour), South of England (e.g.: 0.70 m above mean sea level in Bournemouth), and the Canary Islands (e.g.: 0.96 m above mean sea level in Hierro). Maximum values are observed in the Bristol and English Channels (e.g.: 6.26 m and 5.17 m above mean sea level in Newport and St. Helier, respectively). The standard deviation reflects the south-north increase of storminess, ranging between 2 cm in the Canary Islands to 12 cm in Newport (Bristol Channel). Negative or close to zero anomalies of 2021 99th percentile are observed this year for most of the stations in the region, reaching up to -17.8 cm in Newport, or -15 cm in St.Helier (Jersey Island, Channel Islands).\\n\\n**DOI (product):** \\nhttps://doi.org/10.48670/moi-00253\\n\\n**References:**\\n\\n* Abram, N., Gattuso, J.-P., Prakash, A., Cheng, L., Chidichimo, M. P., Crate, S., Enomoto, H., Garschagen, M., Gruber, N., Harper, S., Holland, E., Kudela, R. M., Rice, J., Steffen, K., & von Schuckmann, K. (2019). Framing and Context of the Report. In H. O. Pörtner, D. C. Roberts, V. Masson-Delmotte, P. Zhai, M. Tignor, E. Poloczanska, K. Mintenbeck, A. Alegría, M. Nicolai, A. Okem, J. Petzold, B. Rama, & N. M. Weyer (Eds.), IPCC Special Report on the Ocean and Cryosphere in a Changing Climate (pp. 73–129). in press. https://www.ipcc.ch/srocc/\\n* Legeais J-F, W. Llowel, A. Melet and B. Meyssignac: Evidence of the TOPEX-A Altimeter Instrumental Anomaly and Acceleration of the Global Mean Sea Level, in Copernicus Marine Service Ocean State Report, Issue 4, Journal of Operational Oceanography, 2020, accepted.\\n* Pérez-Gómez B, Álvarez-Fanjul E, She J, Pérez-González I, Manzano F. 2016. Extreme sea level events, Section 4.4, p:300. In: Von Schuckmann K, Le Traon PY, Alvarez-Fanjul E, Axell L, Balmaseda M, Breivik LA, Brewin RJW, Bricaud C, Drevillon M, Drillet Y, Dubois C , Embury O, Etienne H, García-Sotillo M, Garric G, Gasparin F, Gutknecht E, Guinehut S, Hernandez F, Juza M, Karlson B, Korres G, Legeais JF, Levier B, Lien VS, Morrow R, Notarstefano G, Parent L, Pascual A, Pérez-Gómez B, Perruche C, Pinardi N, Pisano A, Poulain PM , Pujol IM, Raj RP, Raudsepp U, Roquet H, Samuelsen A, Sathyendranath S, She J, Simoncelli S, Solidoro C, Tinker J, Tintoré J, Viktorsson L, Ablain M, Almroth-Rosell E, Bonaduce A, Clementi E, Cossarini G, Dagneaux Q, Desportes C, Dye S, Fratianni C, Good S, Greiner E, Gourrion J, Hamon M, Holt J, Hyder P, Kennedy J, Manzano-Muñoz F, Melet A, Meyssignac B, Mulet S, Nardelli BB, O’Dea E, Olason E, Paulmier A, Pérez-González I, Reid R, Racault MF, Raitsos DE, Ramos A, Sykes P, Szekely T, Verbrugge N. 2016. The Copernicus Marine Environment Monitoring Service Ocean State Report, Journal of Operational Oceanography. 9 (sup2): 235-320. http://dx.doi.org/10.1080/1755876X.2016.1273446\\n* Pérez Gómez B, De Alfonso M, Zacharioudaki A, Pérez González I, Álvarez Fanjul E, Müller M, Marcos M, Manzano F, Korres G, Ravdas M, Tamm S. 2018. Sea level, SST and waves: extremes variability. In: Copernicus Marine Service Ocean State Report, Issue 2, Journal of Operational Oceanography, 11:sup1, Chap. 3.1, s79–s88, DOI: https://doi.org/10.1080/1755876X.2018.1489208.\\n* WCRP Global Sea Level Budget Group: Global sea-level budget 1993–present. 2018. Earth Syst. Sci. Data, 10, 1551-1590, https://doi.org/10.5194/essd-10-1551-2018.\\n* Vousdoukas MI, Mentaschi L, Hinkel J, et al. 2020. Economic motivation for raising coastal flood defenses in Europe. Nat Commun 11, 2119 (2020). https://doi.org/10.1038/s41467-020-15665-3.\\n',\n", + " 'digital_object_identifier': '10.48670/moi-00253',\n", + " 'sources': ['In-situ observations'],\n", + " 'processing_level': None,\n", + " 'production_center': 'Puertos del Estado (Spain)'}]}" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# We can also filter based on the strings found by the describe method\n", + "copernicusmarine.describe(include_description=True, contains=[\"Iberian Biscay\"])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here is an example of a more exhaustive search:" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Fetching catalog: 100%|██████████| 2/2 [00:11<00:00, 5.85s/it]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Products that offer the variables ['chl', 'o2']: 85\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Fetching catalog: 100%|██████████| 2/2 [00:11<00:00, 5.57s/it]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Products in the region ['Iberian Biscay']: 9\n", + "______________________\n", + "Product: IBI_ANALYSISFORECAST_BGC_005_004\n", + "Dataset: cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m\n", + "Spatial resolution : 0.027777777777777776\n", + "______________________\n", + "Product: IBI_ANALYSISFORECAST_BGC_005_004\n", + "Dataset: cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1M-m\n", + "Spatial resolution : 0.027777777777777776\n", + "______________________\n", + "Product: IBI_MULTIYEAR_BGC_005_003\n", + "Dataset: cmems_mod_ibi_bgc_my_0.083deg-3D-climatology_P1M-m\n", + "Spatial resolution : 0.08333333333333333\n", + "______________________\n", + "Product: IBI_MULTIYEAR_BGC_005_003\n", + "Dataset: cmems_mod_ibi_bgc_my_0.083deg-3D_P1D-m\n", + "Spatial resolution : 0.08333333333333333\n", + "______________________\n", + "Product: IBI_MULTIYEAR_BGC_005_003\n", + "Dataset: cmems_mod_ibi_bgc_my_0.083deg-3D_P1M-m\n", + "Spatial resolution : 0.08333333333333333\n", + "______________________\n", + "Product: IBI_MULTIYEAR_BGC_005_003\n", + "Dataset: cmems_mod_ibi_bgc_my_0.083deg-3D_P1Y-m\n", + "Spatial resolution : 0.08333333333333333\n" + ] + } + ], + "source": [ + "# For a little more advanced search...\n", + "var_name = [\"chl\", \"o2\"]\n", + "regions = [\"Iberian Biscay\"]\n", + "\n", + "# We find the products that offer the variables we are interested in:\n", + "describe_var = copernicusmarine.describe(contains=[var_name[0], var_name[1]])\n", + "prod_var = []\n", + "for product in describe_var['products']:\n", + " prod_var.append(product['product_id'])\n", + "print(f\"Products that offer the variables {var_name}: {len(prod_var)}\")\n", + "\n", + "# We save the products that offer the region we are interested in:\n", + "describe_loc = copernicusmarine.describe(contains=[regions[0]], include_all=True)\n", + "prod_loc = []\n", + "for product in describe_loc['products']:\n", + " prod_loc.append(product['product_id'])\n", + "print(f\"Products in the region {regions}: {len(prod_loc)}\")\n", + "\n", + "# And we search the intersection of both lists:\n", + "products = [prod_var, prod_loc]\n", + "final_selected_products = set.intersection(*map(set,products))\n", + "\n", + "pairs_dataset_step = {}\n", + "\n", + "for product in describe_loc['products']:\n", + " # We add a filter to clarify specific products, in this case the \"OMI\" (Ocean Monitoring Indicators)\n", + " if product['product_id'] in final_selected_products and \"OMI_\" not in product[\"product_id\"]:\n", + " for dataset in product['datasets']:\n", + " for version in dataset['versions']:\n", + " for part in version['parts']:\n", + " for part in version.get('parts', []):\n", + " for service in part['services'][:-1]:\n", + " # And we filter the datasets that can be subsetted:\n", + " if 'arco-' in service['service_type']['service_name'] and 'zarr' in service['service_format']:\n", + " for variable in service['variables']:\n", + " if (variable['short_name'] == var_name[0], variable['short_name'] == var_name[1]) and variable['coordinates'] != []:\n", + " pairs_dataset_step[dataset['dataset_id']] = (product['product_id'], variable['coordinates'][2]['step'])\n", + "\n", + "for key, value in pairs_dataset_step.items():\n", + " print(\"______________________\")\n", + " print(f\"Product: {value[0]}\")\n", + " print(f\"Dataset: {key}\")\n", + " print(f\"Spatial resolution : {value[1]}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We see that Copernicus Marine has 2 high-resolution products (models) that cover the Bay of Biscay, both for chlorophyll and dissolved oxygen:\n", + "- [IBI_ANALYSISFORECAST_BGC_005_004](https://data.marine.copernicus.eu/product/IBI_ANALYSISFORECAST_BGC_005_004/description) with recent data and a few days of forecast\n", + "- [IBI_MULTIYEAR_BGC_005_003](https://data.marine.copernicus.eu/product/IBI_MULTIYEAR_BGC_005_003/description) with several years of data covering the last decades\n", + "\n", + "Both products offer daily (`P1D`) and monthly (`P1M`) data." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Copernicus Marine toolbox - Subset\n", + "\n", + "For more information, see the [page about subset](subset-page) of the documentation. You can also check the dedicated pages for the [command line interface](cli-subset) or the {func}`Python interface `.\n", + "\n", + "The subset is a powerful tool that allows you to benefit from the power the Copernicus Marine services. Indeed, not only do you have access to the whole catalogue but you also can pinpoint the data that interest you thanks to the two services: \"arco-geo-series\" and \"arco-time-series\". They are respectively optimised for retieving maps (short time spam, wide area) and time series (long time span, small area). " + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO - 2024-10-18T15:50:02Z - Dataset version was not specified, the latest one was selected: \"202211\"\n", + "INFO - 2024-10-18T15:50:02Z - Dataset part was not specified, the first one was selected: \"default\"\n", + "INFO - 2024-10-18T15:50:03Z - Service was not specified, the default one was selected: \"arco-time-series\"\n", + "INFO - 2024-10-18T15:50:05Z - Downloading using service arco-time-series...\n", + "INFO - 2024-10-18T15:50:06Z - Estimated size of the dataset file is 61.855 MB\n", + "Estimated size of the data that needs to be downloaded to obtain the result: 2814 MB\n", + "This is a very rough estimate that is generally higher than the actual size of the data that needs to be downloaded.\n", + "INFO - 2024-10-18T15:50:06Z - Writing to local storage. Please wait...\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "d062ca80f47149f481764a07113437fc", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/2802 [00:00` function or the {func}`read_dataframe ` function.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### General Dataset\n", + "We can view the data of the entire dataset:" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO - 2024-10-18T12:48:42Z - Dataset version was not specified, the latest one was selected: \"202211\"\n", + "INFO - 2024-10-18T12:48:42Z - Dataset part was not specified, the first one was selected: \"default\"\n", + "INFO - 2024-10-18T12:48:44Z - Service was not specified, the default one was selected: \"arco-geo-series\"\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:    (depth: 50, latitude: 1081, longitude: 865, time: 1303)\n",
+       "Coordinates:\n",
+       "  * depth      (depth) float32 0.494 1.541 2.646 ... 5.275e+03 5.728e+03\n",
+       "  * latitude   (latitude) float32 26.0 26.03 26.06 26.08 ... 55.94 55.97 56.0\n",
+       "  * longitude  (longitude) float32 -19.0 -18.97 -18.94 ... 4.944 4.972 5.0\n",
+       "  * time       (time) datetime64[ns] 2021-04-03 2021-04-04 ... 2024-10-26\n",
+       "Data variables: (12/14)\n",
+       "    chl        (time, depth, latitude, longitude) float32 ...\n",
+       "    dissic     (time, depth, latitude, longitude) float32 ...\n",
+       "    fe         (time, depth, latitude, longitude) float32 ...\n",
+       "    nh4        (time, depth, latitude, longitude) float32 ...\n",
+       "    no3        (time, depth, latitude, longitude) float32 ...\n",
+       "    nppv       (time, depth, latitude, longitude) float32 ...\n",
+       "    ...         ...\n",
+       "    phyc       (time, depth, latitude, longitude) float32 ...\n",
+       "    po4        (time, depth, latitude, longitude) float32 ...\n",
+       "    si         (time, depth, latitude, longitude) float32 ...\n",
+       "    spco2      (time, latitude, longitude) float32 ...\n",
+       "    zeu        (time, latitude, longitude) float32 ...\n",
+       "    zooc       (time, depth, latitude, longitude) float32 ...\n",
+       "Attributes:\n",
+       "    source:       NEMO3.6-PISCES3.6\n",
+       "    institution:  Nologin (Spain)\n",
+       "    title:        Biogeochemical 3D daily mean fields for the Iberia-Biscay-I...\n",
+       "    contact:      mailto: servicedesk.cmems@mercator-ocean.eu\n",
+       "    Conventions:  CF-1.0\n",
+       "    references:   http://marine.copernicus.eu/
" + ], + "text/plain": [ + "\n", + "Dimensions: (depth: 50, latitude: 1081, longitude: 865, time: 1303)\n", + "Coordinates:\n", + " * depth (depth) float32 0.494 1.541 2.646 ... 5.275e+03 5.728e+03\n", + " * latitude (latitude) float32 26.0 26.03 26.06 26.08 ... 55.94 55.97 56.0\n", + " * longitude (longitude) float32 -19.0 -18.97 -18.94 ... 4.944 4.972 5.0\n", + " * time (time) datetime64[ns] 2021-04-03 2021-04-04 ... 2024-10-26\n", + "Data variables: (12/14)\n", + " chl (time, depth, latitude, longitude) float32 ...\n", + " dissic (time, depth, latitude, longitude) float32 ...\n", + " fe (time, depth, latitude, longitude) float32 ...\n", + " nh4 (time, depth, latitude, longitude) float32 ...\n", + " no3 (time, depth, latitude, longitude) float32 ...\n", + " nppv (time, depth, latitude, longitude) float32 ...\n", + " ... ...\n", + " phyc (time, depth, latitude, longitude) float32 ...\n", + " po4 (time, depth, latitude, longitude) float32 ...\n", + " si (time, depth, latitude, longitude) float32 ...\n", + " spco2 (time, latitude, longitude) float32 ...\n", + " zeu (time, latitude, longitude) float32 ...\n", + " zooc (time, depth, latitude, longitude) float32 ...\n", + "Attributes:\n", + " source: NEMO3.6-PISCES3.6\n", + " institution: Nologin (Spain)\n", + " title: Biogeochemical 3D daily mean fields for the Iberia-Biscay-I...\n", + " contact: mailto: servicedesk.cmems@mercator-ocean.eu\n", + " Conventions: CF-1.0\n", + " references: http://marine.copernicus.eu/" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# We open the dataset to explore the data:\n", + "data = copernicusmarine.open_dataset(dataset_id=\"cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m\")\n", + "data" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The `open_dataset` function uses a lot of the subset functionalities to help you select the data directly and open lazily a dataset. If you want to process the data using directly xarray yourself you can, see [xarray's documentation](https://docs.xarray.dev/en/stable/)." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO - 2024-10-18T16:08:46Z - Dataset version was not specified, the latest one was selected: \"202211\"\n", + "INFO - 2024-10-18T16:08:46Z - Dataset part was not specified, the first one was selected: \"default\"\n", + "INFO - 2024-10-18T16:08:47Z - Service was not specified, the default one was selected: \"arco-time-series\"\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset> Size: 13MB\n",
+       "Dimensions:    (depth: 50, latitude: 37, longitude: 73, time: 6)\n",
+       "Coordinates:\n",
+       "  * depth      (depth) float32 200B 0.494 1.541 2.646 ... 5.275e+03 5.728e+03\n",
+       "  * latitude   (latitude) float32 148B 43.0 43.03 43.06 ... 43.94 43.97 44.0\n",
+       "  * longitude  (longitude) float32 292B -5.0 -4.972 -4.944 ... -3.028 -3.0\n",
+       "  * time       (time) datetime64[ns] 48B 2024-10-17 2024-10-18 ... 2024-10-22\n",
+       "Data variables:\n",
+       "    chl        (time, depth, latitude, longitude) float64 6MB ...\n",
+       "    o2         (time, depth, latitude, longitude) float64 6MB ...\n",
+       "Attributes:\n",
+       "    institution:  Nologin (Spain)\n",
+       "    contact:      mailto: servicedesk.cmems@mercator-ocean.eu\n",
+       "    Conventions:  CF-1.0\n",
+       "    title:        Biogeochemical 3D daily mean fields for the Iberia-Biscay-I...\n",
+       "    references:   http://marine.copernicus.eu/\n",
+       "    source:       NEMO3.6-PISCES3.6
" + ], + "text/plain": [ + " Size: 13MB\n", + "Dimensions: (depth: 50, latitude: 37, longitude: 73, time: 6)\n", + "Coordinates:\n", + " * depth (depth) float32 200B 0.494 1.541 2.646 ... 5.275e+03 5.728e+03\n", + " * latitude (latitude) float32 148B 43.0 43.03 43.06 ... 43.94 43.97 44.0\n", + " * longitude (longitude) float32 292B -5.0 -4.972 -4.944 ... -3.028 -3.0\n", + " * time (time) datetime64[ns] 48B 2024-10-17 2024-10-18 ... 2024-10-22\n", + "Data variables:\n", + " chl (time, depth, latitude, longitude) float64 6MB ...\n", + " o2 (time, depth, latitude, longitude) float64 6MB ...\n", + "Attributes:\n", + " institution: Nologin (Spain)\n", + " contact: mailto: servicedesk.cmems@mercator-ocean.eu\n", + " Conventions: CF-1.0\n", + " title: Biogeochemical 3D daily mean fields for the Iberia-Biscay-I...\n", + " references: http://marine.copernicus.eu/\n", + " source: NEMO3.6-PISCES3.6" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import datetime\n", + "# Now we can focus on the paraeteers that we are interested in: the region of Cantabria, with a specific dates.\n", + "response_bay = copernicusmarine.open_dataset(dataset_id=\"cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m\", \n", + " variables=[\"chl\", \"o2\"],\n", + " maximum_longitude=-3, # Sopela, to the right\n", + " minimum_longitude=-5, # Llanes, to the left\n", + " minimum_latitude=43, # 85km inside the sea\n", + " maximum_latitude=44, # Santander\n", + " start_datetime=datetime.datetime.now()-datetime.timedelta(days=2), \n", + " end_datetime=datetime.datetime.now()+datetime.timedelta(days=4))\n", + "response_bay" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array(['2024-10-17T00:00:00.000000000', '2024-10-18T00:00:00.000000000',\n", + " '2024-10-19T00:00:00.000000000', '2024-10-20T00:00:00.000000000',\n", + " '2024-10-21T00:00:00.000000000', '2024-10-22T00:00:00.000000000'],\n", + " dtype='datetime64[ns]')" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# I can not see all the values of the dataset without downloading it\n", + "# For the time dimension for example:\n", + "response_bay.time.values" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's use `xarray` and `matplotlib` to plot our data." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA74AAALUCAYAAAAsZu6JAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAAD3eElEQVR4nOzdeXjUxP8H8He6vU8oFEoRSjkLcosgiBxyg4iAoNgfIHh/UUFEEZVLwSKiAqKAqICIFg/AC9EKlENuCoqggFBohXIJtLSl1+78/iibndDNHm2Xttv363nyNE0mk0k2k+xsks8oQggBIiIiIiIiIjflUdoFICIiIiIiInIlNnyJiIiIiIjIrbHhS0RERERERG6NDV8iIiIiIiJya2z4EhERERERkVtjw5eIiIiIiIjcGhu+RERERERE5NbY8CUiIiIiIiK3xoYvERERERERuTU2fImIKpCTJ09CURQoioJly5aVdnFKVUJCgrovEhISSrs4AKCWZ9q0aaVdFKKb5tq1a5g+fTpatGiBgIAAtR6MGzeutItGRG7Es7QLQERlx8mTJxEVFVXsfIQQJVAaIiJyd3l5eejevTu2b99e2kUhIjfHO75EREQuVKdOHSiKgocffri0i1LucN+5v6+++kpt9D788MPYtGkTDh48iIMHD+Kll14q5dIRkTvhHV8iUtWsWRMHDx7Und+sWTMAQJs2bbB06dKbVSyiCoNPS1BF8+uvvwIAwsPD8dFHH8FgMJRyiYjIXbHhS0QqLy8vNG3a1G66gIAAh9IRERHZcvr0aQBA3bp12eglIpfio85EREREVCpycnIAFPzwSkTkSmz4ElGJSkxMxJNPPolGjRohMDAQAQEBaNSoEZ566ikcPXpUd7lly5apkTxPnjyJ3NxcvPPOO2jTpg1CQkIQGhqKLl264Mcff9Qsd/XqVcyePRutWrVCcHAwKlWqhB49emDDhg2667oxmq/JZMKSJUvQoUMHhIaGIiAgAC1atEBsbCyys7PtbnNGRgZmzZqF9u3bIzQ0FD4+Prjllltw//3344cffrC5bJcuXaAoCrp06QIAOHbsGJ5++mk0aNAA/v7+6v4wS01NxQcffID7778fDRo0QEBAAHx8fFCzZk0MGDAAq1atgslkslvmkrJ27VoMGTIEtWvXhq+vLypVqoQ2bdpg+vTpuHz5cqH0p06dgoeHBxRFwSuvvGI3/y+++EL9rNatW2c1zffff4/7778ft9xyC3x8fFClShW0b98es2bNQkZGRpG37cbPRs+0adPUMlpb/tSpUwCA5cuXq+nMw415OxLV2WQy4bPPPkPfvn0RHh4Ob29vhIWFoWvXrvjggw+Qm5vrcFmzs7Px1ltvoXXr1ggKCkJQUBDatm2LBQsWID8/3+Z225OZmYlVq1bh0UcfRcuWLRESEgIvLy+EhYWhc+fOmDNnju7nU5R954js7GzMnz8fXbp0QVhYGLy8vBAaGopGjRqhT58+eOeddzT17UZGoxHLly/HPffcg4iICPV469ixI9555x1cu3ZNd1mTyYSNGzdiwoQJuPPOO1G1alV4eXmhUqVKaNmyJSZMmIDk5GSnt8majRs3YtiwYYiKioKfnx/8/f0RGRmJO+64AxMmTMDGjRsLLaN3HN/IXjR0R89p5jw2b94MANi8ebPm861Tp44m3z///BMzZsxAr1691LoeGBiIBg0aYOTIkdi5c6fD++e3337Do48+ikaNGiE4OBje3t645ZZbcM899+D999/HlStXdJf9559/8Nxzz6FZs2YICQmBn58f6tati4cffhh79+51uAxEVAoEEZGDAAgAonPnzoXmGY1G8dxzzwlFUdR0Nw6enp5i8eLFVvNeunSpmu73338X7dq1083nnXfeEUIIcerUKXHrrbdaTaMoivjss8+srmvTpk1qup9//ln07t1bd11NmjQRqampuvskMTFRRERE6C4PQAwaNEhcu3bN6vKdO3dW9+natWtFQEBAoeWTkpKEEELk5+cLDw8Pm+sCIHr06CGuXr1qdX1JSUlquqVLl+pulz2XLl0Sd999t81yVKtWTezYsaPQsh07dhQARFRUlN319OvXTwAQYWFhIi8vTzPv2rVrYuDAgTbLEBERIfbv3281b/k42LRpU6H58mdjy9SpU9V8rC1va7gxb/P0qVOnWl3Xf//9J+68806beTZu3FicPHnSblnPnj0rWrZsqZtP//79hdFotLnttjiy/VFRUeKvv/4q0rL2PpcbnTlzRjRp0sRuvs8//7zV5U+dOiVatGhhc9n69euLI0eOWF1e3vd6g7+/v1i9erVT23WjcePG2V1PlSpVbJbPFmfqja1zmr0yRkZGWl2nreGll16yWfasrCwxbNgwu/no1b+33npLeHl56S6nKIqYPHmyzTIQUenhO75EVCKeeeYZfPDBBwCATp064eGHH0bdunXh7++P33//HXPnzsWhQ4fwxBNPIDw8HPfee69uXo8//jj27duH//3vfxg4cCAqV66MAwcOYMqUKThz5gwmTJiAHj164OGHH8aJEyfw0ksvoXfv3ggICMBvv/2GqVOnIi0tDU899RR69OiBatWq6a7r1VdfxZ49e9CzZ0889dRTqFWrFlJSUvDBBx8gPj4ehw8fRv/+/bFz585C75+dPn0a3bp1w+XLl9XIsw8++CCqVKmCw4cP4+2338bvv/+O1atX4+GHH0ZcXJxuOZKTk/F///d/8Pf3x+TJk3HXXXfBYDBgz549CAwMBAA18NHdd9+NPn36oFmzZggLC8PVq1dx4sQJLFmyBDt27EB8fDzGjBmD5cuXO/z5OSMnJwfdu3dHYmIiDAYDHnroIfTt2xdRUVHIy8vDli1b8M477+D8+fPo27cv9u/fj8jISHX5mJgYbNu2DUlJSdi+fTs6dOhgdT3//fcffvnlFwDA0KFD4empvWSNHDkSa9asAQC0aNECzz//PBo3boxLly4hLi4Oy5Ytw5kzZ9CtWzf88ccfqFmzpkv2h56lS5ciMzMTvXr1wpkzZzBgwADMmDFDkyYgIMDh/IxGI+655x7s2LEDANC5c2c8/fTTiIqKwpkzZ/DJJ59g7dq1+Ouvv9CtWzccOHBAPXasGTRoEA4fPoxnn30W/fv3R2hoKI4cOYLXX38df/31F77//nssWbIETzzxRJG2Pz8/H82aNcO9996LNm3aICIiAkIInDp1CmvWrMGXX36JpKQk3HfffThw4AB8fX3VZUt63wEF56jDhw8DAP7v//4PgwYNQkREBAwGA1JTU7F37158++23Vpf977//0LFjR6SkpMDHxwePPfYYOnfujDp16iAjIwO//PIL5s2bh3/++Qd9+vRBYmIiQkJCCu2PGjVqYODAgWjfvj3q1q0LX19fpKSkYPv27fjggw+QkZGBhx56CImJiWjcuLFT2wcAP/zwA+bOnQsAaN68OZ566ik0btwYISEhuHLlCg4dOoRff/0Vu3fvdjpvZ9k6pwUEBKiBFEeNGoW9e/cWCpro7e2tjufn5yMgIAD9+vXD3XffjejoaAQHB+P8+fM4dOgQ5s+fj1OnTmHWrFlo2LAhRo0aVag8JpMJAwYMQHx8PACgQYMG+N///oc2bdrA398fqamp2L59O7788kur2/PWW2/hxRdfBGDZtw0aNEClSpVw5MgRLFiwADt27MDrr7+OqlWr4tlnny2xfUlEJaS0W95EVH5A507LL7/8os776KOPrC577do19Q5hZGRkobt38h1fRVHEmjVrCuXx+++/q3c8w8LChI+Pj9i5c2ehdD/++KOal/nusOzGuwePP/641TI/8sgjapr333+/0Pz777/f5nZnZ2eLrl27qmnWrVtXKI18ZysiIkKcOnXKalmEEMJkMoljx47pzhdCiClTpqj78OjRo4Xml8Qd35dfflkAEJUqVRJ79+61mubkyZOiRo0aAoB46KGHNPMuXryo3jUZM2aM7noWLlyolnX79u2aeT/88IM6r1u3biInJ6fQ8h9++KGaZujQoYXmu/qOr1lkZKQAIEaOHGkzHyFs3/FdsGCBOn/EiBHCZDIVSmP+bACIF1980WZZvby8rG73f//9J6pXry4AiObNm9stsx5rx58sPj5erc965w1n9p0t165dU485vTu6Zv/991+haQ899JB67jpx4oTV5RITE9W7my+//HKh+UlJSSI3N1d3vSkpKaJmzZoCgPi///s/O1tk3fDhw9Vy6j31IYT1bSzpO76OnNPk9Lbq2YULF8Tly5d15+fk5IgePXqo256fn18ozbx589RyDRw4UGRnZ1vNy2g0in///Vcz7dChQ+rxM3XqVKt1z2g0iv/7v/8TAERgYKC4dOmSbnmJqHSw4UtEDtNr+JobtIMHD7a5/OHDh9U8fvnlF808ueH7wAMP6ObRqVMnNd3EiRN105m/MA8cOLDQPPmLW/Xq1UVmZqbVPK5evSrCwsIEAHHrrbdq5p0+fVoYDAYBQPTu3Vu3HElJScLT01MAEH379i00X/6S+Omnn+rm46j8/HxRtWpVAUDMmTPHanmK0/C9evWqCAkJEQDEe++9ZzPtBx98oDawMjIyNPP69++v/oBx448gZuZHouvWrVtoXp8+fdS8k5OTdcvQvXt3ARQ8Zn/mzBnNvPLW8G3cuLG6z9LT060un5eXJ6KjowUAUbly5UJf7uWyjh8/XrccL730kvoDypUrV+yWu6juu+8+AUDcc889VueXVMP39OnT6nZ/++23Ti2blJSk1vXvv//eZtoXX3xRbfAVxdy5cwUAERwcbLVxZY+58WftvGePKxq+jpzTHK1n9hw4cEBd740/yBmNRnHLLbcIAOKWW26x+aOANaNHjxYARJs2bWx+LpcvXxY+Pj4CgPjwww+LtB1E5DoMbkVExZKenq4GOLn//vttpm3cuDGqVq0KAOrjmtY8+OCDuvNatGjhULrmzZsDAE6cOGGzTEOHDoW/v7/VeYGBgRg6dCgA4NChQzh79qw6LyEhAUajEQDwyCOP6OZfp04d9OjRo9AyN/L29saQIUNslvVGJpMJZ86cwZEjR/Dnn3/izz//xF9//YVbbrkFAPD77787lZ8jNm/ejLS0NAD2P+9OnToBAPLy8rBv3z7NvJiYGADAhQsX1EcPZcnJyfjtt98AAA899JBmXn5+vhoQp2fPnqhVq5ZuGR577DF1GWuBeMqLM2fO4K+//gJQcMwGBQVZTefp6ak+5nn58mUkJibq5mn+DKy57bbbABQ8Xp+UlFTUYmtcuHABx44dU4/VP//8E2FhYQBcc6zKqlSpoj46u2LFCqcCd/34448wGo3w9/dHnz59bKY1H/NnzpyxG6gqPT0dSUlJOHTokLo/zOci8zxn1ahRAwCwZcsWHD9+3OnlS1JRzmmOysnJQXJyMg4fPqzuOyH1gX3j8XTgwAH8+++/AArOCbZeAbDm+++/BwAMHjzYZvCvSpUqqf3d27rGEVHp4Du+RFQs+/fvV6MIDxs2DMOGDXNoObkReaOGDRvqzqtUqZJT6a5evWqzHLfffrvN+W3btsX7778PADh48CDCw8MBFEQYNWvXrp3NPNq1a4effvoJWVlZOHHiBBo0aFAoTYMGDTTvOOoRQmDlypX4+OOPsWvXLptRZC9evGg3P2fJUUvNX7IdcePnfe+99yIoKAhXr17FypUrCzUovvjiC/WL7I0NtBMnTiArKwuAY/veTP7Myhtnjzd5ufbt21tNFx0drZtHaGioOm6vDtny22+/Yf78+fj1119x6dIl3XSuOFZlPj4+eOCBB7BixQp8/fXX2LNnD4YOHYouXbqgQ4cOmvPKjczHfFZWVqH3zG05e/YsateurZl26tQpzJkzB99//70asVrPxYsXUbduXYfXBwAjRozAp59+iv/++w9NmzbFgAED0KtXL9x1112oX7++U3kVl6PnNEdlZmZi/vz5iIuLw6FDh3R/RAQKH0/79+9Xx++66y6n1nvq1ClcuHABADBp0iRMmjTJoeVsXeOIqHTwji8RFcv58+eLtJy54WKN3h1YAPDw8HAqna0vRwBsBr4CgOrVq6vj8hd3edxeHubG8o3LySpXrmwzD6CgK5Z+/fph+PDhSEhIsNnoBWB3flGU1Oft5+eHgQMHAijoEunG+StXrgQAtG7dulADraT3fXngim12tJ7Zq0N6pk2bho4dO+LLL7+0u+9dcazeaMGCBejfvz+AgsbMW2+9hX79+qFKlSq4/fbb8dZbb6lPM8hK6pj/6aef0KRJEyxYsMBuoxco2j7p1q0bFixYAD8/P2RnZ2PVqlUYPXo0GjRogFtuuQVPPvmky++umzlyTnPUyZMn0axZM7z88sv4448/7B6TN+47uSHszA92gGuucURUOnjHl4iKRf4CsnjxYt0IvTcqyS9FxWGvz8qblceNEaOtmTlzJn766ScABRF9x4wZg9atWyM8PBx+fn5qY6VTp07YunWr5tG/kiJ/3omJifDy8nJoOfPj17KYmBh8+umnyMzMxLfffqs+LXDo0CE14qutx3GBktn35U152OYNGzZg+vTpAIC6detiwoQJ6NixI2rXro2AgAD1zumUKVPw+uuv35QyBQcH47vvvsPu3bvx5ZdfIiEhAQcOHIDRaMTevXuxd+9ezJkzB2vXrtXcJTcf81WrVsWmTZscXl9UVJQ6fvHiRTz00EPIyspCYGAgJkyYgF69eqFevXoICQlRH8PeuHEjunXrBgBFrr9jxozBkCFD8PnnnyM+Ph6//fYb0tLScPr0aSxevBgffvghXn755UJRskuaI+c0Rw0fPhxJSUlQFAWjRo3Cgw8+iMaNGyMsLAze3t5QFAUmk0ldZ0me++Rz3pQpUxx+fNvZqONE5Hps+BJRsVSpUkUd9/f3R9OmTUuxNM47d+6cw/Plxz/l8XPnztl8z1R+5E1ezhlCCHz00UcACh7V27hxo+aunMyVdzblzzssLMxqg9ZR3bp1Q/Xq1XHu3DmsXLlSbfia7/Z6eHhYfY/7xn1vS3H2vXn/mh/l15OZmelUvkVxs7a5pCxZsgRAwQ9cO3fuVN/lvVFp3IVv27Yt2rZtC6DgMe6EhAQsW7YMq1evxvnz5zF48GAcP34cfn5+ACzH/NWrV9G4ceMiNei+/vprXLlyBQCwZs0adO/e3Wq6ktof1apVw7hx4zBu3DiYTCYcOHAAa9aswYIFC3DlyhXMnDkTt99+OwYMGKAuI59PTCaT7vnlZhzvsr///hvbtm0DAJsNdlv7zhxbAgBSU1NtPuZ/I/mc5+XlVe6ucURkwUediahYWrZsqd6BMgcjKk/27Nnj8Hz5C488vmvXLpt5mPvM9Pf3d/qdPbNLly6pDZohQ4bofinNyMjAkSNHirQOR7Rq1UodL+7nbTAY1IbtL7/8gv/++w9CCHzxxRcAgK5duyIiIqLQcub+oQHH9z0Ap7+wmgNIXb582Wa6o0eP2pxfEndoi3K83bjczXTo0CEABZ+hXqMX0L4zbo2r724HBQWhf//++Oabb9R+V1NTU9WGFmA55nNycuyWV495f4SGhuo2egH7+6MoPDw80Lp1a7z++uvYsGGDOv3G/mrlgGm2jnl7x3tJM+87AHjggQd009nad61bt1bHt2zZ4tT669atq/bJXB6vcURkwYYvERVLWFgY7rjjDgDA559/rgYBKS+++uor3XfpMjMz1S+HTZo00bwb1qVLF/XOzyeffKKbf3Jyshq1WF7GWXIUWlt3XD766COnItY6q3v37mqjc/78+cV+pND8KHNeXh6+/PJLbN++HSdPntTMu5Gnpyc6d+4MAIiPj1ejtVpjvkvu6emJLl26OFU286OqR48e1Q3wdPHiRatRqWXmAD85OTlOrV8WERGBxo0bAyhosGRkZFhNZzQasWzZMgAFd1vlL/w3k/kYtHWs7t+/324jviT2naPMjxgD2ndC+/fvrzbA586dW6S8zfsjOztb9wmCrKwsrFixokj5O6p169bqayY3BoCSH8221YiMi4tzTeF0OHruW7Roke68Fi1aqE/lfPTRR7r1xxqDwYC+ffsCKPiBzhxdnYjKHzZ8iajYXn31VQAFXXDcf//96iN91uTk5OD9999Hdnb2TSqdbWfPnsXzzz9vdd748ePVwCZPPfWUZl5ERIQanOmnn37C8uXLCy2fm5uL0aNHIy8vDwDw9NNPF7mcYWFhauTZL774wmpDYM+ePZg8eXKR1+GISpUqqduxfft2PPfcczYfBT537pza+LTm9ttvV6Ncr1y5Ep9//jmAggbP4MGDdZcbM2YMgIJ9/Mgjj6j7WPbJJ5/gl19+AQAMGjTI6aA25sZ1bm4u3nvvvULz8/Ly8Oijj9oNQmReb3G7lzFv84ULF9S7kzeaPn06Dh8+DKCg2xYfH59irbOozJ/ptm3b8M8//xSaf+HCBQwfPtxuPiW1706cOKF2gaXHfKwA2kZgo0aN1Pc64+Li8M4779jMJykpSX1qwcy8P7KysgrdaQUKfrB49NFHcebMGdsbYseqVatsHo979+5V7+bK2wgAHTp0UN+9fvfdd63+qPXWW29pnii4GeQo+OYfdW60cOFCfPvtt7p5eHh44IUXXgAA/PvvvxgxYgRyc3OtpjV3EyebNGkSDAYDTCYT7r//fps/thmNRqxcudJmGiIqJaXUfzARlUMABADRuXPnQvPGjh2rzg8PDxfTpk0Tv/76q9i/f7/Ytm2bWLZsmXjkkUdE5cqVBQBx9epVzfJLly5Vl09KStItw9SpU9V0towcOVIAEJGRkYXmbdq0Sc2jTZs2AoDo3bu3WLt2rdi3b59Yu3at6NWrl5qmVatWIi8vr1A+KSkp6vZ4eHiIRx99VMTHx4u9e/eKzz77TLRs2VLNY+jQoVbL2blzZ919eqMxY8Zoyv3555+LPXv2iF9//VWMHz9e+Pr6iqpVq4qGDRvq5pmUlKTmsXTpUrvrtCY7O1u0a9dOzadFixZiwYIFYtu2bWL//v1i48aN4r333hMDBgwQ3t7e4rbbbrOZ37Rp0wQAoSiKCAkJEQDE/fffb7ccQ4YMUcvQunVr8dlnn4m9e/eK+Ph48cgjjwhFUQQAERoaKv79999Cy8vHwaZNmwrNz8nJEZGRkern+9xzz4mtW7eKPXv2iGXLlonWrVsLRVHEHXfcYfOYfOWVV9T5sbGx4sCBA+LYsWPi2LFjhcplTjd16tRC+eTn54v27durae6++27x9ddfi3379okffvhBDBo0SJ1Xr169QnVMCMfrj719Y89XX32lLh8RESHmz58vfvvtN/Hbb7+Jt956S9SoUUMoiqLZHmuc2XeObE+TJk3EK6+8ItasWSN2794tdu/eLb755hsxdOhQdT0tW7YUJpNJs/x///0n6tatq6bp1KmT+Oijj8SOHTtEYmKiiI+PF3PmzBHdu3cXHh4eYvDgwZrlU1JShI+PjwAgfH19xcSJE8Wvv/6qHku33XabACDuvPPOYu33yMhIUalSJTFy5Ejx8ccfi61bt6rlmzp1qggNDRUAhMFgEHv27Cm0/LBhw9T133PPPeKnn34SiYmJYu3atWLw4MECgOjQoYPNMjpzTnMkvclkEk2bNtWcS7///nuxd+9esXbtWnH//fcX2nfW6o/RaBQ9evRQ0zRs2FDMnTtXbNu2TSQmJop169aJKVOmiAYNGlhd/t1331WXDQkJES+88IK6f7Zv3y4+//xz8cwzz4gaNWoIAOLgwYMObT8R3Txs+BKRw2w1fE0mk5g+fbrw9PRU0+kNAQEBIisrS7N8aTV8f/75Z9GzZ0/dskZHR4vTp0/rricxMVFERETY3N5BgwaJa9euWV3emS+JV65c0TSmbxxCQ0PF5s2bbeZZEg1fIYRIT0/XNLRsDV27drWZ17Fjxwots2bNGrtluHbtmhg4cKDNdUdERIj9+/dbXd6Rxt3WrVtFQECA1bwNBoOYN2+e3WPy33//VRscNw43fka2vrgLUdAAk7/gWxsaN24sTp48aXX5m9XwFUKIUaNG6ZbRYDCIuXPnlui+c3R7bA3R0dHixIkTVvNITU0Vd911l0P5jBo1qtDyn3zyifDw8NBd5oEHHhC//vprsfa7+YcaW4OPj49u3T979qxo0KCB7rIPPvig3TKWdMNXCCH279+v/shobWjWrJk4c+aM3fqTmZmpNpRtDXrLf/jhh8Lf39/u8t7e3uLYsWMObT8R3Tx81JmISoSiKJgyZQqOHj2KF198EW3atEFoaCgMBgOCgoLQpEkTxMTEYPny5UhNTVUjppY2b29vrFu3Dh988AHuuOMOVKpUCf7+/mjWrBlmzJiBxMREqwGWzFq1aoUjR44gNjYW7dq1Q6VKleDt7Y2IiAgMGjQI3333Hb755hv1XcXiCAkJwW+//YbXX38dzZo1g6+vLwIDA9G4cWNMmDABv//+Ozp16lTs9TgiKCgI33zzDbZu3YpHH30UjRo1QlBQEDw9PREaGorbb78dY8aMwbp16+y+A1u/fn01yi5Q8G6q+Z06W3x9fbF69Wp89913GDRoECIiIuDt7Y3KlSujXbt2iI2NxZEjR9CyZcsib2fHjh2xb98+DB8+HBEREfDy8kKNGjUwePBgbNmyRfeRY1nNmjWxe/duPPLII6hfv36xjoXQ0FBs2bIFn376KXr37o3q1avDy8sLVapUQZcuXbBgwQIcOHAAkZGRRV5HSfnkk0+wYsUK3HXXXQgKCoKPjw8iIyMxfPhwbN++HWPHjrWbR0ntu7vuugsJCQmYNGkSunbtivr16yMoKAheXl6oXr06evbsiUWLFuHAgQOFHgE2Cw8Px5YtW/DDDz8gJiZGDbLm5eWFsLAwdOjQAc8//zw2b95s9b3/UaNGYevWrbjvvvsQFhamHku9e/fGqlWrEBcXV+wugDZt2oR58+Zh8ODBaNasGcLCwuDp6Yng4GC0atUKEyZMwOHDh/Hwww9bXb569erYtWsXJk6ciAYNGsDHxwehoaHo1KkTPvvsM3zxxRcl2k2Ro1q2bIkDBw7gySefRGRkJLy8vBAaGoq2bdtizpw52L17t0OvMvj7++Orr77Cxo0bMXz4cERFRcHPzw/e3t6oVasW+vfvj8WLF+u+/vLYY4/hxIkTmD59Ou68805UrVoVnp6eCAgIQMOGDTF48GAsWrQIp0+fRv369Ut6NxBRMSlCuKCjRyKiMiwhIQFdu3YFUPBF0dmgR0RERERUvvCOLxEREREREbk1NnyJiIiIiIjIrbHhS0RERERERG6NDV8iIiIiIiJya2z4EhERERERkVtjVGciIiIiIiJya7zjS0RERERERG6NDV8iIiIiIiJya2z4EhERERERkVtjw5eIiIiIiIjcGhu+RERERERE5NbY8CUiIiIiIiK3xoYvERERERERuTU2fImIiIiIiMitseFLREREREREbo0NXyIiIiIiInJrbPgSERERERGRW2PDl4iIiIiIiNwaG75ERERERETk1tjwJSIiIiIiIrfGhi8RERERERG5NTZ8iYiIiIiIyK2x4UtERERERERujQ1fIiIiIiIicmts+BIREREREZFbY8OXiIiIiIiI3BobvkREREREROTW2PAlIiIiIiIit8aGLxEREREREbk1NnyJiIiIiIjIrbHhS0RERERERG6NDV8iIiIiIiJya2z4EhERERERkVtjw5eIiIiIiIjcGhu+RERERERE5NbY8CUiIiIiIiK3xoYvERERERERuTU2fImIiIiIiMitseFLREREREREbo0NXyIiIiIiInJrbPgSERERERGRW2PDl4iIiIiIiNwaG75ERERERETk1tjwJSIiIiIiIrfGhi8RERERERG5NTZ8iYiIiIiIyK2x4UtERERERERujQ1fIiIiIiIicmts+BIREREREZFbY8OXiIiIiIiI3BobvkREREREROTW2PAlIiIiIiIit8aGLxEREREREbk1NnyJiIiIiIjIrbHhS0RERERERG6NDV8iIiIiIiJya2z4EhERERERkVtjw5eIiIiIiIjcGhu+RERERERE5NbY8CUiIiIiIiK3xoYvERERERERuTU2fImIiIiIiMitseFLREREREREbo0NXyIiIiIiInJrbPgSERERERGRW2PDl4iIiIiIiNwaG75ERERERETk1tjwJSIiIiIiIrfGhi8RERERERG5NTZ8iYiIiIiIyK2x4UtERERERERujQ1fIiIiIiIicmts+BIREREREZFbY8OXiIiIiIiI3BobvkREREREROTW2PAlIiIiIiIit8aGLxEREREREbk1NnyJiIiIiIjIrbHhS0RERERERG6NDV8iIiIiIiJya2z4EhERERERkVtjw5eIiIiIiIjcGhu+RERERERE5NbY8CUiIiIiIiK3xoYvERERERERuTU2fImIiIiIiMitseFLREREREREbo0NXyIiIiIiInJrbPgSERERERGRW2PDl4iIiIiIiNwaG75ERERERETk1tjwJSIiIiIiIrfGhi8RERERERG5NTZ8iYiIiIiIyK2x4UtERERERERujQ1fIiIiIiIicmts+BIREREREZFbY8OXiIiIiIiI3BobvkREREREROTW2PAlIiIiIiIit8aGLxEREREREbk1NnyJiIiIiIjIrbHhS0RERERERG6NDV8iIiIiIiJya2z4EhERERERkVtjw5eIiIiIiIjcGhu+RERERERE5NbY8CUiIiIiIiK3xoYvERERERERuTU2fImIiIiIiMitseFLREREREREbo0NXyIiIiIiInJrbPi6oYSEBCiKgitXrpR2UYjIhVjXiSoG1nUiouJjw7ec69KlC8aNG6eZ1qFDB6SmpiIkJKR0ClVEly5dwjPPPINGjRrBz88PtWvXxrPPPou0tDRNuuTkZPTr1w/+/v6oVq0aXnjhBeTn56vzV69ejR49eiAsLAzBwcFo3749fv75Z931zpo1C4qiFNqP1sycORMdOnSAv78/KlWqZDWNvfJZc+jQIQwePBh16tSBoiiYO3duoTTmeTcOY8aMsVtuKv9Y1ytOXTcajZg8eTKioqLg5+eHevXq4fXXX4cQwm65qfxjXa84df3q1asYN24cIiMj4efnhw4dOmDPnj12y0xERcOGrxvy9vZGeHg4FEUp7aI45cyZMzhz5gzmzJmDP//8E8uWLcP69evxyCOPqGmMRiP69euH3NxcbN++HcuXL8eyZcswZcoUNc2WLVvQo0cPrFu3Dvv27UPXrl3Rv39/7N+/v9A69+zZg8WLF6N58+YOlTE3NxdDhgzBU089ZXW+I+WzJisrC3Xr1sWsWbMQHh5uNc2ePXuQmpqqDvHx8QCAIUOGOFR2cj+s6+5Z1998800sXLgQCxYswF9//YU333wTs2fPxnvvvedQ2cn9sK67Z11/9NFHER8fjxUrVuDgwYPo2bMnunfvjtOnTztUdiJykqBya+TIkQKAZkhKShKbNm0SAMTly5eFEEIsXbpUhISEiO+//140bNhQ+Pn5icGDB4vMzEyxbNkyERkZKSpVqiSeeeYZkZ+fr+afnZ0tnn/+eRERESH8/f1F27ZtxaZNm27qNn755ZfC29tb5OXlCSGEWLdunfDw8BBnz55V0yxcuFAEBweLnJwc3XyaNGkipk+frpl29epV0aBBAxEfHy86d+4sxo4d63C5zPv0RkUtnywyMlK8++67dtONHTtW1KtXT5hMJkeLTeUU63qBilLX+/XrJ0aPHq2ZNmjQIBETE+Nwual8Yl0vUBHqelZWljAYDOKHH37QTG/durV45ZVXHC43ETmOd3zLsXnz5qF9+/Z47LHH1LuAtWrVspo2KysL8+fPR1xcHNavX4+EhAQMHDgQ69atw7p167BixQosXrwYX3/9tbrM008/jR07diAuLg5//PEHhgwZgt69e+PYsWO6ZerTpw8CAwN1h1tvvdWpbUxLS0NwcDA8PT0BADt27ECzZs1QvXp1NU2vXr2Qnp6OQ4cOWc3DZDLh6tWrCA0N1UwfM2YM+vXrh+7duztVJluKUr6iyM3NxWeffYbRo0eXuzsA5DzW9QIVpa536NABGzZswNGjRwEAv//+O7Zt24Y+ffoUK18q+1jXC1SEup6fnw+j0QhfX1/NdD8/P2zbtq3I+RKRPs/SLgAVXUhICLy9veHv76/7GI1ZXl4eFi5ciHr16gEA7r//fqxYsQLnzp1DYGAgmjRpgq5du2LTpk144IEHkJycjKVLlyI5ORkREREAgAkTJmD9+vVYunQp3njjDavr+eijj3Dt2jXdcnh5eTm8fRcvXsTrr7+Oxx9/XJ129uxZzcUHgPr/2bNnreYzZ84cZGRkYOjQoeq0uLg4JCYmlvi7NEUpX1GsXbsWV65cwcMPP1xieVLZxbpeoKLU9Zdeegnp6emIjo6GwWCA0WjEzJkzERMTU6x8qexjXS9QEep6UFAQ2rdvj9dffx2NGzdG9erV8cUXX2DHjh2oX79+scpMRNax4VtB+Pv7qxdHoOCkXadOHQQGBmqmnT9/HgBw8OBBGI1GNGzYUJNPTk4OqlSporuemjVrlkh509PT0a9fPzRp0gTTpk0rcj6ff/45pk+fjm+//RbVqlUDAKSkpGDs2LGIj48v9Eur2ZNPPonPPvtM/T8jI6PIZZAlJyejSZMm6v8vv/wyXn75Zafz+fjjj9GnTx/1ywuRGet6+a/rX375JVauXInPP/8ct956Kw4cOIBx48YhIiICI0eOLJHyUfnHul7+6/qKFSswevRo1KxZEwaDAa1bt8awYcOwb9++EikbEWmx4VtB3PiLrKIoVqeZTCYABRcEg8GAffv2wWAwaNLJF9Ub9enTB1u3btWdHxkZaffRoKtXr6J3794ICgrCmjVrNOUMDw/H7t27NenPnTunzpPFxcXh0UcfxVdffaV57Gnfvn04f/48WrdurU4zGo3YsmULFixYgJycHLz22muYMGGCzXJaY698EREROHDggDrvxse0HHHq1Cn8+uuvWL16tdPLkvtjXS//df2FF17ASy+9hAcffBAA0KxZM5w6dQqxsbFs+JKKdb381/V69eph8+bNyMzMRHp6OmrUqIEHHngAdevWdbqcRGQfG77lnLe3N4xGY4nn26pVKxiNRpw/fx533XWXw8sV95Go9PR09OrVCz4+Pvjuu+8K/XLbvn17zJw5E+fPn1d/6Y2Pj0dwcLDmF9cvvvgCo0ePRlxcHPr166fJo1u3bjh48KBm2qhRoxAdHY2JEyfCYDCgWrVqav7OsFc+T0/PYj/CtHTpUlSrVq3QdpF7Y12vOHU9KysLHh7aEBwGg0FtwJB7Y12vOHXdLCAgAAEBAbh8+TJ+/vlnzJ49u1j5EZF1bPiWc3Xq1MGuXbtw8uRJBAYGFukOojUNGzZETEwMRowYgbfffhutWrXChQsXsGHDBjRv3ly30VWcR6LS09PRs2dPZGVl4bPPPkN6ejrS09MBAGFhYTAYDOjZsyeaNGmC4cOHY/bs2Th79ixeffVVjBkzBj4+PgAKHoMaOXIk5s2bh3bt2qnv4Pj5+SEkJARBQUFo2rSpZt0BAQGoUqVKoek3Sk5OxqVLl5CcnAyj0aj+ylu/fn0EBgY6VD5rcnNzcfjwYXX89OnTOHDgAAIDAzUXVJPJhKVLl2LkyJFqYBCqGFjXK05d79+/P2bOnInatWvj1ltvxf79+/HOO+9g9OjRzu9sKndY1ytOXf/5558hhECjRo3wzz//4IUXXkB0dDRGjRrl/M4mIvtKO6w0Fc+RI0fEHXfcIfz8/Ox2eyCbOnWqaNGihWbayJEjxYABA9T/c3NzxZQpU0SdOnWEl5eXqFGjhhg4cKD4448/XLIt5nJbG5KSktR0J0+eFH369BF+fn6iatWq4vnnn1e7RRBCiM6dO1vNY+TIkbrrdrTbA2tdTQDQdAdhr3zWJCUlWc23c+fOmnQ///yzACCOHDlit6zkXljXK05dT09PF2PHjhW1a9cWvr6+om7duuKVV15xuOsUKt9Y1ytOXV+1apWoW7eu8Pb2FuHh4WLMmDHiypUrdstMREWjCCFEcRrORERERERERGUZ+/ElIiIiIiIit8aGLxEREREREbk1NnyJiIiIiIjIrbHhS0RERERERG6NDV8iIiIiIiJya+wI1AqTyYQzZ84gKCgIiqKUdnGI3IIQAlevXkVERAQ8PMrGb26s60Qlr6zVddZzItcoa3XdUdnZ2cjNzXXpOry9veHr6+vSdVARlGpnSpLY2FgBwGqfayaTSfTu3VsAEGvWrLGZj8lkEpMnTxbh4eHC19dXdOvWTRw9etSpsqSkpOj2O8eBA4fiDSkpKU7VR1diXefAwXVDWanrrOccOLh2KCt13RHXrl0T4dUMLt8n4eHh4tq1a6W9uXSDMnHHd8+ePVi8eDGaN29udf7cuXMd/pV29uzZmD9/PpYvX46oqChMnjwZvXr1wuHDhx3+5SUoKAgA0MlvMDwVLyjyr1ie0i4zGtVRIY0j3/p0TRp7FOu/nCkeLv61Wme9RSZMDiQRJbvOCko+NhSDwTLu420Z97KMC/mzycuzTJeP2TzpmNX7LKVjRnN8Gq5Pv15380Uetlz7Rq1fZYFa130HwVPxUssKaPehSur2XOTq7DNn6jmgv/+cTSMzuO6Xd0eOCdbpm0dT7729LOM+PpZxL8t0zTEsH6vWzgFGnTovH19Wrs1lra6r9dzrvoJ6rlN++VovTNK269V7+TiX64JefbV3rryhPNbKJdMro4beZ6jJR2c7HElPpU49luTjy0v6DiBfy6RzgaLIx7t0DZPvhOodV9flizxsyV5dZuq6I3Jzc3H2vBGn9tVBcJBrrpXpV02IvO0kcnNzede3jCn1hm9GRgZiYmKwZMkSzJgxo9D8AwcO4O2338bevXtRo0YNm3kJITB37ly8+uqrGDBgAADg008/RfXq1bF27Vo8+OCDVpfLyclBTk6O+v/Vq1cBAJ6KFzwVb83JAYq0yxTpRCGNQ8mXpntYHbdLr+Hr6se0SrrhCwcuogovoiVBPjYURbroKVLD10On4SstKzTHteVY1v0s5YutfHyay3DDMVuajxraq+vQ2YcW0hdgaTOKXM8B/f3nbBpNemtlLxmOHBOs0zePtt57Wx/30Gn4Cvm6ZeUcoOj8iCMfXzaOx9Kq67bruZdu+TUNAUU+tvXqvXyc6zR8FZ2Gr7Vz5Q3lsVYumV4ZtQvb/yFOdzscSk+lTT2WNMeXpzQuHV8eOg1fIV/35dwd+6zL4ysEgUEKAoNcU24Tyt/+qChK/YH8MWPGoF+/fujevXuheVlZWXjooYfw/vvvIzw83G5eSUlJOHv2rCavkJAQtGvXDjt27NBdLjY2FiEhIepQq1atom0MEZVprOtE7o/1nIiIrCnVhm9cXBwSExMRGxtrdf5zzz2HDh06qHdv7Tl79iwAoHr16prp1atXV+dZM2nSJKSlpalDSkqKg1tAROUJ6zqR+2M9JyJ7jMLk0oHKplJ71DklJQVjx45FfHy81effv/vuO2zcuBH79+93eVl8fHzgI70PRUTuiXWdyP2xnhMRkTWldsd33759OH/+PFq3bg1PT094enpi8+bNmD9/Pjw9PREfH4/jx4+jUqVK6nwAGDx4MLp06WI1T/Pj0OfOndNMP3funEOPShMRERERkXszQbh0oLKp1O74duvWDQcPHtRMGzVqFKKjozFx4kRUrVoVTzzxhGZ+s2bN8O6776J///5W84yKikJ4eDg2bNiAli1bAgDS09Oxa9cuPPXUUyVTcDkKpp1od04rrUjOMp3IlFTOaKI4StVcjgBrkgJbOJu/bnTSwkFazBFJFVGGjydFARRFN3qqVfK2ylFOFQcipDobpVmiF1FVk48zkaWtRa+2QV6PkI4heVu1afgFwJXk/as48rnrBlBT7KextpiVOlNm67rBoyCQlAPbpxfhWRvtXY7m7sD6pToi1x1FysdafdREb9bMKJm6pT0HWT8faI4zg2aG1TR085j3u+IhHSdG28fUjTTHta/0tIQmirkDUcSJyrhSa/gGBQWhadOmmmkBAQGoUqWKOt3aXdratWsjKipK/T86OhqxsbEYOHAgFEXBuHHjMGPGDDRo0EDtzigiIgL33XefS7eHiIiIiIjKPhNMDsQwL3reVDaVendGxXXkyBGkpaWp/7/44ovIzMzE448/jitXrqBjx45Yv349+9EiIiIiIiKqoMpUwzchIcHmfGHl0YobpymKgtdeew2vvfZaSRaNiIiIiIjcgFEIGF30yLar8qXiK6Mv4hARERERERGVjDJ1x5eIiIiIiMiVXBl9mVGdyy42fB2giejoTNRXR5SFSM567HXAXUJRnxkB1rUU+XPylKp8fr6cyIGMHI/kXJCmnD9QcpOitjvEXl3EDRGWra5e5zN2JgJ0Eeitl3W95OlGeDZIMS7kOiod45plr08XetFgnYj6XKZcj95eLI5Ec5do9qtcF3QiPMOUbz29vbIUJ40e6Rhy5DptTsO6XU7Ix5f03UBz7pCvW/L0698fFFGM44uoFLDhS0REREREFYYJAkbe8a1wyvktGSIiIiIiIiLbeMeXiIiIiIgqDL7jWzHxji8RERERERG5Nd7xJSIiIiKiCoP9+FZMbPjaoHh4aCPiFj/DksurLHAg0izdRPLxpRe5VLEexREGKcKzHO3Z6SIUPsbNUdFFGT5e7NZ1a/vTyUjYLifvX70I3K4kb6sDnzWjubuAvN/1vnjpnQM0kVwL0ihSRGC5dwPtKgtPL6t1vajXdPm8JnSioBfreNbZX9YitWvqsyMR2YsT1dkRTtZ7ch1NBHGDPF3qmUQ+NuVDQ++8AOn7gOYaY05fTiO8U4XFhi8REREREVUYpuuDq/KmssnNbkESERERERERafGOLxERERERVRhGF/bj66p8qfh4x5eIiIiIiIjcGu/4EhERERFRhWEUBYOr8qayiXd8iYiIiIiIyK3xjq8jHOi2RNPdgdx9gCPdDah5uDYsfEl1GXLTukehkicfy9K4ptsSR7rp0cuzvFIUp7dD03WTl3Qqlbp8KAvd9FgrQ0nVYb18NN2wsGujm0bed3K3O4rcRZmPj2VcOoYVqQsTNZ9idG1WLumdA+SuoeTzpnzcyr1BldTxbKXuWOvi6MZ1ajjb5ZED6e1tnyPnF9Zz19J0baSXyB2u3cXAqM4VE+/4EhERERERkVvjHV8iIiIiIqowTFBg1L8fXuy8qWziHV8iIiIiIiJya7zjS0REREREFYZJFAyuypvKJt7xJSIiIiIiIrfGO762eHoCyg27SI5WKP+kI0dC1ImWC1jS3KyIhq5YT+lEieXPZ3bJEUClKKRCmq7Zu57SsWmQorvK0YodiU4q3Oiz0dRXJ5ikfa/Ivyc6HtW9RNmJpCycLZZi/zdS3XOA4lyEZyoZIi/f6rji7W1J5Gn9eFeu12lNzc7NLcnilS+aSM7We3CQj35NRO3y0AuCE71PADauBdbOEzp1vjj7hd8H7NNGdZf2tXx9l693BuuRy+3lL5y+mJQdRhe+4+uqfKn4eMeXiIiIiIiI3Brv+BIRERERUYXBO74VE+/4EhERERERkVvjHV8iIiIiIqowTEKBSbioH18X5UvFxzu+RERERERE5NbY8L2ZFA91UDyUQkN5JkzC6lBSeZKTjEbLkJdvGYSwDNLxCG9vafCyDAaDZZBoPhtpXcJkUgczxcNDHcoVD0UdFIOh0CCTt9Fa3S73dV2YrA+aJKyvZYr0OYncXHWAPMg8DZbBy6tg0M1b2B/KIkWxPcjkc588OLKsfG4taQ7UP1cPTnHBvijX59JSIIxGu4PmO4NePTZ3fOsm53jzO76uGpwRGxuL22+/HUFBQahWrRruu+8+HDlyxO5yX331FaKjo+Hr64tmzZph3bp1mvlCCEyZMgU1atSAn58funfvjmPHjjlVNndTzr6JEhERERERuYfNmzdjzJgx2LlzJ+Lj45GXl4eePXsiMzNTd5nt27dj2LBheOSRR7B//37cd999uO+++/Dnn3+qaWbPno358+dj0aJF2LVrFwICAtCrVy9kZ2ffjM0qk/iOLxERERERVRhGeMDoovt/zvZuvH79es3/y5YtQ7Vq1bBv3z506tTJ6jLz5s1D79698cILLwAAXn/9dcTHx2PBggVYtGgRhBCYO3cuXn31VQwYMAAA8Omnn6J69epYu3YtHnzwQae3yx3wji8REREREVEJSk9P1ww5OTkOLZeWlgYACA0N1U2zY8cOdO/eXTOtV69e2LFjBwAgKSkJZ8+e1aQJCQlBu3bt1DQVERu+RERERERUYYjrUZ1dMYjrUZ1r1aqFkJAQdYiNjbVbLpPJhHHjxuHOO+9E06ZNddOdPXsW1atX10yrXr06zp49q843T9NLUxHxUWciIiIiIqISlJKSguDgYPV/Hx8fu8uMGTMGf/75J7Zt2+bKolVYbPgSEREREVGFUZToy87kDQDBwcGahq89Tz/9NH744Qds2bIFt9xyi8204eHhOHfunGbauXPnEB4ers43T6tRo4YmTcuWLR0uk7spM486z5o1C4qiYNy4ceq0J554AvXq1YOfnx/CwsIwYMAA/P333zbzycjIwNNPP41bbrkFfn5+aNKkCRYtWlRyBdUL5y51d6DpvsXTUx00IfiLGt7fld0kuMBN6zKB9MldE+TnWwa5WwypCyPFx8cy6HTfo+kqRf7MpHXd2K1RmWalyya7pO6ONN2aaLo+sV5fS6M7DpfURb1ujhzo/kgPuyopGZrPT6r3Is8yaLo6k4/V68eybv0vp5TgICghwVACAyyDr486wMvTMujRu9brdP3mUiVU5xzKsySUs+8v7kJzPpfrv/x9wGiyDHrkLv7KazeFZZQQAk8//TTWrFmDjRs3Iioqyu4y7du3x4YNGzTT4uPj0b59ewBAVFQUwsPDNWnS09Oxa9cuNU1FVCbu+O7ZsweLFy9G8+bNNdNvu+02xMTEoHbt2rh06RKmTZuGnj17IikpCQadi8v48eOxceNGfPbZZ6hTpw5++eUX/O9//0NERATuvffem7E5RERERERURhmFB4zCRVGdnbx3M2bMGHz++ef49ttvERQUpL6DGxISAj8/PwDAiBEjULNmTfU94bFjx6Jz5854++230a9fP8TFxWHv3r348MMPAUC9mThjxgw0aNAAUVFRmDx5MiIiInDfffeV2LaWN6X+U01GRgZiYmKwZMkSVK5cWTPv8ccfR6dOnVCnTh20bt0aM2bMQEpKCk6ePKmb3/bt2zFy5Eh06dIFderUweOPP44WLVpg9+7dLt4SIiIiIiIixy1cuBBpaWno0qULatSooQ6rVq1S0yQnJyM1NVX9v0OHDvj888/x4YcfokWLFvj666+xdu1aTUCsF198Ec888wwef/xx3H777cjIyMD69evh6+t7U7evLCn1O75jxoxBv3790L17d8yYMUM3XWZmJpYuXYqoqCjUqlVLN12HDh3w3XffYfTo0YiIiEBCQgKOHj2Kd999V3eZnJwcTYjx9PT0om0MEZVprOtE7o/1nIjsMUGByUX3/0xw7pavEPbTJyQkFJo2ZMgQDBkyRHcZRVHw2muv4bXXXnOqPO6sVO/4xsXFITEx0WZ47w8++ACBgYEIDAzETz/9hPj4eHh7e+umf++999CkSRPccsst8Pb2Ru/evfH+++/rdgANALGxsZpw47Ya1kRUfrGuE7k/1nMiIrKm1Bq+KSkpGDt2LFauXGnzlntMTAz279+PzZs3o2HDhhg6dCiys7N107/33nvYuXMnvvvuO+zbtw9vv/02xowZg19//VV3mUmTJiEtLU0dUlJSirVtRFQ2sa4TuT/WcyKyxxzV2VUDlU2l9qjzvn37cP78ebRu3VqdZjQasWXLFixYsAA5OTkwGAzqL7YNGjTAHXfcgcqVK2PNmjUYNmxYoTyvXbuGl19+GWvWrEG/fv0AAM2bN8eBAwcwZ84cdO/e3WpZfHx8HOpby2Gelt2qGI2W6XL0O3O02/x8dZKQ0zpCjoyoE3VRjozKSMkVh+azliMrS8ebki8db9IxC28vSxqpXsjHssjNlVdmGZeP4esB6MyRnUVJRQYtBr26rgT4Q/HwAa5ZflSTy2seV6Q6J48LOdietA8UOTqsvP/kz8fF+6Uk6r0jebgiCrO1PHkec55mn0l1V3N8ytcnz+vHs3wukI9f6TFia3W+tOnV8/yIKoCnLwxZ0j7ItNR5JVvaLvkcJ+0/zXlMUayOa6+79q/TLuXAOh2r3w6U3V605jJwDajw5OuafIzLx6+c3qBzzTN/Z+BnSuVMqTV8u3XrhoMHD2qmjRo1CtHR0Zg4caLVqM1CCAghNO/uyPLy8pCXlwePG8KrGwwGmMpLtypEREREROQyro3qzB9oy6pSa/gGBQVpIo8BQEBAAKpUqYKmTZvixIkTWLVqFXr27ImwsDD8+++/mDVrFvz8/NC3b191mejoaMTGxmLgwIEIDg5G586d8cILL8DPzw+RkZHYvHkzPv30U7zzzjs3exOJiIiIiIioDCj1qM56fH19sXXrVsydOxeXL19G9erV0alTJ2zfvh3VqlVT0x05cgRpaWnq/3FxcZg0aRJiYmJw6dIlREZGYubMmXjyySdLYzOIiIiIiKgMKYjq7Jp3cV2VLxVfmWr4yqG6IyIisG7dOrvL3BgCPDw8HEuXLi3pohEREREREVE5VaYavkRERERERK5kggeMZaQfX7p52PB1NS/ru1gxR1GUA3FJEfZ0o77qRdDTi6bIiHtuTRP1Vo447EiEXTngm/zkhJclkiv8LF2NaSKUyxFedcbNkVDVyLFlONiDKawSTAYfeKRlqdOUrGuWBLl5tjOQ9rciB+aTtlnIUTPlfaEX7VlWDuoxoy2XYcJ6ZHeRJ0V597RyrZKuT7oRyuX05ull9HjNDvOBp5cPPLMs5zjPDG913HDVUueVTOsRnpUc6Tot1XX5jCvvE0Xe36abf50uqXqpl4/mWlMC2yGvR+86xnNN0Wn3nXS9lo5TOTq75jOQIzyb/5rKRiR3Ikex4UtERERERBUGozpXTK75xImIiIiIiIjKCN7xJSIiIiKiCsMED5j4jm+Fwzu+RERERERE5NZ4x5eIiIiIiCoMo1BgFK7pb9dV+VLxseFri6JoorUWIkfHk6O6yhGW9Za/HghPyMvJ8+WomVLkTd2oifJ0ef2aSL/2Iy4yWmLZZTXCpV4kZylCqxxlWNGL/i1HY5YjjftYIp4qesEacizRT4UU/dR8TJqPXyGMKKuu1fCHp5cvfLwt+83zinR6vJpZ8FeOvG7U2R6D9SiXmqjYDkR71tT7YkRtNx8XrNsE3HAcyPVVvhb5Xo/mLp8LvKVzgXTM4lq2JW91pGzWdaO3BxQvDwiDpfxGP8s2egZI9f+qjzrucdVyjpOjvcvjmkjO0rhmum40d2k/l9GI2HpceV7hOcu1dM8FOhRvqccHc4RnvWsTURnFhi8REREREVUYRhf242vkO75lFn+qISIiIiIiIrfGO75ERERERFRhmIQHTC7qx9fEfnzLLN7xJSIiIiIiIrfGO75ERERERFRh8B3fiol3fImIiIiIiMit8Y6vDYriUdD9i50uiawsKKUxWJ9unuQlhYeXuqBBvqUrE8XTMi6k6druEKRfl4rRHYLVLnNuzJ+KTG//6nUJoElvrZscuSsceb7chZGXdFzJx5h8PBqlY8YgjcvL+vlKi0rL6h0z5u4RzMdjGe6mIzfYAKO3AYrJ0oWJvF0Gr4L9qaRnqtMUuRsXub7qdGek182RpispeVzex1L+mrqoON4NinwssT4TAE23WpC7zzLkFfyVujCCp/VrmeZ4v961mWLyACw9/ZQZnjkmeJpMkLvYlLsUzAuwbIvRy3K+8/KxnAcNGZZxD+n6renaKNtybtCcHeX9Le83nXOAWk9L6JrOek96HOraSO7yzLP8Nx9McF1/u2X32w7xji8RERERERG5tfL/kw0REREREZGDTPCAyUX3/1yVLxUfPxkiIiIiIiJya7zjS0REREREFYZReMDoon58XZUvFR8/GSIiIiIiInJrvONri4dSOGKtExGbC/KQflswSOPmaKzCegRdJV+K8uhriban5OWp40KK/qjkWp8u5EidMiejRLo0MqQjZdGJeuws83Y4sg3ORrh2JGKzbhopQqIiHzPycXX9+NBEUdWL0qw3bu0YvJG8fXK0Zzmqa6C/JRtvKbKpVB5xPcqpKTvn+pSy+zubyQAoBiA3UCqj0bJdXtcnGzwt8z3Spc9MiugKqS5qzh865w7FKK9T2sdyvcjTiQArR4nVi/JuLT83oVuf9BcomRUXcV/ezKi6mn3jyDlIOpbM1xD5eqM5fuXzjlz/r48rphzgchEK7WJe6Xnw9DRotkV4WsZNUv2Wb9jkBVq21+hjqYuefpao14Y06XyQJi2ck6OOytdm+Tyr6H2vMKe3V7cLZlifXo45Xb/1Myp+HqW0f0sjErduhGf5+PUp6AFBCJ0I0OWACQpMcFVUZ9fkS8VXdr+JEhEREREREZUA3vElIiIiIqIKg+/4Vkz8ZIiIiIiIiMit8Y4vERERERFVGEZ4wOii+3+uypeKj58MERERERERuTXe8bXF0xPw8NRGspM5G0VXjtaoptX5COSomXLkVm9LFEklX4rY7GuyOl3IaXQiP+tGjNSJYlhikRZVVvZLwYqKvn5r+xpQPxtF3qd6Ebl1yKk1EZitrKdQWXTKrhupWZ5ujp4qRf8WeuuX8xZORoXUSy9HfZXX6yMdk3Jkcj9fAIDhesRjYcoFLjpXlJvFI78gALvJy/L55AUVPoaE9PkZpM/GIG03rmZaxuWouHrkz1sO0i1/DnIUaFPh6LsANPVYXlaYrtdjaZqic05zpP6XGEcirZrL4Mi5wIEo+7oR04tDp76o+11evyNRuHX2u5zGoXOgXiRn+Viyt8/k8srbI5dRvoZ5XT9HGctmhGGvtBx4GgBhkKI6S/tASFGdTd5ShGcvy7hRGs8NsUS09vAPtqwn0FcdN6RZIr4r8rlBipYr9K4Lpuv7Vr6Oaz4H68eTTHPcGDQzrKYvEQ7Ubae/R+gcqw5dg53N3xq974EyJ84FhdLrfs8sWhTvkooGLecjTJbj0HwdMgkHrnFllEkoMAkXRXV2Ub5UfLzjS0RERERERG6Nd3yJiIiIiKjCMLnwHV8T7yuWWfxkiIiIiIiIyK3xji8REREREVUYJuEBk4v623VVvlR8/GSIiIiIiIjIrfGOLxERERERVRhGKDDCNdGXXZUvFR8bvrYoiu2Q+HrdvjgSRt+cRq/rI0fI6TVh8aUuZeRuJfykbg3k8PoOdHMEO92jaDjbPZCzXYzodA+kyF0oyF0l6HUtZG+d8nRnuz/SSy+Pa7qxkbrU8JG6K/K2jJt8CrrOkLvW0OsGQ9N1jdQdgUee9LkbdY4HTbc3Ot0t6PXOIJUX18fN3RrBmFNmuzOCUjCYpMND+Mr79voMTTco0ucgdYNi8JK6i0mXuy/R6fZB7/iQycet/PnIx5nc5Ymc/fVzgHCg+xL5WHGo+w6Zs91m6RbCyj5wtkswvTpfUl0YyXS2W12TznlUk4Wmbul8BtbOwTby1BZGp/sX6RykWNvHnjpfEeTriqHoXcPdbErmNSgeJs0+EHJXZHLdzZfPw9K+kU+hPpZ88v2lLo/8LN0ZeQZaujzyumzp+s0jLcuyrmvZlkzlemz+bDX1XyqAA8eKI5+IXnc7Dl2b7XUDpHeN1kmjLYCT63TkXOpKcl2UPwNNd1Q65wCdz1Vx5Pxhrcs6OUExuk7TYz5nCeHkdYKolLHhS0REREREFQbf8a2YyswnM2vWLCiKgnHjxqnTnnjiCdSrVw9+fn4ICwvDgAED8Pfff9vN66+//sK9996LkJAQBAQE4Pbbb0dycrILS09ERERERERlVZlo+O7ZsweLFy9G8+bNNdNvu+02LF26FH/99Rd+/vlnCCHQs2dPGG08gnf8+HF07NgR0dHRSEhIwB9//IHJkyfD19dXdxkiIiIiIqoYjLC851vyA5VVpf6oc0ZGBmJiYrBkyRLMmDFDM+/xxx9Xx+vUqYMZM2agRYsWOHnyJOrVq2c1v1deeQV9+/bF7Nmz1Wl6ac1ycnKQk5Oj/p+enl6UTSGiMo51ncj9sZ4TkT181LliKvVPZsyYMejXrx+6d+9uM11mZiaWLl2KqKgo1KpVy2oak8mEH3/8EQ0bNkSvXr1QrVo1tGvXDmvXrrWZd2xsLEJCQtRBL38iKt9Y14ncH+s5ERFZU+Q7vlu3bsXixYtx/PhxfP3116hZsyZWrFiBqKgodOzY0aE84uLikJiYiD179uim+eCDD/Diiy8iMzMTjRo1Qnx8PLy9va2mPX/+PDIyMjBr1izMmDEDb775JtavX49BgwZh06ZN6Ny5s9XlJk2ahPHjx6v/p6enF1wo8/IAD0UbcVOiiZgsR1jWicCqiYRpjpYpL6eXnxSdUMjROb0ciDqs6IXflaZ7W6JOarZJL0KhvSiizkaDdYReZFa9cWmboIliWpCPMOhEfPTQ+S1IJ+qlHOFT0dlfQs5Tihpq8rYeNdToI497SOmV6/nJ22y9WJAPB01UZzmSsyWNR67lH498y7giRYGWIz8reZbPWBP52cpxaI70rFePbibdun6dovk8LeOm63Ut31f6LDWRL62fSg1yfc24ZpmRk2sZdyQ6uuZ8oHOMetk+nWvqtp7iRGZ2RVRnqxGedbbfkajung5Egy0p1vaHzvlVs3ahE6k3X6pz8vlIL+qrXmRWRyLem6fL5y69a5zV47R0f1PXq+ciwA/C4KNJKzTll8Yd2AT5HOEhd4ggXZtzgyz10ujtr457+1quUYbL0jU4SzpP5F2PBixFBVbkgsmft3zNc5LiSJTkok53pL6WFEeiQ+ueVwsvqzk2nCyu7nVRPgfI11Sduq73PUyu9+p3D6nOOxslWo7sreidgyTmyM+KMAHWOxQo84zCA0YX3Zl1Vb5UfEX6ZL755hv06tULfn5+2L9/v/pIUVpaGt544w2H8khJScHYsWOxcuVKm+/fxsTEYP/+/di8eTMaNmyIoUOHIjs722pa0/WKO2DAADz33HNo2bIlXnrpJdxzzz1YtGiR7jp8fHwQHBysGYjI/bCuE7k/1nMiIrKmSA3fGTNmYNGiRViyZAm8vCy/Mt55551ITEx0KI99+/bh/PnzaN26NTw9PeHp6YnNmzdj/vz58PT0VANYhYSEoEGDBujUqRO+/vpr/P3331izZo3VPKtWrQpPT080adJEM71x48aM6kxERERERBBQYHLRIBzqQZtKQ5EedT5y5Ag6depUaHpISAiuXLniUB7dunXDwYMHNdNGjRqF6OhoTJw4EQYrj94IISCE0AStkHl7e+P222/HkSNHNNOPHj2KyMhIh8pFRERERERE7qVId3zDw8Pxzz//FJq+bds21K1b16E8goKC0LRpU80QEBCAKlWqoGnTpjhx4gRiY2Oxb98+JCcnY/v27RgyZAj8/PzQt29fNZ/o6GjNHeAXXngBq1atwpIlS/DPP/9gwYIF+P777/G///2vKJtKRERERERuxPyOr6sGZ2zZsgX9+/dHREQEFEWxG5T34YcfhqIohYZbb71VTTNt2rRC86Ojo4uyq9xKkRq+jz32GMaOHYtdu3ZBURScOXMGK1euxIQJE/DUU0+VSMF8fX2xdetW9O3bF/Xr18cDDzyAoKAgbN++HdWqVVPTHTlyBGlpaer/AwcOxKJFizB79mw0a9YMH330Eb755huHA24RERERERHdDJmZmWjRogXef/99h9LPmzcPqamp6pCSkoLQ0FAMGTJEk+7WW2/VpNu2bZsril+uFOlR55deegkmkwndunVDVlYWOnXqBB8fH0yYMAHPPPNMkQuTkJCgjkdERGDdunV2lxFWImeOHj0ao0ePLnI51LxzciAUoRsxWWiiN8vR+eRwuTrRBM0R9PSiZpqsR81U9CISyvTSaCJ+ytN1oknL9CJOW02rEyFSj17EWjkCqzQuR2QWnlJkZClisvCUoiH7yJGUC6abdKI6y1E65WjImijJmv0oTZf2kfCyrN/oJX/GlvXKH7EcqVnRCcZq/gFRTiv/qKgZN+ikkQ9HeZukCJSe2ZZxQ7Zlmwxy5OccS6RHjzzr0Z5h1G6IcEXk3BIilIJBjswq7xNDbsG4R74UIVszbj36tYYcdVU+tvLyilhq3HB+sXM699LZ/45EY9aLGu/qSM7Xzw2ayOjy6UeeLp+bpfqvHZfOBZ5SeqmOao5TBw5ZTX20kl7vnKIZz7c+3SNXityam299XDr21CjAgPPR9a1Fxda7Pun0NIDr+7osRHC35lrNQHh63RBMU/c8L02WP1f58Pe0/dnfmF6O0p8bYjkfeBkC1HHPNMsxqmReD+SZLb3epXd+KYkIzID96/uNnPluIJO/dzibh/zdSzpGhRzZXqcHBc01WKr3Jq/CPT6YvKyP616vNb1fWCbL9Vs+xuRriPy9Qtubgt55Qkpz/RygORfIUZ9zpPOCHDFauvYo8nVI/i6j26tJwXQP4QFctZqkzDMJBSbdilv8vIHCfYj7+PjAx8enUPo+ffqgT58+Dudv7qrNbO3atbh8+TJGjRqlSefp6Ynw8HBniu72inTHV1EUvPLKK7h06RL+/PNP7Ny5ExcuXMDrr79e0uUjIiIiIiIqV2rVqqXpUzw2NtYl6/n444/RvXv3QvGMjh07hoiICNStWxcxMTEM9Iti9OMLFASTujGCMhERERERUVllhAeMLupz3JxvSkqKpjs1a3d7i+vMmTP46aef8Pnnn2umt2vXDsuWLUOjRo2QmpqK6dOn46677sKff/6JoKCgEi9HeeFww3fQoEEOZ7p69eoiFYaIiIiIiKi8uxn9iC9fvhyVKlXCfffdp5kuPzrdvHlztGvXDpGRkfjyyy/xyCOPuLRMZZnDDV/5WXIhBNasWYOQkBC0adMGQEG/vFeuXHGqgUxERERERHQz3Yx3fF1NCIFPPvkEw4cPh7e3t820lSpVQsOGDa32ylORONzwXbp0qTo+ceJEDB06FIsWLVL72zUajfjf//7n8l82iIiIiIiIKrLNmzfjn3/+cegObkZGBo4fP47hw4ffhJKVXUV6uP2TTz7BhAkT1EYvABgMBowfPx6ffPJJiRWOiIiIiIioJJng4dLBGRkZGThw4AAOHDgAAEhKSsKBAwfUYFSTJk3CiBEjCi338ccfo127dmjatGmheRMmTMDmzZtx8uRJbN++HQMHDoTBYMCwYcOc31lupEjBrfLz8/H333+jUaNGmul///03THqh9suh/GZ1AU9feEih4BWpGxdNtyVyGHm5iw9N9xJW9o2HI/1l6HQloseR7gA0ZZH71dHpnsRgZbqmGwgHuiPQ6apI+FkezzD5WLp4kLshMfpZDlW5SwiTt9y9lGU839cybvQu3CWB3MWPSeplRkPudkCK9O8hdXMjd4skpN2YJUWPl6crJp1x6TDxyLWMG6T1el4zz7f+Gcnbb5TiJ8jbp9+nutzVlrT+XEvhPa9Z1uudYRn3ypK6PMqSuzm63t3B9a4ZRL60kWXMtaoKDD4KDFKvIYYcyz4xb7tXprSt0vJy9xMe2dL5IkvKUKcbCU2d06t/8rlGt4sZK93RFIdeF0N6aRyh1yWO3L2YoXC3ZHJ3JHIXMkZfy3STdF6Q67x8LpDHtV2VwOq4fA6Qq4h8PsgLkKbLh7h5WXkXyR+jpussy7hBrv851rsZ85S6GfOU6pwh01IAjxypqxK5+yOjzvnD2mfp6cDnZaUbKVN+2ezO6EpdTxh8PDX7W++cLNN0WSN/hpouzaTpUv4eeXIXNFI23pZ9mxcgHd+Kpbslz+v72UP+bPS6NtI7d+hN1+uWrDhdUZmX1ctD6n5Qs36560KD9a4chb/loiZ/TzBJ3w3yAuVxSz5GH73zgWVVpuuLCqmImu8J0lOk8jVaU+flY0maLB9v0Pk4ZHKeBul6r/0eIpenII1BOtY8cqRrlTx+zfo5wuOalHmudO7IkTbWVLi7JMVUfrszKkv27t2Lrl27qv+PHz8eADBy5EgsW7YMqamphSIyp6Wl4ZtvvsG8efOs5vnvv/9i2LBh+O+//xAWFoaOHTti586dCAsLc92GlANFaviOGjUKjzzyCI4fP462bdsCAHbt2oVZs2YV6kOKiIiIiIiorDAKBUYXvYvrbL5dunSB0PuRCsCyZcsKTQsJCUFWVpbuMnFxcU6VoaIoUsN3zpw5CA8Px9tvv43U1FQAQI0aNfDCCy/g+eefL9ECEhERERERERVHkRq+Hh4eePHFF/Hiiy8iPT0dABjUioiIiIiIyjx3iOpMzitSw1fGBi8RERERERGVZUVq+EZFRUGxEdTkxIkTRS4QERERERGRqwjhAZN+xM9i501lU5EavuPGjdP8n5eXh/3792P9+vV44YUXSqJcZcLpTn4w+PjCcM0yTRNVT468JwVa9JKi33plWKLgeWVK0Tezr0fEkyJsKnKkZZ3Im8WiVw/lyIk6P2goViLPKvnWy6vovKAvR2Y1+UoRF+XIrFLEVuElR1+UImD6y9GL5ejNlnXlS5FW5ajK5siNcuTWnHA5wq4lP00kT3nc27Ldtzc8CWu+6rDQ6nRH1Fk4x1KGfEt5PHILxr0ypDLqBUqWP0Y5OrUcjVIv0qR0nBilqJeaSLl+csRjaTzbQ5pesLPNkSbzpd1c1uRUBgy+N0TUzraMm/e9IUSOcm3ZVq8syzHsnWY5EL2vWMYNVy0nEuWaTpRWKfK1kCKjKnoRmzWRUXUiplqb5kjkdb316ERj1Zw7pGjAQopCLbwKR2wumC5HapejMxeMy5Hc8wIs49mVpcjM0uYb/S3j+dJ4bphUYaS6JR//isn69OCoK+p4RpYlwuw/Q19FUTSY9a46Ll9X5Mi/HnlS3cqUxrPkY8+yT70yLTvB85p07EmRXA1SzwS61xlz9ppIztLnJUV1lj8v0/XPtKwGcBceBYN8/tfubzmx/fzkyODysnJ0XbleyFGg5fXK1zqTt+XzNF3v8UDzPUEuQJ71SLyaqMry9VjqiUEInRDWDtCcj2TmY8SgE6XZ11Jv5N4c5POC/D0ht5IlTU5lKY30PSEvwDJ+rZplVdlhel0oWC+6+VorvOT9ZRk9+b8J1hd0QL0570hFkY8HSxr5euOZCavp5XOc5rp+/TptyJXy9rV+XZbrq0G6hhmkz0DJsVQQxcfyGSiaY+z6zjHJ/RsQlX1FaviOHTvW6vT3338fe/fuLVaBiIiIiIiIXMUIBUa9X0JKIG8qm0r0XnyfPn3wzTfflGSWRERERERERMVS7OBWsq+//hqhoaElmSUREREREVGJMQnXRV82ueBtRSoZRWr4tmrVShPcSgiBs2fP4sKFC/jggw9KrHBERERERERExVWkhu+AAQM0DV8PDw+EhYWhS5cuiI6OLrHCERERERERlSSTC6M6uypfKr4iNXynTZtWwsUom/ZNGlOkforrzpMi+OVZDn6fK5bod77/Ffz1TpeiPmdZno3wyLVMlyNBajjyKIX0FIfJIEd1lSIFykH59LqpkiJDqtGANZGcpaRyFjrrMXnJkVshjUtRmqWohHl+UhopSmueFL05r7IUydLXMn7y4YkoT04+ZTt6ZPPvp6jjWVLk1ryrUhTXTMvO9six7EdDtjQuBRPWRHvWiVyuiSIpBw2VPuN8K1GgzWmNuWU3+uP+Cc7XdTkqr2eWZR94X7Ec276XLadY3yuWiKZe6ZadbMiwfBAeWZYPQpGjPefJ0YilcTliqxzh1Vo9lqOrmuRQttIFWoooCy9L2TURmKWoq/K4XKdNmiitckRR+XxgPTq7HKU1//o5IC/IUqzMW6SyB1q2+eTI8lXPj730nN00dT58Sx33umTZ1z6XLGk0EZY95QjLUl2X9q9HriUfRRPNWor4ez2it3wu10QeltcpTTfXeWNuib5FVWJ2vlG0a/qtEy113UuKuOt91cp18UY6vRwIKWq6kD4I7edZMK5IPR94SPkpcnR2uX7L0Xflui4vK0d+Nuicm/WiuXtJYbF9Ldcdk5/X9b+WaUYfuQcHS3nz/S3TcwMteedUksYrS0WXDqmjL9uvO2XJ8Qnj7aZp8ooU5V06xecFWsZz5UNX/jp3/bou90AiR3j2yrR87t5SrxBy20yOrO/hI33/lM8Xcr02B3U2ls26TqSnSD9JGAwGnD9/vtD0//77Dwa9EygREREREVEpM0Fx6UBlU5EavkLnF8ycnBx4e3tbnUdERERERERUGpx6RmH+/PkAAEVR8NFHHyEw0PIMhtFoxJYtW/iOLxERERERlVlGocDooqjOrsqXis+phu+77xa8gyCEwKJFizSPNXt7e6NOnTpYtGhRyZaQiIiIiIiIqBicavgmJSUBALp27YrVq1ejcuXKdpYgIiIiIiIqOxjVuWIqUji2TZs2lXQ5iIiIiIiIiFzC4Ybv+PHj8frrryMgIADjx9sOzf7OO+/YnO/uToy1H7q+3vV95POf1PWJ1EWFV6ZlumK0HkxMkXopUKSeCRS93o/kVw7kbioc+mHK9vsKchcMcn5yF0ZCGpe7LcqXuirKk8L151a2bKAp2BLfv7x1T1TS/uj/mjr++T/t1PFTuVXU8W0X66vj5zMt7+L/d8UynpMtVf9sqduti9IrDFcsSeRuPAy5crcYluma7hGujxuvZ6fX20d55Uh3NC2ftnRRIXfx4+Vv2VFeWZauQTxyLMe8IdtyzHtkW/aeR65lupIrd3Oks4evd3mi6ZLIx7JOY4D1rkdMPnKXY4W7GAK0XebI9Vvv9SaTXr2Xu+yoZDm2jo8vX92WlLSTj7+gjg/f9ag6fibLcqJM3llLHfc7b/087JVl+cdgkOqudA2RmZeVu9yRu52TP3e5KypzdyvGHPd6v+3Qm/aPww4PvK2Om0qohxdxve6avK33lqF4yhdbywekSAFIhdwNkXydls5HwmD9S4DcRVl+oGWj8qTzV56/lWNB7kZR2hdy94NGqf7/PbVi13MAODzTsg/qzrV8h/aQuiXy+c+SXt5/5npsknqZksflrqjkOi8U6biSu9eSxuXu6JS8wsdhvl53m+WACQpMLnoXl1Gdyy6HT8/79+9H3vV+IhMTE6Ho9fdKREREREREVIY43PCVH29OSEhwRVmIiIiIiIhcSriwv13BO75lVpHevh49ejSuXr1aaHpmZiZGjx5d7EIRERERERERlZQiNXyXL1+Oa9euFZp+7do1fPrpp8UuFBERERERkSuYhOLSgcomp0IwpKenQwgBIQSuXr0KX19fdZ7RaMS6detQrVq1Ei8kERERERERUVE51fCtVKkSFEWBoiho2LBhofmKomD69OklVjh3dtxOZOz6sy3RYBWj5ZcjQ64ljSFbWkBYH/fItz6uiQJtJ7InoI0QaI7eqhdBUJNWyiM/0FIwk59UGF9LAU6OqNgRm531UP1ddtNcOWOJ+hrsYQkFuS/XcjBtymisjn91qpU6fiHF0le39wXL6cI7TTom5eNQkn89gqc5eq8xu+L9Anpggf1opbdOstR1SHXR85olSqshxzLdQ47yLo0qOucA86tGcvRdObpqvr+0eumKoDl3SOcdvfOFPD0vSM5fqvfelnFHot+TxYp2H1mf0dUy2mXDBHX8zOUQdTz3vKXee6VJUdvTLMt6Sg9xma8P8vnbaPmdW/P55oRZLiY+1bIK0mZlAwusF9ddbV/1vN00zcZb6rpcv7yvSufTPMt0Q464nlaqmMJ+FF25lwU5AnN2Zcv0nFBLevn7gM8V63kafaRx6VjQXPvNxZRO9UdfZsRmZ50YZ//cWOeDOeq4X2pBnZbrsxw13yvDeiR3vZ42NA+DmqTjzdeyQE5IwXi+dLyWN+zHt2JyquG7adMmCCFw991345tvvkFoqOXM6e3tjcjISERERJR4IYmIiIiIiIiKyqmGb+fOnQEASUlJqFWrFjw8+IsGERERERGVH658F5fv+JZdRWq5RkZGwsPDA1lZWfj777/xxx9/aIaimDVrFhRFwbhx49RpTzzxBOrVqwc/Pz+EhYVhwIAB+Pvvvx3O88knn4SiKJg7d26RykRERERERETln1N3fM0uXLiAUaNG4aeffrI632g0Wp2uZ8+ePVi8eDGaN2+umX7bbbchJiYGtWvXxqVLlzBt2jT07NkTSUlJMBgMOrkVWLNmDXbu3MlHr4mIiIiISGVyYT++rsqXiq9Id3zHjRuHK1euYNeuXfDz88P69euxfPlyNGjQAN99951TeWVkZCAmJgZLlixB5cqVNfMef/xxdOrUCXXq1EHr1q0xY8YMpKSk4OTJkzbzPH36NJ555hmsXLkSXl5eNtMSERERERGReyvSHd+NGzfi22+/RZs2beDh4YHIyEj06NEDwcHBiI2NRb9+/RzOa8yYMejXrx+6d++OGTNm6KbLzMzE0qVLERUVhVq1aummM5lMGD58OF544QXceuutDpUhJycHOTmWsKnp6ekOl99V/nmRkRCpZFSKSLE6/Xad8fuDa6jjiyI6quM/JFnq09X/LKGAlRzp97NAS6hSxaMgGmTS/00CUFCvQmJfdqboJa4s1vVDsazrVDISus2xn0jy2p/3quO/p9+ijp++WhAR2miy3LXw87aEb72tiuWcUsf3ojo+NvpXANfrOmKdKktJKov1HAAOvsO6TiXj5P8m2E90XYOvLN+tDYcsIf0N16S7ktJlXI7ybbJ0LgCjNH5kSsGxnJ6ejpAvX3G4LGUJ3/GtmIp0xzczM1Ptr7dy5cq4cOECAKBZs2ZITEx0OJ+4uDgkJiYiNlb/AvnBBx8gMDAQgYGB+OmnnxAfHw9vb2/d9G+++SY8PT3x7LPPOlyO2NhYhISEqIOthjURlV+s60Tuj/WciIisKVLDt1GjRjhy5AgAoEWLFli8eDFOnz6NRYsWoUaNGnaWLpCSkoKxY8di5cqV8PX11U0XExOD/fv3Y/PmzWjYsCGGDh2K7GzrHYfu27cP8+bNw7Jly6Aojv/aMmnSJKSlpalDSor1O2REVL6xrhO5P9ZzIrLHfMfXVQOVTUV61Hns2LFITU0FAEydOhW9e/fGZ599Bm9vbyxfvtyhPPbt24fz58+jdevW6jSj0YgtW7ZgwYIFyMnJgcFgUH+xbdCgAe644w5UrlwZa9aswbBhwwrluXXrVpw/fx61a9fW5Pn8889j7ty5uu8G+/j4wMfHx+o8InIfrOtE7o/1nIiIrClSw/f//u//1PHbbrsNp06dwt9//43atWujatWqDuXRrVs3HDx4UDNt1KhRiI6OxsSJE61GbRZCQAiheXdHNnz4cHTv3l0zrVevXhg+fDhGjRrlULmIiIiIiMh98R3fisnhhu/48eMdzvSdd96xmyYoKAhNmzbVTAsICECVKlXQtGlTnDhxAqtWrULPnj0RFhaGf//9F7NmzYKfnx/69u2rLhMdHY3Y2FgMHDgQVapUQZUqVTR5enl5ITw8HI0aNXK4/EREREREROQ+HG747t+/36F0zrxba4uvry+2bt2KuXPn4vLly6hevTo6deqE7du3q4G1AODIkSNIS0srkXUSEREREZF74x3fisnhhu+mTZtcWQ4AQEJCgjoeERGBdevW2V1GCGFzvr0+f4lIq+4tqer4bEsPJxgeavmnWa1/b2aRiMgFpjT9zur0sfsLYmjMa/XFzSwOEbnAsSGvWv4ZYhlt/8tL6viOnrNuYomISk+R3vElIiIiIiIqjwQAE1xzZ9b2LTkqTUXqzoiIiIiIiIiovOAdXyIiIiIiqjD4jm/FxDu+RERERERE5NZ4x5eIiIiIiCoM3vGtmNjwJSKHMJIzUcXAaM5E7o+RnKkiYsOXiIiIiIgqDN7xrZj4ji8RERERERG5Nd7xJSIiIiKiCoN3fCsm3vElIiIiIiIit8Y7vkREREREVGEIoUC46M6sq/Kl4uMdXyIiIiIiInJrbPgSEREREVGFYYLi0sEZW7ZsQf/+/REREQFFUbB27Vqb6RMSEqAoSqHh7NmzmnTvv/8+6tSpA19fX7Rr1w67d+92dje5HTZ8iYiIiIiISkFmZiZatGiB999/36nljhw5gtTUVHWoVq2aOm/VqlUYP348pk6disTERLRo0QK9evXC+fPnS7r45Qrf8SUiIiIiogqjLEV17tOnD/r06eP0eqpVq4ZKlSpZnffOO+/gsccew6hRowAAixYtwo8//ohPPvkEL730ktPrche840tERERERFSC0tPTNUNOTk6J5t+yZUvUqFEDPXr0wG+//aZOz83Nxb59+9C9e3d1moeHB7p3744dO3aUaBnKGzZ8iYiIiIiowjBHdXbVAAC1atVCSEiIOsTGxpZI2WvUqIFFixbhm2++wTfffINatWqhS5cuSExMBABcvHgRRqMR1atX1yxXvXr1Qu8BVzR81JmIiIiIiKgEpaSkIDg4WP3fx8enRPJt1KgRGjVqpP7foUMHHD9+HO+++y5WrFhRIutwV2z4EhERERFRhXEz3vENDg7WNHxdqW3btti2bRsAoGrVqjAYDDh37pwmzblz5xAeHn5TylNW8VFnIiIiIiKicurAgQOoUaMGAMDb2xu33XYbNmzYoM43mUzYsGED2rdvX1pFLBN4x5eIiIiIiCoM+V1cV+TtjIyMDPzzzz/q/0lJSThw4ABCQ0NRu3ZtTJo0CadPn8ann34KAJg7dy6ioqJw6623Ijs7Gx999BE2btyIX375Rc1j/PjxGDlyJNq0aYO2bdti7ty5yMzMVKM8V1Rs+BIREREREZWCvXv3omvXrur/48ePBwCMHDkSy5YtQ2pqKpKTk9X5ubm5eP7553H69Gn4+/ujefPm+PXXXzV5PPDAA7hw4QKmTJmCs2fPomXLlli/fn2hgFcVjSKEEKVdiLImPT0dISEhSEtLu2nP5hO5u7JYr8pimYjKu7JWr8paeYjcRXmsW+Yyt/56PAwBJRNs6kbGzBwk3v9OudovFQXf8SUiIiIiIqJiy8/Px6effloouFZZwIYvERERERFVGAKAEC4aSnvjSpmnpyeefPJJZGdnl3ZRCmHDl4iIiIiIiEpE27ZtceDAgdIuRiEMbkVERERERBWGCQoUuKgfXxflW57873//w/jx45GSkoLbbrsNAQEBmvnNmzcvlXKx4UtEREREREQl4sEHHwQAPPvss+o0RVEghICiKDAajaVSLjZ8iYiIiIiowihL/fi6o6SkpNIuglVs+BIREREREVGJiIyMLO0iWMXgVkREREREVGGYhOLSgYAVK1bgzjvvREREBE6dOgUAmDt3Lr799ttSKxMbvkRERERERBXYoEGDkJ6eDgD49NNPkZOTU+S8Fi5ciPHjx6Nv3764cuWK+k5vpUqVMHfu3JIobpGw4UtERERERBWGy/rwvT6URz/88AMyMzMBAKNGjUJaWlqR83rvvfewZMkSvPLKKzAYDOr0Nm3a4ODBg8Uua1HxHV8iIiIiIqIKLDo6GpMmTULXrl0hhMCXX36J4OBgq2lHjBhhM6+kpCS0atWq0HQfHx+1cV0a2PAlIiIiIqIKg1GdC1u0aBHGjx+PH3/8EYqi4NVXX4WiFN4WRVHsNnyjoqJw4MCBQkGu1q9fj8aNG5douZ1RZh51njVrFhRFwbhx49RpTzzxBOrVqwc/Pz+EhYVhwIAB+Pvvv3XzyMvLw8SJE9GsWTMEBAQgIiICI0aMwJkzZ27CFhAREREREZU/HTp0wM6dO3HhwgUIIXD06FFcvny50HDp0iW7eY0fPx5jxozBqlWrIITA7t27MXPmTEyaNAkvvvjiTdga68rEHd89e/Zg8eLFaN68uWb6bbfdhpiYGNSuXRuXLl3CtGnT0LNnTyQlJWmeFzfLyspCYmIiJk+ejBYtWuDy5csYO3Ys7r33Xuzdu/dmbQ4REREREZVRvONrW1JSEsLCwoq8/KOPPgo/Pz+8+uqryMrKwkMPPYSIiAjMmzcPDz74YAmW1Dml3vDNyMhATEwMlixZghkzZmjmPf744+p4nTp1MGPGDLRo0QInT55EvXr1CuUVEhKC+Ph4zbQFCxagbdu2SE5ORu3ata2WIScnRxO5zBzRjIjcC+s6kftjPSciKp7IyEhcuXIFH3/8Mf766y8AQJMmTfDII48gJCTEoTxiYmIQExODrKwsZGRkoFq1aq4sskNK/VHnMWPGoF+/fujevbvNdJmZmVi6dCmioqJQq1Yth/NPS0uDoiioVKmSbprY2FiEhISogzP5E1H5wbpO5P5Yz4nIHvbja9vevXtRr149vPvuu7h06RIuXbqEd999F/Xq1UNiYqLd5e+++25cuXIFAODv7682etPT03H33Xe7sug2lWrDNy4uDomJiYiNjdVN88EHHyAwMBCBgYH46aefEB8fD29vb4fyz87OxsSJEzFs2DDdqGQAMGnSJKSlpalDSkqK09tCRGUf6zqR+2M9JyIqnueeew733nsvTp48idWrV2P16tVISkrCPffco4nHpCchIQG5ubmFpmdnZ2Pr1q0uKLFjSu1R55SUFIwdOxbx8fHw9fXVTRcTE4MePXogNTUVc+bMwdChQ/Hbb7/ZXAYoCHQ1dOhQCCGwcOFCm2l9fHzg4+NTpO0govKDdZ3I/bGeE5E9ruxvt7z24yvbu3cvlixZAk9PS1PR09MTL774Itq0aaO73B9//KGOHz58GGfPnlX/NxqNWL9+PWrWrOmaQjug1Bq++/btw/nz59G6dWt1mtFoxJYtW7BgwQLk5OTAYDCojyo1aNAAd9xxBypXrow1a9Zg2LBhunmbG72nTp3Cxo0bbd7tJSIiIiIiogLBwcFITk5GdHS0ZnpKSgqCgoJ0l2vZsiUURYGiKFYfafbz88N7771X4uV1VKk1fLt164aDBw9qpo0aNQrR0dGYOHGi1ajNQggIITRBK25kbvQeO3YMmzZtQpUqVUq87EREREREVD4V3PF1VVRnl2R7Uz3wwAN45JFHMGfOHHTo0AEA8Ntvv+GFF16wefMxKSkJQgjUrVsXu3fv1kSG9vb2RrVq1ay28W6WUmv4BgUFoWnTppppAQEBqFKlCpo2bYoTJ05g1apV6NmzJ8LCwvDvv/9i1qxZ8PPzQ9++fdVloqOjERsbi4EDByIvLw/3338/EhMT8cMPP8BoNKq32ENDQx1+N5iIiIiIiKgimjNnDhRFwYgRI5Cfnw8A8PLywlNPPYVZs2bpLhcZGQkAMJlMN6Wczir17oz0+Pr6YuvWrZg7dy4uX76M6tWro1OnTti+fbsmHPaRI0eQlpYGADh9+jS+++47AAW32mWbNm1Cly5dblbxiYiIiIioDGI/vrZ5e3tj3rx5iI2NxfHjxwEA9erVg7+/v1P5HD58GMnJyYUCXd17770lVlZnlKmGb0JCgjoeERGBdevW2V1GSM8T1KlTR/M/EREREREROc/f3x/NmjVzerkTJ05g4MCBOHjwIBRFUdtnilLwo4DRaCzRcjqq1PvxJSIiIiIiulmEi4eKbuzYsYiKisL58+fh7++PQ4cOYcuWLWjTpo3mRufNVqbu+BIREREREVH5tWPHDmzcuBFVq1aFh4cHPDw80LFjR8TGxuLZZ5/F/v37S6VcvONLREREREQVhvkdX1cNFZ3RaFS7PapatSrOnDkDoCD41ZEjR0qtXLzjS0RERERERCWiadOm+P333xEVFYV27dph9uzZ8Pb2xocffoi6deuWWrnY8CUiIiIioorDlS/juslLvseOHcOmTZtw/vz5Qt0TTZkyxeayr776KjIzMwEAr732Gu655x7cddddqFKlClatWuWyMtvDhi8REREREREBAJYsWYKnnnoKVatWRXh4uBqNGSiIzKzX8N20aRPuvPNO9OrVS51Wv359/P3337h06RIqV66syetmY8OXiIiIiIgqDle+i+sG7/jOmDEDM2fOxMSJE51arlu3bvD19cUdd9yBrl27omvXrrjjjjvg6emJ0NBQF5XWcQxuRURERERERACAy5cvY8iQIU4vl5SUhPfffx+1a9fGxx9/jE6dOqFSpUro1asXZs2ahV27dhV6bPpmYsOXiIiIiIgqDCFcO5R3Q4YMwS+//OL0cpGRkRg1ahSWLVuGkydP4p9//sH8+fNRvXp1LFy4EB06dCjVO7981JmIiIiIiCoMV3Y75A7dGdWvXx+TJ0/Gzp070axZM3h5eWnmP/vssw7lU7duXRgMBiiKAkVRsHbtWuTm5rqiyA5hw5eIiIiIiIgAAB9++CECAwOxefNmbN68WTNPURSbDd/k5GQkJCRg06ZNSEhIwMWLF9GhQwfcdddd+OGHH9CuXTtXF18XG75ERERERFRxCMV1Qajc4I5vUlJSkZarW7cuLl++jDvvvBOdOnXCE088gTZt2sDTs2w0OfmOLxERERERERUihIBw8MXla9euAQA8PDzg6ekJLy8vGAwGVxbPKWz4EhERERFRhcHgVvZ9+umnaNasGfz8/ODn54fmzZtjxYoVNpdJTU3Fjh070LdvX+zatQv9+vVD5cqVcc8992DOnDnYs2dPqUZ1Lhv3nYmIiIiIiKjUvfPOO5g8eTKefvpp3HnnnQCAbdu24cknn8TFixfx3HPP6S4bHR2N6OhoPPnkkwCAv/76S33fd8aMGQCAK1euuHwbrGHDl4iIiIiIKg5xfXBV3uXce++9h4ULF2LEiBHqtHvvvRe33norpk2bZrPhKzt37hz++OMP/PHHH/j999+Rnp4OHx8fVxXbLjZ8iYiIiIiICEDBI8sdOnQoNL1Dhw5ITU3VXe78+fNISEhQozofPXoUXl5eaNu2LR588EF07doV7du3d2XRbWLDl4iIiIiIKgz242tb/fr18eWXX+Lll1/WTF+1ahUaNGigu1x4eDi8vLzQpk0bDB48GF27dkWHDh3g5+fn6iI7hA1fIiIiIiIiAgBMnz4dDzzwALZs2aK+4/vbb79hw4YN+PLLL3WX++mnn9CxY0cEBATcrKI6hQ1fIiIiIiKqWNzgXVxXGTx4MHbt2oV3330Xa9euBQA0btwYu3fvRqtWrXSX69Wr100qYdGw4UtERERERESq2267DZ999llpF6NEseFLREREREQVBt/xLSw9PR3BwcHquC3mdOUNG75EREREREQVWOXKlZGamopq1aqhUqVKUJTCDXghBBRFgdFoLIUSFh8bvkREREREVHGwH99CNm7ciNDQUADApk2bSrk0rsGGLxERERERUQXWuXNndTwqKgq1atUqdNdXCIGUlBSH8svMzMTmzZuRnJyM3Nxczbxnn322+AUuAjZ8iYiIiIioAlGuD67Ku3yLiopSH3uWXbp0CVFRUXYfdd6/fz/69u2LrKwsZGZmIjQ0FBcvXoS/vz+qVatWag1fj1JZKxEREREREZU55nd5b5SRkQFfX1+7yz/33HPo378/Ll++DD8/P+zcuROnTp3Cbbfdhjlz5riiyA7hHV8iIiIiIqo4ytA7vlu2bMFbb72Fffv2ITU1FWvWrMF9992nm3716tVYuHAhDhw4gJycHNx6662YNm2apg/dadOmYfr06ZrlGjVqhL///ttmWcaPHw8AUBQFkydPhr+/vzrPaDRi165daNmypd1tOnDgABYvXgwPDw8YDAbk5OSgbt26mD17NkaOHIlBgwbZzcMV2PAlIiIiIiIqBZmZmWjRogVGjx7tUINwy5Yt6NGjB9544w1UqlQJS5cuRf/+/bFr1y60atVKTXfrrbfi119/Vf/39LTf7Nu/fz+Agju+Bw8ehLe3tzrP29sbLVq0wIQJE+zm4+XlBQ+PggeLq1WrhuTkZDRu3BghISEOvyPsCmz4EhERERFRxVGG7vj26dMHffr0cTj93LlzNf+/8cYb+Pbbb/H9999rGr6enp4IDw93qizmaM6jRo3CvHnzitxfb6tWrbBnzx40aNAAnTt3xpQpU3Dx4kWsWLECTZs2LVKeJYHv+BIREREREZWg9PR0zZCTk+OS9ZhMJly9elXtisjs2LFjiIiIQN26dRETE4Pk5GSH81y6dGmRG71AQWO8Ro0aAICZM2eicuXKeOqpp3DhwgV8+OGHRc63uHjHl4iIiIiIKg6hFAyuyhtArVq1NJOnTp2KadOmlfjq5syZg4yMDAwdOlSd1q5dOyxbtgyNGjVCamoqpk+fjrvuugt//vkngoKCHMp37969+PLLL612R7R69Wqby7Zp00Ydr1atGtavX+/EFrkOG75EREREREQlKCUlRXPX1MfHp8TX8fnnn2P69On49ttvNV0PyY9ON2/eHO3atUNkZCS+/PJLPPLII3bzjYuLw4gRI9CrVy/88ssv6NmzJ44ePYpz585h4MCBDpUtPz8fCQkJOH78OB566CEEBQXhzJkzCA4ORmBgoPMbWwLY8CUiIiIiogpDiILBVXkDQHBwcLEeF7YnLi4Ojz76KL766it0797dZtpKlSqhYcOG+OeffxzK+4033sC7776LMWPGICgoCPPmzUNUVBSeeOIJ9RFmW06dOoXevXsjOTkZOTk56NGjB4KCgvDmm28iJycHixYt0l22KBGfFy1aVKjPYWvKzDu+s2bNgqIoGDdunDrtiSeeQL169eDn54ewsDAMGDDAbhhuIQSmTJmCGjVqwM/PD927d8exY8dcXHoiIiIiIiLX++KLLzBq1Ch88cUX6Nevn930GRkZOH78uEONVgA4fvy4mq+3tzcyMzOhKAqee+45h97RHTt2LNq0aaP242s2cOBAbNiwweaya9euhbe3N0JCQhwafvzxR2RkZDi0XWXiju+ePXuwePFiNG/eXDP9tttuQ0xMDGrXro1Lly5h2rRp6NmzJ5KSkmAwGKzmNXv2bMyfPx/Lly9HVFQUJk+ejF69euHw4cMOdbhMRERERERurAxFdc7IyNDciU1KSsKBAwcQGhqK2rVrY9KkSTh9+jQ+/fRTAAWPN48cORLz5s1Du3btcPbsWQCAn58fQkJCAAATJkxA//79ERkZiTNnzmDq1KkwGAwYNmyYQ2WqXLkyrl69CgCoWbMm/vzzTzRr1gxXrlxBVlaW3eW3bt2K7du3a7pDAoA6derg9OnTdpefP3++Q3dwAeDrr792KB1QBu74ZmRkICYmBkuWLEHlypU18x5//HF06tQJderUQevWrTFjxgykpKTg5MmTVvMSQmDu3Ll49dVXMWDAADRv3hyffvopzpw5g7Vr17p+Y4iIiIiIiBy0d+9etGrVSu2KaPz48WjVqhWmTJkCAEhNTdVEZP7www+Rn5+PMWPGoEaNGuowduxYNc2///6LYcOGoVGjRhg6dCiqVKmCnTt3IiwszKEyderUCfHx8QCAIUOGYOzYsXjssccwbNgwdOvWze7yJpMJRqOx0PR///3XbnCtTZs2FYpQbctPP/2EmjVrOpS21O/4jhkzBv369UP37t0xY8YM3XSZmZlYunQpoqKiCkVJM0tKSsLZs2c1z7mHhISgXbt22LFjBx588EGry+Xk5GhCjKenpxdxa4ioLGNdJ3J/rOdEZNdNiOrsqC5dukDYeOF42bJlmv8TEhLs5hkXF+dUGW60YMECZGdnAwBeeeUVeHl5Yfv27Rg8eDBeffVVu8v37NkTc+fOVR+LVhQFGRkZmDp1Kvr27Wtz2c6dOztV1o4dOzqctlTv+MbFxSExMRGxsbG6aT744AMEBgYiMDAQP/30E+Lj4wvdNjcz3+qvXr26Znr16tXVedbExsZqnhXXa1gTUfnGuk7k/ljPiYiKLj8/Hz/88IP6WqmHhwdeeuklfPfdd3j77bcLPaFrzdtvv43ffvsNTZo0QXZ2Nh566CH1Mec333zT4bLceNd4165d2LJlC/Ly8pzbqOtKreGbkpKCsWPHYuXKlTbfvY2JicH+/fuxefNmNGzYEEOHDlV/gSgpkyZNQlpamjqkpKSUaP5EVDawrhO5P9ZzIrJHEa4dyjNPT088+eSTxWpv3XLLLfj999/xyiuv4LnnnkOrVq0wa9Ys7N+/36F3d1NTU9GxY0f4+Pigc+fOuHz5Mu655x60b98eXbp0QdOmTZGamup0uUrtUed9+/bh/PnzaN26tTrNaDRiy5YtWLBgAXJycmAwGNRfbBs0aIA77rgDlStXxpo1a6y+nB0eHg4AOHfunCZq2blz59CyZUvdsvj4+Gj61jI/bsDHo4hKjrk+2Xqcx9VY14lcr7TrOus50c1R2nWdXKdt27Y4cOAAIiMji5yHp6cnYmJiEBMT4/SyEydOhBACa9aswcqVK3HPPffAYDAgJSUFRqMRDz30EGbOnIkFCxY4VyanS1JCunXrhoMHD2qmjRo1CtHR0Zg4caLVqM1CCAghNO/uyKKiohAeHo4NGzaoDd309HTs2rULTz31lMNlM0cx4+NRRCXv6tWratTB0sa6TuQ6ZaWus54TuVZZqetOKUNRncui//3vfxg/fjxSUlJw2223ISAgQDP/xp54zI4ePYorV66gbdu26rQNGzZgxowZyMzMxH333YeXX37Z7vp//fVXrF69GnfccQfuvPNOVK1aFfHx8WoQq9deew2PPfaY09tVag3foKAgNG3aVDMtICAAVapUQdOmTXHixAmsWrUKPXv2RFhYGP7991/MmjULfn5+mpeio6OjERsbi4EDB6r9AM+YMQMNGjRQuzOKiIjAfffd53DZIiIicPjwYTRp0gQpKSku7Xz6ZkpPT0etWrW4TWWcO2/T4cOHERERUdrFUbGulx/cpvKhLNb1iIgIpKSkQAiB2rVru+X+5jaVbe62TebtSU5OhqIoZaauU8kxBwR+9tln1WmKokAIAUVRrEZsBgru1DZr1kxt+CYlJaF///6466670Lx5c8TGxsLf3x/jxo2zuf7Lly+rjdzQ0FD4+/tr7j7Xr1+/fD3qbI+vry+2bt2KuXPn4vLly6hevTo6deqE7du3a54NP3LkCNLS0tT/X3zxRWRmZuLxxx/HlStX0LFjR6xfv96pPnw9PDzUnR0cHOwWJykZt6l8cMdtqlmzJjw8Sr0XNRXrevnDbSofylJd9/DwwC233KI+lumO+5vbVD642zaFhISU3+0pQ1Gdy6KkpKQiLbd37168+OKL6v8rV65Ew4YN8fPPPwMouFP83nvv2W34VqtWDampqeqTOk8//bSmi6PLly8XugvtiDLV8JXDc0dERGDdunV2l7nxvQJFUfDaa6/htddeK+niERERERERubVTp06hQ4cO8PTUNhXz8/Oxfft23Xd/L168iFtuuUX9f9OmTejfv7/6f5cuXfD888/bXX/Lli2xY8cO9c7xrFmzNPO3bdum+7i1LWWq4UtERERERORSfMfXpq5duyI1NbVQBOa0tDR07dpV91Hn0NBQ9U6tyWTC3r17MX78eHV+bm6uQ8HQvv32W5vzb7/9dqf7+wVKuR/fsszHxwdTp07VRIYs77hN5QO36eYqy2UrKm5T+cBturnKctmKittUPrjbNrnb9lBh5nd5b/Tff//ZfMS4S5cueP3115GSkoK5c+fCZDKhS5cu6vzDhw+jTp06xS5f27ZtC8WKcoQiGIOciIiIiIjcXHp6OkJCQlDr7dfh4ed4/B9nmK5lI+X5yUhLSyt370APGjQIQMEd1969e2t+3DAajfjjjz/QqFEjrF+/3uryJ0+eRI8ePXD8+HEYDAbMnz9f07POfffdh6ioKLz77rtOly04OBgHDhxA3bp1nV7WjI86ExERERERVXDmbqmEEAgKCoKfn586z9vbG3fccYfNboTq1KmDv/76C4cOHUJYWFihiN/Tp0/XvAPsjJK4V8uGLxERERERVRx8x9eqpUuXAihowE6YMKFokZM9PdGiRQur8/Sm3yxs+BIREREREREAYOrUqaVdhEL+7//+r9iPjjO4FRERERERVRzmfnxdNZRz586dw/DhwxEREQFPT08YDAbNUBoWLlyIqlWrFisP3vElIiIiIiIiAMDDDz+M5ORkTJ48GTVq1LAa4flm2bNnDzZt2oTz58/DZDJp5r3zzjtO5cWGLxERERERVRiKKBhclXd5t23bNmzduhUtW7Ys1XK88cYbePXVV9GoUSNUr15d0wAvSmOcDV8iIiIiIiICANSqVavYUZSvXLmC3bt3W71TO2LECIfymDdvHj755BM8/PDDxSqLGRu+RERERERUcTCqs01z587FSy+9hMWLF6NOnTpOL//9998jJiYGGRkZCA4OLnSn1tGGr4eHB+68806n16+bX4nlREREREREROXaAw88gISEBNSrVw9BQUEIDQ3VDPY8//zzGD16NDIyMnDlyhVcvnxZHS5duuRwOZ577jm8//77xdkUDd7xJSIiIiIiIgAFd3yL4/Tp03j22Wfh7+9frHwmTJiAfv36oV69emjSpAm8vLw081evXu1Ufmz4EhEREREREQBg5MiRxVq+V69e2Lt3L+rWrVusfJ599lls2rQJXbt2RZUqVYodXZoNXyIiIiIiqjAUuDCqs2uyvemOHz+OpUuX4vjx45g3bx6qVauGn376CbVr18att95qc9l+/frhhRdewOHDh9GsWbNCd2rvvfdeh8qwfPlyfPPNN+jXr1+Rt0PGhi8REREREREBADZv3ow+ffrgzjvvxJYtWzBz5kxUq1YNv//+Oz7++GN8/fXXNpd/7LHHAACvvfZaoXmKosBoNDpUjtDQUNSrV8/5DdDB4FZERERERFRxCMW1Qzn30ksvYcaMGYiPj4e3t7c6/e6778bOnTvtLm8ymXQHRxu9ADBt2jRMnToVWVlZRdqOG/GOLxEREREREQEADh48iM8//7zQ9GrVquHixYs3rRzz58/H8ePHUb16ddSpU6fQI9OJiYlO5ceGrxtKSEhA165dcfnyZVSqVKm0i0NELsK6TlQxsK4TlTD242tTpUqVkJqaiqioKM30/fv3o2bNmlaXmT9/Ph5//HH4+vpi/vz5NvN/9tlnHSrHfffd51A6R/FR53KuS5cuGDdunGZahw4dkJqaipCQkNIpVBFdunQJzzzzDBo1agQ/Pz/Url0bzz77LNLS0jTpkpOT0a9fP/j7+6NatWp44YUXkJ+fr85fvXo1evTogbCwMAQHB6N9+/b4+eefddc7a9YsKIpSaD9aM3PmTHTo0AH+/v66Xz7slc+aJUuW4K677kLlypVRuXJldO/eHbt379akEUJgypQpqFGjBvz8/NC9e3ccO3bMbpnJPbCuV5y6vnr1avTs2VONYHngwAG75SX3wbpeMep6Xl4eJk6ciGbNmiEgIAAREREYMWIEzpw5Y7fMRK724IMPYuLEiTh79iwURYHJZMJvv/2GCRMmYMSIEVaXeffdd5GZmamO6w3OdJU0depUm4Oz2PB1Q97e3ggPDy92yO+b7cyZMzhz5gzmzJmDP//8E8uWLcP69evxyCOPqGmMRiP69euH3NxcbN++HcuXL8eyZcswZcoUNc2WLVvQo0cPrFu3Dvv27UPXrl3Rv39/7N+/v9A69+zZg8WLF6N58+YOlTE3NxdDhgzBU089ZXW+I+WzJiEhAcOGDcOmTZuwY8cO1KpVCz179sTp06fVNLNnz8b8+fOxaNEi7Nq1CwEBAejVqxeys7MdKju5H9Z196zrmZmZ6NixI958802Hykruj3Xd/ep6VlYWEhMTMXnyZCQmJmL16tU4cuSIw9FuqZiEi4dy7o033kB0dDRq1aqFjIwMNGnSBJ06dUKHDh3w6quvWl0mKSkJVapUUcf1hhMnTtzMTdESVG6NHDmyUFVLSkoSmzZtEgDE5cuXhRBCLF26VISEhIjvv/9eNGzYUPj5+YnBgweLzMxMsWzZMhEZGSkqVaoknnnmGZGfn6/mn52dLZ5//nkREREh/P39Rdu2bcWmTZtu6jZ++eWXwtvbW+Tl5QkhhFi3bp3w8PAQZ8+eVdMsXLhQBAcHi5ycHN18mjRpIqZPn66ZdvXqVdGgQQMRHx8vOnfuLMaOHetwucz79EZFLd+N8vPzRVBQkFi+fLkQQgiTySTCw8PFW2+9paa5cuWK8PHxEV988YXD+VL5xLpeoCLUdVlSUpIAIPbv3+9wflS+sa4XqGh13Wz37t0CgDh16pTD+ZJz0tLSBAAR+cZMEfXO2y4ZIt+YKQCItLS00t7cYktOThY//vijWLVqlTh69GhpF0c1YsQI0bVrV6eX4x3fcmzevHlo3749HnvsMaSmpiI1NRW1atWymjYrKwvz589HXFwc1q9fj4SEBAwcOBDr1q3DunXrsGLFCixevFgTnvzpp5/Gjh07EBcXhz/++ANDhgxB7969bT5e26dPHwQGBuoO9vr9ulFaWhqCg4Ph6VnwOvqOHTvQrFkzVK9eXU3Tq1cvpKen49ChQ1bzMJlMuHr1KkJDQzXTx4wZg379+qF79+5OlcmWopTPmqysLOTl5allTkpKwtmzZzVlDQkJQbt27bBjx44SKz+VTazrBSpCXaeKjXW9QEWt62lpaVAUhe9x3wSKcO3gLmrVqoW+ffti6NChaNCgQWkXR1WzZk1ERkY6vRyDW5VjISEh8Pb2hr+/P8LDw22mzcvLw8KFC9W+sO6//36sWLEC586dQ2BgIJo0aYKuXbti06ZNeOCBB5CcnIylS5ciOTkZERERAIAJEyZg/fr1WLp0Kd544w2r6/noo49w7do13XLcGI3NlosXL+L111/H448/rk47e/as5uIDQP3/7NmzVvOZM2cOMjIyMHToUHVaXFwcEhMTsWfPHofL44iilM+aiRMnIiIiQr14m5e1lrcz+VL5xLpeoCLUdarYWNcLVMS6np2djYkTJ2LYsGEIDg4ueoGJSsDgwYPRtm1bTJw4UTN99uzZ2LNnD7766qtSKlkBvfOVPWz4VhD+/v6aDqDNYcEDAwM1086fPw+gIIy50WhEw4YNNfnk5OSoz+9boxfpzVnp6eno168fmjRpgmnTphU5n88//xzTp0/Ht99+i2rVqgEAUlJSMHbsWMTHx8PX19fqck8++SQ+++wz9f+MjIwil0GWnJyMJk2aqP+//PLLePnllzVpZs2ahbi4OCQkJOiWj0gP6zrrOlUMrOvuU9fz8vIwdOhQCCGwcOHCEikX2cGozjZt2bLFaj3t06cP3n777ZtfoBLChm8FceMvsoqiWJ1mMpkAFFwQDAYD9u3bB4PBoEknX1Rv1KdPH2zdulV3fmRkpN1Hg65evYrevXsjKCgIa9as0ZQzPDy8UATUc+fOqfNkcXFxePTRR/HVV19pfmHdt28fzp8/j9atW6vTjEYjtmzZggULFiAnJwevvfYaJkyYYLOc1tgrX0REhCZC642PPM2ZMwezZs3Cr7/+qgnMYd62c+fOoUaNGpq8W7Zs6XQ5yX2xrpfvuk7kKNZ196jr5kbvqVOnsHHjRt7tpTIhIyMD3t7ehaZ7eXkhPT39ppVj/PjxVqcrigJfX1/Ur18fAwYMcPh1ITZ8yzlvb28YjcYSz7dVq1YwGo04f/487rrrLoeXK+4jUenp6ejVqxd8fHzw3XffFfpltH379pg5cybOnz+v/tIbHx+P4OBgzS+uX3zxBUaPHo24uDj069dPk0e3bt1w8OBBzbRRo0YhOjoaEydOhMFgQLVq1dT8nWGvfJ6enqhfv77VZWfPno2ZM2fi559/Rps2bTTzoqKiEB4ejg0bNqgN3fT0dOzatUs3EiW5F9b1ilHXiVjXK05dNzd6jx07hk2bNtm8804ljHd8bWrWrBlWrVpVKHp5XFycpl7K/vjjD4fzd/QH3/379yMxMRFGoxGNGjUCABw9ehQGgwHR0dH44IMP8Pzzz2Pbtm265ZKx4VvO1alTB7t27cLJkycRGBhYYgFSGjZsiJiYGIwYMQJvv/02WrVqhQsXLmDDhg1o3rx5oYuOWXEeiUpPT0fPnj2RlZWFzz77DOnp6eqvSmFhYTAYDOjZsyeaNGmC4cOHY/bs2Th79ixeffVVjBkzBj4+PgAKHoMaOXIk5s2bh3bt2qnv4Pj5+SEkJARBQUFo2rSpZt0BAQGoUqVKoek3Sk5OxqVLl5CcnAyj0aj+ylu/fn0EBgY6VD5r3nzzTUyZMgWff/456tSpo5bZHDzE3B/hjBkz0KBBA0RFRWHy5MmIiIgo8c69qWxiXa8YdR2Aul5zf55HjhwBUHB3yd57n1T+sa5XjLqel5eH+++/H4mJifjhhx9gNBrVNKGhoVbvthHdLJMnT8agQYNw/Phx3H333QCADRs24IsvvtB9v7dly5ZQFAVCWG/5m+cpiuLwj3vmu7lLly5Vn4ZIS0vDo48+io4dO+Kxxx7DQw89hOeee85m396qkg4vTTfXkSNHxB133CH8/Pzsdnsgmzp1qmjRooVm2siRI8WAAQPU/3Nzc8WUKVNEnTp1hJeXl6hRo4YYOHCg+OOPP/6/vXsPb6LK/wf+nqRXLi2lQEsVaLkWljsoAi7g0qUIfkHURbGKgqKgKHcQFVCstrLcVgHLg1yV34KroKiIsJWCXORSYGGFduUmRWkRpZQW6CU5vz/aJmdoppm0SZsm79fz5GE6mTk5k5nPhJPJfD4u2ZbSftt6nDt3zrLc+fPnxf333y8CAwNFgwYNxJQpUyxlEYQQom/fvjbbeOqppzRfW2/ZA1ulJgCoykHY658tzZo1s9nunDlzLMuYzWYxa9YsERYWJvz9/UX//v1Fenq63T6TZ2Cse0+sr1692u4y5LkY694R66Xlyuy9NjlXaTmjqLlvixbzFrjkETXXM8oZffXVV6JXr16iVq1aIjQ0VNx3330iJSVFc/nz58/rfugVEREhfvzxxzLz//vf/4qIiAghhBCpqakiNDRUV3uKEBrDciIiIiIiIg+Rk5OD4OBgRM19GwYXJRU037qFc7Nfs5TuooqrU6cOvvrqK/Tr1081PyUlBf/3f/+H69ev4+zZs+jcubOue4/5U2ciIiIiIvIeQil+uKptD1FQUIDLly9bkuSVatq0aZllt2zZorvdIUOG6Fpu6NChGD16NBYsWIC77roLAHDo0CFMnTrVcqvfwYMHy2Sr18KBLxEREREREQEAfvrpJ4wePRr79u1TzRfl3KOrN+eMI/f4Ll++HJMmTcJjjz2GoqIiAICPjw+eeuopLFq0CAAQHR2NDz/8UFd7HPgSEREREZH3YFbncj399NPw8fHBV199hcaNG0NR7F/Fvv2qsDPUqVMHK1aswKJFi3D27FkAQPPmzVUl2Bwp68mBLxEREREREQEAjh07htTUVERHR1do/bNnz6J58+ZO60+dOnWcUvOeA18iIiIiIvIaiih+uKrtmq5du3a4cuVKhddv2bIl+vbti2eeeQaPPPJImfrdet26dQvvv/8+du7cafNe4yNHjjjUHge+NpjNZvz666+oW7eurkv7RGSfEALXr19HREQEDAZDdXcHAGOdyBXcLdYZ50Su4W6xTs7z7rvvYvr06XjnnXfQoUMH+Pr6qp63l636yJEjWL16NSZPnozx48fj0UcfxTPPPIO7777boX4888wz2L59Ox555BHcfffdlT6Hu005o8TERMycORMTJkzA4sWLVc8JITBo0CBs27YNmzdvLvfmaSEE5syZgxUrViA7Oxu9e/fGBx98gFatWunuy8WLF9GkSZMKbgkRlScjIwN33nlndXcDAGOdyJXcJdYZ50Su5S6xrkdpOaPms99xaTmjs3NfrdHljEq/yLh9oFlecitbioqKsGXLFqxZswbbtm1D69atMXr0aDz55JNo2LCh3fWDg4OxdetW9O7d2/GNsMEtrvgeOnQIy5cv1/zt9uLFi3WP8OfNm4f33nsPa9euRVRUFGbNmoXY2FicPHlS92X2unXrAgD6BDwEH8VXe0GpT4r0TZeQL8NL3yuIgkJpmbLfNyhGo9SetL0+8nyNb9T0vD9yX+Q+Fpmk+VK/hP6b1G1tD9V88nGo+FljQfGRTh3StHwM367IXICUPz62xJc70B3rpbTiTI6tQunDQI4hxRq7iq8U0/J7JrevMV+R2hFy+yXZDgFAyB9IJfGtGaMOxHnZVRn3nkz1OSQfv362Y17x9QMAFIkCpFxd7zaxXibOdcSxLiYdseOjEeuOxLdWbGudd2Q64ptxTFo0zwEl84tEIXYXfu42sU7Os3PnTqe04+Pjg4ceegiDBw/GsmXLMHPmTEydOhWvvvoqhg8fjnfffReNGzfWXP+OO+5w6vFV7QPf3NxcxMXFYcWKFYiPjy/z/LFjx7BgwQIcPny43DcGKP4WYvHixXj99dcxdOhQAMC6desQFhaGzz//HI899pjN9fLz85Gfn2/5+/r16wAAH8UXPoqf9gtqfVgp8geN9MEknT+EjRsAFEX6gJQ/mBXpPxlKJQa+qr7I/yGXPlRV/XJg4OsJNzRQGYrqGPeTpqVTh0E6Pg3aA19bbVa1Csd6Kc2+y7FVJM3XGPiqYlpj4CvPN+gY+KrOQdJ/gkv6ox2jlRj4Mu49mvpzSD5+pS/B5M8tQ0kMmW2sX4XsxrmOONZF0XHFQzPWHYhvrdjWPO/IdAx8GcekQfscoGgvV1O48B5fT8jq3LdvX6e0c/jwYaxatQobNmxA7dq1MXXqVDzzzDO4ePEi3nzzTQwdOhQHDx7UXH/BggWYMWMGkpKS0KxZs0r3p9p/kP/iiy9i8ODBiImJKfPcjRs38Pjjj2Pp0qUIDw+329a5c+eQmZmpais4OBg9evTA/v37NddLSEhAcHCw5cGfRBF5JsY6kedjnBMRVV52djYWLFiAZ599Fs8++ywWLVqEa9eu6Vp34cKF6NChA3r16oVff/0V69atw88//4z4+HhERUXhz3/+M9asWWM3OVX37t1x69YtNG/eHHXr1kX9+vVVD0dV6xXfDRs24MiRIzh06JDN5ydNmoRevXpZrt7ak5mZCQAICwtTzQ8LC7M8Z8vMmTMxefJky985OTn8oCTyQIx1Is/HOCciu1jHt1yHDx9GbGwsAgMDLQmpFi5ciLfffhvbt29H165dy13/gw8+wOjRo/H0009r/mK3UaNGWLlyZbntjBgxAr/88gveeecdhIWFVfrXBdU28M3IyMCECROwY8cOm/febtmyBd999x2OHj3q8r74+/vD39/f5a9DRNWLsU7k+RjnRESVM2nSJAwZMgQrVqyAT0kul6KiIjz77LOYOHEidu/eXe76P/30k93X8PPzw1NPPVXuMvv27cP+/fvRqVMn/Z0vR7UNfFNTU3H58mXVNwYmkwm7d+/GkiVLMG7cOJw5cwb16tVTrffwww/jz3/+M1JSUsq0Wfpz6KysLNW3C1lZWejcuXPFO6vx7YJWoimtRFfqxD82Es5I9+sJs3QvhZzYQisRjo5+aZKTk0BOdCW1YydBhioBggYm0Kjh5KQqWkmsbCVnKl3Pne8BUpTihxOS3MuxoBVDolDjfjz5nmp5vpzPppwEYtY+SOegkuXVsS3khW32UQ+tuGesewZ5P8q3pKriRE6wVHL8iEokTKtW8jlK3katc5cci1oZTuXPb6lNRW5fShAoYKMd+TNaI0ml+rzjWPwxjkmL6hxgKPt/VDcpDFMxvOJbrsOHD6sGvUBxoqrp06eje/fuutrIzs7GwYMHbdbfHTlypK42oqOjcfPmTf0dt6Pa7vHt378/Tpw4gWPHjlke3bt3R1xcHI4dO4bXXnsNx48fVz0PAIsWLcLq1attthkVFYXw8HAkJydb5uXk5ODAgQPo2bNnVWwWERERERGRLrt378b//d//ISIiAoqi4PPPP7e7TkpKCrp27Qp/f3+0bNkSa9asKbPM0qVLERkZiYCAAPTo0aPcJFK3CwoKwoULF8rMz8jI0JVl+csvv0TTpk0xcOBAjB8/HhMmTLA8Jk6cqLsfiYmJmDJlClJSUvD7778jJydH9XBUtV3xrVu3Ltq3b6+aV7t2bYSGhlrm20po1bRpU0RFRVn+jo6ORkJCAoYNGwZFUTBx4kTEx8ejVatWlnJGERER5db+JSIiIiIi76C4MKuzo+3m5eWhU6dOGD16NB566CG7y587dw6DBw/G2LFjsX79eiQnJ+PZZ59F48aNERsbCwDYuHEjJk+ejKSkJPTo0QOLFy9GbGws0tPT0ahRI7uv8eijj+KZZ57B/Pnz0atXLwDA3r17MW3aNIwYMcLu+lOmTMHo0aPxzjvvoFatWnaX1zJw4EAAxRdMZY7WEy5V7eWMKis9PV2VYWz69OnIy8vDc889h+zsbNx7773Ytm2b7hq+RERERERElXH7FUmt/AP3338/7r//ft3tJiUlISoqCgsWLAAAtG3bFnv27MGiRYssA9+FCxdizJgxGDVqlGWdr7/+GqtWrcIrr7xi9zXmz58PRVEwcuRIFJXcsuHr64tx48YhMTHR7vq//PILXn755UoNegHn1RMu5VYDX1v37cps3Utw+zxFUTB37lzMnTvXmV0jIiIiIiLS5fZs8nPmzMEbb7xR6Xb3799fpgxsbGys5SfEBQUFSE1NxcyZMy3PGwwGxMTElFveVebn54d//OMfSEhIwJkzZwAALVq00D2QjY2NxeHDh9G8eXNdy8uOHz+O9u3bw2Aw6Kon/OOPP6JNmzaq+5G1uNXAl4iIiIiIqKbLyMhAUFCQ5W9nZZvPzMy0Wbo1JycHN2/exNWrV2EymWwuk5aWVm7bJpMJP/74I1q1aoXAwEDUqlULHTp0AADcvHlTNSi93ZYtWyzTgwcPxrRp03Dy5El06NABvr6+qmWHDBmi2YcuXbogMzMTDRs2LLevpXr27Iljx47pGmRz4EtERERERN6jCrI6BwUFqQa+NcFHH32EJUuW4MCBA2We8/X1xejRozFx4kQ88cQTZZ63lU/J1i9w7d2bK4TArFmzdF9dLigo0LUcwIFv+UpLnMizHCwVZKusCADAkiLedokRW2nji9crkpaxUToGgJAPJmeVkKlEyRNrE471haUU3Itqf8hlNKqhL86mGAxQFIOqPIhjDdiORVUJIfkcr1HaSFWSRKOciqKnzIo8v3Ras2yZc+KM8erhtMpxycdm6XyhUa7LDWl9pus6F2jEvVzaSPW5Lsed3I6qLJHUH1vxLb/f8v8vpJ/4qUogmjX+z6Ljc5yf2UTuKTw8HFlZWap5WVlZCAoKQmBgIIxGI4xGo81lbCUOlq1cuRJTp06F0UbpxNJyRkuWLLE58L29ZFFF9enTB+np6bqX79mzJwIDA3Uty4EvERERERF5DXfK6uyonj17YuvWrap5O3bssJRu9fPzQ7du3ZCcnGy5Cms2m5GcnIzx48eX23Z6ejruuecezefvuusunDp1qnIbYIe9nE+VUW11fImIiIiIiLxZbm4ujh07hmPHjgEoLld07NgxSx3dmTNnYuTIkZblx44di7Nnz2L69OlIS0vDsmXL8Mknn2DSpEmWZSZPnowVK1Zg7dq1OHXqFMaNG4e8vDxLlmcteXl55dbHvX79Om7cuKFru5KTk/HAAw+gRYsWaNGiBR544AH8+9//1rWuq3DgS0RERERE3kW46OGgw4cPo0uXLujSpQuA4kFrly5dMHv2bADApUuXLINgAIiKisLXX3+NHTt2oFOnTliwYAE+/PBDSykjoLgO7/z58zF79mx07twZx44dw7Zt28okvLpdq1atsG/fPs3n9+zZg1atWtndpmXLlmHgwIGoW7cuJkyYgAkTJiAoKAiDBg3C0qVL7a7vKvypMxERERERUTXo16+fzZKtpdasWWNznaNHj5bb7vjx4+3+tPl2jz/+OF5//XX06tULHTt2VD33n//8B7Nnz8b06dPttvPOO+9g0aJFqtd/+eWX0bt3b7zzzjt48cUXHeqXs3DgS0RERERE3qMKsjrXRJMmTcI333yDbt26ISYmBtHR0QCAtLQ0/Pvf/0bv3r1VP6nWkp2djYEDB5aZP2DAAMyYMcPp/daLA9/KcDRjsiobY8m0kLK4ShkR1dkRrZkgVVlf5SyxqozQGv2SM7Rp9V0j86tMM0ukDar+6sn4KOTslrb7yMyRVUcr0zjkbyblDK8amU1rClWWVCdneJZjVJVRVc6WK79lquzsUsxpZHPXyvbqECdkbyfPpDoXyMk+5XNB6XQ5Vy5qCofPBXoyu6sy42tkvrYV3zpiWzPDs7wvVNmmK57tWUvpZ3ZN+Yx2JGt1TdkmV7L5/wF+TngcX19fbN++HYsWLcL/+3//D7t374YQAq1bt8bbb7+NiRMnlqnJa8uQIUOwefNmTJs2TTX/iy++wAMPPOCq7tvFgS8REREREXmNmpzV2dV8fX0xffp0XT9p1tKuXTu8/fbbSElJsWSb/uGHH7B3715MmTIF7733nmXZl19+udJ91osDXyIiIiIiInKKlStXIiQkBCdPnsTJkyct8+vVq4eVK1da/lYUhQNfIiIiIiIil+A9vi517ty56u6CTSxnRERERERERE4nhCg3a3VV4sCXiIiIiIi8Ruk9vq56ELBu3Tp06NABgYGBCAwMRMeOHfHRRx9Va5/4U+fKkL+90JHhWZV1sTRbo0bWXIczCGplhpWytGplaVZlhtUxX7MdO/RkUNSTadLhTNHkfHLGYek4EdJ85fbjx02+7bNJUYofFeyjVqZVORus4ms93Qo5E7awH/fqc4P0WlJmWFVc2MvgrifrrIMZnh3JkFpdeL5wEvl40DgX1EhyrAj5s9nFGZ61Pr9LltGMbY3X1OxvFWV7dufP6Iqep1hlQq10u93lKh65nslkwokTJ9CsWTOEhITYXX7hwoWYNWsWxo8fj969ewMA9uzZg7Fjx+LKlSu6SiK5Age+RERERETkPXiPb7kmTpyIDh064JlnnoHJZELfvn2xb98+1KpVC1999RX69etX7vrvv/8+PvjgA4wcOdIyb8iQIfjTn/6EN954o9oGvvypMxEREREREQEAPv30U3Tq1AkA8OWXX+LcuXNIS0vDpEmT8Nprr9ld/9KlS+jVq1eZ+b169cKlS5ec3l+9OPAlIiIiIiLvIVz8qOGuXLmC8PBwAMDWrVvxt7/9Da1bt8bo0aNx4sQJu+u3bNkSn3zySZn5GzduRKtWrZzeX734U2ciIiIiIiICAISFheHkyZNo3Lgxtm3bhg8++AAAcOPGDRh15HN488038eijj2L37t2We3z37t2L5ORkmwPiqsKBLxEREREReQ1XZl/2hKzOo0aNwvDhw9G4cWMoioKYmBgAwIEDBxAdHW13/YcffhgHDx7EwoUL8fnnnwMA2rZti4MHD6JLly6u7Hq5OPAlIiIiIiIiAMAbb7yB9u3bIyMjA3/729/g7+8PADAajXjllVfKXbewsBDPP/88Zs2ahY8//rgquqsbB77lUAwGKEolb4OWfw5QXqkXqEucyOVLnEWzVIqTyhxVmPy+aJVB0Citoq9EUtntdrSkgbeWLJCp3wPpmCkosEwqchmP22OnJryHGiVBSqnKE2mUMFI1p1FWRKu0kVZJM6FR9kzdNzuly3TErXbJjsqXONGtsufc27EUmtOpz83SdMkxLlxxXFQ1jRKFekobaS6jVdpI4yPYUh5OSH2Ry0fJ5QodLHmkxdHPYHei+bnuyDnFwW1z9TmF5ywXYlZnux555BHV39nZ2Xjqqafsrufr64vPPvsMs2bNclXXKozJrYiIiIiIiAgA8O6772Ljxo2Wv4cPH47Q0FDceeedOH78uN31H3zwQctPnN0Jr/gSEREREZH34BXfciUlJWH9+vUAgB07dmDHjh345ptv8Mknn2Dq1KnYvn17ueu3atUKc+fOxd69e9GtWzfUrl1b9fzLL7/ssr6XhwNfIiIiIiIiAgBkZmaiSZMmAICvvvoKw4cPx4ABAxAZGYkePXrYXX/lypWoV68eUlNTkZqaqnpOURQOfImIiIiIiFyNWZ3LFxISgoyMDDRp0gTbtm1DfHw8AEAIAZNJIzGB5Ny5c67uYoVw4EtEREREREQAgIceegiPP/44WrVqhd9//x33338/AODo0aNo2bKlQ20JUfxNgKIjwZ6rceBbDsXPH4rBT52hUs6qp+MbDxVbGZ5VWWTl7H1ukEFR3j4XZnKW6cpa6GB2SUczOFe0DW/KuKiZ1VXeryX7ptKZ0auC0Qgo5RzjJdulJ5OzFlWmV81M5nKWVgfjXsgZocv2UzN7uw7ax79zzgvVETvOOC+4Wo04p8jn4JL/3Fj+dVOqOJb/I6b1OafxeeW8DM+QlimbwV09T5EXlibtZHW/7TU15ztaZUFm4/NYO1O8Y8eIw/Fa0c+dSmSvdlYGZmZyriK8x7dcixYtQmRkJDIyMjBv3jzUqVMHAHDp0iW88MILutpYt24d/v73v+Onn34CALRu3RrTpk3Dk08+6bJ+28OBLxEREREReQ3+1Ll8vr6+mDp1apn5kyZN0rX+woULMWvWLIwfPx69e/cGAOzZswdjx47FlStXdLfjbBz4EhERERERkcrJkydx4cIFFBQUqOYPGTKk3PXef/99fPDBBxg5cqRqnT/96U944403OPAlIiIiIiJyOf7UuVxnz57FsGHDcOLECSiKUuY+XXsJri5duoRevXqVmd+rVy9cunTJ+R3WqQbcfEdERERERERVYcKECYiKisLly5dRq1Yt/Pjjj9i9eze6d++OlJQUu+u3bNkSn3zySZn5GzduRKtWrVzQY314xZeIiIiIiLwHr/iWa//+/fjuu+/QoEEDGAwGGAwG3HvvvUhISMDLL7+Mo0ePlrv+m2++iUcffRS7d++23OO7d+9eJCcn2xwQVxUOfMtTLwgw+kORMzHeyrdOFxZaJuUsrXCnLHxyVkKNLIeaGSNljmR4rkzGZkfpydxYRVmxvTUToypDcZF0/BSWHCd+flXcI8cpPj5QDD7qDK9yZtqSY17I97hUJnOtq1P624h7XXFeTZyV+VWjcbnByrfnYjXt3KHqb2FR8TxR8QziLqUo1kcp+fNM8/PP9mytTM66ltFTFaLk2NWV4V3O8Cw3LR//5iJptmPnAF0x6kAm5fKS6LsNB7JXl13VsfdXfh9r2jmAPJPJZELdunUBAA0aNMCvv/6KNm3aoFmzZkhPT7e7/sMPP4wDBw5g0aJF+PzzzwEAbdu2xcGDB9GlSxdXdr1cbvNT58TERCiKgokTJ1rmPf/882jRogUCAwPRsGFDDB06FGlpaeW2k5ubi/Hjx+POO+9EYGAg2rVrh6SkJBf3noiIiIiIagLFxY+arn379vjPf/4DAOjRowfmzZuHvXv3Yu7cuWjevLmuNrp164aPP/4YqampSE1Nxccff1ytg17ATa74Hjp0CMuXL0fHjh1V87t164a4uDg0bdoUf/zxB9544w0MGDAA586dg1HjquPkyZPx3Xff4eOPP0ZkZCS2b9+OF154AREREXYzkBEREREREXmz119/HXl5eQCAuXPn4oEHHsCf//xnhIaGYuPGjTbXycnJ0d1+UFCQU/rpqGof+Obm5iIuLg4rVqxAfHy86rnnnnvOMh0ZGYn4+Hh06tQJ58+fR4sWLWy2t2/fPjz11FPo16+fpY3ly5fj4MGDHPgSEREREXk73uNbrtjYWMt0y5YtkZaWhj/++AMhISGWzM63q1evnuZzpYQQUBTFblZoV6n2ge+LL76IwYMHIyYmpszAV5aXl4fVq1cjKioKTZo00VyuV69e2LJlC0aPHo2IiAikpKTgf//7HxYtWqS5Tn5+PvLzrffuOvKNBRHVHIx1Is/HOCcicr769euX+/zOnTurqCcVV60D3w0bNuDIkSM4dOiQ5jLLli3D9OnTkZeXhzZt2mDHjh3wKydZzvvvv4/nnnsOd955J3x8fGAwGLBixQr06dNHc52EhAS8+eabldoWInJ/jHUiz8c4JyJ7FFH8cFXbNdXo0aN1Lbdq1aoy8/r27evs7jhdtQ18MzIyMGHCBOzYsQMBAQGay8XFxeGvf/0rLl26hPnz52P48OHYu3ev5jrvv/8+fvjhB2zZsgXNmjXD7t278eKLLyIiIgIxMTE215k5cyYmT55s+TsnJ6fcq8pEVDMx1ok8H+OciKhi1qxZg2bNmqFLly4QlaleASA7OxsHDx7E5cuXYb4tw/3IkSMr1XZFVdvANzU1FZcvX0bXrl0t80wmE3bv3o0lS5YgPz8fRqMRwcHBCA4ORqtWrXDPPfcgJCQEmzdvxogRI8q0efPmTbz66qvYvHkzBg8eDADo2LEjjh07hvnz52sOfP39/eHv719mvql+bSg+AYDJuuMN+dYBt3LTWtpEkUobocD2tLDxe3bFYE2sLaSEXYpUP0GYnVSOQ8e6qtfSIpVEcOg1HSh1UBOxBMFtx3iR+jgRogDVTSvW4ecLGPy0yxmVnLDlO1dsxTMA7TIljn6AuLAMj7uVNvL22PGY7S89Tqu5bJRmnJeSP2t9bf83SDO+ddAsYVTJ/0Q6k7POARVd1yXHfA0oVybzmLivqXiPr03jxo3DP//5T5w7dw6jRo3CE088YfcnzrZ8+eWXiIuLQ25uLoKCglT3/iqKUm0D32obifTv3x8nTpzAsWPHLI/u3bsjLi4Ox44ds5m1WQgBIYTq3h1ZYWEhCgsLYTCoN8toNJb5poGIiIiIiIiKLV26FJcuXcL06dPx5ZdfokmTJhg+fDi+/fZbh64AT5kyBaNHj0Zubi6ys7Nx9epVy+OPP/5w4RaUr9qu+NatWxft27dXzatduzZCQ0PRvn17nD17Fhs3bsSAAQPQsGFDXLx4EYmJiQgMDMSgQYMs60RHRyMhIQHDhg1DUFAQ+vbti2nTpiEwMBDNmjXDrl27sG7dOixcuLCqN5GIiIiIiNxRDb4y60r+/v4YMWIERowYgZ9//hlr1qzBCy+8gKKiIvz444+oU6eO3TZ++eUXvPzyy6hVq1YV9Fi/as/qrCUgIADff/89Fi9ejKtXryIsLAx9+vTBvn370KhRI8ty6enpuHbtmuXvDRs2YObMmYiLi8Mff/yBZs2a4e2338bYsWOrYzOIiIiIiIhqHIPBAEVRIIRwqARRbGwsDh8+jObNm7uwd45zq4FvSkqKZToiIgJbt261u87tl93Dw8OxevVqZ3eNiIiIiIg8ALM6a8vPz8emTZuwatUq7NmzBw888ACWLFmCgQMHlrmdVLZlyxbL9ODBgzFt2jScPHkSHTp0gK+vr2rZIUOGuKz/5XGrgS8RERERERFVvRdeeAEbNmxAkyZNMHr0aPzzn/9EgwYNdK374IMPlpk3d+7cMvMURXHo6rEzceBbjsK6fhA+flCkq8pKkfUbC0OhNcOz4ZY1e7NyU5rOlzI/35KScpVkgRaF1sy3qoyxcvYz+aq2Sc72LM13VjZDF2aPBWwf5E7LKuvkLLjMuOg41XtWemyXHMtCVM9JThd/v+KsznKGV5N0DBWV9F3OBivHpTwtZ7OW2hDSMalIcS8fZdqxXolj29byUqxoHeeViUtdsVPRGNWTHb4Ks7vyPGFV+l5UtgSGqyhGIxTFCEVOnilncldVVrBS7WMpLpVyrnzYosr2bCOBZ3EfpT8sr2Wd6fDnvo7KCq44B9gjt+3S80UV4rmghmFWZ5uSkpLQtGlTNG/eHLt27cKuXbtsLrdp06Yy82pCImEOfImIiIiIiLzcyJEjVaWHPA0HvkRERERE5DV4j69ta9asqdT63333HcaPH48ffvgBQUFBqueuXbuGXr164YMPPkCfPn0q9ToVVW11fImIiIiIiMgzLF68GGPGjCkz6AWA4OBgPP/881i0aFE19KwYB75EREREROQ9hIsfXuo///kPBg4cqPn8gAEDkJqaWoU9UuPAl4iIiIiIiColKyurTOkimY+PD3777bcq7NFtr19tr1wDCKMCYVRglm/ylvalwc/6vYHib826aKjtZ51fGGidL2V7NtwqzvasyvR885Z1usC6rCiwZoZWZ53UyPqq2oiqybBWmWyGlckoqSdrNFUPUZqRtOTwFaJIe+Hq5usLGH0h/KynRFVm9dJMhXLGQjlZqjy/UM7qLGWAlef7S5ni5X5oxb2c9l/KGl3hzO46llUl4daTSbkSr1VV7THrqpcTNi7F+Ej/DVJ91vtKs6X5JtvZmIXGcalIsaP5mSYdl0KO9ZJpeZ6uKg9OUmXxoud8xNi1/38irfO0xvvr7e8p7/F1jTvuuAP//e9/0bJlS5vPHz9+HI0bN67iXlnxii8RERERERFVyqBBgzBr1izcunWrzHM3b97EnDlz8MADD1RDz4rxii8REREREXkP1vF1iddffx2bNm1C69atMX78eLRp0wYAkJaWhqVLl8JkMuG1116rtv5x4EtERERERESVEhYWhn379mHcuHGYOXMmRMntGYqiIDY2FkuXLkVYWFi19Y8DXyIiIiIi8h684usyzZo1w9atW3H16lWcPn0aQgi0atUKISEh1d013uNLRERERERUnZYuXYrIyEgEBASgR48eOHjwoOay/fr1g6IoZR6DBw+2LPP000+Xeb68UkPOFhISgrvuugt33323Wwx6AV7xJSIiIiIiL+JuWZ03btyIyZMnIykpCT169MDixYsRGxuL9PR0NGrUqMzymzZtQoFU/eH3339Hp06d8Le//U213MCBA7F69WrL3/7+/o53zoNw4OsgzYNZunYupJTzQip5JHytB5s5sLhUgiHfWvrIcDPA+jq5N6zTBdayCkKj3Il2iQO5Y84tK+LqVPg1LdW+nvJLWmratjqk9LirotJaFSEUpfjhI8ertWyJZe9IcaYUWbdHyPNNUs0zuQyJSWMfy7ErlTxS8qX4LpTiXipnBKkcmnxusP06lXj/WUJIl8qcA6RGbM93cB940/vuMHk/aZQwUr3fvhr/VTJZl1HteaO0D6UShKrXkqnOH3LpsuJpzfiXY95GGSTADY4DNztunRKjleuAk5pxZDukzzLp/VUMLHPkajk5Oaq//f39NQeeCxcuxJgxYzBq1CgAQFJSEr7++musWrUKr7zySpnl69evr/p7w4YNqFWrVpmBr7+/P8LDwyuzGR6FP3UmIiIiIiLvIVz8ANCkSRMEBwdbHgkJCTa7UlBQgNTUVMTExFjmGQwGxMTEYP/+/bo2Z+XKlXjsscdQu3Zt1fyUlBQ0atQIbdq0wbhx4/D777/ras9T8YovERERERGRE2VkZCAoKMjyt9bV3itXrsBkMpXJdhwWFoa0tDS7r3Pw4EH897//xcqVK1XzBw4ciIceeghRUVE4c+YMXn31Vdx///3Yv38/jPKvUbwIB75EREREROQ1FCHUtwk6uW0ACAoKUg18XWXlypXo0KED7r77btX8xx57zDLdoUMHdOzYES1atEBKSgr69+/v8n65I/7UmYiIiIiIqBo0aNAARqMRWVlZqvlZWVl278/Ny8vDhg0b8Mwzz9h9nebNm6NBgwY4ffp0pfpbk3HgS0RERERE3qMK7vHVy8/PD926dUNycrJlntlsRnJyMnr27Fnuuv/617+Qn5+PJ554wu7rXLx4Eb///jsaN27sWAc9CH/qXA5DkRkGmFVZmmWKnB2vSM7KaM2UZyjUyJpXkt3RHGjN6iz8rLtD8bdmlzTckrK75t20Tt+UMvVJGZ6FlDlSkTNAamT2q2jGVjmrYHVlAdSV2dBOFkWHszy64L4IzR7ImTpLaL7XGvuxujM0lr6+cNFPipxBKSgoPg7krM4GadrPWDJPWsfH9nEgxz9MUkwXShmezdIy0rSiSFllpdeXs80qUqyrXlfur40Mr6oM75XhRtm5HY5djXNBtZwD5Ay/cmxoZf7VopHRX1WCwE3PDVVNmEwQikmdPV3O5Czzkf57JO8To444kpaXP491XWqQdpXlPCH3V97HUlZnRaPig+qY0DoHax2LWmx8LmmzHStax55SidDSFcfOjt1qIp/vbRHyZ4ycNVzefI3s38z2XD0mT56Mp556Ct27d8fdd9+NxYsXIy8vz5LleeTIkbjjjjvKJMhauXIlHnzwQYSGhqrm5+bm4s0338TDDz+M8PBwnDlzBtOnT0fLli0RGxtbZdvlbjjwJSIiIiIir+FudXwfffRR/Pbbb5g9ezYyMzPRuXNnbNu2zZLw6sKFCzDc9oVHeno69uzZg+3bt5dpz2g04vjx41i7di2ys7MRERGBAQMG4K233vLqWr4c+BIREREREVWj8ePHY/z48TafS0lJKTOvTZs2mr+mCwwMxLfffuvM7nkEDnyJiIiIiMh7VOBeXIfaJrfE5FZERERERETk0XjFl4iIiIiIvIa73eNLVYMD33L4ZufDxwiYpWzLwkfKmCxnX5QyucrZW2GSMuWZy2bKU2WO9ZWyLvtbp02+gZZpg5TtWcmVpm/esjaan2+dLpDal7PHamVu1JGt0VZmP81MjFoZYO1kWi5eRM6kKb2ARkZFVZZDRWPdUlLbio6+aK3rNNJ7KrTes5JlFI0snbYy+RavoOMMXIlMvR6R6TH3BmAoUmVqNxRZ3xNhLo41rXOBmvV4UqQUrULObmpNwKrK6C2k5RV5eTmTrOrY1TjOpSywpRleNWNeD9V5wbHs8M7KvGyzPY3zgmbGU633ywUxbe+8ohnnjjLbzuqsOHpusNEfj4htG4T8GSlRxYic/EXK4C5/Zqt+M6fx+VOZrM6W87vUtur/EfIx7Cf9f6DQT5qWTjZanzNa+1nr/wPOyIyskXVYxdHPeplWH13x+W2P1vurpy8VjEFFa9/J5wX5nKmRHV6V7Vl+S0uOH0UogCNJvomqGQe+RERERETkPXiPr1fiPb5ERERERETk0XjFl4iIiIiIvAbv8fVOvOJLREREREREHo1XfImIiIiIyHvwHl+vxIFvOQyXr8Jg8IfB15otUZVdVYucHc9O1lNFIyujWcreLGd7NgdYd5lirGXtq680/5bU3wIpo6OcyVLK+qqV5U+LIzkRnZVdVZUhVV5GT0ZHe9ukY5srtbwWjUyWdrPvamRflDOLyxk75UzFKJKy/erJqqkjy7fmb3puO97dOfujuHkTQjHdlp3dGjtKQXGGVyVAiksf6fRp1DgmVOcC66QqM6ucbd2k49NSinXVsegrZ3WV+l4SC6XZnW/vl7CRbb4MH9sfFboiQUfGVrvnicqcI1yhMllaSyiKkz5+5fdXPja0Pnukc7+idRyUnid0xvbt3DXWDXXqwGDwg7hx0zJPzvAsx4gS4C9NB1in/awZk1XZnuX33kfO7G4lzDoyPEvzS5dXpPaE9NGtSP9nQJG0ohwvcr+KbGf91vpMgdBxjOqpSFD6mopq4+w2rVreqLF9qqzZGstrMdn5DNR6j1Rt2K6yoIo/jQTTujK7V/RcZtDYd3KWZnm+vB3S+6s+fsu+X4oAcBNENQYHvkRERERE5FV4L6734T2+RERERERE5NHcZuCbmJgIRVEwceJEy7znn38eLVq0QGBgIBo2bIihQ4ciLS3NblunTp3CkCFDEBwcjNq1a+Ouu+7ChQsXXNh7IiIiIiKqEYRw7YPcklsMfA8dOoTly5ejY8eOqvndunXD6tWrcerUKXz77bcQQmDAgAEwlXPf4ZkzZ3DvvfciOjoaKSkpOH78OGbNmoUA6R4dIiIiIiIi8h7Vfo9vbm4u4uLisGLFCsTHx6uee+655yzTkZGRiI+PR6dOnXD+/Hm0aNHCZnuvvfYaBg0ahHnz5lnmaS1bKj8/H/lSgoucnJyKbAoRuTnGOpHnY5wTkT2s4+udqv2K74svvojBgwcjJiam3OXy8vKwevVqREVFoUmTJjaXMZvN+Prrr9G6dWvExsaiUaNG6NGjBz7//PNy205ISEBwcLDlodU+EdVsjHUiz8c4JyIiWyp8xff777/H8uXLcebMGXz66ae444478NFHHyEqKgr33nuvrjY2bNiAI0eO4NChQ5rLLFu2DNOnT0deXh7atGmDHTt2wE8uJyC5fPkycnNzkZiYiPj4eLz77rvYtm0bHnroIezcuRN9+/a1ud7MmTMxefJky985OTnFH5QCxb/Tl8uAaFE0SuxopdRXbMyXSw0otsseCINUPsHPuvtMUvp5g1QKScmXyprIZY5u3rJOq1L6y2VWHPjKSkq5X6nSQzI9ZYP0lBjQat9eG3r2qa39COjb7orSKj0hl+LRKlUklz/SalOmVapCLomgUc5BlM4v+VcR5movcaIZ6z4+gOKjLvkjb9fN4noNilTWR5HLnPnI5TV0lNSQjwmt917PPUJyO3If5Nct6adc4ghyuSONMlgOxX9FaJ0zbC1TmXOBK7ZDq8SIan/4lJ3nqMocJ0IjRuXzoVbJI3PxZ6xmuRNbcS4tU92xrhXnIiwUwugPJeeG5TnlhnVayJ+LUilAIZcFVJ0DNM4H8v9RpFhUfGyX4bFXRlC79JDGeV7rmNf6TJPX1SotpoPNrdDYNkXP56jGOU3IffSR29FTVk6jfFzpMiaNEkNF8vEvLWOS48n+564qLs3232tdJY8q+H819fyKlT9S9PTPXbGOr1eq0Bnus88+Q2xsLAIDA3H06FHLT4quXbuGd955R1cbGRkZmDBhAtavX1/u/bdxcXE4evQodu3ahdatW2P48OG4deuWzWXNJSedoUOHYtKkSejcuTNeeeUVPPDAA0hKStJ8DX9/fwQFBakeROR5GOtEno9xTkREtlRo4BsfH4+kpCSsWLECvtK3nL1798aRI0d0tZGamorLly+ja9eu8PHxgY+PD3bt2oX33nsPPj4+lgRWwcHBaNWqFfr06YNPP/0UaWlp2Lx5s802GzRoAB8fH7Rr1041v23btszqTEREREREUMyufZB7qtBPndPT09GnT58y84ODg5Gdna2rjf79++PEiROqeaNGjUJ0dDRmzJgBo42fpwohIIRQJa2Q+fn54a677kJ6erpq/v/+9z80a9ZMV7+IiIiIiIjIs1Ro4BseHo7Tp08jMjJSNX/Pnj1o3ry5rjbq1q2L9u3bq+bVrl0boaGhaN++Pc6ePYuNGzdiwIABaNiwIS5evIjExEQEBgZi0KBBlnWio6ORkJCAYcOGAQCmTZuGRx99FH369MF9992Hbdu24csvv0RKSkpFNpWIiIiIiDwJ7/H1ShX6qfOYMWMwYcIEHDhwAIqi4Ndff8X69esxdepUjBs3zikdCwgIwPfff49BgwahZcuWePTRR1G3bl3s27cPjRo1siyXnp6Oa9euWf4eNmwYkpKSMG/ePHTo0AEffvghPvvsM90Jt4iIiIiIiMizVOiK7yuvvAKz2Yz+/fvjxo0b6NOnD/z9/TF16lS89NJLFe6MfFU2IiICW7dutbuOsJHRcvTo0Rg9enSF+1GqoHkjmH0CNOtxCTnFnZyh0Ucja570m39DUfEfclZBRStDo0wjE6QIsH6HYQ6QMjwXShmeb1kzTRoC/K1t3rL+dFwpLLLOL5KmK8peRuXbmTRSgWpkrNTKeKjKEiuvW5qRU5UV0lj2eQCQMnYKo/ReS9m0ha+UZVtqU/hK0xrHiapfOrJRKiXbLd87onnMyMuY7d9sInw0MmZqUIqsbRrkzOE3C6TpkiR0JZmSDeYC4IrdpqvHneGA0V+VBR35NrIga2XtlDN+qm7ukY4no45sz3qytMrHvFYGafmYLjmORaA15lVZYqWYV7QyvBfpiEs9x7NMaxlb87VeR2sZrey1el7fR2OfSe+pKl6kLPqq84Gt9rUyqsrkLKo6MnsrhXJWZWld6fwtx6s6O61GxlZ7+08rY3RJ9mODuQDILL+J6nArvDZ8fALgE2xNqGnMq2OZVm5K57J8KcO7nNVZT7Z1OYN6kfReFtnOZKz5bjuSMVcrG7GqPY0Y8dGqrKBRoUG1jI1YM9rOtCx/jsr/N1DFivxZK38WGW3/30erX6rXkrtbKGV1trEv5bZVz8uxpeMzVdWmns9UueJCgUbsyp9J8v/PSteV5zmazV6rEoX8fzhD2f2qmPOB6469lLtgHV/vVKGBr6IoeO211zBt2jScPn0aubm5aNeuHerUqWN/ZSIiIiIiIqIqVOE6vkBxMqnbMygTERERERG5LSH0/YKjom2TW9I98H3ooYd0N7pp06YKdYaIiIiIiIjI2XQPfIODgy3TQghs3rwZwcHB6N69O4DiurzZ2dkODZCJiIiIiIiqEu/x9U66B76rV6+2TM+YMQPDhw9HUlKSpd6uyWTCCy+8gKCgIOf3koiIiIiIiKiCKnSP76pVq7Bnzx7LoBcAjEYjJk+ejF69euHvf/+70zpYnfLuCICPb4AqK68w2J+Wyd/6GKSEfMYCUTLPuoAxX8qUWyhnzdTI/KxBSBkVzVKWUcVfyvYcaM1Easi3ZnuWMwiqsoXK2Z5L+yBnnNSTkVqVBbfI9nytTIQG21kcFUU6hOVsrIHWrJ3CT8q6GlC8jDnQus1Fta3rmf2k905KZmiS5pv8FWm+1HU/Oeuk7c1QfQsoJ6qVkyVKb7tROg4sbUptm6XMlYYi6ViSjjX5+NGiyhTtWMJK1XbI/fXJK94Q3+vF2VFNRbfcNqvzzfDa8PEN0Hy+dL/J76WhwFTmeUA7i7eiyswsZwiVFpfjSI57KRZV2XpNttu0mf1dzkQsPy9lJZbpySisorW8ngyocrZQeXlz+X3Rk23d7GedNgVKsS5l35cz8avP99Y/igKluJfOAbfqW6fNtt9KCzm2DdIp0CAlEDYWyMvI+1eaLyfZlne7PC0fq/JrFcnHnu3j0ELrfKzqixwTxY0UFd1yy6zOQik+15oC5ONDivtg2+cAOS7l90x1DsjXiNEijc9JmZwFWV7GaOOA0qjsoEXRyoiuJwu0TE+m5tI41nheqDIEy52Ul5E+g+UKGfIyqv8PQFperk4gta+VkdrG8S/HvxxPRYFa/8nTaFq13db56v83yhmkrfON+dLn6C3rfjLesh5jpbFWPF08X84GrarsIO13zQzaGudM1XvqU3YfFBXdAn5BzcQ6vl6pQnV8i4qKkJaWVmZ+WloazA6meSciIiIiIiJypQpd8R01ahSeeeYZnDlzBnfffTcA4MCBA0hMTMSoUaOc2kEiIiIiIiJn4T2+3qlCA9/58+cjPDwcCxYswKVLlwAAjRs3xrRp0zBlyhSndpCIiIiIiIioMio08DUYDJg+fTqmT5+OnJwcAGBSKyIiIiIicn+s4+uVKjTwlXHAS0RERERERO6sQgPfqKgoKOVkEzx79myFO0REREREROQqvMfXO1Vo4Dtx4kTV34WFhTh69Ci2bduGadOmOaNfbsHnhhk+vmZ1Sn0NqhI3/tJ8VZkMKaV8SSp4xc92iQy5/ISqnJFcSkEuaaGjZI2cxFsJlOYWWQ8DpVBKkS+XvTCVLXOkKtMgl1uRSx8VSZ2Uyx/J5UtUZRKkaR9rSn3hKx2qqrT7UnkSqVSJXK7CFGBts6BO8XRBkHXezQbWpuVyJEJ6SbnEgbxvTAHye2Sd/9Mrk1BRUUsWWKZ9cq39LC15IpcdkUtk+eZKZVVuSQ1KX1KZpW2S25HLJ+gpZ6RVTkUu6VRYq3g6v17xixYVGoDD9tuuDoV1jcXlHBz4sDKYpOPTYPs9VpX6kEk/g1LFuqq0ke1SZ0ZVGYuyJS0AjbIWWuVDVCVApIUMcukK2+uqaJRocpStch/CVyonJpUWK6wtnwukc6kUx/J5NS/COl+OdbOv7R2vmKVyRiHSe+pjXf78qOk217Un8oP5lmljnnWbfG5KJU5M1mnf69Z15ZJH8rlJFd9SnRX5PKEqnVRo+/xlmadRfk3RKHNUut+LCt2zuoPZzwCzrwEwSNut+pyTpuUSMFJcqN4S+bMrQP7vlPU/Aer3UMcJRlWuqOw8mdAo06MuU6WxfUXyZ7a08/WUwdE6l5S+T3KZHNV6GtshLS+XFDT5ahQekZYvDJTPB1KZsVDpfCBVqZJLEKqaLIkL+bygSCF/6q2Kf6Y3/8dC6+tI8e2bK72WXOqswLqMz03r9vldl0qIFtgudWZt0Dpp9rV9bpTLLMnll7RKNNkqY2YqqPQPR4mqVIWO2AkTJticv3TpUhw+7Kb/syUiIiIiImIdX69UoTq+Wu6//3589tlnzmySiIiIiIiIqFKc+huFTz/9FPXr13dmk0RERERERE7De3y9U4UGvl26dFEltxJCIDMzE7/99huWLVvmtM4RERERERERVVaFBr5Dhw5VDXwNBgMaNmyIfv36ITo62mmdIyIiIiIiciqzKH64qm1ySxUa+L7xxhtO7oZ72r7+5QrVKe7ywiLLtH+O9eD3zZMyI9sICjlDo5ypU86wJ2c/lJLK3raQ7dnqF5MWkTKXGgqst30bC+VMztIyJV96GGBNeSi/jCoDrL90iBntZ2M2yxlm5UyPPnKGRilLc13rdFEtaXnpvTm2pOLZGKvDufFTyn2+wxTr8SVnhZSzcOeHSFkv61qXUWeOlNdVbM43qpaxti/nbNXK8Fx6PBeVZBA3aWU4dgP/XvOSw7He+SXrfjDmW+cbpEy5Wu+NnAlVzgKvIp0EigLlDL1yZk85q7M1jgxF1tSlpZlc5XOOVjZYrawPcmZWOdMr9CTvldqU41vOwixnFFVlIC1ZprCOdV5+sLU9k5SdPm1OzYrz8+Om2l2m7WzrMSafZAtrWafN0vEjZ6RVZWCVE1LfkJbPt/25IWwcB0Y5M7TGdOnP+4Rwz1j/9v9V7DP9njhrpn3/HOtB73Nd3nj5GJYrFVgn9XzGq2jFqSPkShByBusi63lEVbVBlRHadpO2jo/iFUqysGt1W5UBWj4HSpmZ61r7JWdplj+7Dq6drPEC7unsBPv9bZVo+/PELGWhLgiSK3/YeG+k992kUaFC9X8A+fA1255WkU/9Jf9vNDnjGCWqQhVKbmU0GnH58uUy83///XcYjUYbaxAREREREbkB4eIHuaUKDXyFRi26/Px8+PlpFEkjIiIiIiIiqgYO/dT5vffeAwAoioIPP/wQderUsTxnMpmwe/du3uNLRERERERuS4ELszq7pllyAocGvosWFd+DIIRAUlKS6mfNfn5+iIyMRFJSknN7SERERERERFQJDg18z507BwC47777sGnTJoSEhLikU0RERERERC4hRPHDVW2TW6pQVuedO3c6ux8e5egy+9lF+90/D4A6+6HwlzIeGuXsfXKWRfsZFzXTqMtZPk1ydkc566PUqNB4rZIsfuYAa9pAOcuj2VfO3mw7i2uRlMXRJG23KiuhnzxtfULP++vJTiywbn/bWdZMkD43re+Rf7aUTfy69KZK+6lIyogrv9fGW9Zpv1ytY0k+ZqyzbR2T5pKzjCj0rA+CY+/bPw57jFxomdYVuxJV9k0pXm1lzgbU2T+FlBnZ8lpm222oXlOV7d32QiYfjYy1UuZ1OQt7UaCUhb227f7KGUjlTM0n3/buWD8117r9LRZYjyXfXNtZwc0+0jEmZ2DVytqukT3Wcj6Q25AzwwbY/iFf6bFmyvesH/r9sL78TPsAcN+Ady3TqsznchjJ8aXVkOqJ4hVU2aDlzCxaGXW1MjlrxLQwytUUNPqlJyOMufzXMQVYGy+qZW1wz2f2M5x7up9escZ61BJrFnHjDev7FHDFurwcj+aSLO/yvpM/P+RM7nL8y+cIk634B9RZ+X3LTpuk/y8Q1QS6B76TJ0/GW2+9hdq1a2Py5PJTsy9cuLDc54mIiIiIiKqDIlx4j69nfc/vUXRndT569CgKC4uLfh05cgRHjx7VfBAREREREZE+S5cuRWRkJAICAtCjRw8cPHhQc9k1a9ZAURTVIyAgQLWMEAKzZ89G48aNERgYiJiYGPz000+u3gy3pvuKr/zz5pSUFFf0hYiIiIiIyLVcWW+3Au1u3LgRkydPRlJSEnr06IHFixcjNjYW6enpaNSokc11goKCkJ6ebvlbUdS3P8ybNw/vvfce1q5di6ioKMyaNQuxsbE4efJkmUGyt6hQHd/Ro0fj+vXrZebn5eVh9OjRle4UERERERGRN1i4cCHGjBmDUaNGoV27dkhKSkKtWrWwatUqzXUURUF4eLjlERYWZnlOCIHFixfj9ddfx9ChQ9GxY0esW7cOv/76Kz7//PMq2CL3VKGB79q1a3Hz5s0y82/evIl169ZVulNERERERESuoAjh0gcA5OTkqB75+fk2+1JQUIDU1FTExMRY5hkMBsTExGD//v2a25Cbm4tmzZqhSZMmGDp0KH788UfLc+fOnUNmZqaqzeDgYPTo0aPcNj2dQwPfnJwcXLt2DUIIXL9+XbUzr169iq1bt2pejiciIiIiIvIGTZo0QXBwsOWRkJBgc7krV67AZDKprtgCQFhYGDIzM22u06ZNG6xatQpffPEFPv74Y5jNZvTq1QsXL14EAMt6jrTpDRwqZ1SvXj3LDdStW7cu87yiKHjzzTed1jlPlvLN9HKf7/q8tUyNscB6s4D/NWsdFEVKG2eQUterSiboKE8ilxwqqiOVGwiwzr8VUvY7Er/r1r7IJW3k8kRyyn25dJOqtJF0m8HxRd5dvsRRp96y/361ect6LNX+VXpCOhyKalmn/XKs07551n0sl9GQy2soJtvlMizHZMnzhkL5IPUOB9aVnwEfADpOsu4fuQSFb571vfSRy8NolBlTzPIy0nxR+rxG/Bvl0ji2Y1er/JLqmJBKaRTWktqU4luelkv1kH1nptg/llp/+pZluiDXWpNEFEnl42pLJeauWKcNhdZ2SveTVqkkIZU18c2x7mu53Iq32bl9ht1lOr9kjXX5MzPwijXAjAXSZ3xpKSK5bJlWuSGZFNNmqbQZpPv/TKr50qSeWNe4ZGJr3X0b7ZeCIrVz4+2/Z4/uH2uZPp7ZGABQcNMamOZc67R/pvVk7nfN2oa8v+RyZvn1rbFeVNe6kH8ja12kyNA/ip/Py8cZu711U2ZYSnC5pG0AGRkZCAoKssz29/fXWMFxPXv2RM+ePS1/9+rVC23btsXy5cvx1ltvlbOmd3No4Ltz504IIfCXv/wFn332GerXr295zs/PD82aNUNERITTO0lERERERFRTBAUFqQa+Who0aACj0YisrCzV/KysLISHh+t6LV9fX3Tp0gWnT58GAMt6WVlZaNy4sarNzp0769wCz+PQwLdv374Ain833qRJExgMFbpFmIiIiIiIqFrI9+K6om1H+Pn5oVu3bkhOTsaDDz4IADCbzUhOTsb48eN1tWEymXDixAkMGjQIABAVFYXw8HAkJydbBro5OTk4cOAAxo0b51D/PEmFRq7NmjWDwWDAjRs3kJaWhuPHj6seFZGYmAhFUTBx4kTLvOeffx4tWrRAYGAgGjZsiKFDhyItLU13m2PHjoWiKFi8eHGF+kRERERERORKkydPxooVK7B27VqcOnUK48aNQ15eHkaNGgUAGDlyJGbOnGlZfu7cudi+fTvOnj2LI0eO4IknnsDPP/+MZ599FgAsY6r4+Hhs2bIFJ06cwMiRIxEREWEZXHsjh674lvrtt98watQofPPNNzafN5kcu5fv0KFDWL58OTp27Kia361bN8TFxaFp06b4448/8MYbb2DAgAE4d+4cjMbyb3TZvHkzfvjhB/70moiIiIiIrNysju+jjz6K3377DbNnz0ZmZiY6d+6Mbdu2WZJTXbhwQfVL26tXr2LMmDHIzMxESEgIunXrhn379qFdu3aWZaZPn468vDw899xzyM7Oxr333ott27Z5bQ1foIID34kTJyI7OxsHDhxAv379sHnzZmRlZSE+Ph4LFixwqK3c3FzExcVhxYoViI+PVz333HPPWaYjIyMRHx+PTp064fz582jRooVmm7/88gteeuklfPvttxg8eLBjG0dERERERFSFxo8fr/nT5pSUFNXfixYtwqJFi2wuW0pRFMydOxdz5851VhdrvAoNfL/77jt88cUX6N69OwwGA5o1a4a//vWvCAoKQkJCgkODzRdffBGDBw9GTExMmYGvLC8vD6tXr0ZUVBSaNGmiuZzZbMaTTz6JadOm4U9/+pOuPuTn56tqa+Xk5JSzdNU4spwZT8k50mc5dixFLptvmTbmW79d9L0hZROWftShyBlBbXzLuWtrcQbznJwcBAfPcagvzuaOsc5M5uQs/3tklkPLd936umX6jywpAUtJIBsDramHa9eyxk2T4GzL9NnfQy3T/xlWHN85OTkIXv6qQ31xJneMcwA49j5jnZxjY88k3csuT+9rmV5/8W7LdNa1upbpfCkLfGBda+y0Cf3dMj047IRl+oU2OwGUxDr098WtCKGqkuD0tsktVege37y8PEu93pCQEPz2228AgA4dOuDIkSO629mwYQOOHDmiWdcKAJYtW4Y6deqgTp06+Oabb7Bjxw74+flpLv/uu+/Cx8cHL7/8su5+JCQkqOpslTewJqKai7FO5PkY50REZEuFBr5t2rRBeno6AKBTp05Yvnw5fvnlFyQlJalSZpcnIyMDEyZMwPr168v9rXlcXByOHj2KXbt2oXXr1hg+fDhu3bplc9nU1FT84x//wJo1a6DIl6HsmDlzJq5du2Z5ZGRk6F6XiGoOxjqR52OcE5E9inDtg9xThX7qPGHCBFy6dAkAMGfOHAwcOBAff/wx/Pz8sHbtWl1tpKam4vLly+jatatlnslkwu7du7FkyRLk5+fDaDRavrFt1aoV7rnnHoSEhGDz5s0YMWJEmTa///57XL58GU2bNlW1OWXKFCxevBjnz5+32Rd/f3+nFpUmIvfEWCfyfIxzIiKypUID3yeeeMIy3a1bN/z8889IS0tD06ZN0aBBA11t9O/fHydOnFDNGzVqFKKjozFjxgybWZuFEBBCqO7dkT355JOIiYlRzYuNjcWTTz5pSQdORERERERejPf4eiXdA9/JkyfrbnThwoV2l6lbty7at2+vmle7dm2Ehoaiffv2OHv2LDZu3IgBAwagYcOGuHjxIhITExEYGGgpzgwA0dHRSEhIwLBhwxAaGorQ0FBVm76+vggPD0ebNm1095+IiIiIiIg8h+6B79GjR3Ut58i9teUJCAjA999/j8WLF+Pq1asICwtDnz59sG/fPktiLQBIT0/HtWvXnPKaRAScf2GqzfltNlnT4ac/NLuqukNELnJkkO1KCi8cKf5V17KuH1dld4jIBZ5vs0uats7P+MWak6fJHZeqsktuQTEXP1zVNrkn3QPfnTt3urIfANQ1qiIiIrB161a76wg7PyfQuq+XiIiIiIiIvEOF7vElIiIiIiKqkXiPr1fiwJeIiIiIiLyHKHm4qm1ySxWq40tERERERERUU/CKLxEREREReQ1FCCgu+kmyq9qlyuMVXyIiIiIiIvJovOJLRLqwhBGRd2AZIyLP540ljFSY3Mor8YovEREREREReTRe8SUiIiIiIu8hAJhd2Da5JV7xJSIiIiIiIo/GK75EREREROQ1mNXZO/GKLxEREREREXk0XvElIiIiIiLvIeDCrM6uaZYqj1d8iYiIiIiIyKPxii8REREREXkP1vH1SrziS0RERERERB6NV3yJiIiIiMh7mAEoLmyb3BKv+BIREREREZFH4xVfIiIiIiLyGqzj6514xZeIiIiIiIg8Gq/4EhERERGR92BWZ6/EK75ERERERETk0XjFl4iIiIiIvAev+HolXvElIiIiIiIij8YrvkRERERE5D14xdcr8YovEREREREReTRe8SUiIiIiIu9hBqC4sG1yS7ziS0RERERERB6NV3yJiIiIiMhrKEJAcdG9uK5qlyqPV3yJiIiIiIjIo/GKLxEREREReQ9mdfZKvOJLREREREREHo1XfImIiIiIyHuYBaC46MqsmVd83RWv+BIREREREZFH4xVfIiIiIiLyHrzH1yvxii8RERERERF5NLcZ+CYmJkJRFEycONEy7/nnn0eLFi0QGBiIhg0bYujQoUhLS9Nso7CwEDNmzECHDh1Qu3ZtREREYOTIkfj111+rYAuIiIiIiMj9CetVX2c/wCu+7sotBr6HDh3C8uXL0bFjR9X8bt26YfXq1Th16hS+/fZbCCEwYMAAmEwmm+3cuHEDR44cwaxZs3DkyBFs2rQJ6enpGDJkSFVsBhEREREREbmhar/HNzc3F3FxcVixYgXi4+NVzz333HOW6cjISMTHx6NTp044f/48WrRoUaat4OBg7NixQzVvyZIluPvuu3HhwgU0bdrUZh/y8/ORn59v+TsnJ6cym0REboqxTuT5GOdEZBfv8fVK1X7F98UXX8TgwYMRExNT7nJ5eXlYvXo1oqKi0KRJE93tX7t2DYqioF69eprLJCQkIDg42PJwpH0iqjkY60Sej3FORES2VOvAd8OGDThy5AgSEhI0l1m2bBnq1KmDOnXq4JtvvsGOHTvg5+enq/1bt25hxowZGDFiBIKCgjSXmzlzJq5du2Z5ZGRkOLwtROT+GOtEno9xTkR2mYVrH+SWqu2nzhkZGZgwYQJ27NiBgIAAzeXi4uLw17/+FZcuXcL8+fMxfPhw7N27t9x1gOJEV8OHD4cQAh988EG5y/r7+8Pf379C20FENQdjncjzMc6JiMiWahv4pqam4vLly+jatatlnslkwu7du7FkyRLk5+fDaDRafqrUqlUr3HPPPQgJCcHmzZsxYsQIzbZLB70///wzvvvuu3Kv9hIRERERkRcR5uKHq9omt1RtP3Xu378/Tpw4gWPHjlke3bt3R1xcHI4dOwaj0VhmHSEEhBCqpBW3Kx30/vTTT/j3v/+N0NBQV24GERERERFRpSxduhSRkZEICAhAjx49cPDgQc1lV6xYgT//+c8ICQlBSEgIYmJiyiz/9NNPQ1EU1WPgwIGu3gy3Vm0D37p166J9+/aqR+3atREaGor27dvj7NmzSEhIQGpqKi5cuIB9+/bhb3/7GwIDAzFo0CBLO9HR0di8eTOA4kHvI488gsOHD2P9+vUwmUzIzMxEZmYmCgoKqmtTiYiIiIjIXbiqhm8Fs0Vv3LgRkydPxpw5c3DkyBF06tQJsbGxuHz5ss3lU1JSMGLECOzcuRP79+9HkyZNMGDAAPzyyy+q5QYOHIhLly5ZHv/85z8r9HZ5imovZ6QlICAA33//PRYvXoyrV68iLCwMffr0wb59+9CoUSPLcunp6bh27RoA4JdffsGWLVsAAJ07d1a1t3PnTvTr16+quk9ERERERF7q9lJq5eUfWLhwIcaMGYNRo0YBAJKSkvD1119j1apVeOWVV8osv379etXfH374IT777DMkJydj5MiRqtcMDw+v7KZ4DLca+KakpFimIyIisHXrVrvrCOlblcjISNXfREREREREKmYBwEVjhpKszreXUpszZw7eeOONMosXFBQgNTUVM2fOtMwzGAyIiYnB/v37db3kjRs3UFhYiPr166vmp6SkoFGjRggJCcFf/vIXxMfHe/VtoG418CUiIiIiIqrpMjIyVAl2ta72XrlyBSaTCWFhYar5YWFhSEtL0/VaM2bMQEREBGJiYizzBg4ciIceeghRUVE4c+YMXn31Vdx///3Yv3+/zVxK3oADXyIiIiIi8h4VvBdXd9sAgoKCqqSyTGJiIjZs2ICUlBRVudfHHnvMMt2hQwd07NgRLVq0QEpKCvr37+/yfrmjaktuRURERERE5M0aNGgAo9GIrKws1fysrCy79+fOnz8fiYmJ2L59Ozp27Fjuss2bN0eDBg1w+vTpSve5puLAl4iIiIiIvIeAC7M6O9YVPz8/dOvWDcnJyZZ5ZrMZycnJ6Nmzp+Z68+bNw1tvvYVt27ahe/fudl/n4sWL+P3339G4cWPHOuhBOPAlIiIiIiKqJpMnT8aKFSuwdu1anDp1CuPGjUNeXp4ly/PIkSNVya/effddzJo1C6tWrUJkZKSlfGtubi4AIDc3F9OmTcMPP/yA8+fPIzk5GUOHDkXLli0RGxtbLdvoDniPLxEREREReY8quMfXEY8++ih+++03zJ49G5mZmejcuTO2bdtmSXh14cIFGAzW65UffPABCgoK8Mgjj6jaKc0cbTQacfz4caxduxbZ2dmIiIjAgAED8NZbb2km2fIGHPgSERERERFVo/Hjx2P8+PE2n5NLvgLA+fPny20rMDAQ3377rZN65jk48CUiIiIiIu9hNgMwu7Btcke8x5eIiIiIiIg8Gq/4EhERERGR93Cze3ypavCKLxEREREREXk0XvElIiIiIiLvwSu+XolXfImIiIiIiMij8YovERERERF5D7MA4KIrs2Ze8XVXvOJLREREREREHo1XfImIiIiIyGsIYYYQrqm366p2qfJ4xZeIiIiIiIg8Gq/4EhERERGR9xDCdffiMquz2+IVXyIiIiIiIvJovOJLRERERETeQ7gwqzOv+LotXvElIiIiIiIij8YrvkRERERE5D3MZkBxUfZlZnV2W7ziS0RERERERB6NV3yJiIiIiMh78B5fr8QrvkREREREROTReMWXiIiIiIi8hjCbIVx0j6/gPb5ui1d8iYiIiIiIyKPxii8REREREXkP3uPrlXjFl4iIiIiIiDwar/gSEREREZH3MAtA4RVfb8MrvkREREREROTReMWXiIiIiIi8hxAAXJR9mVd83Rav+BIREREREZFH4xVfIiIiIiLyGsIsIFx0j6/gFV+35TZXfBMTE6EoCiZOnGiZ9/zzz6NFixYIDAxEw4YNMXToUKSlpZXbjhACs2fPRuPGjREYGIiYmBj89NNPLu49ERERERERuSu3GPgeOnQIy5cvR8eOHVXzu3XrhtWrV+PUqVP49ttvIYTAgAEDYDKZNNuaN28e3nvvPSQlJeHAgQOoXbs2YmNjcevWLVdvBhERERERuTthdu2D3FK1D3xzc3MRFxeHFStWICQkRPXcc889hz59+iAyMhJdu3ZFfHw8MjIycP78eZttCSGwePFivP766xg6dCg6duyIdevW4ddff8Xnn3/u+o0hIiIiIiIit1PtA98XX3wRgwcPRkxMTLnL5eXlYfXq1YiKikKTJk1sLnPu3DlkZmaq2goODkaPHj2wf/9+zbbz8/ORk5OjehCR52GsE3k+xjkR2SPMwqUPck/VOvDdsGEDjhw5goSEBM1lli1bhjp16qBOnTr45ptvsGPHDvj5+dlcNjMzEwAQFhammh8WFmZ5zpaEhAQEBwdbHloDayKq2RjrRJ6PcU5ERLZU28A3IyMDEyZMwPr16xEQEKC5XFxcHI4ePYpdu3ahdevWGD58uNPv1505cyauXbtmeWRkZDi1fSJyD4x1Is/HOCciu3iPr1eqtnJGqampuHz5Mrp27WqZZzKZsHv3bixZsgT5+fkwGo2Wb2xbtWqFe+65ByEhIdi8eTNGjBhRps3w8HAAQFZWFho3bmyZn5WVhc6dO2v2xd/fH/7+/pa/S9OQ8+dRRM5TGk/VmeafsU7ketUd64xzoqpR3bFeGUUoBFzU7SIUuqZhqrRqG/j2798fJ06cUM0bNWoUoqOjMWPGDBiNxjLrCCEghEB+fr7NNqOiohAeHo7k5GTLQDcnJwcHDhzAuHHjdPft+vXrAMCfRxG5wPXr1xEcHFzd3QDAWCdyJXeJdcY5kWu5S6zr4efnh/DwcOzJ3OrS1wkPD9e8NZOqT7UNfOvWrYv27dur5tWuXRuhoaFo3749zp49i40bN2LAgAFo2LAhLl68iMTERAQGBmLQoEGWdaKjo5GQkIBhw4ZZ6gDHx8ejVatWiIqKwqxZsxAREYEHH3xQd98iIiJw8uRJtGvXDhkZGQgKCnLWZlernJwcNGnShNvk5jx5m06ePImIiIjq7o4FY73m4DbVDO4Y6xEREcjIyIAQAk2bNvXI95vb5N48bZtKt+fChQtQFMVtYl2PgIAAnDt3DgUFBS59HT8/v3Jv5aTqUW0DX3sCAgLw/fffY/Hixbh69SrCwsLQp08f7Nu3D40aNbIsl56ejmvXrln+nj59OvLy8vDcc88hOzsb9957L7Zt2+bQwWcwGHDHHXcAAIKCgjziJCXjNtUMnrhNd9xxBwyGak8mb8FYr3m4TTWDO8W6wWDAnXfeaflZpie+39ymmsHTtik4OLhGbk9AQAAHpV7KrQa+KSkplumIiAhs3Wr/Zwi331egKArmzp2LuXPnOrt7REREREREVAO5x9exRERERERERC7Cga8Gf39/zJkzR5UZsqbjNtUM3Kaq5c59qyhuU83Abapa7ty3iuI21Qyetk2etj3kPRRRE3OQExEREREREenEK75ERERERETk0TjwJSIiIiIiIo/GgS8RERERERF5NA58iYiIiIiIyKNx4CuJjIyEoiiqR2JiYrnr3Lp1Cy+++CJCQ0NRp04dPPzww8jKyqqiHuuXn5+Pzp07Q1EUHDt2rNxl+/XrV+Z9GDt2bNV01AGObJO776chQ4agadOmCAgIQOPGjfHkk0/i119/LXcdd99PFdmmqtpPjPVi7n4MlWKsu+9+cuc4Bxjrpdz5GCrFOHfvfeTusU6kBwe+t5k7dy4uXbpkebz00kvlLj9p0iR8+eWX+Ne//oVdu3bh119/xUMPPVRFvdVv+vTpiIiI0L38mDFjVO/DvHnzXNi7inFkm9x9P91333345JNPkJ6ejs8++wxnzpzBI488Ync9d95PFdmmqtxPjPVi7nwMlWKsu+9+cvc4Bxjrpdz1GCrFOHfvfVQTYp3ILkEWzZo1E4sWLdK9fHZ2tvD19RX/+te/LPNOnTolAIj9+/e7oIcVs3XrVhEdHS1+/PFHAUAcPXq03OX79u0rJkyYUCV9qyhHtqmm7CfZF198IRRFEQUFBZrL1IT9JLO3TVW5nxjrxWrCMcRYrxn7qZQ7xbkQjPVS7n4MMc7dfx/dzt1inUgPXvG9TWJiIkJDQ9GlSxf8/e9/R1FRkeayqampKCwsRExMjGVedHQ0mjZtiv3791dFd+3KysrCmDFj8NFHH6FWrVq611u/fj0aNGiA9u3bY+bMmbhx44YLe+kYR7epJuwn2R9//IH169ejV69e8PX1LXdZd95PMj3bVNX7ibFezJ2PIca6lTvvp1LuGOcAY72Uux5DjHMrd91Ht3PXWCeyx6e6O+BOXn75ZXTt2hX169fHvn37MHPmTFy6dAkLFy60uXxmZib8/PxQr1491fywsDBkZmZWQY/LJ4TA008/jbFjx6J79+44f/68rvUef/xxNGvWDBERETh+/DhmzJiB9PR0bNq0ybUd1qEi2+Tu+6nUjBkzsGTJEty4cQP33HMPvvrqq3KXd+f9VMqRbarK/cRYL+bOxxBj3cqd9xPgvnEOMNZLuesxxDi3ctd9JHPnWCfSpTovN1eFGTNmCADlPk6dOmVz3ZUrVwofHx9x69Ytm8+vX79e+Pn5lZl/1113ienTpzt1O2R6t+kf//iH6N27tygqKhJCCHHu3DldP4m6XXJysgAgTp8+7YKtKebKbXL3/VTqt99+E+np6WL79u2id+/eYtCgQcJsNut+PXfaT6Uc2abK7ifGOmPd3fdTKXePdXeO84r0T8ZYL+YuxxDjXJs7nY9LVXWsEzmbx1/xnTJlCp5++ulyl2nevLnN+T169EBRURHOnz+PNm3alHk+PDwcBQUFyM7OVn2jlZWVhfDw8Mp0u1x6t+m7777D/v374e/vr3que/fuiIuLw9q1a3W9Xo8ePQAAp0+fRosWLSrUZ3tcuU3uvp9KNWjQAA0aNEDr1q3Rtm1bNGnSBD/88AN69uyp6/XcaT+VcmSbKrufGOuMdXffT6XcPdbdOc4r0j8ZY72YuxxDjHNt7nQ+LlXVsU7kdNU98nZnH3/8sTAYDOKPP/6w+XzpjfuffvqpZV5aWpoA3OPG/Z9//lmcOHHC8vj2228FAPHpp5+KjIwM3e3s2bNHABD/+c9/XNhbfSqyTe6+n2z5+eefBQCxc+dO3eu4036yxd42Ved+YqwXc6djiLGuzZ320+3cOc6FYKyXcpdjiHGuzV32kRZ3j3UiWzjwLbFv3z6xaNEicezYMXHmzBnx8ccfi4YNG4qRI0dalrl48aJo06aNOHDggGXe2LFjRdOmTcV3330nDh8+LHr27Cl69uxZHZtgl62fEN2+TadPnxZz584Vhw8fFufOnRNffPGFaN68uejTp0819bp8erZJCPfeTz/88IN4//33xdGjR8X58+dFcnKy6NWrl2jRooXl53g1bT9VZJuEqJr9xFivGcfQ7Rjr7ref3DnOhWCs14Rj6HaMc/fcR+4e60R6ceBbIjU1VfTo0UMEBweLgIAA0bZtW/HOO++o7gMqPSHL327dvHlTvPDCCyIkJETUqlVLDBs2TFy6dKkatsA+Wx8ot2/ThQsXRJ8+fUT9+vWFv7+/aNmypZg2bZq4du1a9XTaDj3bJIR776fjx4+L++67z/KeR0ZGirFjx4qLFy9alqlp+6ki2yRE1ewnxvpOIYT7H0O3Y6y7335y5zgXgrFeE46h2zHO3XMfuXusE+mlCCGEM386TUREREREROROWMeXiIiIiIiIPBoHvkREREREROTROPAlIiIiIiIij8aBLxEREREREXk0DnyJiIiIiIjIo3HgS0RERERERB6NA18iIiIiIiLyaBz4EhERERERkUfjwJecol+/fpg4caLHvObTTz+NBx980CVtE9VUjHMi78BYJyJP5FPdHSCqqE2bNsHX19fyd2RkJCZOnFjlH9ZE5DqMcyLvwFgnIlfjwJdqrPr161d3F4jIxRjnRN6BsU5ErsafOpPTXb16FSNHjkRISAhq1aqF+++/Hz/99JPl+TVr1qBevXr49ttv0bZtW9SpUwcDBw7EpUuXLMsUFRXh5ZdfRr169RAaGooZM2bgqaeeUv1USf5ZVL9+/fDzzz9j0qRJUBQFiqIAAN544w107txZ1b/FixcjMjLS8rfJZMLkyZMtrzV9+nQIIVTrmM1mJCQkICoqCoGBgejUqRM+/fRT57xhRDUQ45zIOzDWichTcOBLTvf000/j8OHD2LJlC/bv3w8hBAYNGoTCwkLLMjdu3MD8+fPx0UcfYffu3bhw4QKmTp1qef7dd9/F+vXrsXr1auzduxc5OTn4/PPPNV9z06ZNuPPOOzF37lxcunRJ9YFrz4IFC7BmzRqsWrUKe/bswR9//IHNmzerlklISMC6deuQlJSEH3/8EZMmTcITTzyBXbt26X9jiDwI45zIOzDWichjCCIn6Nu3r5gwYYL43//+JwCIvXv3Wp67cuWKCAwMFJ988okQQojVq1cLAOL06dOWZZYuXSrCwsIsf4eFhYm///3vlr+LiopE06ZNxdChQ8u8ZqlmzZqJRYsWqfo1Z84c0alTJ9W8RYsWiWbNmln+bty4sZg3b57l78LCQnHnnXdaXuvWrVuiVq1aYt++fap2nnnmGTFixIhy3xciT8I4J/IOjHUi8kS8x5ec6tSpU/Dx8UGPHj0s80JDQ9GmTRucOnXKMq9WrVpo0aKF5e/GjRvj8uXLAIBr164hKysLd999t+V5o9GIbt26wWw2O7W/165dw6VLl1T99fHxQffu3S0/jTp9+jRu3LiBv/71r6p1CwoK0KVLF6f2h6gmYJwTeQfGOhF5Eg58qVrImRsBQFGUMvfgOIPBYCjTrvzzLD1yc3MBAF9//TXuuOMO1XP+/v6V6yCRB2OcE3kHxjoR1QS8x5ecqm3btigqKsKBAwcs837//Xekp6ejXbt2utoIDg5GWFgYDh06ZJlnMplw5MiRctfz8/ODyWRSzWvYsCEyMzNVH5THjh1TvVbjxo1V/S0qKkJqaqrl73bt2sHf3x8XLlxAy5YtVY8mTZro2iYiT8I4J/IOjHUi8iS84ktO1apVKwwdOhRjxozB8uXLUbduXbzyyiu44447MHToUN3tvPTSS0hISEDLli0RHR2N999/H1evXrVkdrQlMjISu3fvxmOPPQZ/f380aNAA/fr1w2+//YZ58+bhkUcewbZt2/DNN98gKCjIst6ECROQmJiIVq1aITo6GgsXLkR2drbl+bp162Lq1KmYNGkSzGYz7r33Xly7dg179+5FUFAQnnrqqQq9V0Q1FeOcyDsw1onIk/CKLznd6tWr0a1bNzzwwAPo2bMnhBDYunVrmZ9ClWfGjBkYMWIERo4ciZ49e6JOnTqIjY1FQECA5jpz587F+fPn0aJFCzRs2BBA8bfVy5Ytw9KlS9GpUyccPHhQlWkSAKZMmYInn3wSTz31FHr27Im6deti2LBhqmXeeustzJo1CwkJCWjbti0GDhyIr7/+GlFRUQ68M0Seg3FO5B0Y60TkKRThipswiJzMbDajbdu2GD58ON56663q7g4RuQDjnMg7MNaJqDrwp87kln7++Wds374dffv2RX5+PpYsWYJz587h8ccfr+6uEZGTMM6JvANjnYjcAX/qTG7JYDBgzZo1uOuuu9C7d2+cOHEC//73v9G2bdvq7hoROQnjnMg7MNaJyB3wp85ERERERETk0XjFl4iIiIiIiDwaB75ERERERETk0TjwJSIiIiIiIo/GgS8RERERERF5NA58iYiIiIiIyKNx4EtEREREREQejQNfIiIiIiIi8mgc+BIREREREZFH+/9XsYpcUMTc+AAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import matplotlib.pyplot as plt\n", + "import xarray\n", + "\n", + "response_bay[\"chl\"].isel(depth=10).plot(col=\"time\", col_wrap=3)\n", + "plt.suptitle(\"Temporal evolution at sea surface\", fontsize=20, y=1.2)\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Copernicus Marine toolbox - Get\n", + "\n", + "To download the original data (or native data), you can use the **`get`** command.\n", + "\n", + "For more information, see the [page about get](get-page) of the documentation. You can also check the dedicated pages for the [command line interface](cli-get) or the {func}`Python interface `.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO - 2024-10-18T14:48:09Z - Dataset version was not specified, the latest one was selected: \"202211\"\n", + "INFO - 2024-10-18T14:48:09Z - Dataset part was not specified, the first one was selected: \"default\"\n", + "INFO - 2024-10-18T14:48:09Z - Service was not specified, the default one was selected: \"original-files\"\n", + "INFO - 2024-10-18T14:48:09Z - Downloading using service original-files...\n", + "INFO - 2024-10-18T14:48:10Z - Listing files on remote server...\n", + "1it [00:00, 1.09it/s]\n", + "INFO - 2024-10-18T14:48:11Z - You requested the download of the following files:\n", + "s3://mdl-native-10/native/IBI_ANALYSISFORECAST_BGC_005_004/cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m_202211/2022/10/CMEMS_v7r1_IBI_BIO_NRT_NL_01dav_20221001_20221001_R20221017_AN04.nc - 82.48 MB - 2023-11-12T14:01:32.377000Z\n", + "s3://mdl-native-10/native/IBI_ANALYSISFORECAST_BGC_005_004/cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m_202211/2022/10/CMEMS_v7r1_IBI_BIO_NRT_NL_01dav_20221002_20221002_R20221017_AN05.nc - 82.77 MB - 2023-11-12T14:01:38.666000Z\n", + "s3://mdl-native-10/native/IBI_ANALYSISFORECAST_BGC_005_004/cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m_202211/2022/10/CMEMS_v7r1_IBI_BIO_NRT_NL_01dav_20221003_20221003_R20221017_AN06.nc - 82.81 MB - 2023-11-12T14:01:37.923000Z\n", + "s3://mdl-native-10/native/IBI_ANALYSISFORECAST_BGC_005_004/cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m_202211/2022/10/CMEMS_v7r1_IBI_BIO_NRT_NL_01dav_20221004_20221004_R20221017_AN07.nc - 82.64 MB - 2023-11-12T14:01:40.222000Z\n", + "s3://mdl-native-10/native/IBI_ANALYSISFORECAST_BGC_005_004/cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m_202211/2022/10/CMEMS_v7r1_IBI_BIO_NRT_NL_01dav_20221005_20221005_R20221024_AN01.nc - 82.31 MB - 2023-11-12T14:01:38.078000Z\n", + "s3://mdl-native-10/native/IBI_ANALYSISFORECAST_BGC_005_004/cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m_202211/2022/10/CMEMS_v7r1_IBI_BIO_NRT_NL_01dav_20221006_20221006_R20221024_AN02.nc - 82.45 MB - 2023-11-12T14:01:48.759000Z\n", + "s3://mdl-native-10/native/IBI_ANALYSISFORECAST_BGC_005_004/cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m_202211/2022/10/CMEMS_v7r1_IBI_BIO_NRT_NL_01dav_20221007_20221007_R20221024_AN03.nc - 82.41 MB - 2023-11-12T14:01:43.316000Z\n", + "s3://mdl-native-10/native/IBI_ANALYSISFORECAST_BGC_005_004/cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m_202211/2022/10/CMEMS_v7r1_IBI_BIO_NRT_NL_01dav_20221008_20221008_R20221024_AN04.nc - 82.48 MB - 2023-11-12T14:01:46.301000Z\n", + "s3://mdl-native-10/native/IBI_ANALYSISFORECAST_BGC_005_004/cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m_202211/2022/10/CMEMS_v7r1_IBI_BIO_NRT_NL_01dav_20221009_20221009_R20221024_AN05.nc - 82.24 MB - 2023-11-12T14:01:47.611000Z\n", + "s3://mdl-native-10/native/IBI_ANALYSISFORECAST_BGC_005_004/cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m_202211/2022/10/CMEMS_v7r1_IBI_BIO_NRT_NL_01dav_20221010_20221010_R20221024_AN06.nc - 82.04 MB - 2023-11-12T14:01:49.426000Z\n", + "s3://mdl-native-10/native/IBI_ANALYSISFORECAST_BGC_005_004/cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m_202211/2022/10/CMEMS_v7r1_IBI_BIO_NRT_NL_01dav_20221011_20221011_R20221024_AN07.nc - 82.18 MB - 2023-11-12T14:01:53.948000Z\n", + "s3://mdl-native-10/native/IBI_ANALYSISFORECAST_BGC_005_004/cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m_202211/2022/10/CMEMS_v7r1_IBI_BIO_NRT_NL_01dav_20221012_20221012_R20221031_AN01.nc - 82.30 MB - 2023-11-12T14:01:59.761000Z\n", + "s3://mdl-native-10/native/IBI_ANALYSISFORECAST_BGC_005_004/cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m_202211/2022/10/CMEMS_v7r1_IBI_BIO_NRT_NL_01dav_20221013_20221013_R20221031_AN02.nc - 82.36 MB - 2023-11-12T14:01:59.417000Z\n", + "s3://mdl-native-10/native/IBI_ANALYSISFORECAST_BGC_005_004/cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m_202211/2022/10/CMEMS_v7r1_IBI_BIO_NRT_NL_01dav_20221014_20221014_R20221031_AN03.nc - 82.37 MB - 2023-11-12T14:01:58.859000Z\n", + "s3://mdl-native-10/native/IBI_ANALYSISFORECAST_BGC_005_004/cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m_202211/2022/10/CMEMS_v7r1_IBI_BIO_NRT_NL_01dav_20221015_20221015_R20221031_AN04.nc - 82.11 MB - 2023-11-12T14:02:01.737000Z\n", + "s3://mdl-native-10/native/IBI_ANALYSISFORECAST_BGC_005_004/cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m_202211/2022/10/CMEMS_v7r1_IBI_BIO_NRT_NL_01dav_20221016_20221016_R20221031_AN05.nc - 82.03 MB - 2023-11-12T14:02:11.336000Z\n", + "s3://mdl-native-10/native/IBI_ANALYSISFORECAST_BGC_005_004/cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m_202211/2022/10/CMEMS_v7r1_IBI_BIO_NRT_NL_01dav_20221017_20221017_R20221031_AN06.nc - 81.82 MB - 2023-11-12T14:02:12.479000Z\n", + "s3://mdl-native-10/native/IBI_ANALYSISFORECAST_BGC_005_004/cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m_202211/2022/10/CMEMS_v7r1_IBI_BIO_NRT_NL_01dav_20221018_20221018_R20221031_AN07.nc - 81.89 MB - 2023-11-12T14:02:07.042000Z\n", + "s3://mdl-native-10/native/IBI_ANALYSISFORECAST_BGC_005_004/cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m_202211/2022/10/CMEMS_v7r1_IBI_BIO_NRT_NL_01dav_20221019_20221019_R20221107_AN01.nc - 81.66 MB - 2023-11-12T14:02:09.998000Z\n", + "s3://mdl-native-10/native/IBI_ANALYSISFORECAST_BGC_005_004/cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m_202211/2022/10/CMEMS_v7r1_IBI_BIO_NRT_NL_01dav_20221020_20221020_R20221107_AN02.nc - 81.50 MB - 2023-11-12T14:02:13.928000Z\n", + "Printed 20 out of 757 files\n", + "\n", + "Total size of the download: 65.60 GB\n", + "\n", + "\n", + "Do you want to proceed with download? [Y/n]:" + ] + } + ], + "source": [ + "# Download all the files from a dataset\n", + "copernicusmarine.get(dataset_id=\"cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m\") " + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO - 2024-10-18T15:19:27Z - Dataset version was not specified, the latest one was selected: \"202211\"\n", + "INFO - 2024-10-18T15:19:27Z - Dataset part was not specified, the first one was selected: \"default\"\n", + "INFO - 2024-10-18T15:19:27Z - Service was not specified, the default one was selected: \"original-files\"\n", + "INFO - 2024-10-18T15:19:27Z - Downloading using service original-files...\n", + "INFO - 2024-10-18T15:19:27Z - Listing files on remote server...\n", + "1it [00:00, 1.90it/s]\n", + "Downloading files: 100%|██████████| 2/2 [00:05<00:00, 2.75s/it]" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "data/IBI_ANALYSISFORECAST_BGC_005_004/cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m_202211/2022/10/CMEMS_v7r1_IBI_BIO_NRT_NL_01dav_20221001_20221001_R20221017_AN04.nc\n", + "data/IBI_ANALYSISFORECAST_BGC_005_004/cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m_202211/2022/10/CMEMS_v7r1_IBI_BIO_NRT_NL_01dav_20221003_20221003_R20221017_AN06.nc\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "# You can combine the filter and regex argument (it will be as an \"OR\" condition)\n", + "response = copernicusmarine.get(dataset_id=\"cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m\", \n", + " filter=\"*01dav_20221001_20221001_R20221017_AN04*\", \n", + " regex=\"01dav_20221003_20221003_R20221017_AN06\", \n", + " force_download=True, # no need to prompt\n", + " output_directory=\"data\", # we can specify the output directory\n", + " overwrite_output_data=True, # if files already exist, they will be overwritten\n", + " )\n", + "\n", + "for file_metadata in response.files:\n", + " print(file_metadata.output) " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now if you have downloaded this data, but you want to check if your data is up to date you can use the `sync` option. " + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO - 2024-10-18T15:20:33Z - You forced selection of dataset version \"202211\"\n", + "INFO - 2024-10-18T15:20:33Z - Dataset part was not specified, the first one was selected: \"default\"\n", + "INFO - 2024-10-18T15:20:33Z - Service was not specified, the default one was selected: \"original-files\"\n", + "INFO - 2024-10-18T15:20:33Z - Downloading using service original-files...\n", + "INFO - 2024-10-18T15:20:33Z - Listing files on remote server...\n", + "1it [00:00, 1.91it/s]\n", + "INFO - 2024-10-18T15:20:33Z - No data to download\n" + ] + } + ], + "source": [ + "response = copernicusmarine.get(\n", + " dataset_id=\"cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m\", \n", + " filter=\"*01dav_20221001_20221001_R20221017_AN04*\", \n", + " regex=\"01dav_20221003_20221003_R20221017_AN06\", \n", + " dataset_version=\"202211\", \n", + " force_download=True,\n", + " output_directory=\"data\",\n", + " sync=True,\n", + " sync_delete=True, # delete the files that are not in the server\n", + " max_concurrent_requests=0, # not in parallel\n", + " # can be useful to be sure to not overload the process \n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.18" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/doc/usage/shared-options.rst b/doc/usage/shared-options.rst new file mode 100644 index 00000000..0317b7b1 --- /dev/null +++ b/doc/usage/shared-options.rst @@ -0,0 +1,213 @@ +Shared options +========================= + +Both ``subset`` and ``get`` (and also some concern other options) commands provide these options: + +Option ``--overwrite-output-data`` +********************************** + +When specified, existing files will be overwritten. If not, and the files already exist at the destination, new files will be created with a unique index once the download is accepted (or if ``--force-download`` is provided). + +Option ``--create-template`` +********************************* + +This option creates a file in your current directory containing the request parameters. If specified, no other action will be performed. The file created will depend on the command used: + +- ``subset`` + + **Example:** + + .. code-block:: bash + + copernicusmarine subset --create-template + + **Returns:** + + .. code-block:: text + + INFO - 2024-04-04T14:38:09Z - Template created at: subset_template.json + +- ``get`` + + **Example:** + + .. code-block:: bash + + copernicusmarine get --create-template + + **Returns:** + + .. code-block:: text + + INFO - 2024-04-04T14:38:09Z - Template created at: get_template.json + +Option ``--request-file`` +********************************* + +This option allows you to specify request parameters in a provided ``.json`` file, which is useful for batch processing. You can use the following templates or create new ones with the ``--create-template`` option. + +- Template for ``subset`` data request: + + .. code-block:: text + + { + "dataset_id": "cmems_mod_glo_phy_myint_0.083deg_P1M-m", + "start_datetime": "2023-10-07", + "end_datetime": "2023-10-12", + "minimum_longitude": -85, + "maximum_longitude": -10, + "minimum_latitude": 35, + "maximum_latitude": 43, + "minimum_depth": 1, + "maximum_depth": 10, + "variables": [ + "so", + "thetao" + ], + "output_directory": "copernicusmarine_data", + "force_service": false, + "force_download": false, + "request_file": false, + "motu_api_request": false, + "overwrite_output_data": false + } + + **Example:** + + .. code-block:: bash + + copernicusmarine subset --request-file subset_template.json + +- Template for ``get`` data request: + + .. code-block:: text + + { + "dataset_id": "cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m", + "dataset_version": null, + "dataset_part": null, + "username": null, + "password": null, + "no_directories": false, + "filter": "*01yav_200[0-2]*", + "regex": null, + "output_directory": "copernicusmarine_data", + "show_outputnames": true, + "force_download": false, + "file_list": null, + "sync": false, + "sync_delete": false, + "index_parts": false, + "disable_progress_bar": false, + "overwrite_output_data": false, + "log_level": "INFO" + } + + **Example:** + + .. code-block:: bash + + copernicusmarine get --request-file get_template.json + +Option ``--credentials-file`` +********************************* + +You can use the ``--credentials-file`` option to specify a credentials file. The file can be either ``.copernicusmarine-credentials``, ``motuclient-python.ini``, ``.netrc``, or ``_netrc``. + +.. _dataset version: + +Option ``--dataset-version`` +********************************* + +This option allows you to fetch a specific version of the dataset, which is particularly useful to maintain operational consistency when updates affect the chosen dataset. +The version of dataset can be found through the ``describe`` command. + +**Example:** + +.. code:: bash + + copernicusmarine get -i cmems_mod_ibi_phy_my_0.083deg-3D_P1D-m --dataset-version 202012 + +**Returns:** + +.. code:: text + + INFO - 2024-10-07T08:53:18Z - You forced selection of dataset version "202012" + +Option ``--dataset-part`` +********************************* + +Use this option to fetch a specific part of the chosen dataset version. +The parts of the dataset can be found through the ``describe`` command. + +**Example:** + +.. code:: bash + + copernicusmarine get -i cmems_obs-ins_blk_phybgcwav_mynrt_na_irr --dataset-part history + +**Returns:** + +.. code:: text + + INFO - 2024-10-07T08:53:18Z - You forced selection of dataset part "history" + +Option ``--dry-run`` +********************************* + +This option allows you to simulate the command without actually executing it. +It is useful for checking the command's behavior before running it and to be able to access some metadata about the request. +When used, the toolbox will by default, send the return response of the command. + +**Example:** + +.. code:: bash + + copernicusmarine subset -i cmems_mod_ibi_phy_my_0.083deg-3D_P1D-m -v thetao --dry-run --force-download + +**Returns:** + +.. code:: text + + INFO - 2024-10-07T08:47:46Z - Dataset version was not specified, the latest one was selected: "202012" + INFO - 2024-10-07T08:47:46Z - Dataset part was not specified, the first one was selected: "default" + INFO - 2024-10-07T08:47:47Z - Service was not specified, the default one was selected: "arco-geo-series" + INFO - 2024-10-07T08:47:48Z - Downloading using service arco-geo-series... + INFO - 2024-10-07T08:47:50Z - Estimated size of the dataset file is 210828.202 MB + Estimated size of the data that needs to be downloaded to obtain the result: 210888 MB + This a very rough estimation and usually its higher than the actual size of the data that needs to be downloaded. + { + "output": "cmems_mod_ibi_phy_my_0.083deg-3D_P1D-m_thetao_19.00W-5.00E_26.00N-56.00N_0.51-5698.06m_1993-01-01-2021-12-28.nc", + "size": 210828.20248091602, + "data_needed": 210887.9328244275, + "coodinates_extent": { + "longitude": { + "minimum": -19.0, + "maximum": 4.999999046325684 + }, + "latitude": { + "minimum": 26.0, + "maximum": 56.0 + }, + "time": { + "minimum": "1993-01-01T00:00:00Z", + "maximum": "2021-12-28T00:00:00Z" + }, + "depth": { + "minimum": 0.5057600140571594, + "maximum": 5698.060546875 + } + } + } + +See :ref:`Response types documentation ` for more information about the response you can expect. + +Option ``--log-level`` +********************************* + +Set the level of detail printed to the console by the command, based on the standard logging library. Available values are: ``[DEBUG|INFO|WARN|ERROR|CRITICAL|QUIET]``. + +All logs of the library are by default logged in stderr, except the output of the ``describe`` command and the output of the ``--show-outputnames`` option, which are sent to stdout. + +.. note:: + For versions <=1.2.4, all logs are sent to stdout by default. diff --git a/doc/usage/subset-usage.rst b/doc/usage/subset-usage.rst new file mode 100644 index 00000000..8b36aa3f --- /dev/null +++ b/doc/usage/subset-usage.rst @@ -0,0 +1,112 @@ +.. _subset-page: + +=================== +Command ``subset`` +=================== + +The ``subset`` command allows you to remotely subset a dataset based on variable names, geographical parameters, and time ranges. + +**Example:** + +.. code-block:: bash + + copernicusmarine subset --dataset-id cmems_mod_ibi_phy_my_0.083deg-3D_P1D-m --variable thetao --variable so --start-datetime 2021-01-01 --end-datetime 2021-01-03 --minimum-longitude 0.0 --maximum-longitude 0.1 --minimum-latitude 28.0 --maximum-latitude 28.1 + +**Returns:** + +.. code-block:: bash + + INFO - 2024-04-03T10:18:18Z - Size: 3kB + Dimensions: (depth: 50, latitude: 2, longitude: 1, time: 3) + Coordinates: + * depth (depth) float32 200B 0.5058 1.556 2.668 ... 5.292e+03 5.698e+03 + * latitude (latitude) float32 8B 28.0 28.08 + * longitude (longitude) float32 4B 0.08333 + * time (time) datetime64[ns] 24B 2021-01-01 2021-01-02 2021-01-03 + Data variables: + thetao (time, depth, latitude, longitude) float32 1kB dask.array + so (time, depth, latitude, longitude) float32 1kB dask.array + Attributes: (12/20) + Conventions: CF-1.0 + bulletin_date: 2020-12-01 + references: http://marine.copernicus.eu + copernicusmarine_version: 1.1.0 + INFO - 2024-04-03T10:18:18Z - Estimated size of the dataset file is 0.002 MB. + Estimated size of the data that needs to be downloaded to obtain the result: 207 MB + This a very rough estimation and usually its higher than the actual size of the data that needs to be downloaded. + + Do you want to proceed with download? [Y/n]: + +By default, a summary of the dataset subset is displayed, and a download confirmation is prompted. You can skip this confirmation by using the ``--force-download`` option. + +Additional options +------------------ + +About longitude range +"""""""""""""""""""""" + +The ``--minimum-longitude`` and ``--maximum-longitude`` options work as follows: + +- If the result of ``--maximum-longitude`` minus ``--minimum-longitude`` is greater than or equal to 360, the entire dataset will be returned. +- If the requested longitude range: + + * **Does not cross** the antemeridian, the dataset between -180 and 180 is returned. + * **Crosses** the antemeridian, the dataset between 0 and 360 is returned. + +Note that any longitudes can be requested. The system applies a modulus operation to bring the result between -180° and 360°. For example, a request for [530, 560] will return data for longitudes [170, 200]. + +About ``--netcdf-compression-level`` options +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +If writing data to a NetCDF file (the default format), the ``--netcdf-compression-level`` option can be set to compress the downloaded file. This reduces file size but increases writing time. Without this option, the file is written faster but with a larger size. For Zarr format ('.zarr' extension), the default compression of the Copernicus Marine Data Store is applied, making the download fast and compressed without using ``--netcdf-compression-level``. + +Default NetCDF compression settings for xarray: + +.. code-block:: text + + {'zlib': True, 'complevel': 1, 'contiguous': False, 'shuffle': True} + +Set the ``--netcdf-compression-level`` to a custom compression level between 0 (no compression, by default) and 9 (maximum compression). + +About ``--netcdf3-compatible`` option +"""""""""""""""""""""""""""""""""""""""" + +The ``--netcdf3-compatible`` option enables compatibility with the netCDF3 format. +This uses the ``format="NETCDF3_CLASSIC"`` setting in the xarray `to_netcdf` method. (cf. `xarray documentation `_.) + +About ``--coordinates-selection-method`` option +"""""""""""""""""""""""""""""""""""""""""""""""""" + +The ``--coordinates-selection-method`` option lets you specify how the requested interval selects data points: + +- **inside** (default): Returns points strictly within the requested area. +- **strict-inside**: Returns points strictly within the requested area. Fails with an error if the requested area is outside the dataset. +- **nearest**: Returns points closest to the requested interval. +- **outside**: Returns all points covering the requested area. + +This applies to all dimensions: longitude, latitude, time, and depth. + +**Example of longitude requests:** +Imagine a dataset with longitudes from 5.0 to 36.5, with steps of 0.5. + +- ``--coordinates-selection-method`` = **inside**, with requested interval = [0.4, 35.9]: + + - Returns longitudes within the range: [0.5, 35.5] + +- ``--coordinates-selection-method`` = **strict-inside**, with requested interval = [0.4, 35.9]: + + - Returns longitudes within the range: [0.5, 35.5] + +- ``--coordinates-selection-method`` = **strict-inside**, with requested interval = [0.0, 40]: + + - Returns an error, see :class:`copernicusmarine.CoordinatesOutOfDatasetBounds`. + +- ``--coordinates-selection-method`` = **outside**, with requested interval = [0.4, 35.9]: + + - Returns longitudes within the range: [0.0, 36.0] + +- ``--coordinates-selection-method`` = **nearest**, with requested interval = [0.4, 35.9]: + + - Returns longitudes within the range: [0.5, 36.0] + +If you request a single point, the nearest point in that dimension will be returned. diff --git a/doc/usage/usage.rst b/doc/usage/usage.rst new file mode 100644 index 00000000..1f82fcc6 --- /dev/null +++ b/doc/usage/usage.rst @@ -0,0 +1,13 @@ +Usage and technical details +============================ + +.. toctree:: + + quickoverview + login-usage + describe-usage + subset-usage + get-usage + shared-options + network-configuration + errors diff --git a/poetry.lock b/poetry.lock index da7370c1..3d0a48df 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,115 +1,16 @@ # This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] -name = "aiohttp" -version = "3.9.5" -description = "Async http client/server framework (asyncio)" +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"}, - {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"}, - {file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"}, - {file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"}, - {file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"}, - {file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"}, - {file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"}, - {file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"}, - {file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"}, - {file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"}, - {file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"}, - {file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"}, - {file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"}, - {file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"}, + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] -[package.dependencies] -aiosignal = ">=1.1.2" -async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} -attrs = ">=17.3.0" -frozenlist = ">=1.1.1" -multidict = ">=4.5,<7.0" -yarl = ">=1.0,<2.0" - -[package.extras] -speedups = ["Brotli", "aiodns", "brotlicffi"] - -[[package]] -name = "aiosignal" -version = "1.3.1" -description = "aiosignal: a list of registered asynchronous callbacks" -optional = false -python-versions = ">=3.7" -files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, -] - -[package.dependencies] -frozenlist = ">=1.1.0" - [[package]] name = "asciitree" version = "0.3.3" @@ -138,49 +39,19 @@ six = ">=1.12.0" astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] -[[package]] -name = "async-timeout" -version = "4.0.3" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, -] - -[[package]] -name = "attrs" -version = "23.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, -] - -[package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] - [[package]] name = "boto3" -version = "1.34.139" +version = "1.35.48" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.34.139-py3-none-any.whl", hash = "sha256:98b2a12bcb30e679fa9f60fc74145a39db5ec2ca7b7c763f42896e3bd9b3a38d"}, - {file = "boto3-1.34.139.tar.gz", hash = "sha256:32b99f0d76ec81fdca287ace2c9744a2eb8b92cb62bf4d26d52a4f516b63a6bf"}, + {file = "boto3-1.35.48-py3-none-any.whl", hash = "sha256:60889bb6e21f0af662ac9404e00125d3b8a5808f190e89462e5ddf73604adfc1"}, + {file = "boto3-1.35.48.tar.gz", hash = "sha256:5007a5cdd09e4db9309adf2ee090455a34ae639bd10a68a1fefca72cd77070fc"}, ] [package.dependencies] -botocore = ">=1.34.139,<1.35.0" +botocore = ">=1.35.48,<1.36.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -189,13 +60,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.139" +version = "1.35.48" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.34.139-py3-none-any.whl", hash = "sha256:dd1e085d4caa2a4c1b7d83e3bc51416111c8238a35d498e9d3b04f3b63b086ba"}, - {file = "botocore-1.34.139.tar.gz", hash = "sha256:df023d8cf8999d574214dad4645cb90f9d2ccd1494f6ee2b57b1ab7522f6be77"}, + {file = "botocore-1.35.48-py3-none-any.whl", hash = "sha256:34fa25fd717208b05745e60f271a39636108fa87a3512fbca18e7e6f787a3239"}, + {file = "botocore-1.35.48.tar.gz", hash = "sha256:3e766cc251053c9ef98542fdf225381ed58531769c3811a6282bd7247f7e2bdf"}, ] [package.dependencies] @@ -207,33 +78,17 @@ urllib3 = [ ] [package.extras] -crt = ["awscrt (==0.20.11)"] - -[[package]] -name = "cachier" -version = "3.0.0" -description = "Persistent, stale-free, local and cross-machine caching for Python functions." -optional = false -python-versions = "*" -files = [ - {file = "cachier-3.0.0-py3-none-any.whl", hash = "sha256:627455077d3619a27b1c63368d49533ffe6807a1b19fe6944e4c8f3ac895d1bc"}, - {file = "cachier-3.0.0.tar.gz", hash = "sha256:f756ebeabf8b397f75fbbb9a5841972d4c85a81b7313e2812b3448e80d74c66d"}, -] - -[package.dependencies] -portalocker = ">=2.3.2" -setuptools = ">=67.6.0" -watchdog = ">=2.3.1" +crt = ["awscrt (==0.22.0)"] [[package]] name = "certifi" -version = "2024.2.2" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] @@ -247,139 +102,118 @@ files = [ {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, ] -[[package]] -name = "cftime" -version = "1.6.3" -description = "Time-handling functionality from netcdf4-python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "cftime-1.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b62d42546fa5c914dfea5b15a9aaed2087ea1211cc36d08c374502ef95892038"}, - {file = "cftime-1.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb6dd70b2ccabfe1a14b7fbb0bbdce0418e71697094373c0d573c880790fa291"}, - {file = "cftime-1.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9878bfd8c1c3f24184ecbd528f739ba46ebaceaf1c8a24d348d7befb117a285"}, - {file = "cftime-1.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:3cf6e216a4c06f9a628cdf8e9c9d5e8097fb3eb02dd087dd14ab3b18478a7271"}, - {file = "cftime-1.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d2c01456d9d7b46aa710a41d1c711a50d5ea259aff4a987d0e973d1093bc922"}, - {file = "cftime-1.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:80eb1170ce1639016f55760847f4aadd04b0312496c5bac2797e930914bba48d"}, - {file = "cftime-1.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d87dadd0824262bdd7493babd2a44447da0a22175ded8ae9e060a3aebec7c5d7"}, - {file = "cftime-1.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:0a38eb9f5c733a23e1714bd3ef2762ed5acee34f127670f8fb4ad6464946f6b3"}, - {file = "cftime-1.6.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2d113a01ab924445e61d65c26bbd95bc08e4a22878d3b947064bba056c884c4a"}, - {file = "cftime-1.6.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f11685663a6af97418908060492a07663c16d42519c139ca03c2ffb1377fd25"}, - {file = "cftime-1.6.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a98abb1d46d118e52b0611ce668a0b714b407be26177ef0581ecf5e95f894725"}, - {file = "cftime-1.6.3-cp312-cp312-win_amd64.whl", hash = "sha256:4d6fbd5f41b322cfa7b0ac3aaadeceb4450100a164b5bccbbb9e7c5048489a88"}, - {file = "cftime-1.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bedb577bc8b8f3f10f5336c0792e5dae88605781890f50f36b45bb46907968e8"}, - {file = "cftime-1.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:022dabf1610cdd04a693e730fa8f71d307059717f29dba921e7486e553412bb4"}, - {file = "cftime-1.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbf782ab4ac0605bdec2b941952c897595613203942b7f8c2fccd17efa5147df"}, - {file = "cftime-1.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:9eb177a02db7cd84aa6962278e4bd2d3106a545de82e6aacd9404f1e153661db"}, - {file = "cftime-1.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b86be8c2f254147be4ba88f12099466dde457a4a3a21de6c69d52a7224c13ae"}, - {file = "cftime-1.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:523b9a6bf03f5e36407979e248381d0fcab2d225b915bbde77d00c6dde192b90"}, - {file = "cftime-1.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a14d2c7d22fd2a6dfa6ad563283b6d6679f1df95e0ed8d14b8f284dad402887"}, - {file = "cftime-1.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:d9b00c2844c7a1701d8ede5336b6321dfee256ceab81a34a1aff0483d56891a6"}, - {file = "cftime-1.6.3.tar.gz", hash = "sha256:d0a6b29f72a13f08e008b9becff247cc75c84acb213332ede18879c5b6aa4dfd"}, -] - -[package.dependencies] -numpy = [ - {version = ">1.13.3", markers = "python_version < \"3.12.0.rc1\""}, - {version = ">=1.26.0b1", markers = "python_version >= \"3.12.0.rc1\""}, -] - [[package]] name = "charset-normalizer" -version = "3.3.2" +version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, ] [[package]] @@ -398,13 +232,13 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "cloudpickle" -version = "3.0.0" +version = "3.1.0" description = "Pickler class to extend the standard pickle.Pickler functionality" optional = false python-versions = ">=3.8" files = [ - {file = "cloudpickle-3.0.0-py3-none-any.whl", hash = "sha256:246ee7d0c295602a036e86369c77fecda4ab17b506496730f2f576d9016fd9c7"}, - {file = "cloudpickle-3.0.0.tar.gz", hash = "sha256:996d9a482c6fb4f33c1a35335cf8afd065d2a56e973270364840712d9131a882"}, + {file = "cloudpickle-3.1.0-py3-none-any.whl", hash = "sha256:fe11acda67f61aaaec473e3afe030feb131d78a43461b718185363384f1ba12e"}, + {file = "cloudpickle-3.1.0.tar.gz", hash = "sha256:81a929b6e3c7335c863c771d673d105f02efdb89dfaba0c90495d1c64796601b"}, ] [[package]] @@ -420,13 +254,13 @@ files = [ [[package]] name = "dask" -version = "2024.4.2" +version = "2024.8.0" description = "Parallel PyData with Task Scheduling" optional = false python-versions = ">=3.9" files = [ - {file = "dask-2024.4.2-py3-none-any.whl", hash = "sha256:56fbe92472e3b323ab7beaf2dc8437d48066ac21aa9c2c17ac40d2b6f7b4c414"}, - {file = "dask-2024.4.2.tar.gz", hash = "sha256:3d7a516468d96e72581b84c7bb00172366f30d24c689ea4e9bd1334ab6d98f8a"}, + {file = "dask-2024.8.0-py3-none-any.whl", hash = "sha256:250ea3df30d4a25958290eec4f252850091c6cfaed82d098179c3b25bba18309"}, + {file = "dask-2024.8.0.tar.gz", hash = "sha256:f1fec39373d2f101bc045529ad4e9b30e34e6eb33b7aa0fa7073aec7b1bf9eee"}, ] [package.dependencies] @@ -435,16 +269,16 @@ cloudpickle = ">=1.5.0" fsspec = ">=2021.09.0" importlib-metadata = {version = ">=4.13.0", markers = "python_version < \"3.12\""} packaging = ">=20.0" -partd = ">=1.2.0" +partd = ">=1.4.0" pyyaml = ">=5.3.1" toolz = ">=0.10.0" [package.extras] array = ["numpy (>=1.21)"] complete = ["dask[array,dataframe,diagnostics,distributed]", "lz4 (>=4.3.2)", "pyarrow (>=7.0)", "pyarrow-hotfix"] -dataframe = ["dask-expr (>=1.0,<1.1)", "dask[array]", "pandas (>=1.3)"] +dataframe = ["dask-expr (>=1.1,<1.2)", "dask[array]", "pandas (>=2.0)"] diagnostics = ["bokeh (>=2.4.2)", "jinja2 (>=2.10.3)"] -distributed = ["distributed (==2024.4.2)"] +distributed = ["distributed (==2024.8.0)"] test = ["pandas[test]", "pre-commit", "pytest", "pytest-cov", "pytest-rerunfailures", "pytest-timeout", "pytest-xdist"] [[package]] @@ -460,24 +294,24 @@ files = [ [[package]] name = "distlib" -version = "0.3.8" +version = "0.3.9" description = "Distribution utilities" optional = false python-versions = "*" files = [ - {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, - {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, + {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, + {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, ] [[package]] name = "exceptiongroup" -version = "1.2.1" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] @@ -485,13 +319,13 @@ test = ["pytest (>=6)"] [[package]] name = "executing" -version = "2.0.1" +version = "2.1.0" description = "Get the currently executing AST node of a frame, and other information" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" files = [ - {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, - {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, + {file = "executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf"}, + {file = "executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab"}, ] [package.extras] @@ -510,115 +344,29 @@ files = [ [[package]] name = "filelock" -version = "3.13.4" +version = "3.16.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.13.4-py3-none-any.whl", hash = "sha256:404e5e9253aa60ad457cae1be07c0f0ca90a63931200a47d9b6a6af84fd7b45f"}, - {file = "filelock-3.13.4.tar.gz", hash = "sha256:d13f466618bfde72bd2c18255e269f72542c6e70e7bac83a0232d6b1cc5c8cf4"}, + {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, + {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] -typing = ["typing-extensions (>=4.8)"] - -[[package]] -name = "frozenlist" -version = "1.4.1" -description = "A list-like structure which implements collections.abc.MutableSequence" -optional = false -python-versions = ">=3.8" -files = [ - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, - {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, - {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, - {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, - {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, - {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, - {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, - {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, - {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, - {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, - {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, - {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, - {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, -] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] +typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "fsspec" -version = "2024.3.1" +version = "2024.10.0" description = "File-system specification" optional = false python-versions = ">=3.8" files = [ - {file = "fsspec-2024.3.1-py3-none-any.whl", hash = "sha256:918d18d41bf73f0e2b261824baeb1b124bcf771767e3a26425cd7dec3332f512"}, - {file = "fsspec-2024.3.1.tar.gz", hash = "sha256:f39780e282d7d117ffb42bb96992f8a90795e4d0fb0f661a70ca39fe9c43ded9"}, + {file = "fsspec-2024.10.0-py3-none-any.whl", hash = "sha256:03b9a6785766a4de40368b88906366755e2819e758b83705c88cd7cb5fe81871"}, + {file = "fsspec-2024.10.0.tar.gz", hash = "sha256:eda2d8a4116d4f2429db8550f2457da57279247dd930bb12f821b58391359493"}, ] [package.extras] @@ -626,7 +374,8 @@ abfs = ["adlfs"] adl = ["adlfs"] arrow = ["pyarrow (>=1)"] dask = ["dask", "distributed"] -devel = ["pytest", "pytest-cov"] +dev = ["pre-commit", "ruff"] +doc = ["numpydoc", "sphinx", "sphinx-design", "sphinx-rtd-theme", "yarl"] dropbox = ["dropbox", "dropboxdrivefs", "requests"] full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] fuse = ["fusepy"] @@ -643,17 +392,76 @@ s3 = ["s3fs"] sftp = ["paramiko"] smb = ["smbprotocol"] ssh = ["paramiko"] +test = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "numpy", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "requests"] +test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask-expr", "dask[dataframe,test]", "moto[server] (>4,<5)", "pytest-timeout", "xarray"] +test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard"] tqdm = ["tqdm"] +[[package]] +name = "h5netcdf" +version = "1.4.0" +description = "netCDF4 via h5py" +optional = false +python-versions = ">=3.9" +files = [ + {file = "h5netcdf-1.4.0-py3-none-any.whl", hash = "sha256:d1bb96fce5dcf42908903c9798beeef70ac84e97159eb381f1b151459313f228"}, + {file = "h5netcdf-1.4.0.tar.gz", hash = "sha256:e959c3b5bd3ca7965ce5f4383a4e038ffcb55034c63d791829bd33a5ac38a962"}, +] + +[package.dependencies] +h5py = "*" +packaging = "*" + +[package.extras] +test = ["netCDF4", "pytest"] + +[[package]] +name = "h5py" +version = "3.12.1" +description = "Read and write HDF5 files from Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "h5py-3.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f0f1a382cbf494679c07b4371f90c70391dedb027d517ac94fa2c05299dacda"}, + {file = "h5py-3.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cb65f619dfbdd15e662423e8d257780f9a66677eae5b4b3fc9dca70b5fd2d2a3"}, + {file = "h5py-3.12.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b15d8dbd912c97541312c0e07438864d27dbca857c5ad634de68110c6beb1c2"}, + {file = "h5py-3.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59685fe40d8c1fbbee088c88cd4da415a2f8bee5c270337dc5a1c4aa634e3307"}, + {file = "h5py-3.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:577d618d6b6dea3da07d13cc903ef9634cde5596b13e832476dd861aaf651f3e"}, + {file = "h5py-3.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ccd9006d92232727d23f784795191bfd02294a4f2ba68708825cb1da39511a93"}, + {file = "h5py-3.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ad8a76557880aed5234cfe7279805f4ab5ce16b17954606cca90d578d3e713ef"}, + {file = "h5py-3.12.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1473348139b885393125126258ae2d70753ef7e9cec8e7848434f385ae72069e"}, + {file = "h5py-3.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:018a4597f35092ae3fb28ee851fdc756d2b88c96336b8480e124ce1ac6fb9166"}, + {file = "h5py-3.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:3fdf95092d60e8130ba6ae0ef7a9bd4ade8edbe3569c13ebbaf39baefffc5ba4"}, + {file = "h5py-3.12.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:06a903a4e4e9e3ebbc8b548959c3c2552ca2d70dac14fcfa650d9261c66939ed"}, + {file = "h5py-3.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b3b8f3b48717e46c6a790e3128d39c61ab595ae0a7237f06dfad6a3b51d5351"}, + {file = "h5py-3.12.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:050a4f2c9126054515169c49cb900949814987f0c7ae74c341b0c9f9b5056834"}, + {file = "h5py-3.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c4b41d1019322a5afc5082864dfd6359f8935ecd37c11ac0029be78c5d112c9"}, + {file = "h5py-3.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:e4d51919110a030913201422fb07987db4338eba5ec8c5a15d6fab8e03d443fc"}, + {file = "h5py-3.12.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:513171e90ed92236fc2ca363ce7a2fc6f2827375efcbb0cc7fbdd7fe11fecafc"}, + {file = "h5py-3.12.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:59400f88343b79655a242068a9c900001a34b63e3afb040bd7cdf717e440f653"}, + {file = "h5py-3.12.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3e465aee0ec353949f0f46bf6c6f9790a2006af896cee7c178a8c3e5090aa32"}, + {file = "h5py-3.12.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba51c0c5e029bb5420a343586ff79d56e7455d496d18a30309616fdbeed1068f"}, + {file = "h5py-3.12.1-cp313-cp313-win_amd64.whl", hash = "sha256:52ab036c6c97055b85b2a242cb540ff9590bacfda0c03dd0cf0661b311f522f8"}, + {file = "h5py-3.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d2b8dd64f127d8b324f5d2cd1c0fd6f68af69084e9e47d27efeb9e28e685af3e"}, + {file = "h5py-3.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4532c7e97fbef3d029735db8b6f5bf01222d9ece41e309b20d63cfaae2fb5c4d"}, + {file = "h5py-3.12.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fdf6d7936fa824acfa27305fe2d9f39968e539d831c5bae0e0d83ed521ad1ac"}, + {file = "h5py-3.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84342bffd1f82d4f036433e7039e241a243531a1d3acd7341b35ae58cdab05bf"}, + {file = "h5py-3.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:62be1fc0ef195891949b2c627ec06bc8e837ff62d5b911b6e42e38e0f20a897d"}, + {file = "h5py-3.12.1.tar.gz", hash = "sha256:326d70b53d31baa61f00b8aa5f95c2fcb9621a3ee8365d770c551a13dbbcbfdf"}, +] + +[package.dependencies] +numpy = ">=1.19.3" + [[package]] name = "identify" -version = "2.5.36" +version = "2.6.1" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.36-py2.py3-none-any.whl", hash = "sha256:37d93f380f4de590500d9dba7db359d0d3da95ffe7f9de1753faa159e71e7dfa"}, - {file = "identify-2.5.36.tar.gz", hash = "sha256:e5e00f54165f9047fbebeb4a560f9acfb8af4c88232be60a488e9b68d122745d"}, + {file = "identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0"}, + {file = "identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98"}, ] [package.extras] @@ -661,33 +469,40 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.7" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "importlib-metadata" -version = "7.1.0" +version = "8.5.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, - {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, ] [package.dependencies] -zipp = ">=0.5" +zipp = ">=3.20" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] [[package]] name = "ipython" @@ -769,165 +584,149 @@ files = [ [[package]] name = "lxml" -version = "5.2.1" +version = "5.3.0" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = false python-versions = ">=3.6" files = [ - {file = "lxml-5.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1f7785f4f789fdb522729ae465adcaa099e2a3441519df750ebdccc481d961a1"}, - {file = "lxml-5.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6cc6ee342fb7fa2471bd9b6d6fdfc78925a697bf5c2bcd0a302e98b0d35bfad3"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:794f04eec78f1d0e35d9e0c36cbbb22e42d370dda1609fb03bcd7aeb458c6377"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817d420c60a5183953c783b0547d9eb43b7b344a2c46f69513d5952a78cddf3"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2213afee476546a7f37c7a9b4ad4d74b1e112a6fafffc9185d6d21f043128c81"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b070bbe8d3f0f6147689bed981d19bbb33070225373338df755a46893528104a"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e02c5175f63effbd7c5e590399c118d5db6183bbfe8e0d118bdb5c2d1b48d937"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:3dc773b2861b37b41a6136e0b72a1a44689a9c4c101e0cddb6b854016acc0aa8"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:d7520db34088c96cc0e0a3ad51a4fd5b401f279ee112aa2b7f8f976d8582606d"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:bcbf4af004f98793a95355980764b3d80d47117678118a44a80b721c9913436a"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2b44bec7adf3e9305ce6cbfa47a4395667e744097faed97abb4728748ba7d47"}, - {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1c5bb205e9212d0ebddf946bc07e73fa245c864a5f90f341d11ce7b0b854475d"}, - {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2c9d147f754b1b0e723e6afb7ba1566ecb162fe4ea657f53d2139bbf894d050a"}, - {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3545039fa4779be2df51d6395e91a810f57122290864918b172d5dc7ca5bb433"}, - {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a91481dbcddf1736c98a80b122afa0f7296eeb80b72344d7f45dc9f781551f56"}, - {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2ddfe41ddc81f29a4c44c8ce239eda5ade4e7fc305fb7311759dd6229a080052"}, - {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a7baf9ffc238e4bf401299f50e971a45bfcc10a785522541a6e3179c83eabf0a"}, - {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:31e9a882013c2f6bd2f2c974241bf4ba68c85eba943648ce88936d23209a2e01"}, - {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0a15438253b34e6362b2dc41475e7f80de76320f335e70c5528b7148cac253a1"}, - {file = "lxml-5.2.1-cp310-cp310-win32.whl", hash = "sha256:6992030d43b916407c9aa52e9673612ff39a575523c5f4cf72cdef75365709a5"}, - {file = "lxml-5.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:da052e7962ea2d5e5ef5bc0355d55007407087392cf465b7ad84ce5f3e25fe0f"}, - {file = "lxml-5.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:70ac664a48aa64e5e635ae5566f5227f2ab7f66a3990d67566d9907edcbbf867"}, - {file = "lxml-5.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1ae67b4e737cddc96c99461d2f75d218bdf7a0c3d3ad5604d1f5e7464a2f9ffe"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f18a5a84e16886898e51ab4b1d43acb3083c39b14c8caeb3589aabff0ee0b270"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6f2c8372b98208ce609c9e1d707f6918cc118fea4e2c754c9f0812c04ca116d"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:394ed3924d7a01b5bd9a0d9d946136e1c2f7b3dc337196d99e61740ed4bc6fe1"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d077bc40a1fe984e1a9931e801e42959a1e6598edc8a3223b061d30fbd26bbc"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:764b521b75701f60683500d8621841bec41a65eb739b8466000c6fdbc256c240"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:3a6b45da02336895da82b9d472cd274b22dc27a5cea1d4b793874eead23dd14f"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:5ea7b6766ac2dfe4bcac8b8595107665a18ef01f8c8343f00710b85096d1b53a"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:e196a4ff48310ba62e53a8e0f97ca2bca83cdd2fe2934d8b5cb0df0a841b193a"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:200e63525948e325d6a13a76ba2911f927ad399ef64f57898cf7c74e69b71095"}, - {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dae0ed02f6b075426accbf6b2863c3d0a7eacc1b41fb40f2251d931e50188dad"}, - {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:ab31a88a651039a07a3ae327d68ebdd8bc589b16938c09ef3f32a4b809dc96ef"}, - {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:df2e6f546c4df14bc81f9498bbc007fbb87669f1bb707c6138878c46b06f6510"}, - {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5dd1537e7cc06efd81371f5d1a992bd5ab156b2b4f88834ca852de4a8ea523fa"}, - {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9b9ec9c9978b708d488bec36b9e4c94d88fd12ccac3e62134a9d17ddba910ea9"}, - {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8e77c69d5892cb5ba71703c4057091e31ccf534bd7f129307a4d084d90d014b8"}, - {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a8d5c70e04aac1eda5c829a26d1f75c6e5286c74743133d9f742cda8e53b9c2f"}, - {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c94e75445b00319c1fad60f3c98b09cd63fe1134a8a953dcd48989ef42318534"}, - {file = "lxml-5.2.1-cp311-cp311-win32.whl", hash = "sha256:4951e4f7a5680a2db62f7f4ab2f84617674d36d2d76a729b9a8be4b59b3659be"}, - {file = "lxml-5.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:5c670c0406bdc845b474b680b9a5456c561c65cf366f8db5a60154088c92d102"}, - {file = "lxml-5.2.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:abc25c3cab9ec7fcd299b9bcb3b8d4a1231877e425c650fa1c7576c5107ab851"}, - {file = "lxml-5.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6935bbf153f9a965f1e07c2649c0849d29832487c52bb4a5c5066031d8b44fd5"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d793bebb202a6000390a5390078e945bbb49855c29c7e4d56a85901326c3b5d9"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afd5562927cdef7c4f5550374acbc117fd4ecc05b5007bdfa57cc5355864e0a4"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0e7259016bc4345a31af861fdce942b77c99049d6c2107ca07dc2bba2435c1d9"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:530e7c04f72002d2f334d5257c8a51bf409db0316feee7c87e4385043be136af"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59689a75ba8d7ffca577aefd017d08d659d86ad4585ccc73e43edbfc7476781a"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f9737bf36262046213a28e789cc82d82c6ef19c85a0cf05e75c670a33342ac2c"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:3a74c4f27167cb95c1d4af1c0b59e88b7f3e0182138db2501c353555f7ec57f4"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:68a2610dbe138fa8c5826b3f6d98a7cfc29707b850ddcc3e21910a6fe51f6ca0"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f0a1bc63a465b6d72569a9bba9f2ef0334c4e03958e043da1920299100bc7c08"}, - {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c2d35a1d047efd68027817b32ab1586c1169e60ca02c65d428ae815b593e65d4"}, - {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:79bd05260359170f78b181b59ce871673ed01ba048deef4bf49a36ab3e72e80b"}, - {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:865bad62df277c04beed9478fe665b9ef63eb28fe026d5dedcb89b537d2e2ea6"}, - {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:44f6c7caff88d988db017b9b0e4ab04934f11e3e72d478031efc7edcac6c622f"}, - {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:71e97313406ccf55d32cc98a533ee05c61e15d11b99215b237346171c179c0b0"}, - {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:057cdc6b86ab732cf361f8b4d8af87cf195a1f6dc5b0ff3de2dced242c2015e0"}, - {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f3bbbc998d42f8e561f347e798b85513ba4da324c2b3f9b7969e9c45b10f6169"}, - {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:491755202eb21a5e350dae00c6d9a17247769c64dcf62d8c788b5c135e179dc4"}, - {file = "lxml-5.2.1-cp312-cp312-win32.whl", hash = "sha256:8de8f9d6caa7f25b204fc861718815d41cbcf27ee8f028c89c882a0cf4ae4134"}, - {file = "lxml-5.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:f2a9efc53d5b714b8df2b4b3e992accf8ce5bbdfe544d74d5c6766c9e1146a3a"}, - {file = "lxml-5.2.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:70a9768e1b9d79edca17890175ba915654ee1725975d69ab64813dd785a2bd5c"}, - {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c38d7b9a690b090de999835f0443d8aa93ce5f2064035dfc48f27f02b4afc3d0"}, - {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5670fb70a828663cc37552a2a85bf2ac38475572b0e9b91283dc09efb52c41d1"}, - {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:958244ad566c3ffc385f47dddde4145088a0ab893504b54b52c041987a8c1863"}, - {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6241d4eee5f89453307c2f2bfa03b50362052ca0af1efecf9fef9a41a22bb4f"}, - {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2a66bf12fbd4666dd023b6f51223aed3d9f3b40fef06ce404cb75bafd3d89536"}, - {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:9123716666e25b7b71c4e1789ec829ed18663152008b58544d95b008ed9e21e9"}, - {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:0c3f67e2aeda739d1cc0b1102c9a9129f7dc83901226cc24dd72ba275ced4218"}, - {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:5d5792e9b3fb8d16a19f46aa8208987cfeafe082363ee2745ea8b643d9cc5b45"}, - {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:88e22fc0a6684337d25c994381ed8a1580a6f5ebebd5ad41f89f663ff4ec2885"}, - {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:21c2e6b09565ba5b45ae161b438e033a86ad1736b8c838c766146eff8ceffff9"}, - {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_s390x.whl", hash = "sha256:afbbdb120d1e78d2ba8064a68058001b871154cc57787031b645c9142b937a62"}, - {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:627402ad8dea044dde2eccde4370560a2b750ef894c9578e1d4f8ffd54000461"}, - {file = "lxml-5.2.1-cp36-cp36m-win32.whl", hash = "sha256:e89580a581bf478d8dcb97d9cd011d567768e8bc4095f8557b21c4d4c5fea7d0"}, - {file = "lxml-5.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:59565f10607c244bc4c05c0c5fa0c190c990996e0c719d05deec7030c2aa8289"}, - {file = "lxml-5.2.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:857500f88b17a6479202ff5fe5f580fc3404922cd02ab3716197adf1ef628029"}, - {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56c22432809085b3f3ae04e6e7bdd36883d7258fcd90e53ba7b2e463efc7a6af"}, - {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a55ee573116ba208932e2d1a037cc4b10d2c1cb264ced2184d00b18ce585b2c0"}, - {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:6cf58416653c5901e12624e4013708b6e11142956e7f35e7a83f1ab02f3fe456"}, - {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:64c2baa7774bc22dd4474248ba16fe1a7f611c13ac6123408694d4cc93d66dbd"}, - {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:74b28c6334cca4dd704e8004cba1955af0b778cf449142e581e404bd211fb619"}, - {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7221d49259aa1e5a8f00d3d28b1e0b76031655ca74bb287123ef56c3db92f213"}, - {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3dbe858ee582cbb2c6294dc85f55b5f19c918c2597855e950f34b660f1a5ede6"}, - {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:04ab5415bf6c86e0518d57240a96c4d1fcfc3cb370bb2ac2a732b67f579e5a04"}, - {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:6ab833e4735a7e5533711a6ea2df26459b96f9eec36d23f74cafe03631647c41"}, - {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f443cdef978430887ed55112b491f670bba6462cea7a7742ff8f14b7abb98d75"}, - {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9e2addd2d1866fe112bc6f80117bcc6bc25191c5ed1bfbcf9f1386a884252ae8"}, - {file = "lxml-5.2.1-cp37-cp37m-win32.whl", hash = "sha256:f51969bac61441fd31f028d7b3b45962f3ecebf691a510495e5d2cd8c8092dbd"}, - {file = "lxml-5.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:b0b58fbfa1bf7367dde8a557994e3b1637294be6cf2169810375caf8571a085c"}, - {file = "lxml-5.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:804f74efe22b6a227306dd890eecc4f8c59ff25ca35f1f14e7482bbce96ef10b"}, - {file = "lxml-5.2.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:08802f0c56ed150cc6885ae0788a321b73505d2263ee56dad84d200cab11c07a"}, - {file = "lxml-5.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f8c09ed18ecb4ebf23e02b8e7a22a05d6411911e6fabef3a36e4f371f4f2585"}, - {file = "lxml-5.2.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3d30321949861404323c50aebeb1943461a67cd51d4200ab02babc58bd06a86"}, - {file = "lxml-5.2.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:b560e3aa4b1d49e0e6c847d72665384db35b2f5d45f8e6a5c0072e0283430533"}, - {file = "lxml-5.2.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:058a1308914f20784c9f4674036527e7c04f7be6fb60f5d61353545aa7fcb739"}, - {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:adfb84ca6b87e06bc6b146dc7da7623395db1e31621c4785ad0658c5028b37d7"}, - {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:417d14450f06d51f363e41cace6488519038f940676ce9664b34ebf5653433a5"}, - {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a2dfe7e2473f9b59496247aad6e23b405ddf2e12ef0765677b0081c02d6c2c0b"}, - {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bf2e2458345d9bffb0d9ec16557d8858c9c88d2d11fed53998512504cd9df49b"}, - {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:58278b29cb89f3e43ff3e0c756abbd1518f3ee6adad9e35b51fb101c1c1daaec"}, - {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:64641a6068a16201366476731301441ce93457eb8452056f570133a6ceb15fca"}, - {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:78bfa756eab503673991bdcf464917ef7845a964903d3302c5f68417ecdc948c"}, - {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:11a04306fcba10cd9637e669fd73aa274c1c09ca64af79c041aa820ea992b637"}, - {file = "lxml-5.2.1-cp38-cp38-win32.whl", hash = "sha256:66bc5eb8a323ed9894f8fa0ee6cb3e3fb2403d99aee635078fd19a8bc7a5a5da"}, - {file = "lxml-5.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:9676bfc686fa6a3fa10cd4ae6b76cae8be26eb5ec6811d2a325636c460da1806"}, - {file = "lxml-5.2.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cf22b41fdae514ee2f1691b6c3cdeae666d8b7fa9434de445f12bbeee0cf48dd"}, - {file = "lxml-5.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ec42088248c596dbd61d4ae8a5b004f97a4d91a9fd286f632e42e60b706718d7"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd53553ddad4a9c2f1f022756ae64abe16da1feb497edf4d9f87f99ec7cf86bd"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feaa45c0eae424d3e90d78823f3828e7dc42a42f21ed420db98da2c4ecf0a2cb"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddc678fb4c7e30cf830a2b5a8d869538bc55b28d6c68544d09c7d0d8f17694dc"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:853e074d4931dbcba7480d4dcab23d5c56bd9607f92825ab80ee2bd916edea53"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc4691d60512798304acb9207987e7b2b7c44627ea88b9d77489bbe3e6cc3bd4"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:beb72935a941965c52990f3a32d7f07ce869fe21c6af8b34bf6a277b33a345d3"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:6588c459c5627fefa30139be4d2e28a2c2a1d0d1c265aad2ba1935a7863a4913"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:588008b8497667f1ddca7c99f2f85ce8511f8f7871b4a06ceede68ab62dff64b"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6787b643356111dfd4032b5bffe26d2f8331556ecb79e15dacb9275da02866e"}, - {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7c17b64b0a6ef4e5affae6a3724010a7a66bda48a62cfe0674dabd46642e8b54"}, - {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:27aa20d45c2e0b8cd05da6d4759649170e8dfc4f4e5ef33a34d06f2d79075d57"}, - {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d4f2cc7060dc3646632d7f15fe68e2fa98f58e35dd5666cd525f3b35d3fed7f8"}, - {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff46d772d5f6f73564979cd77a4fffe55c916a05f3cb70e7c9c0590059fb29ef"}, - {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:96323338e6c14e958d775700ec8a88346014a85e5de73ac7967db0367582049b"}, - {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:52421b41ac99e9d91934e4d0d0fe7da9f02bfa7536bb4431b4c05c906c8c6919"}, - {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:7a7efd5b6d3e30d81ec68ab8a88252d7c7c6f13aaa875009fe3097eb4e30b84c"}, - {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0ed777c1e8c99b63037b91f9d73a6aad20fd035d77ac84afcc205225f8f41188"}, - {file = "lxml-5.2.1-cp39-cp39-win32.whl", hash = "sha256:644df54d729ef810dcd0f7732e50e5ad1bd0a135278ed8d6bcb06f33b6b6f708"}, - {file = "lxml-5.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:9ca66b8e90daca431b7ca1408cae085d025326570e57749695d6a01454790e95"}, - {file = "lxml-5.2.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9b0ff53900566bc6325ecde9181d89afadc59c5ffa39bddf084aaedfe3b06a11"}, - {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd6037392f2d57793ab98d9e26798f44b8b4da2f2464388588f48ac52c489ea1"}, - {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b9c07e7a45bb64e21df4b6aa623cb8ba214dfb47d2027d90eac197329bb5e94"}, - {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3249cc2989d9090eeac5467e50e9ec2d40704fea9ab72f36b034ea34ee65ca98"}, - {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f42038016852ae51b4088b2862126535cc4fc85802bfe30dea3500fdfaf1864e"}, - {file = "lxml-5.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:533658f8fbf056b70e434dff7e7aa611bcacb33e01f75de7f821810e48d1bb66"}, - {file = "lxml-5.2.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:622020d4521e22fb371e15f580d153134bfb68d6a429d1342a25f051ec72df1c"}, - {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efa7b51824aa0ee957ccd5a741c73e6851de55f40d807f08069eb4c5a26b2baa"}, - {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c6ad0fbf105f6bcc9300c00010a2ffa44ea6f555df1a2ad95c88f5656104817"}, - {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e233db59c8f76630c512ab4a4daf5a5986da5c3d5b44b8e9fc742f2a24dbd460"}, - {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6a014510830df1475176466b6087fc0c08b47a36714823e58d8b8d7709132a96"}, - {file = "lxml-5.2.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:d38c8f50ecf57f0463399569aa388b232cf1a2ffb8f0a9a5412d0db57e054860"}, - {file = "lxml-5.2.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5aea8212fb823e006b995c4dda533edcf98a893d941f173f6c9506126188860d"}, - {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff097ae562e637409b429a7ac958a20aab237a0378c42dabaa1e3abf2f896e5f"}, - {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f5d65c39f16717a47c36c756af0fb36144069c4718824b7533f803ecdf91138"}, - {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3d0c3dd24bb4605439bf91068598d00c6370684f8de4a67c2992683f6c309d6b"}, - {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e32be23d538753a8adb6c85bd539f5fd3b15cb987404327c569dfc5fd8366e85"}, - {file = "lxml-5.2.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cc518cea79fd1e2f6c90baafa28906d4309d24f3a63e801d855e7424c5b34144"}, - {file = "lxml-5.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a0af35bd8ebf84888373630f73f24e86bf016642fb8576fba49d3d6b560b7cbc"}, - {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8aca2e3a72f37bfc7b14ba96d4056244001ddcc18382bd0daa087fd2e68a354"}, - {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ca1e8188b26a819387b29c3895c47a5e618708fe6f787f3b1a471de2c4a94d9"}, - {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c8ba129e6d3b0136a0f50345b2cb3db53f6bda5dd8c7f5d83fbccba97fb5dcb5"}, - {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e998e304036198b4f6914e6a1e2b6f925208a20e2042563d9734881150c6c246"}, - {file = "lxml-5.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d3be9b2076112e51b323bdf6d5a7f8a798de55fb8d95fcb64bd179460cdc0704"}, - {file = "lxml-5.2.1.tar.gz", hash = "sha256:3f7765e69bbce0906a7c74d5fe46d2c7a7596147318dbc08e4a2431f3060e306"}, + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656"}, + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:501d0d7e26b4d261fca8132854d845e4988097611ba2531408ec91cf3fd9d20a"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66442c2546446944437df74379e9cf9e9db353e61301d1a0e26482f43f0dd8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e41506fec7a7f9405b14aa2d5c8abbb4dbbd09d88f9496958b6d00cb4d45330"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f7d4a670107d75dfe5ad080bed6c341d18c4442f9378c9f58e5851e86eb79965"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41ce1f1e2c7755abfc7e759dc34d7d05fd221723ff822947132dc934d122fe22"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:44264ecae91b30e5633013fb66f6ddd05c006d3e0e884f75ce0b4755b3e3847b"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:3c174dc350d3ec52deb77f2faf05c439331d6ed5e702fc247ccb4e6b62d884b7"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:2dfab5fa6a28a0b60a20638dc48e6343c02ea9933e3279ccb132f555a62323d8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b1c8c20847b9f34e98080da785bb2336ea982e7f913eed5809e5a3c872900f32"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2c86bf781b12ba417f64f3422cfc302523ac9cd1d8ae8c0f92a1c66e56ef2e86"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c162b216070f280fa7da844531169be0baf9ccb17263cf5a8bf876fcd3117fa5"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:36aef61a1678cb778097b4a6eeae96a69875d51d1e8f4d4b491ab3cfb54b5a03"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f65e5120863c2b266dbcc927b306c5b78e502c71edf3295dfcb9501ec96e5fc7"}, + {file = "lxml-5.3.0-cp310-cp310-win32.whl", hash = "sha256:ef0c1fe22171dd7c7c27147f2e9c3e86f8bdf473fed75f16b0c2e84a5030ce80"}, + {file = "lxml-5.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:052d99051e77a4f3e8482c65014cf6372e61b0a6f4fe9edb98503bb5364cfee3"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:74bcb423462233bc5d6066e4e98b0264e7c1bed7541fff2f4e34fe6b21563c8b"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3d819eb6f9b8677f57f9664265d0a10dd6551d227afb4af2b9cd7bdc2ccbf18"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b8f5db71b28b8c404956ddf79575ea77aa8b1538e8b2ef9ec877945b3f46442"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3406b63232fc7e9b8783ab0b765d7c59e7c59ff96759d8ef9632fca27c7ee4"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ecdd78ab768f844c7a1d4a03595038c166b609f6395e25af9b0f3f26ae1230f"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168f2dfcfdedf611eb285efac1516c8454c8c99caf271dccda8943576b67552e"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa617107a410245b8660028a7483b68e7914304a6d4882b5ff3d2d3eb5948d8c"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:69959bd3167b993e6e710b99051265654133a98f20cec1d9b493b931942e9c16"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:bd96517ef76c8654446fc3db9242d019a1bb5fe8b751ba414765d59f99210b79"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ab6dd83b970dc97c2d10bc71aa925b84788c7c05de30241b9e96f9b6d9ea3080"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eec1bb8cdbba2925bedc887bc0609a80e599c75b12d87ae42ac23fd199445654"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a7095eeec6f89111d03dabfe5883a1fd54da319c94e0fb104ee8f23616b572d"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6f651ebd0b21ec65dfca93aa629610a0dbc13dbc13554f19b0113da2e61a4763"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f422a209d2455c56849442ae42f25dbaaba1c6c3f501d58761c619c7836642ec"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:62f7fdb0d1ed2065451f086519865b4c90aa19aed51081979ecd05a21eb4d1be"}, + {file = "lxml-5.3.0-cp311-cp311-win32.whl", hash = "sha256:c6379f35350b655fd817cd0d6cbeef7f265f3ae5fedb1caae2eb442bbeae9ab9"}, + {file = "lxml-5.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c52100e2c2dbb0649b90467935c4b0de5528833c76a35ea1a2691ec9f1ee7a1"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e99f5507401436fdcc85036a2e7dc2e28d962550afe1cbfc07c40e454256a859"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:384aacddf2e5813a36495233b64cb96b1949da72bef933918ba5c84e06af8f0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a216bf6afaf97c263b56371434e47e2c652d215788396f60477540298218f"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65ab5685d56914b9a2a34d67dd5488b83213d680b0c5d10b47f81da5a16b0b0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aac0bbd3e8dd2d9c45ceb82249e8bdd3ac99131a32b4d35c8af3cc9db1657179"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b369d3db3c22ed14c75ccd5af429086f166a19627e84a8fdade3f8f31426e52a"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24037349665434f375645fa9d1f5304800cec574d0310f618490c871fd902b3"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:62d172f358f33a26d6b41b28c170c63886742f5b6772a42b59b4f0fa10526cb1"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:c1f794c02903c2824fccce5b20c339a1a14b114e83b306ff11b597c5f71a1c8d"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:5d6a6972b93c426ace71e0be9a6f4b2cfae9b1baed2eed2006076a746692288c"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3879cc6ce938ff4eb4900d901ed63555c778731a96365e53fadb36437a131a99"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:74068c601baff6ff021c70f0935b0c7bc528baa8ea210c202e03757c68c5a4ff"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ecd4ad8453ac17bc7ba3868371bffb46f628161ad0eefbd0a855d2c8c32dd81a"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7e2f58095acc211eb9d8b5771bf04df9ff37d6b87618d1cbf85f92399c98dae8"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e63601ad5cd8f860aa99d109889b5ac34de571c7ee902d6812d5d9ddcc77fa7d"}, + {file = "lxml-5.3.0-cp312-cp312-win32.whl", hash = "sha256:17e8d968d04a37c50ad9c456a286b525d78c4a1c15dd53aa46c1d8e06bf6fa30"}, + {file = "lxml-5.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:c1a69e58a6bb2de65902051d57fde951febad631a20a64572677a1052690482f"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c72e9563347c7395910de6a3100a4840a75a6f60e05af5e58566868d5eb2d6a"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e92ce66cd919d18d14b3856906a61d3f6b6a8500e0794142338da644260595cd"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d04f064bebdfef9240478f7a779e8c5dc32b8b7b0b2fc6a62e39b928d428e51"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c2fb570d7823c2bbaf8b419ba6e5662137f8166e364a8b2b91051a1fb40ab8b"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c120f43553ec759f8de1fee2f4794452b0946773299d44c36bfe18e83caf002"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:562e7494778a69086f0312ec9689f6b6ac1c6b65670ed7d0267e49f57ffa08c4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:423b121f7e6fa514ba0c7918e56955a1d4470ed35faa03e3d9f0e3baa4c7e492"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c00f323cc00576df6165cc9d21a4c21285fa6b9989c5c39830c3903dc4303ef3"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:1fdc9fae8dd4c763e8a31e7630afef517eab9f5d5d31a278df087f307bf601f4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:658f2aa69d31e09699705949b5fc4719cbecbd4a97f9656a232e7d6c7be1a367"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1473427aff3d66a3fa2199004c3e601e6c4500ab86696edffdbc84954c72d832"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a87de7dd873bf9a792bf1e58b1c3887b9264036629a5bf2d2e6579fe8e73edff"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0d7b36afa46c97875303a94e8f3ad932bf78bace9e18e603f2085b652422edcd"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cf120cce539453ae086eacc0130a324e7026113510efa83ab42ef3fcfccac7fb"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:df5c7333167b9674aa8ae1d4008fa4bc17a313cc490b2cca27838bbdcc6bb15b"}, + {file = "lxml-5.3.0-cp313-cp313-win32.whl", hash = "sha256:c802e1c2ed9f0c06a65bc4ed0189d000ada8049312cfeab6ca635e39c9608957"}, + {file = "lxml-5.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:406246b96d552e0503e17a1006fd27edac678b3fcc9f1be71a2f94b4ff61528d"}, + {file = "lxml-5.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8f0de2d390af441fe8b2c12626d103540b5d850d585b18fcada58d972b74a74e"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1afe0a8c353746e610bd9031a630a95bcfb1a720684c3f2b36c4710a0a96528f"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56b9861a71575f5795bde89256e7467ece3d339c9b43141dbdd54544566b3b94"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:9fb81d2824dff4f2e297a276297e9031f46d2682cafc484f49de182aa5e5df99"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2c226a06ecb8cdef28845ae976da407917542c5e6e75dcac7cc33eb04aaeb237"}, + {file = "lxml-5.3.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:7d3d1ca42870cdb6d0d29939630dbe48fa511c203724820fc0fd507b2fb46577"}, + {file = "lxml-5.3.0-cp36-cp36m-win32.whl", hash = "sha256:094cb601ba9f55296774c2d57ad68730daa0b13dc260e1f941b4d13678239e70"}, + {file = "lxml-5.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:eafa2c8658f4e560b098fe9fc54539f86528651f61849b22111a9b107d18910c"}, + {file = "lxml-5.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cb83f8a875b3d9b458cada4f880fa498646874ba4011dc974e071a0a84a1b033"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25f1b69d41656b05885aa185f5fdf822cb01a586d1b32739633679699f220391"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23e0553b8055600b3bf4a00b255ec5c92e1e4aebf8c2c09334f8368e8bd174d6"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ada35dd21dc6c039259596b358caab6b13f4db4d4a7f8665764d616daf9cc1d"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:81b4e48da4c69313192d8c8d4311e5d818b8be1afe68ee20f6385d0e96fc9512"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:2bc9fd5ca4729af796f9f59cd8ff160fe06a474da40aca03fcc79655ddee1a8b"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07da23d7ee08577760f0a71d67a861019103e4812c87e2fab26b039054594cc5"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:ea2e2f6f801696ad7de8aec061044d6c8c0dd4037608c7cab38a9a4d316bfb11"}, + {file = "lxml-5.3.0-cp37-cp37m-win32.whl", hash = "sha256:5c54afdcbb0182d06836cc3d1be921e540be3ebdf8b8a51ee3ef987537455f84"}, + {file = "lxml-5.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f2901429da1e645ce548bf9171784c0f74f0718c3f6150ce166be39e4dd66c3e"}, + {file = "lxml-5.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c56a1d43b2f9ee4786e4658c7903f05da35b923fb53c11025712562d5cc02753"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ee8c39582d2652dcd516d1b879451500f8db3fe3607ce45d7c5957ab2596040"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdf3a3059611f7585a78ee10399a15566356116a4288380921a4b598d807a22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:146173654d79eb1fc97498b4280c1d3e1e5d58c398fa530905c9ea50ea849b22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0a7056921edbdd7560746f4221dca89bb7a3fe457d3d74267995253f46343f15"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:9e4b47ac0f5e749cfc618efdf4726269441014ae1d5583e047b452a32e221920"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f914c03e6a31deb632e2daa881fe198461f4d06e57ac3d0e05bbcab8eae01945"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:213261f168c5e1d9b7535a67e68b1f59f92398dd17a56d934550837143f79c42"}, + {file = "lxml-5.3.0-cp38-cp38-win32.whl", hash = "sha256:218c1b2e17a710e363855594230f44060e2025b05c80d1f0661258142b2add2e"}, + {file = "lxml-5.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:315f9542011b2c4e1d280e4a20ddcca1761993dda3afc7a73b01235f8641e903"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1ffc23010330c2ab67fac02781df60998ca8fe759e8efde6f8b756a20599c5de"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2b3778cb38212f52fac9fe913017deea2fdf4eb1a4f8e4cfc6b009a13a6d3fcc"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b0c7a688944891086ba192e21c5229dea54382f4836a209ff8d0a660fac06be"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:747a3d3e98e24597981ca0be0fd922aebd471fa99d0043a3842d00cdcad7ad6a"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86a6b24b19eaebc448dc56b87c4865527855145d851f9fc3891673ff97950540"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b11a5d918a6216e521c715b02749240fb07ae5a1fefd4b7bf12f833bc8b4fe70"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68b87753c784d6acb8a25b05cb526c3406913c9d988d51f80adecc2b0775d6aa"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:109fa6fede314cc50eed29e6e56c540075e63d922455346f11e4d7a036d2b8cf"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:02ced472497b8362c8e902ade23e3300479f4f43e45f4105c85ef43b8db85229"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:6b038cc86b285e4f9fea2ba5ee76e89f21ed1ea898e287dc277a25884f3a7dfe"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:7437237c6a66b7ca341e868cda48be24b8701862757426852c9b3186de1da8a2"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7f41026c1d64043a36fda21d64c5026762d53a77043e73e94b71f0521939cc71"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:482c2f67761868f0108b1743098640fbb2a28a8e15bf3f47ada9fa59d9fe08c3"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1483fd3358963cc5c1c9b122c80606a3a79ee0875bcac0204149fa09d6ff2727"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dec2d1130a9cda5b904696cec33b2cfb451304ba9081eeda7f90f724097300a"}, + {file = "lxml-5.3.0-cp39-cp39-win32.whl", hash = "sha256:a0eabd0a81625049c5df745209dc7fcef6e2aea7793e5f003ba363610aa0a3ff"}, + {file = "lxml-5.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:89e043f1d9d341c52bf2af6d02e6adde62e0a46e6755d5eb60dc6e4f0b8aeca2"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7b1cd427cb0d5f7393c31b7496419da594fe600e6fdc4b105a54f82405e6626c"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51806cfe0279e06ed8500ce19479d757db42a30fd509940b1701be9c86a5ff9a"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee70d08fd60c9565ba8190f41a46a54096afa0eeb8f76bd66f2c25d3b1b83005"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8dc2c0395bea8254d8daebc76dcf8eb3a95ec2a46fa6fae5eaccee366bfe02ce"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6ba0d3dcac281aad8a0e5b14c7ed6f9fa89c8612b47939fc94f80b16e2e9bc83"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:6e91cf736959057f7aac7adfc83481e03615a8e8dd5758aa1d95ea69e8931dba"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:94d6c3782907b5e40e21cadf94b13b0842ac421192f26b84c45f13f3c9d5dc27"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c300306673aa0f3ed5ed9372b21867690a17dba38c68c44b287437c362ce486b"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d9b952e07aed35fe2e1a7ad26e929595412db48535921c5013edc8aa4a35ce"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:01220dca0d066d1349bd6a1726856a78f7929f3878f7e2ee83c296c69495309e"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2d9b8d9177afaef80c53c0a9e30fa252ff3036fb1c6494d427c066a4ce6a282f"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:20094fc3f21ea0a8669dc4c61ed7fa8263bd37d97d93b90f28fc613371e7a875"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ace2c2326a319a0bb8a8b0e5b570c764962e95818de9f259ce814ee666603f19"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e67a0be1639c251d21e35fe74df6bcc40cba445c2cda7c4a967656733249e2"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5350b55f9fecddc51385463a4f67a5da829bc741e38cf689f38ec9023f54ab"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c1fefd7e3d00921c44dc9ca80a775af49698bbfd92ea84498e56acffd4c5469"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:71a8dd38fbd2f2319136d4ae855a7078c69c9a38ae06e0c17c73fd70fc6caad8"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:97acf1e1fd66ab53dacd2c35b319d7e548380c2e9e8c54525c6e76d21b1ae3b1"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:68934b242c51eb02907c5b81d138cb977b2129a0a75a8f8b60b01cb8586c7b21"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b710bc2b8292966b23a6a0121f7a6c51d45d2347edcc75f016ac123b8054d3f2"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18feb4b93302091b1541221196a2155aa296c363fd233814fa11e181adebc52f"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3eb44520c4724c2e1a57c0af33a379eee41792595023f367ba3952a2d96c2aab"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:609251a0ca4770e5a8768ff902aa02bf636339c5a93f9349b48eb1f606f7f3e9"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:516f491c834eb320d6c843156440fe7fc0d50b33e44387fcec5b02f0bc118a4c"}, + {file = "lxml-5.3.0.tar.gz", hash = "sha256:4e109ca30d1edec1ac60cdbe341905dc3b8f55b16855e03a54aaf59e51ec8c6f"}, ] [package.extras] @@ -935,7 +734,7 @@ cssselect = ["cssselect (>=0.7)"] html-clean = ["lxml-html-clean"] html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=3.0.10)"] +source = ["Cython (>=3.0.11)"] [[package]] name = "matplotlib-inline" @@ -951,174 +750,17 @@ files = [ [package.dependencies] traitlets = "*" -[[package]] -name = "multidict" -version = "6.0.5" -description = "multidict implementation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, - {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, - {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, - {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, - {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, - {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, - {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, - {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, - {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, - {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, - {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, - {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, - {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, - {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, - {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, - {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, -] - -[[package]] -name = "nest-asyncio" -version = "1.6.0" -description = "Patch asyncio to allow nested event loops" -optional = false -python-versions = ">=3.5" -files = [ - {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, - {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, -] - -[[package]] -name = "netcdf4" -version = "1.6.5" -description = "Provides an object-oriented python interface to the netCDF version 4 library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "netCDF4-1.6.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d23b97cbde2bf413fadc4697c5c255a0436511c02f811e127e0fb12f5b882a4c"}, - {file = "netCDF4-1.6.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e5edfed673005f47f8d2fbea9c72c382b085dd358ac3c20ca743a563ed7b90e"}, - {file = "netCDF4-1.6.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10d2ac9ae1308ca837d86c6dc304ec455a85bdba0f2175e222844a54589168dc"}, - {file = "netCDF4-1.6.5-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a63a2be2f80977ac23bb0aa736c565011fd4639097ce0922e01b0dc38015df2"}, - {file = "netCDF4-1.6.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aaceea2097d292bad398d9f9b4fe403efa7b1568fcfa6faba9b67b1630027f9"}, - {file = "netCDF4-1.6.5-cp310-cp310-win_amd64.whl", hash = "sha256:111357d9e12eb79e8d58bfd91bc6b230d35b17a0ebd8c546d17416e8ceebea49"}, - {file = "netCDF4-1.6.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c5fede0b34c0a02a1b9e84116bfb3fcd2f80124a651d4836e72b785d10e2f15"}, - {file = "netCDF4-1.6.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3de5512b9270aa6472e4f3aa2bf895a7364c1d4f8667ce3b82e8232197d4fec8"}, - {file = "netCDF4-1.6.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b20971a164431f6eca1d24df8aa153db15c2c1b9630e83ccc5cf004e8ac8151d"}, - {file = "netCDF4-1.6.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad1101d538077152b866782e44458356981526bf2ea9cc07930bf28b589c82a7"}, - {file = "netCDF4-1.6.5-cp311-cp311-win_amd64.whl", hash = "sha256:de4dc973fae9e2bbdf42e094125e423a4c25393172a61958314969b055a38889"}, - {file = "netCDF4-1.6.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:19e16c63cdd7c0dbffe284a4a65f226ba1026f476f35cbedd099b4792b395f69"}, - {file = "netCDF4-1.6.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b994afce2ca4073f6b757385a6c0ffec25ecaae2b8821535b303c7cdbf6de42b"}, - {file = "netCDF4-1.6.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0187646e3348e7a8cd654617dda65517df138042c94c2fcc6682ff7c8c6654dc"}, - {file = "netCDF4-1.6.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1ab5dabac27d25fcc82c52dc29a74a6585e865208cce35f4e285df83d3df0b2"}, - {file = "netCDF4-1.6.5-cp312-cp312-win_amd64.whl", hash = "sha256:081e9043ac6160989f60570928eabe803c88ce7df1d3f79f2345dc48f68ef752"}, - {file = "netCDF4-1.6.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b47b22dda5b25ba6291f97634d7ac67b0a843f8ae5c9d9d5813c15364f66d0a"}, - {file = "netCDF4-1.6.5-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4609dd62d14798c9524327287091875449d68588c128abb768fc0c76c4a28165"}, - {file = "netCDF4-1.6.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2455e9d35fde067e6a6bdc24aa9d44962235a071cec49904d1589e298c23dcd3"}, - {file = "netCDF4-1.6.5-cp38-cp38-win_amd64.whl", hash = "sha256:2c210794d96431d92b5992e46ad8a9f97237bf6d6956f8816978a03dc0fa18c3"}, - {file = "netCDF4-1.6.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:18255b8b283d32d3900092f29c67e53aa25bd8f0dfe7adde59fe782d865a381c"}, - {file = "netCDF4-1.6.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:53050562bac84738bbd121fbbee9593d074579f5d6fdaafcb981abeb5c964225"}, - {file = "netCDF4-1.6.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:938c062382406bca9198b16adddd87c09b00521766b138cdfd11c95546eefeb8"}, - {file = "netCDF4-1.6.5-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a8300451d7542d3c4ff1dcccf5fb1c7d44bdd1dc08ec77dab04416caf13cb1f"}, - {file = "netCDF4-1.6.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a27db2701feef31201c9b20b04a9579196edc20dfc339ca423c7b81e462d6e14"}, - {file = "netCDF4-1.6.5-cp39-cp39-win_amd64.whl", hash = "sha256:574d7742ab321e5f9f33b5b1296c4ad4e5c469152c17d4fc453d5070e413e596"}, - {file = "netCDF4-1.6.5.tar.gz", hash = "sha256:824881d0aacfde5bd982d6adedd8574259c85553781e7b83e0ce82b890bfa0ef"}, -] - -[package.dependencies] -certifi = "*" -cftime = "*" -numpy = "*" - -[package.extras] -tests = ["Cython", "packaging", "pytest"] - [[package]] name = "nodeenv" -version = "1.8.0" +version = "1.9.1" description = "Node.js virtual environment builder" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, - {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, ] -[package.dependencies] -setuptools = "*" - [[package]] name = "numcodecs" version = "0.12.1" @@ -1161,96 +803,118 @@ zfpy = ["zfpy (>=1.0.0)"] [[package]] name = "numpy" -version = "1.26.4" +version = "2.0.2" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, - {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, - {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8c5713284ce4e282544c68d1c3b2c7161d38c256d2eefc93c1d683cf47683e66"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:becfae3ddd30736fe1889a37f1f580e245ba79a5855bff5f2a29cb3ccc22dd7b"}, + {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2da5960c3cf0df7eafefd806d4e612c5e19358de82cb3c343631188991566ccd"}, + {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:496f71341824ed9f3d2fd36cf3ac57ae2e0165c143b55c3a035ee219413f3318"}, + {file = "numpy-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a61ec659f68ae254e4d237816e33171497e978140353c0c2038d46e63282d0c8"}, + {file = "numpy-2.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d731a1c6116ba289c1e9ee714b08a8ff882944d4ad631fd411106a30f083c326"}, + {file = "numpy-2.0.2-cp310-cp310-win32.whl", hash = "sha256:984d96121c9f9616cd33fbd0618b7f08e0cfc9600a7ee1d6fd9b239186d19d97"}, + {file = "numpy-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:c7b0be4ef08607dd04da4092faee0b86607f111d5ae68036f16cc787e250a131"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:49ca4decb342d66018b01932139c0961a8f9ddc7589611158cb3c27cbcf76448"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:11a76c372d1d37437857280aa142086476136a8c0f373b2e648ab2c8f18fb195"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:807ec44583fd708a21d4a11d94aedf2f4f3c3719035c76a2bbe1fe8e217bdc57"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8cafab480740e22f8d833acefed5cc87ce276f4ece12fdaa2e8903db2f82897a"}, + {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a15f476a45e6e5a3a79d8a14e62161d27ad897381fecfa4a09ed5322f2085669"}, + {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13e689d772146140a252c3a28501da66dfecd77490b498b168b501835041f951"}, + {file = "numpy-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9ea91dfb7c3d1c56a0e55657c0afb38cf1eeae4544c208dc465c3c9f3a7c09f9"}, + {file = "numpy-2.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c1c9307701fec8f3f7a1e6711f9089c06e6284b3afbbcd259f7791282d660a15"}, + {file = "numpy-2.0.2-cp311-cp311-win32.whl", hash = "sha256:a392a68bd329eafac5817e5aefeb39038c48b671afd242710b451e76090e81f4"}, + {file = "numpy-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:286cd40ce2b7d652a6f22efdfc6d1edf879440e53e76a75955bc0c826c7e64dc"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:df55d490dea7934f330006d0f81e8551ba6010a5bf035a249ef61a94f21c500b"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8df823f570d9adf0978347d1f926b2a867d5608f434a7cff7f7908c6570dcf5e"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9a92ae5c14811e390f3767053ff54eaee3bf84576d99a2456391401323f4ec2c"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:a842d573724391493a97a62ebbb8e731f8a5dcc5d285dfc99141ca15a3302d0c"}, + {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05e238064fc0610c840d1cf6a13bf63d7e391717d247f1bf0318172e759e692"}, + {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0123ffdaa88fa4ab64835dcbde75dcdf89c453c922f18dced6e27c90d1d0ec5a"}, + {file = "numpy-2.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:96a55f64139912d61de9137f11bf39a55ec8faec288c75a54f93dfd39f7eb40c"}, + {file = "numpy-2.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec9852fb39354b5a45a80bdab5ac02dd02b15f44b3804e9f00c556bf24b4bded"}, + {file = "numpy-2.0.2-cp312-cp312-win32.whl", hash = "sha256:671bec6496f83202ed2d3c8fdc486a8fc86942f2e69ff0e986140339a63bcbe5"}, + {file = "numpy-2.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:cfd41e13fdc257aa5778496b8caa5e856dc4896d4ccf01841daee1d96465467a"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9059e10581ce4093f735ed23f3b9d283b9d517ff46009ddd485f1747eb22653c"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:423e89b23490805d2a5a96fe40ec507407b8ee786d66f7328be214f9679df6dd"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:2b2955fa6f11907cf7a70dab0d0755159bca87755e831e47932367fc8f2f2d0b"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:97032a27bd9d8988b9a97a8c4d2c9f2c15a81f61e2f21404d7e8ef00cb5be729"}, + {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e795a8be3ddbac43274f18588329c72939870a16cae810c2b73461c40718ab1"}, + {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26b258c385842546006213344c50655ff1555a9338e2e5e02a0756dc3e803dd"}, + {file = "numpy-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fec9451a7789926bcf7c2b8d187292c9f93ea30284802a0ab3f5be8ab36865d"}, + {file = "numpy-2.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9189427407d88ff25ecf8f12469d4d39d35bee1db5d39fc5c168c6f088a6956d"}, + {file = "numpy-2.0.2-cp39-cp39-win32.whl", hash = "sha256:905d16e0c60200656500c95b6b8dca5d109e23cb24abc701d41c02d74c6b3afa"}, + {file = "numpy-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:a3f4ab0caa7f053f6797fcd4e1e25caee367db3112ef2b6ef82d749530768c73"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7f0a0c6f12e07fa94133c8a67404322845220c06a9e80e85999afe727f7438b8"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:312950fdd060354350ed123c0e25a71327d3711584beaef30cdaa93320c392d4"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385"}, + {file = "numpy-2.0.2.tar.gz", hash = "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78"}, ] [[package]] name = "packaging" -version = "24.0" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] name = "pandas" -version = "2.2.2" +version = "2.2.3" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" files = [ - {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, - {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, - {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, - {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, - {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, - {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, - {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, + {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, + {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, + {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, + {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, + {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, + {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, ] [package.dependencies] @@ -1305,13 +969,13 @@ testing = ["docopt", "pytest"] [[package]] name = "partd" -version = "1.4.1" +version = "1.4.2" description = "Appendable key-value storage" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "partd-1.4.1-py3-none-any.whl", hash = "sha256:27e766663d36c161e2827aa3e28541c992f0b9527d3cca047e13fb3acdb989e6"}, - {file = "partd-1.4.1.tar.gz", hash = "sha256:56c25dd49e6fea5727e731203c466c6e092f308d8f0024e199d02f6aa2167f67"}, + {file = "partd-1.4.2-py3-none-any.whl", hash = "sha256:978e4ac767ec4ba5b86c6eaa52e5a2a3bc748a2ca839e8cc798f1cc6ce6efb0f"}, + {file = "partd-1.4.2.tar.gz", hash = "sha256:d022c33afbdc8405c226621b015e8067888173d85f7f5ecebb3cafed9a20f02c"}, ] [package.dependencies] @@ -1319,7 +983,106 @@ locket = "*" toolz = "*" [package.extras] -complete = ["blosc", "numpy (>=1.9.0)", "pandas (>=0.19.0)", "pyzmq"] +complete = ["blosc", "numpy (>=1.20.0)", "pandas (>=1.3)", "pyzmq"] + +[[package]] +name = "pendulum" +version = "3.0.0" +description = "Python datetimes made easy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pendulum-3.0.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2cf9e53ef11668e07f73190c805dbdf07a1939c3298b78d5a9203a86775d1bfd"}, + {file = "pendulum-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fb551b9b5e6059377889d2d878d940fd0bbb80ae4810543db18e6f77b02c5ef6"}, + {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c58227ac260d5b01fc1025176d7b31858c9f62595737f350d22124a9a3ad82d"}, + {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60fb6f415fea93a11c52578eaa10594568a6716602be8430b167eb0d730f3332"}, + {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b69f6b4dbcb86f2c2fe696ba991e67347bcf87fe601362a1aba6431454b46bde"}, + {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:138afa9c373ee450ede206db5a5e9004fd3011b3c6bbe1e57015395cd076a09f"}, + {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:83d9031f39c6da9677164241fd0d37fbfc9dc8ade7043b5d6d62f56e81af8ad2"}, + {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c2308af4033fa534f089595bcd40a95a39988ce4059ccd3dc6acb9ef14ca44a"}, + {file = "pendulum-3.0.0-cp310-none-win_amd64.whl", hash = "sha256:9a59637cdb8462bdf2dbcb9d389518c0263799189d773ad5c11db6b13064fa79"}, + {file = "pendulum-3.0.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3725245c0352c95d6ca297193192020d1b0c0f83d5ee6bb09964edc2b5a2d508"}, + {file = "pendulum-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6c035f03a3e565ed132927e2c1b691de0dbf4eb53b02a5a3c5a97e1a64e17bec"}, + {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597e66e63cbd68dd6d58ac46cb7a92363d2088d37ccde2dae4332ef23e95cd00"}, + {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99a0f8172e19f3f0c0e4ace0ad1595134d5243cf75985dc2233e8f9e8de263ca"}, + {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77d8839e20f54706aed425bec82a83b4aec74db07f26acd039905d1237a5e1d4"}, + {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afde30e8146292b059020fbc8b6f8fd4a60ae7c5e6f0afef937bbb24880bdf01"}, + {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:660434a6fcf6303c4efd36713ca9212c753140107ee169a3fc6c49c4711c2a05"}, + {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dee9e5a48c6999dc1106eb7eea3e3a50e98a50651b72c08a87ee2154e544b33e"}, + {file = "pendulum-3.0.0-cp311-none-win_amd64.whl", hash = "sha256:d4cdecde90aec2d67cebe4042fd2a87a4441cc02152ed7ed8fb3ebb110b94ec4"}, + {file = "pendulum-3.0.0-cp311-none-win_arm64.whl", hash = "sha256:773c3bc4ddda2dda9f1b9d51fe06762f9200f3293d75c4660c19b2614b991d83"}, + {file = "pendulum-3.0.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:409e64e41418c49f973d43a28afe5df1df4f1dd87c41c7c90f1a63f61ae0f1f7"}, + {file = "pendulum-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a38ad2121c5ec7c4c190c7334e789c3b4624798859156b138fcc4d92295835dc"}, + {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fde4d0b2024b9785f66b7f30ed59281bd60d63d9213cda0eb0910ead777f6d37"}, + {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b2c5675769fb6d4c11238132962939b960fcb365436b6d623c5864287faa319"}, + {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8af95e03e066826f0f4c65811cbee1b3123d4a45a1c3a2b4fc23c4b0dff893b5"}, + {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2165a8f33cb15e06c67070b8afc87a62b85c5a273e3aaa6bc9d15c93a4920d6f"}, + {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ad5e65b874b5e56bd942546ea7ba9dd1d6a25121db1c517700f1c9de91b28518"}, + {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17fe4b2c844bbf5f0ece69cfd959fa02957c61317b2161763950d88fed8e13b9"}, + {file = "pendulum-3.0.0-cp312-none-win_amd64.whl", hash = "sha256:78f8f4e7efe5066aca24a7a57511b9c2119f5c2b5eb81c46ff9222ce11e0a7a5"}, + {file = "pendulum-3.0.0-cp312-none-win_arm64.whl", hash = "sha256:28f49d8d1e32aae9c284a90b6bb3873eee15ec6e1d9042edd611b22a94ac462f"}, + {file = "pendulum-3.0.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d4e2512f4e1a4670284a153b214db9719eb5d14ac55ada5b76cbdb8c5c00399d"}, + {file = "pendulum-3.0.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:3d897eb50883cc58d9b92f6405245f84b9286cd2de6e8694cb9ea5cb15195a32"}, + {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e169cc2ca419517f397811bbe4589cf3cd13fca6dc38bb352ba15ea90739ebb"}, + {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f17c3084a4524ebefd9255513692f7e7360e23c8853dc6f10c64cc184e1217ab"}, + {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:826d6e258052715f64d05ae0fc9040c0151e6a87aae7c109ba9a0ed930ce4000"}, + {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2aae97087872ef152a0c40e06100b3665d8cb86b59bc8471ca7c26132fccd0f"}, + {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac65eeec2250d03106b5e81284ad47f0d417ca299a45e89ccc69e36130ca8bc7"}, + {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a5346d08f3f4a6e9e672187faa179c7bf9227897081d7121866358af369f44f9"}, + {file = "pendulum-3.0.0-cp37-none-win_amd64.whl", hash = "sha256:235d64e87946d8f95c796af34818c76e0f88c94d624c268693c85b723b698aa9"}, + {file = "pendulum-3.0.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:6a881d9c2a7f85bc9adafcfe671df5207f51f5715ae61f5d838b77a1356e8b7b"}, + {file = "pendulum-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7762d2076b9b1cb718a6631ad6c16c23fc3fac76cbb8c454e81e80be98daa34"}, + {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e8e36a8130819d97a479a0e7bf379b66b3b1b520e5dc46bd7eb14634338df8c"}, + {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7dc843253ac373358ffc0711960e2dd5b94ab67530a3e204d85c6e8cb2c5fa10"}, + {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a78ad3635d609ceb1e97d6aedef6a6a6f93433ddb2312888e668365908c7120"}, + {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a137e9e0d1f751e60e67d11fc67781a572db76b2296f7b4d44554761049d6"}, + {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c95984037987f4a457bb760455d9ca80467be792236b69d0084f228a8ada0162"}, + {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29c6e578fe0f893766c0d286adbf0b3c726a4e2341eba0917ec79c50274ec16"}, + {file = "pendulum-3.0.0-cp38-none-win_amd64.whl", hash = "sha256:deaba8e16dbfcb3d7a6b5fabdd5a38b7c982809567479987b9c89572df62e027"}, + {file = "pendulum-3.0.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b11aceea5b20b4b5382962b321dbc354af0defe35daa84e9ff3aae3c230df694"}, + {file = "pendulum-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a90d4d504e82ad236afac9adca4d6a19e4865f717034fc69bafb112c320dcc8f"}, + {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:825799c6b66e3734227756fa746cc34b3549c48693325b8b9f823cb7d21b19ac"}, + {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad769e98dc07972e24afe0cff8d365cb6f0ebc7e65620aa1976fcfbcadc4c6f3"}, + {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6fc26907eb5fb8cc6188cc620bc2075a6c534d981a2f045daa5f79dfe50d512"}, + {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c717eab1b6d898c00a3e0fa7781d615b5c5136bbd40abe82be100bb06df7a56"}, + {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3ddd1d66d1a714ce43acfe337190be055cdc221d911fc886d5a3aae28e14b76d"}, + {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:822172853d7a9cf6da95d7b66a16c7160cb99ae6df55d44373888181d7a06edc"}, + {file = "pendulum-3.0.0-cp39-none-win_amd64.whl", hash = "sha256:840de1b49cf1ec54c225a2a6f4f0784d50bd47f68e41dc005b7f67c7d5b5f3ae"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b1f74d1e6ffe5d01d6023870e2ce5c2191486928823196f8575dcc786e107b1"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:729e9f93756a2cdfa77d0fc82068346e9731c7e884097160603872686e570f07"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e586acc0b450cd21cbf0db6bae386237011b75260a3adceddc4be15334689a9a"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22e7944ffc1f0099a79ff468ee9630c73f8c7835cd76fdb57ef7320e6a409df4"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fa30af36bd8e50686846bdace37cf6707bdd044e5cb6e1109acbad3277232e04"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:440215347b11914ae707981b9a57ab9c7b6983ab0babde07063c6ee75c0dc6e7"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:314c4038dc5e6a52991570f50edb2f08c339debdf8cea68ac355b32c4174e820"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5acb1d386337415f74f4d1955c4ce8d0201978c162927d07df8eb0692b2d8533"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a789e12fbdefaffb7b8ac67f9d8f22ba17a3050ceaaa635cd1cc4645773a4b1e"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:860aa9b8a888e5913bd70d819306749e5eb488e6b99cd6c47beb701b22bdecf5"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5ebc65ea033ef0281368217fbf59f5cb05b338ac4dd23d60959c7afcd79a60a0"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d9fef18ab0386ef6a9ac7bad7e43ded42c83ff7ad412f950633854f90d59afa8"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1c134ba2f0571d0b68b83f6972e2307a55a5a849e7dac8505c715c531d2a8795"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:385680812e7e18af200bb9b4a49777418c32422d05ad5a8eb85144c4a285907b"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eec91cd87c59fb32ec49eb722f375bd58f4be790cae11c1b70fac3ee4f00da0"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4386bffeca23c4b69ad50a36211f75b35a4deb6210bdca112ac3043deb7e494a"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dfbcf1661d7146d7698da4b86e7f04814221081e9fe154183e34f4c5f5fa3bf8"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:04a1094a5aa1daa34a6b57c865b25f691848c61583fb22722a4df5699f6bf74c"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5b0ec85b9045bd49dd3a3493a5e7ddfd31c36a2a60da387c419fa04abcaecb23"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0a15b90129765b705eb2039062a6daf4d22c4e28d1a54fa260892e8c3ae6e157"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:bb8f6d7acd67a67d6fedd361ad2958ff0539445ef51cbe8cd288db4306503cd0"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd69b15374bef7e4b4440612915315cc42e8575fcda2a3d7586a0d88192d0c88"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc00f8110db6898360c53c812872662e077eaf9c75515d53ecc65d886eec209a"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:83a44e8b40655d0ba565a5c3d1365d27e3e6778ae2a05b69124db9e471255c4a"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1a3604e9fbc06b788041b2a8b78f75c243021e0f512447806a6d37ee5214905d"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:92c307ae7accebd06cbae4729f0ba9fa724df5f7d91a0964b1b972a22baa482b"}, + {file = "pendulum-3.0.0.tar.gz", hash = "sha256:5d034998dea404ec31fae27af6b22cff1708f830a1ed7353be4d1019bb9f584e"}, +] + +[package.dependencies] +python-dateutil = ">=2.6" +tzdata = ">=2020.1" + +[package.extras] +test = ["time-machine (>=2.6.0)"] [[package]] name = "pexpect" @@ -1337,37 +1100,19 @@ ptyprocess = ">=0.5" [[package]] name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -optional = false -python-versions = ">=3.8" -files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] - -[[package]] -name = "portalocker" -version = "2.8.2" -description = "Wraps the portalocker recipe for easy usage" +version = "4.3.6" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "portalocker-2.8.2-py3-none-any.whl", hash = "sha256:cfb86acc09b9aa7c3b43594e19be1345b9d16af3feb08bf92f23d4dce513a28e"}, - {file = "portalocker-2.8.2.tar.gz", hash = "sha256:2b035aa7828e46c58e9b31390ee1f169b98e1066ab10b9a6a861fe7e25ee4f33"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] -[package.dependencies] -pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""} - [package.extras] -docs = ["sphinx (>=1.7.1)"] -redis = ["redis"] -tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pre-commit" @@ -1389,13 +1134,13 @@ virtualenv = ">=20.10.0" [[package]] name = "prompt-toolkit" -version = "3.0.43" +version = "3.0.48" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, - {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, + {file = "prompt_toolkit-3.0.48-py3-none-any.whl", hash = "sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e"}, + {file = "prompt_toolkit-3.0.48.tar.gz", hash = "sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90"}, ] [package.dependencies] @@ -1414,42 +1159,162 @@ files = [ [[package]] name = "pure-eval" -version = "0.2.2" +version = "0.2.3" description = "Safely evaluate AST nodes without side effects" optional = false python-versions = "*" files = [ - {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, - {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, + {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, + {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, ] [package.extras] tests = ["pytest"] +[[package]] +name = "pydantic" +version = "2.9.2" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, + {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.23.4" +typing-extensions = {version = ">=4.6.1", markers = "python_version < \"3.13\""} + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] + +[[package]] +name = "pydantic-core" +version = "2.23.4" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, + {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, + {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, + {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, + {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, + {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, + {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, + {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, + {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, + {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, + {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, + {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, + {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, + {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + [[package]] name = "pygments" -version = "2.17.2" +version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, - {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, ] [package.extras] -plugins = ["importlib-metadata"] windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pystac" -version = "1.10.0" +version = "1.10.1" description = "Python library for working with the SpatioTemporal Asset Catalog (STAC) specification" optional = false python-versions = ">=3.9" files = [ - {file = "pystac-1.10.0-py3-none-any.whl", hash = "sha256:2d1eb969abc7e13e2bdb4bb5ae1a68780da1e06f30f66fcf0d4143f51eb03f38"}, - {file = "pystac-1.10.0.tar.gz", hash = "sha256:e2762a700953ae9bab914137116cea31e08378f6c7024d805d651009a6341e20"}, + {file = "pystac-1.10.1-py3-none-any.whl", hash = "sha256:a7c31b3dacc44dfc955d9da8c7351c7b5b99100254b36301a1e312709b51bf2f"}, + {file = "pystac-1.10.1.tar.gz", hash = "sha256:4617fe5315a79785f79b616b8ac248ba3d4d561457c8300b34573309715808cd"}, ] [package.dependencies] @@ -1480,107 +1345,86 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2024.1" +version = "2024.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, -] - -[[package]] -name = "pywin32" -version = "306" -description = "Python for Window Extensions" -optional = false -python-versions = "*" -files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, ] [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] name = "requests" -version = "2.31.0" +version = "2.32.3" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -1595,13 +1439,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "s3transfer" -version = "0.10.2" +version = "0.10.3" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">=3.8" files = [ - {file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"}, - {file = "s3transfer-0.10.2.tar.gz", hash = "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6"}, + {file = "s3transfer-0.10.3-py3-none-any.whl", hash = "sha256:263ed587a5803c6c708d3ce44dc4dfedaab4c1a32e8329bab818933d79ddcf5d"}, + {file = "s3transfer-0.10.3.tar.gz", hash = "sha256:4f50ed74ab84d474ce614475e0b8d5047ff080810aac5d01ea25231cfc944b0c"}, ] [package.dependencies] @@ -1623,19 +1467,23 @@ files = [ [[package]] name = "setuptools" -version = "69.5.1" +version = "75.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, - {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, + {file = "setuptools-75.2.0-py3-none-any.whl", hash = "sha256:a7fcb66f68b4d9e8e66b42f9876150a3371558f98fa32222ffaa5bced76406f8"}, + {file = "setuptools-75.2.0.tar.gz", hash = "sha256:753bb6ebf1f465a1912e19ed1d41f403a79173a9acf66a42e7e6aec45c3c16ec"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] [[package]] name = "six" @@ -1669,24 +1517,24 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] [[package]] name = "toolz" -version = "0.12.1" +version = "1.0.0" description = "List processing tools and functional utilities" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "toolz-0.12.1-py3-none-any.whl", hash = "sha256:d22731364c07d72eea0a0ad45bafb2c2937ab6fd38a3507bf55eae8744aa7d85"}, - {file = "toolz-0.12.1.tar.gz", hash = "sha256:ecca342664893f177a13dac0e6b41cbd8ac25a358e5f215316d43e2100224f4d"}, + {file = "toolz-1.0.0-py3-none-any.whl", hash = "sha256:292c8f1c4e7516bf9086f8850935c799a874039c8bcf959d47b600e4c44a6236"}, + {file = "toolz-1.0.0.tar.gz", hash = "sha256:2c86e3d9a04798ac556793bced838816296a2f085017664e4995cb40a1047a02"}, ] [[package]] name = "tqdm" -version = "4.66.2" +version = "4.66.5" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, - {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, + {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, + {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, ] [package.dependencies] @@ -1740,35 +1588,35 @@ files = [ [[package]] name = "typing-extensions" -version = "4.11.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, - {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] name = "tzdata" -version = "2024.1" +version = "2024.2" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, + {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, + {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, ] [[package]] name = "urllib3" -version = "1.26.18" +version = "1.26.20" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, - {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, + {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, + {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, ] [package.extras] @@ -1778,13 +1626,13 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "virtualenv" -version = "20.25.3" +version = "20.27.0" description = "Virtual Python Environment builder" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "virtualenv-20.25.3-py3-none-any.whl", hash = "sha256:8aac4332f2ea6ef519c648d0bc48a5b1d324994753519919bddbb1aff25a104e"}, - {file = "virtualenv-20.25.3.tar.gz", hash = "sha256:7bb554bbdfeaacc3349fa614ea5bff6ac300fc7c335e9facf3a3bcfc703f45be"}, + {file = "virtualenv-20.27.0-py3-none-any.whl", hash = "sha256:44a72c29cceb0ee08f300b314848c86e57bf8d1f13107a5e671fb9274138d655"}, + {file = "virtualenv-20.27.0.tar.gz", hash = "sha256:2ca56a68ed615b8fe4326d11a0dca5dfbe8fd68510fb6c6349163bed3c15f2b2"}, ] [package.dependencies] @@ -1796,47 +1644,6 @@ platformdirs = ">=3.9.1,<5" docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] -[[package]] -name = "watchdog" -version = "4.0.0" -description = "Filesystem events monitoring" -optional = false -python-versions = ">=3.8" -files = [ - {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, - {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, - {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, - {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, - {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, - {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, - {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, - {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, - {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, - {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, -] - -[package.extras] -watchmedo = ["PyYAML (>=3.10)"] - [[package]] name = "wcwidth" version = "0.2.13" @@ -1850,140 +1657,37 @@ files = [ [[package]] name = "xarray" -version = "2024.3.0" +version = "2024.7.0" description = "N-D labeled arrays and datasets in Python" optional = false python-versions = ">=3.9" files = [ - {file = "xarray-2024.3.0-py3-none-any.whl", hash = "sha256:ca2bc4da2bf2e7879e15862a7a7c3fc76ad19f6a08931d030220cef39a29118d"}, - {file = "xarray-2024.3.0.tar.gz", hash = "sha256:5c1db19efdde61db7faedad8fc944f4e29698fb6fbd578d352668b63598bd1d8"}, + {file = "xarray-2024.7.0-py3-none-any.whl", hash = "sha256:1b0fd51ec408474aa1f4a355d75c00cc1c02bd425d97b2c2e551fd21810e7f64"}, + {file = "xarray-2024.7.0.tar.gz", hash = "sha256:4cae512d121a8522d41e66d942fb06c526bc1fd32c2c181d5fe62fe65b671638"}, ] [package.dependencies] numpy = ">=1.23" -packaging = ">=22" -pandas = ">=1.5" +packaging = ">=23.1" +pandas = ">=2.0" [package.extras] accel = ["bottleneck", "flox", "numbagg", "opt-einsum", "scipy"] complete = ["xarray[accel,dev,io,parallel,viz]"] -dev = ["hypothesis", "pre-commit", "pytest", "pytest-cov", "pytest-env", "pytest-timeout", "pytest-xdist", "ruff", "xarray[complete]"] +dev = ["hypothesis", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-env", "pytest-timeout", "pytest-xdist", "ruff", "xarray[complete]"] io = ["cftime", "fsspec", "h5netcdf", "netCDF4", "pooch", "pydap", "scipy", "zarr"] parallel = ["dask[complete]"] viz = ["matplotlib", "nc-time-axis", "seaborn"] -[[package]] -name = "yarl" -version = "1.9.4" -description = "Yet another URL library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, - {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, - {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, - {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, - {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, - {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, - {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, - {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, - {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, - {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, - {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, - {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, - {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, - {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, - {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, - {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, -] - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" - [[package]] name = "zarr" -version = "2.17.2" +version = "2.18.2" description = "An implementation of chunked, compressed, N-dimensional arrays for Python" optional = false python-versions = ">=3.9" files = [ - {file = "zarr-2.17.2-py3-none-any.whl", hash = "sha256:70d7cc07c24280c380ef80644151d136b7503b0d83c9f214e8000ddc0f57f69b"}, - {file = "zarr-2.17.2.tar.gz", hash = "sha256:2cbaa6cb4e342d45152d4a7a4b2013c337fcd3a8e7bc98253560180de60552ce"}, + {file = "zarr-2.18.2-py3-none-any.whl", hash = "sha256:a638754902f97efa99b406083fdc807a0e2ccf12a949117389d2a4ba9b05df38"}, + {file = "zarr-2.18.2.tar.gz", hash = "sha256:9bb393b8a0a38fb121dbb913b047d75db28de9890f6d644a217a73cf4ae74f47"}, ] [package.dependencies] @@ -1998,20 +1702,24 @@ jupyter = ["ipytree (>=0.2.2)", "ipywidgets (>=8.0.0)", "notebook"] [[package]] name = "zipp" -version = "3.18.1" +version = "3.20.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, - {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, + {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, + {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.13" -content-hash = "df93116d25a101d3f319a1e497b4436a09702719616a853437524aa68acf47bd" +content-hash = "dcdf7c4a55b8fd967c0600fda6a75c4c0fac9aaa0228ac45d17150deddc32c36" diff --git a/pyproject.toml b/pyproject.toml index 8eef1b50..7a676ec0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,20 +11,19 @@ license = "EUPL-1.2" python = ">=3.9,<3.13" click = ">=8.0.4" requests = ">=2.27.1" -aiohttp = ">=3.9.4,<3.10.0" setuptools = ">=68.2.2" -cachier = ">=2.2.1" xarray = ">=2023.4.0" tqdm = ">=4.65.0" zarr = ">=2.13.3" dask = ">=2022" -netCDF4 = ">=1.5.4" -boto3 = ">=1.25" +boto3 = ">=1.26" semver = ">=3.0.2" -nest-asyncio = ">=1.5.8" pystac = ">=1.8.3" lxml = ">=4.9.0" -numpy = ">=1.23.0,<2.0.0" +numpy = ">=1.23.0" +pendulum = "^3.0.0" +pydantic = "^2.9.1" +h5netcdf = "^1.4.0" [tool.poetry.dev-dependencies] pre-commit = "^2.20.0" diff --git a/tests/__snapshots__/test_cf_compliance.ambr b/tests/__snapshots__/test_cf_compliance.ambr new file mode 100644 index 00000000..d430df21 --- /dev/null +++ b/tests/__snapshots__/test_cf_compliance.ambr @@ -0,0 +1,31 @@ +# serializer version: 1 +# name: TestCFCompliance.test_subset_open + 'cmems_mod_nws_bgc-pft_my_7km-3D-pico_P1M-m' +# --- +# name: TestCFCompliance.test_subset_open.1 + 160 +# --- +# name: TestCFCompliance.test_subset_open.2 + 160 +# --- +# name: TestCFCompliance.test_subset_open.3 + list([ + ]) +# --- +# name: TestCFCompliance.test_subset_with_warns + 'cmems_obs-sst_med_phy-sst_nrt_diurnal-oi-0.0625deg_PT1H-m' +# --- +# name: TestCFCompliance.test_subset_with_warns.1 + 135 +# --- +# name: TestCFCompliance.test_subset_with_warns.2 + 136 +# --- +# name: TestCFCompliance.test_subset_with_warns.3 + list([ + '§2.6 Attributes', + list([ + '§2.6.1 Conventions global attribute does not contain "CF-1.6"', + ]), + ]) +# --- diff --git a/tests/__snapshots__/test_command_line_interface.ambr b/tests/__snapshots__/test_command_line_interface.ambr new file mode 100644 index 00000000..793ddafa --- /dev/null +++ b/tests/__snapshots__/test_command_line_interface.ambr @@ -0,0 +1,4811 @@ +# serializer version: 1 +# name: TestCommandLineInterface.test_describe_including_datasets + list([ + dict({ + 'service_format': None, + 'service_type': dict({ + 'service_name': 'original-files', + 'short_name': 'files', + }), + 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-native-14/native/GLOBAL_MULTIYEAR_PHY_ENS_001_031/cmems_mod_glo_phy-all_my_0.25deg_P1D-m_202311', + 'variables': list([ + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + ]), + 'short_name': 'mlotst_cglo', + 'standard_name': 'ocean_mixed_layer_thickness_defined_by_sigma_theta', + 'units': 'm', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + ]), + 'short_name': 'mlotst_glor', + 'standard_name': 'ocean_mixed_layer_thickness_defined_by_sigma_theta', + 'units': 'm', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + ]), + 'short_name': 'mlotst_oras', + 'standard_name': 'ocean_mixed_layer_thickness_defined_by_sigma_theta', + 'units': 'm', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + ]), + 'short_name': 'siconc_cglo', + 'standard_name': 'sea_ice_area_fraction', + 'units': '1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + ]), + 'short_name': 'siconc_glor', + 'standard_name': 'sea_ice_area_fraction', + 'units': '1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + ]), + 'short_name': 'siconc_oras', + 'standard_name': 'sea_ice_area_fraction', + 'units': '1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + ]), + 'short_name': 'sithick_cglo', + 'standard_name': 'sea_ice_thickness', + 'units': 'm', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + ]), + 'short_name': 'sithick_glor', + 'standard_name': 'sea_ice_thickness', + 'units': 'm', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + ]), + 'short_name': 'sithick_oras', + 'standard_name': 'sea_ice_thickness', + 'units': 'm', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + ]), + 'short_name': 'so_cglo', + 'standard_name': 'sea_water_salinity', + 'units': '1e-3', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + ]), + 'short_name': 'so_glor', + 'standard_name': 'sea_water_salinity', + 'units': '1e-3', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + ]), + 'short_name': 'so_oras', + 'standard_name': 'sea_water_salinity', + 'units': '1e-3', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + ]), + 'short_name': 'thetao_cglo', + 'standard_name': 'sea_water_potential_temperature', + 'units': 'degrees_C', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + ]), + 'short_name': 'thetao_glor', + 'standard_name': 'sea_water_potential_temperature', + 'units': 'degrees_C', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + ]), + 'short_name': 'thetao_oras', + 'standard_name': 'sea_water_potential_temperature', + 'units': 'degrees_C', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + ]), + 'short_name': 'uo_cglo', + 'standard_name': 'eastward_sea_water_velocity', + 'units': 'm s-1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + ]), + 'short_name': 'uo_glor', + 'standard_name': 'eastward_sea_water_velocity', + 'units': 'm s-1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + ]), + 'short_name': 'uo_oras', + 'standard_name': 'eastward_sea_water_velocity', + 'units': 'm s-1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + ]), + 'short_name': 'vo_cglo', + 'standard_name': 'northward_sea_water_velocity', + 'units': 'm s-1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + ]), + 'short_name': 'vo_glor', + 'standard_name': 'northward_sea_water_velocity', + 'units': 'm s-1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + ]), + 'short_name': 'vo_oras', + 'standard_name': 'northward_sea_water_velocity', + 'units': 'm s-1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + ]), + 'short_name': 'zos_cglo', + 'standard_name': 'sea_surface_height_above_geoid', + 'units': 'm', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + ]), + 'short_name': 'zos_glor', + 'standard_name': 'sea_surface_height_above_geoid', + 'units': 'm', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + ]), + 'short_name': 'zos_oras', + 'standard_name': 'sea_surface_height_above_geoid', + 'units': 'm', + }), + ]), + }), + dict({ + 'service_format': 'zarr', + 'service_type': dict({ + 'service_name': 'arco-geo-series', + 'short_name': 'geoseries', + }), + 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-arco-time-066/arco/GLOBAL_MULTIYEAR_PHY_ENS_001_031/cmems_mod_glo_phy-all_my_0.25deg_P1D-m_202311/timeChunked.zarr', + 'variables': list([ + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 681, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1440, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'mlotst_cglo', + 'standard_name': 'ocean_mixed_layer_thickness_defined_by_sigma_theta', + 'units': 'm', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 681, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1440, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'mlotst_glor', + 'standard_name': 'ocean_mixed_layer_thickness_defined_by_sigma_theta', + 'units': 'm', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 681, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1440, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'mlotst_oras', + 'standard_name': 'ocean_mixed_layer_thickness_defined_by_sigma_theta', + 'units': 'm', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 681, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1440, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'siconc_cglo', + 'standard_name': 'sea_ice_area_fraction', + 'units': '1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 681, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1440, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'siconc_glor', + 'standard_name': 'sea_ice_area_fraction', + 'units': '1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 681, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1440, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'siconc_oras', + 'standard_name': 'sea_ice_area_fraction', + 'units': '1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 681, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1440, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'sithick_cglo', + 'standard_name': 'sea_ice_thickness', + 'units': 'm', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 681, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1440, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'sithick_glor', + 'standard_name': 'sea_ice_thickness', + 'units': 'm', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 681, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1440, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'sithick_oras', + 'standard_name': 'sea_ice_thickness', + 'units': 'm', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'depth', + 'maximum_value': None, + 'minimum_value': None, + 'step': None, + 'units': 'm', + 'values': list([ + 5902.0576171875, + 5698.060546875, + 5494.5751953125, + 5291.68310546875, + 5089.478515625, + 4888.06982421875, + 4687.5810546875, + 4488.15478515625, + 4289.95263671875, + 4093.15869140625, + 3897.98193359375, + 3704.65673828125, + 3513.445556640625, + 3324.640869140625, + 3138.56494140625, + 2955.5703125, + 2776.039306640625, + 2600.38037109375, + 2429.025146484375, + 2262.421630859375, + 2101.026611328125, + 1945.2955322265625, + 1795.6707763671875, + 1652.5684814453125, + 1516.3636474609375, + 1387.376953125, + 1265.8614501953125, + 1151.9912109375, + 1045.854248046875, + 947.4478759765625, + 856.678955078125, + 773.3682861328125, + 697.2586669921875, + 628.0260009765625, + 565.2922973632812, + 508.639892578125, + 457.6256103515625, + 411.7938537597656, + 370.6884765625, + 333.8628234863281, + 300.88751220703125, + 271.35638427734375, + 244.890625, + 221.14117431640625, + 199.7899627685547, + 180.5499267578125, + 163.16445922851562, + 147.40625, + 133.07582092285156, + 120, + 108.03028106689453, + 97.04131317138672, + 86.92942810058594, + 77.61116027832031, + 69.02168273925781, + 61.11283874511719, + 53.85063552856445, + 47.21189498901367, + 41.180023193359375, + 35.740203857421875, + 30.874561309814453, + 26.558300018310547, + 22.75761604309082, + 19.42980194091797, + 16.52532196044922, + 13.99103832244873, + 11.773679733276367, + 9.822750091552734, + 8.09251880645752, + 6.543033599853516, + 5.140361309051514, + 3.8562798500061035, + 2.6676816940307617, + 1.5558552742004395, + 0.5057600140571594, + ]), + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 681, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1440, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'so_cglo', + 'standard_name': 'sea_water_salinity', + 'units': '1e-3', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'depth', + 'maximum_value': None, + 'minimum_value': None, + 'step': None, + 'units': 'm', + 'values': list([ + 5902.0576171875, + 5698.060546875, + 5494.5751953125, + 5291.68310546875, + 5089.478515625, + 4888.06982421875, + 4687.5810546875, + 4488.15478515625, + 4289.95263671875, + 4093.15869140625, + 3897.98193359375, + 3704.65673828125, + 3513.445556640625, + 3324.640869140625, + 3138.56494140625, + 2955.5703125, + 2776.039306640625, + 2600.38037109375, + 2429.025146484375, + 2262.421630859375, + 2101.026611328125, + 1945.2955322265625, + 1795.6707763671875, + 1652.5684814453125, + 1516.3636474609375, + 1387.376953125, + 1265.8614501953125, + 1151.9912109375, + 1045.854248046875, + 947.4478759765625, + 856.678955078125, + 773.3682861328125, + 697.2586669921875, + 628.0260009765625, + 565.2922973632812, + 508.639892578125, + 457.6256103515625, + 411.7938537597656, + 370.6884765625, + 333.8628234863281, + 300.88751220703125, + 271.35638427734375, + 244.890625, + 221.14117431640625, + 199.7899627685547, + 180.5499267578125, + 163.16445922851562, + 147.40625, + 133.07582092285156, + 120, + 108.03028106689453, + 97.04131317138672, + 86.92942810058594, + 77.61116027832031, + 69.02168273925781, + 61.11283874511719, + 53.85063552856445, + 47.21189498901367, + 41.180023193359375, + 35.740203857421875, + 30.874561309814453, + 26.558300018310547, + 22.75761604309082, + 19.42980194091797, + 16.52532196044922, + 13.99103832244873, + 11.773679733276367, + 9.822750091552734, + 8.09251880645752, + 6.543033599853516, + 5.140361309051514, + 3.8562798500061035, + 2.6676816940307617, + 1.5558552742004395, + 0.5057600140571594, + ]), + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 681, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1440, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'so_glor', + 'standard_name': 'sea_water_salinity', + 'units': '1e-3', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'depth', + 'maximum_value': None, + 'minimum_value': None, + 'step': None, + 'units': 'm', + 'values': list([ + 5902.0576171875, + 5698.060546875, + 5494.5751953125, + 5291.68310546875, + 5089.478515625, + 4888.06982421875, + 4687.5810546875, + 4488.15478515625, + 4289.95263671875, + 4093.15869140625, + 3897.98193359375, + 3704.65673828125, + 3513.445556640625, + 3324.640869140625, + 3138.56494140625, + 2955.5703125, + 2776.039306640625, + 2600.38037109375, + 2429.025146484375, + 2262.421630859375, + 2101.026611328125, + 1945.2955322265625, + 1795.6707763671875, + 1652.5684814453125, + 1516.3636474609375, + 1387.376953125, + 1265.8614501953125, + 1151.9912109375, + 1045.854248046875, + 947.4478759765625, + 856.678955078125, + 773.3682861328125, + 697.2586669921875, + 628.0260009765625, + 565.2922973632812, + 508.639892578125, + 457.6256103515625, + 411.7938537597656, + 370.6884765625, + 333.8628234863281, + 300.88751220703125, + 271.35638427734375, + 244.890625, + 221.14117431640625, + 199.7899627685547, + 180.5499267578125, + 163.16445922851562, + 147.40625, + 133.07582092285156, + 120, + 108.03028106689453, + 97.04131317138672, + 86.92942810058594, + 77.61116027832031, + 69.02168273925781, + 61.11283874511719, + 53.85063552856445, + 47.21189498901367, + 41.180023193359375, + 35.740203857421875, + 30.874561309814453, + 26.558300018310547, + 22.75761604309082, + 19.42980194091797, + 16.52532196044922, + 13.99103832244873, + 11.773679733276367, + 9.822750091552734, + 8.09251880645752, + 6.543033599853516, + 5.140361309051514, + 3.8562798500061035, + 2.6676816940307617, + 1.5558552742004395, + 0.5057600140571594, + ]), + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 681, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1440, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'so_oras', + 'standard_name': 'sea_water_salinity', + 'units': '1e-3', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'depth', + 'maximum_value': None, + 'minimum_value': None, + 'step': None, + 'units': 'm', + 'values': list([ + 5902.0576171875, + 5698.060546875, + 5494.5751953125, + 5291.68310546875, + 5089.478515625, + 4888.06982421875, + 4687.5810546875, + 4488.15478515625, + 4289.95263671875, + 4093.15869140625, + 3897.98193359375, + 3704.65673828125, + 3513.445556640625, + 3324.640869140625, + 3138.56494140625, + 2955.5703125, + 2776.039306640625, + 2600.38037109375, + 2429.025146484375, + 2262.421630859375, + 2101.026611328125, + 1945.2955322265625, + 1795.6707763671875, + 1652.5684814453125, + 1516.3636474609375, + 1387.376953125, + 1265.8614501953125, + 1151.9912109375, + 1045.854248046875, + 947.4478759765625, + 856.678955078125, + 773.3682861328125, + 697.2586669921875, + 628.0260009765625, + 565.2922973632812, + 508.639892578125, + 457.6256103515625, + 411.7938537597656, + 370.6884765625, + 333.8628234863281, + 300.88751220703125, + 271.35638427734375, + 244.890625, + 221.14117431640625, + 199.7899627685547, + 180.5499267578125, + 163.16445922851562, + 147.40625, + 133.07582092285156, + 120, + 108.03028106689453, + 97.04131317138672, + 86.92942810058594, + 77.61116027832031, + 69.02168273925781, + 61.11283874511719, + 53.85063552856445, + 47.21189498901367, + 41.180023193359375, + 35.740203857421875, + 30.874561309814453, + 26.558300018310547, + 22.75761604309082, + 19.42980194091797, + 16.52532196044922, + 13.99103832244873, + 11.773679733276367, + 9.822750091552734, + 8.09251880645752, + 6.543033599853516, + 5.140361309051514, + 3.8562798500061035, + 2.6676816940307617, + 1.5558552742004395, + 0.5057600140571594, + ]), + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 681, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1440, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'thetao_cglo', + 'standard_name': 'sea_water_potential_temperature', + 'units': 'degrees_C', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'depth', + 'maximum_value': None, + 'minimum_value': None, + 'step': None, + 'units': 'm', + 'values': list([ + 5902.0576171875, + 5698.060546875, + 5494.5751953125, + 5291.68310546875, + 5089.478515625, + 4888.06982421875, + 4687.5810546875, + 4488.15478515625, + 4289.95263671875, + 4093.15869140625, + 3897.98193359375, + 3704.65673828125, + 3513.445556640625, + 3324.640869140625, + 3138.56494140625, + 2955.5703125, + 2776.039306640625, + 2600.38037109375, + 2429.025146484375, + 2262.421630859375, + 2101.026611328125, + 1945.2955322265625, + 1795.6707763671875, + 1652.5684814453125, + 1516.3636474609375, + 1387.376953125, + 1265.8614501953125, + 1151.9912109375, + 1045.854248046875, + 947.4478759765625, + 856.678955078125, + 773.3682861328125, + 697.2586669921875, + 628.0260009765625, + 565.2922973632812, + 508.639892578125, + 457.6256103515625, + 411.7938537597656, + 370.6884765625, + 333.8628234863281, + 300.88751220703125, + 271.35638427734375, + 244.890625, + 221.14117431640625, + 199.7899627685547, + 180.5499267578125, + 163.16445922851562, + 147.40625, + 133.07582092285156, + 120, + 108.03028106689453, + 97.04131317138672, + 86.92942810058594, + 77.61116027832031, + 69.02168273925781, + 61.11283874511719, + 53.85063552856445, + 47.21189498901367, + 41.180023193359375, + 35.740203857421875, + 30.874561309814453, + 26.558300018310547, + 22.75761604309082, + 19.42980194091797, + 16.52532196044922, + 13.99103832244873, + 11.773679733276367, + 9.822750091552734, + 8.09251880645752, + 6.543033599853516, + 5.140361309051514, + 3.8562798500061035, + 2.6676816940307617, + 1.5558552742004395, + 0.5057600140571594, + ]), + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 681, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1440, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'thetao_glor', + 'standard_name': 'sea_water_potential_temperature', + 'units': 'degrees_C', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'depth', + 'maximum_value': None, + 'minimum_value': None, + 'step': None, + 'units': 'm', + 'values': list([ + 5902.0576171875, + 5698.060546875, + 5494.5751953125, + 5291.68310546875, + 5089.478515625, + 4888.06982421875, + 4687.5810546875, + 4488.15478515625, + 4289.95263671875, + 4093.15869140625, + 3897.98193359375, + 3704.65673828125, + 3513.445556640625, + 3324.640869140625, + 3138.56494140625, + 2955.5703125, + 2776.039306640625, + 2600.38037109375, + 2429.025146484375, + 2262.421630859375, + 2101.026611328125, + 1945.2955322265625, + 1795.6707763671875, + 1652.5684814453125, + 1516.3636474609375, + 1387.376953125, + 1265.8614501953125, + 1151.9912109375, + 1045.854248046875, + 947.4478759765625, + 856.678955078125, + 773.3682861328125, + 697.2586669921875, + 628.0260009765625, + 565.2922973632812, + 508.639892578125, + 457.6256103515625, + 411.7938537597656, + 370.6884765625, + 333.8628234863281, + 300.88751220703125, + 271.35638427734375, + 244.890625, + 221.14117431640625, + 199.7899627685547, + 180.5499267578125, + 163.16445922851562, + 147.40625, + 133.07582092285156, + 120, + 108.03028106689453, + 97.04131317138672, + 86.92942810058594, + 77.61116027832031, + 69.02168273925781, + 61.11283874511719, + 53.85063552856445, + 47.21189498901367, + 41.180023193359375, + 35.740203857421875, + 30.874561309814453, + 26.558300018310547, + 22.75761604309082, + 19.42980194091797, + 16.52532196044922, + 13.99103832244873, + 11.773679733276367, + 9.822750091552734, + 8.09251880645752, + 6.543033599853516, + 5.140361309051514, + 3.8562798500061035, + 2.6676816940307617, + 1.5558552742004395, + 0.5057600140571594, + ]), + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 681, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1440, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'thetao_oras', + 'standard_name': 'sea_water_potential_temperature', + 'units': 'degrees_C', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'depth', + 'maximum_value': None, + 'minimum_value': None, + 'step': None, + 'units': 'm', + 'values': list([ + 5902.0576171875, + 5698.060546875, + 5494.5751953125, + 5291.68310546875, + 5089.478515625, + 4888.06982421875, + 4687.5810546875, + 4488.15478515625, + 4289.95263671875, + 4093.15869140625, + 3897.98193359375, + 3704.65673828125, + 3513.445556640625, + 3324.640869140625, + 3138.56494140625, + 2955.5703125, + 2776.039306640625, + 2600.38037109375, + 2429.025146484375, + 2262.421630859375, + 2101.026611328125, + 1945.2955322265625, + 1795.6707763671875, + 1652.5684814453125, + 1516.3636474609375, + 1387.376953125, + 1265.8614501953125, + 1151.9912109375, + 1045.854248046875, + 947.4478759765625, + 856.678955078125, + 773.3682861328125, + 697.2586669921875, + 628.0260009765625, + 565.2922973632812, + 508.639892578125, + 457.6256103515625, + 411.7938537597656, + 370.6884765625, + 333.8628234863281, + 300.88751220703125, + 271.35638427734375, + 244.890625, + 221.14117431640625, + 199.7899627685547, + 180.5499267578125, + 163.16445922851562, + 147.40625, + 133.07582092285156, + 120, + 108.03028106689453, + 97.04131317138672, + 86.92942810058594, + 77.61116027832031, + 69.02168273925781, + 61.11283874511719, + 53.85063552856445, + 47.21189498901367, + 41.180023193359375, + 35.740203857421875, + 30.874561309814453, + 26.558300018310547, + 22.75761604309082, + 19.42980194091797, + 16.52532196044922, + 13.99103832244873, + 11.773679733276367, + 9.822750091552734, + 8.09251880645752, + 6.543033599853516, + 5.140361309051514, + 3.8562798500061035, + 2.6676816940307617, + 1.5558552742004395, + 0.5057600140571594, + ]), + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 681, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1440, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'uo_cglo', + 'standard_name': 'eastward_sea_water_velocity', + 'units': 'm s-1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'depth', + 'maximum_value': None, + 'minimum_value': None, + 'step': None, + 'units': 'm', + 'values': list([ + 5902.0576171875, + 5698.060546875, + 5494.5751953125, + 5291.68310546875, + 5089.478515625, + 4888.06982421875, + 4687.5810546875, + 4488.15478515625, + 4289.95263671875, + 4093.15869140625, + 3897.98193359375, + 3704.65673828125, + 3513.445556640625, + 3324.640869140625, + 3138.56494140625, + 2955.5703125, + 2776.039306640625, + 2600.38037109375, + 2429.025146484375, + 2262.421630859375, + 2101.026611328125, + 1945.2955322265625, + 1795.6707763671875, + 1652.5684814453125, + 1516.3636474609375, + 1387.376953125, + 1265.8614501953125, + 1151.9912109375, + 1045.854248046875, + 947.4478759765625, + 856.678955078125, + 773.3682861328125, + 697.2586669921875, + 628.0260009765625, + 565.2922973632812, + 508.639892578125, + 457.6256103515625, + 411.7938537597656, + 370.6884765625, + 333.8628234863281, + 300.88751220703125, + 271.35638427734375, + 244.890625, + 221.14117431640625, + 199.7899627685547, + 180.5499267578125, + 163.16445922851562, + 147.40625, + 133.07582092285156, + 120, + 108.03028106689453, + 97.04131317138672, + 86.92942810058594, + 77.61116027832031, + 69.02168273925781, + 61.11283874511719, + 53.85063552856445, + 47.21189498901367, + 41.180023193359375, + 35.740203857421875, + 30.874561309814453, + 26.558300018310547, + 22.75761604309082, + 19.42980194091797, + 16.52532196044922, + 13.99103832244873, + 11.773679733276367, + 9.822750091552734, + 8.09251880645752, + 6.543033599853516, + 5.140361309051514, + 3.8562798500061035, + 2.6676816940307617, + 1.5558552742004395, + 0.5057600140571594, + ]), + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 681, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1440, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'uo_glor', + 'standard_name': 'eastward_sea_water_velocity', + 'units': 'm s-1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'depth', + 'maximum_value': None, + 'minimum_value': None, + 'step': None, + 'units': 'm', + 'values': list([ + 5902.0576171875, + 5698.060546875, + 5494.5751953125, + 5291.68310546875, + 5089.478515625, + 4888.06982421875, + 4687.5810546875, + 4488.15478515625, + 4289.95263671875, + 4093.15869140625, + 3897.98193359375, + 3704.65673828125, + 3513.445556640625, + 3324.640869140625, + 3138.56494140625, + 2955.5703125, + 2776.039306640625, + 2600.38037109375, + 2429.025146484375, + 2262.421630859375, + 2101.026611328125, + 1945.2955322265625, + 1795.6707763671875, + 1652.5684814453125, + 1516.3636474609375, + 1387.376953125, + 1265.8614501953125, + 1151.9912109375, + 1045.854248046875, + 947.4478759765625, + 856.678955078125, + 773.3682861328125, + 697.2586669921875, + 628.0260009765625, + 565.2922973632812, + 508.639892578125, + 457.6256103515625, + 411.7938537597656, + 370.6884765625, + 333.8628234863281, + 300.88751220703125, + 271.35638427734375, + 244.890625, + 221.14117431640625, + 199.7899627685547, + 180.5499267578125, + 163.16445922851562, + 147.40625, + 133.07582092285156, + 120, + 108.03028106689453, + 97.04131317138672, + 86.92942810058594, + 77.61116027832031, + 69.02168273925781, + 61.11283874511719, + 53.85063552856445, + 47.21189498901367, + 41.180023193359375, + 35.740203857421875, + 30.874561309814453, + 26.558300018310547, + 22.75761604309082, + 19.42980194091797, + 16.52532196044922, + 13.99103832244873, + 11.773679733276367, + 9.822750091552734, + 8.09251880645752, + 6.543033599853516, + 5.140361309051514, + 3.8562798500061035, + 2.6676816940307617, + 1.5558552742004395, + 0.5057600140571594, + ]), + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 681, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1440, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'uo_oras', + 'standard_name': 'eastward_sea_water_velocity', + 'units': 'm s-1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'depth', + 'maximum_value': None, + 'minimum_value': None, + 'step': None, + 'units': 'm', + 'values': list([ + 5902.0576171875, + 5698.060546875, + 5494.5751953125, + 5291.68310546875, + 5089.478515625, + 4888.06982421875, + 4687.5810546875, + 4488.15478515625, + 4289.95263671875, + 4093.15869140625, + 3897.98193359375, + 3704.65673828125, + 3513.445556640625, + 3324.640869140625, + 3138.56494140625, + 2955.5703125, + 2776.039306640625, + 2600.38037109375, + 2429.025146484375, + 2262.421630859375, + 2101.026611328125, + 1945.2955322265625, + 1795.6707763671875, + 1652.5684814453125, + 1516.3636474609375, + 1387.376953125, + 1265.8614501953125, + 1151.9912109375, + 1045.854248046875, + 947.4478759765625, + 856.678955078125, + 773.3682861328125, + 697.2586669921875, + 628.0260009765625, + 565.2922973632812, + 508.639892578125, + 457.6256103515625, + 411.7938537597656, + 370.6884765625, + 333.8628234863281, + 300.88751220703125, + 271.35638427734375, + 244.890625, + 221.14117431640625, + 199.7899627685547, + 180.5499267578125, + 163.16445922851562, + 147.40625, + 133.07582092285156, + 120, + 108.03028106689453, + 97.04131317138672, + 86.92942810058594, + 77.61116027832031, + 69.02168273925781, + 61.11283874511719, + 53.85063552856445, + 47.21189498901367, + 41.180023193359375, + 35.740203857421875, + 30.874561309814453, + 26.558300018310547, + 22.75761604309082, + 19.42980194091797, + 16.52532196044922, + 13.99103832244873, + 11.773679733276367, + 9.822750091552734, + 8.09251880645752, + 6.543033599853516, + 5.140361309051514, + 3.8562798500061035, + 2.6676816940307617, + 1.5558552742004395, + 0.5057600140571594, + ]), + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 681, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1440, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'vo_cglo', + 'standard_name': 'northward_sea_water_velocity', + 'units': 'm s-1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'depth', + 'maximum_value': None, + 'minimum_value': None, + 'step': None, + 'units': 'm', + 'values': list([ + 5902.0576171875, + 5698.060546875, + 5494.5751953125, + 5291.68310546875, + 5089.478515625, + 4888.06982421875, + 4687.5810546875, + 4488.15478515625, + 4289.95263671875, + 4093.15869140625, + 3897.98193359375, + 3704.65673828125, + 3513.445556640625, + 3324.640869140625, + 3138.56494140625, + 2955.5703125, + 2776.039306640625, + 2600.38037109375, + 2429.025146484375, + 2262.421630859375, + 2101.026611328125, + 1945.2955322265625, + 1795.6707763671875, + 1652.5684814453125, + 1516.3636474609375, + 1387.376953125, + 1265.8614501953125, + 1151.9912109375, + 1045.854248046875, + 947.4478759765625, + 856.678955078125, + 773.3682861328125, + 697.2586669921875, + 628.0260009765625, + 565.2922973632812, + 508.639892578125, + 457.6256103515625, + 411.7938537597656, + 370.6884765625, + 333.8628234863281, + 300.88751220703125, + 271.35638427734375, + 244.890625, + 221.14117431640625, + 199.7899627685547, + 180.5499267578125, + 163.16445922851562, + 147.40625, + 133.07582092285156, + 120, + 108.03028106689453, + 97.04131317138672, + 86.92942810058594, + 77.61116027832031, + 69.02168273925781, + 61.11283874511719, + 53.85063552856445, + 47.21189498901367, + 41.180023193359375, + 35.740203857421875, + 30.874561309814453, + 26.558300018310547, + 22.75761604309082, + 19.42980194091797, + 16.52532196044922, + 13.99103832244873, + 11.773679733276367, + 9.822750091552734, + 8.09251880645752, + 6.543033599853516, + 5.140361309051514, + 3.8562798500061035, + 2.6676816940307617, + 1.5558552742004395, + 0.5057600140571594, + ]), + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 681, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1440, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'vo_glor', + 'standard_name': 'northward_sea_water_velocity', + 'units': 'm s-1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'depth', + 'maximum_value': None, + 'minimum_value': None, + 'step': None, + 'units': 'm', + 'values': list([ + 5902.0576171875, + 5698.060546875, + 5494.5751953125, + 5291.68310546875, + 5089.478515625, + 4888.06982421875, + 4687.5810546875, + 4488.15478515625, + 4289.95263671875, + 4093.15869140625, + 3897.98193359375, + 3704.65673828125, + 3513.445556640625, + 3324.640869140625, + 3138.56494140625, + 2955.5703125, + 2776.039306640625, + 2600.38037109375, + 2429.025146484375, + 2262.421630859375, + 2101.026611328125, + 1945.2955322265625, + 1795.6707763671875, + 1652.5684814453125, + 1516.3636474609375, + 1387.376953125, + 1265.8614501953125, + 1151.9912109375, + 1045.854248046875, + 947.4478759765625, + 856.678955078125, + 773.3682861328125, + 697.2586669921875, + 628.0260009765625, + 565.2922973632812, + 508.639892578125, + 457.6256103515625, + 411.7938537597656, + 370.6884765625, + 333.8628234863281, + 300.88751220703125, + 271.35638427734375, + 244.890625, + 221.14117431640625, + 199.7899627685547, + 180.5499267578125, + 163.16445922851562, + 147.40625, + 133.07582092285156, + 120, + 108.03028106689453, + 97.04131317138672, + 86.92942810058594, + 77.61116027832031, + 69.02168273925781, + 61.11283874511719, + 53.85063552856445, + 47.21189498901367, + 41.180023193359375, + 35.740203857421875, + 30.874561309814453, + 26.558300018310547, + 22.75761604309082, + 19.42980194091797, + 16.52532196044922, + 13.99103832244873, + 11.773679733276367, + 9.822750091552734, + 8.09251880645752, + 6.543033599853516, + 5.140361309051514, + 3.8562798500061035, + 2.6676816940307617, + 1.5558552742004395, + 0.5057600140571594, + ]), + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 681, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1440, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'vo_oras', + 'standard_name': 'northward_sea_water_velocity', + 'units': 'm s-1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 681, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1440, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'zos_cglo', + 'standard_name': 'sea_surface_height_above_geoid', + 'units': 'm', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 681, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1440, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'zos_glor', + 'standard_name': 'sea_surface_height_above_geoid', + 'units': 'm', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 681, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1440, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'zos_oras', + 'standard_name': 'sea_surface_height_above_geoid', + 'units': 'm', + }), + ]), + }), + dict({ + 'service_format': 'zarr', + 'service_type': dict({ + 'service_name': 'arco-time-series', + 'short_name': 'timeseries', + }), + 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-arco-geo-066/arco/GLOBAL_MULTIYEAR_PHY_ENS_001_031/cmems_mod_glo_phy-all_my_0.25deg_P1D-m_202311/geoChunked.zarr', + 'variables': list([ + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 16, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 32, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1826, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'mlotst_cglo', + 'standard_name': 'ocean_mixed_layer_thickness_defined_by_sigma_theta', + 'units': 'm', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 16, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 32, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1826, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'mlotst_glor', + 'standard_name': 'ocean_mixed_layer_thickness_defined_by_sigma_theta', + 'units': 'm', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 16, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 32, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1826, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'mlotst_oras', + 'standard_name': 'ocean_mixed_layer_thickness_defined_by_sigma_theta', + 'units': 'm', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 16, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 32, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1826, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'siconc_cglo', + 'standard_name': 'sea_ice_area_fraction', + 'units': '1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 16, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 32, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1826, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'siconc_glor', + 'standard_name': 'sea_ice_area_fraction', + 'units': '1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 16, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 32, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1826, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'siconc_oras', + 'standard_name': 'sea_ice_area_fraction', + 'units': '1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 16, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 32, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1826, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'sithick_cglo', + 'standard_name': 'sea_ice_thickness', + 'units': 'm', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 16, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 32, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1826, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'sithick_glor', + 'standard_name': 'sea_ice_thickness', + 'units': 'm', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 16, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 32, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1826, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'sithick_oras', + 'standard_name': 'sea_ice_thickness', + 'units': 'm', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'depth', + 'maximum_value': None, + 'minimum_value': None, + 'step': None, + 'units': 'm', + 'values': list([ + 5902.0576171875, + 5698.060546875, + 5494.5751953125, + 5291.68310546875, + 5089.478515625, + 4888.06982421875, + 4687.5810546875, + 4488.15478515625, + 4289.95263671875, + 4093.15869140625, + 3897.98193359375, + 3704.65673828125, + 3513.445556640625, + 3324.640869140625, + 3138.56494140625, + 2955.5703125, + 2776.039306640625, + 2600.38037109375, + 2429.025146484375, + 2262.421630859375, + 2101.026611328125, + 1945.2955322265625, + 1795.6707763671875, + 1652.5684814453125, + 1516.3636474609375, + 1387.376953125, + 1265.8614501953125, + 1151.9912109375, + 1045.854248046875, + 947.4478759765625, + 856.678955078125, + 773.3682861328125, + 697.2586669921875, + 628.0260009765625, + 565.2922973632812, + 508.639892578125, + 457.6256103515625, + 411.7938537597656, + 370.6884765625, + 333.8628234863281, + 300.88751220703125, + 271.35638427734375, + 244.890625, + 221.14117431640625, + 199.7899627685547, + 180.5499267578125, + 163.16445922851562, + 147.40625, + 133.07582092285156, + 120, + 108.03028106689453, + 97.04131317138672, + 86.92942810058594, + 77.61116027832031, + 69.02168273925781, + 61.11283874511719, + 53.85063552856445, + 47.21189498901367, + 41.180023193359375, + 35.740203857421875, + 30.874561309814453, + 26.558300018310547, + 22.75761604309082, + 19.42980194091797, + 16.52532196044922, + 13.99103832244873, + 11.773679733276367, + 9.822750091552734, + 8.09251880645752, + 6.543033599853516, + 5.140361309051514, + 3.8562798500061035, + 2.6676816940307617, + 1.5558552742004395, + 0.5057600140571594, + ]), + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 16, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 32, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1826, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'so_cglo', + 'standard_name': 'sea_water_salinity', + 'units': '1e-3', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'depth', + 'maximum_value': None, + 'minimum_value': None, + 'step': None, + 'units': 'm', + 'values': list([ + 5902.0576171875, + 5698.060546875, + 5494.5751953125, + 5291.68310546875, + 5089.478515625, + 4888.06982421875, + 4687.5810546875, + 4488.15478515625, + 4289.95263671875, + 4093.15869140625, + 3897.98193359375, + 3704.65673828125, + 3513.445556640625, + 3324.640869140625, + 3138.56494140625, + 2955.5703125, + 2776.039306640625, + 2600.38037109375, + 2429.025146484375, + 2262.421630859375, + 2101.026611328125, + 1945.2955322265625, + 1795.6707763671875, + 1652.5684814453125, + 1516.3636474609375, + 1387.376953125, + 1265.8614501953125, + 1151.9912109375, + 1045.854248046875, + 947.4478759765625, + 856.678955078125, + 773.3682861328125, + 697.2586669921875, + 628.0260009765625, + 565.2922973632812, + 508.639892578125, + 457.6256103515625, + 411.7938537597656, + 370.6884765625, + 333.8628234863281, + 300.88751220703125, + 271.35638427734375, + 244.890625, + 221.14117431640625, + 199.7899627685547, + 180.5499267578125, + 163.16445922851562, + 147.40625, + 133.07582092285156, + 120, + 108.03028106689453, + 97.04131317138672, + 86.92942810058594, + 77.61116027832031, + 69.02168273925781, + 61.11283874511719, + 53.85063552856445, + 47.21189498901367, + 41.180023193359375, + 35.740203857421875, + 30.874561309814453, + 26.558300018310547, + 22.75761604309082, + 19.42980194091797, + 16.52532196044922, + 13.99103832244873, + 11.773679733276367, + 9.822750091552734, + 8.09251880645752, + 6.543033599853516, + 5.140361309051514, + 3.8562798500061035, + 2.6676816940307617, + 1.5558552742004395, + 0.5057600140571594, + ]), + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 16, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 32, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1826, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'so_glor', + 'standard_name': 'sea_water_salinity', + 'units': '1e-3', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'depth', + 'maximum_value': None, + 'minimum_value': None, + 'step': None, + 'units': 'm', + 'values': list([ + 5902.0576171875, + 5698.060546875, + 5494.5751953125, + 5291.68310546875, + 5089.478515625, + 4888.06982421875, + 4687.5810546875, + 4488.15478515625, + 4289.95263671875, + 4093.15869140625, + 3897.98193359375, + 3704.65673828125, + 3513.445556640625, + 3324.640869140625, + 3138.56494140625, + 2955.5703125, + 2776.039306640625, + 2600.38037109375, + 2429.025146484375, + 2262.421630859375, + 2101.026611328125, + 1945.2955322265625, + 1795.6707763671875, + 1652.5684814453125, + 1516.3636474609375, + 1387.376953125, + 1265.8614501953125, + 1151.9912109375, + 1045.854248046875, + 947.4478759765625, + 856.678955078125, + 773.3682861328125, + 697.2586669921875, + 628.0260009765625, + 565.2922973632812, + 508.639892578125, + 457.6256103515625, + 411.7938537597656, + 370.6884765625, + 333.8628234863281, + 300.88751220703125, + 271.35638427734375, + 244.890625, + 221.14117431640625, + 199.7899627685547, + 180.5499267578125, + 163.16445922851562, + 147.40625, + 133.07582092285156, + 120, + 108.03028106689453, + 97.04131317138672, + 86.92942810058594, + 77.61116027832031, + 69.02168273925781, + 61.11283874511719, + 53.85063552856445, + 47.21189498901367, + 41.180023193359375, + 35.740203857421875, + 30.874561309814453, + 26.558300018310547, + 22.75761604309082, + 19.42980194091797, + 16.52532196044922, + 13.99103832244873, + 11.773679733276367, + 9.822750091552734, + 8.09251880645752, + 6.543033599853516, + 5.140361309051514, + 3.8562798500061035, + 2.6676816940307617, + 1.5558552742004395, + 0.5057600140571594, + ]), + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 16, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 32, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1826, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'so_oras', + 'standard_name': 'sea_water_salinity', + 'units': '1e-3', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'depth', + 'maximum_value': None, + 'minimum_value': None, + 'step': None, + 'units': 'm', + 'values': list([ + 5902.0576171875, + 5698.060546875, + 5494.5751953125, + 5291.68310546875, + 5089.478515625, + 4888.06982421875, + 4687.5810546875, + 4488.15478515625, + 4289.95263671875, + 4093.15869140625, + 3897.98193359375, + 3704.65673828125, + 3513.445556640625, + 3324.640869140625, + 3138.56494140625, + 2955.5703125, + 2776.039306640625, + 2600.38037109375, + 2429.025146484375, + 2262.421630859375, + 2101.026611328125, + 1945.2955322265625, + 1795.6707763671875, + 1652.5684814453125, + 1516.3636474609375, + 1387.376953125, + 1265.8614501953125, + 1151.9912109375, + 1045.854248046875, + 947.4478759765625, + 856.678955078125, + 773.3682861328125, + 697.2586669921875, + 628.0260009765625, + 565.2922973632812, + 508.639892578125, + 457.6256103515625, + 411.7938537597656, + 370.6884765625, + 333.8628234863281, + 300.88751220703125, + 271.35638427734375, + 244.890625, + 221.14117431640625, + 199.7899627685547, + 180.5499267578125, + 163.16445922851562, + 147.40625, + 133.07582092285156, + 120, + 108.03028106689453, + 97.04131317138672, + 86.92942810058594, + 77.61116027832031, + 69.02168273925781, + 61.11283874511719, + 53.85063552856445, + 47.21189498901367, + 41.180023193359375, + 35.740203857421875, + 30.874561309814453, + 26.558300018310547, + 22.75761604309082, + 19.42980194091797, + 16.52532196044922, + 13.99103832244873, + 11.773679733276367, + 9.822750091552734, + 8.09251880645752, + 6.543033599853516, + 5.140361309051514, + 3.8562798500061035, + 2.6676816940307617, + 1.5558552742004395, + 0.5057600140571594, + ]), + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 16, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 32, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1826, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'thetao_cglo', + 'standard_name': 'sea_water_potential_temperature', + 'units': 'degrees_C', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'depth', + 'maximum_value': None, + 'minimum_value': None, + 'step': None, + 'units': 'm', + 'values': list([ + 5902.0576171875, + 5698.060546875, + 5494.5751953125, + 5291.68310546875, + 5089.478515625, + 4888.06982421875, + 4687.5810546875, + 4488.15478515625, + 4289.95263671875, + 4093.15869140625, + 3897.98193359375, + 3704.65673828125, + 3513.445556640625, + 3324.640869140625, + 3138.56494140625, + 2955.5703125, + 2776.039306640625, + 2600.38037109375, + 2429.025146484375, + 2262.421630859375, + 2101.026611328125, + 1945.2955322265625, + 1795.6707763671875, + 1652.5684814453125, + 1516.3636474609375, + 1387.376953125, + 1265.8614501953125, + 1151.9912109375, + 1045.854248046875, + 947.4478759765625, + 856.678955078125, + 773.3682861328125, + 697.2586669921875, + 628.0260009765625, + 565.2922973632812, + 508.639892578125, + 457.6256103515625, + 411.7938537597656, + 370.6884765625, + 333.8628234863281, + 300.88751220703125, + 271.35638427734375, + 244.890625, + 221.14117431640625, + 199.7899627685547, + 180.5499267578125, + 163.16445922851562, + 147.40625, + 133.07582092285156, + 120, + 108.03028106689453, + 97.04131317138672, + 86.92942810058594, + 77.61116027832031, + 69.02168273925781, + 61.11283874511719, + 53.85063552856445, + 47.21189498901367, + 41.180023193359375, + 35.740203857421875, + 30.874561309814453, + 26.558300018310547, + 22.75761604309082, + 19.42980194091797, + 16.52532196044922, + 13.99103832244873, + 11.773679733276367, + 9.822750091552734, + 8.09251880645752, + 6.543033599853516, + 5.140361309051514, + 3.8562798500061035, + 2.6676816940307617, + 1.5558552742004395, + 0.5057600140571594, + ]), + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 16, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 32, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1826, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'thetao_glor', + 'standard_name': 'sea_water_potential_temperature', + 'units': 'degrees_C', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'depth', + 'maximum_value': None, + 'minimum_value': None, + 'step': None, + 'units': 'm', + 'values': list([ + 5902.0576171875, + 5698.060546875, + 5494.5751953125, + 5291.68310546875, + 5089.478515625, + 4888.06982421875, + 4687.5810546875, + 4488.15478515625, + 4289.95263671875, + 4093.15869140625, + 3897.98193359375, + 3704.65673828125, + 3513.445556640625, + 3324.640869140625, + 3138.56494140625, + 2955.5703125, + 2776.039306640625, + 2600.38037109375, + 2429.025146484375, + 2262.421630859375, + 2101.026611328125, + 1945.2955322265625, + 1795.6707763671875, + 1652.5684814453125, + 1516.3636474609375, + 1387.376953125, + 1265.8614501953125, + 1151.9912109375, + 1045.854248046875, + 947.4478759765625, + 856.678955078125, + 773.3682861328125, + 697.2586669921875, + 628.0260009765625, + 565.2922973632812, + 508.639892578125, + 457.6256103515625, + 411.7938537597656, + 370.6884765625, + 333.8628234863281, + 300.88751220703125, + 271.35638427734375, + 244.890625, + 221.14117431640625, + 199.7899627685547, + 180.5499267578125, + 163.16445922851562, + 147.40625, + 133.07582092285156, + 120, + 108.03028106689453, + 97.04131317138672, + 86.92942810058594, + 77.61116027832031, + 69.02168273925781, + 61.11283874511719, + 53.85063552856445, + 47.21189498901367, + 41.180023193359375, + 35.740203857421875, + 30.874561309814453, + 26.558300018310547, + 22.75761604309082, + 19.42980194091797, + 16.52532196044922, + 13.99103832244873, + 11.773679733276367, + 9.822750091552734, + 8.09251880645752, + 6.543033599853516, + 5.140361309051514, + 3.8562798500061035, + 2.6676816940307617, + 1.5558552742004395, + 0.5057600140571594, + ]), + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 16, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 32, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1826, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'thetao_oras', + 'standard_name': 'sea_water_potential_temperature', + 'units': 'degrees_C', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'depth', + 'maximum_value': None, + 'minimum_value': None, + 'step': None, + 'units': 'm', + 'values': list([ + 5902.0576171875, + 5698.060546875, + 5494.5751953125, + 5291.68310546875, + 5089.478515625, + 4888.06982421875, + 4687.5810546875, + 4488.15478515625, + 4289.95263671875, + 4093.15869140625, + 3897.98193359375, + 3704.65673828125, + 3513.445556640625, + 3324.640869140625, + 3138.56494140625, + 2955.5703125, + 2776.039306640625, + 2600.38037109375, + 2429.025146484375, + 2262.421630859375, + 2101.026611328125, + 1945.2955322265625, + 1795.6707763671875, + 1652.5684814453125, + 1516.3636474609375, + 1387.376953125, + 1265.8614501953125, + 1151.9912109375, + 1045.854248046875, + 947.4478759765625, + 856.678955078125, + 773.3682861328125, + 697.2586669921875, + 628.0260009765625, + 565.2922973632812, + 508.639892578125, + 457.6256103515625, + 411.7938537597656, + 370.6884765625, + 333.8628234863281, + 300.88751220703125, + 271.35638427734375, + 244.890625, + 221.14117431640625, + 199.7899627685547, + 180.5499267578125, + 163.16445922851562, + 147.40625, + 133.07582092285156, + 120, + 108.03028106689453, + 97.04131317138672, + 86.92942810058594, + 77.61116027832031, + 69.02168273925781, + 61.11283874511719, + 53.85063552856445, + 47.21189498901367, + 41.180023193359375, + 35.740203857421875, + 30.874561309814453, + 26.558300018310547, + 22.75761604309082, + 19.42980194091797, + 16.52532196044922, + 13.99103832244873, + 11.773679733276367, + 9.822750091552734, + 8.09251880645752, + 6.543033599853516, + 5.140361309051514, + 3.8562798500061035, + 2.6676816940307617, + 1.5558552742004395, + 0.5057600140571594, + ]), + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 16, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 32, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1826, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'uo_cglo', + 'standard_name': 'eastward_sea_water_velocity', + 'units': 'm s-1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'depth', + 'maximum_value': None, + 'minimum_value': None, + 'step': None, + 'units': 'm', + 'values': list([ + 5902.0576171875, + 5698.060546875, + 5494.5751953125, + 5291.68310546875, + 5089.478515625, + 4888.06982421875, + 4687.5810546875, + 4488.15478515625, + 4289.95263671875, + 4093.15869140625, + 3897.98193359375, + 3704.65673828125, + 3513.445556640625, + 3324.640869140625, + 3138.56494140625, + 2955.5703125, + 2776.039306640625, + 2600.38037109375, + 2429.025146484375, + 2262.421630859375, + 2101.026611328125, + 1945.2955322265625, + 1795.6707763671875, + 1652.5684814453125, + 1516.3636474609375, + 1387.376953125, + 1265.8614501953125, + 1151.9912109375, + 1045.854248046875, + 947.4478759765625, + 856.678955078125, + 773.3682861328125, + 697.2586669921875, + 628.0260009765625, + 565.2922973632812, + 508.639892578125, + 457.6256103515625, + 411.7938537597656, + 370.6884765625, + 333.8628234863281, + 300.88751220703125, + 271.35638427734375, + 244.890625, + 221.14117431640625, + 199.7899627685547, + 180.5499267578125, + 163.16445922851562, + 147.40625, + 133.07582092285156, + 120, + 108.03028106689453, + 97.04131317138672, + 86.92942810058594, + 77.61116027832031, + 69.02168273925781, + 61.11283874511719, + 53.85063552856445, + 47.21189498901367, + 41.180023193359375, + 35.740203857421875, + 30.874561309814453, + 26.558300018310547, + 22.75761604309082, + 19.42980194091797, + 16.52532196044922, + 13.99103832244873, + 11.773679733276367, + 9.822750091552734, + 8.09251880645752, + 6.543033599853516, + 5.140361309051514, + 3.8562798500061035, + 2.6676816940307617, + 1.5558552742004395, + 0.5057600140571594, + ]), + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 16, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 32, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1826, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'uo_glor', + 'standard_name': 'eastward_sea_water_velocity', + 'units': 'm s-1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'depth', + 'maximum_value': None, + 'minimum_value': None, + 'step': None, + 'units': 'm', + 'values': list([ + 5902.0576171875, + 5698.060546875, + 5494.5751953125, + 5291.68310546875, + 5089.478515625, + 4888.06982421875, + 4687.5810546875, + 4488.15478515625, + 4289.95263671875, + 4093.15869140625, + 3897.98193359375, + 3704.65673828125, + 3513.445556640625, + 3324.640869140625, + 3138.56494140625, + 2955.5703125, + 2776.039306640625, + 2600.38037109375, + 2429.025146484375, + 2262.421630859375, + 2101.026611328125, + 1945.2955322265625, + 1795.6707763671875, + 1652.5684814453125, + 1516.3636474609375, + 1387.376953125, + 1265.8614501953125, + 1151.9912109375, + 1045.854248046875, + 947.4478759765625, + 856.678955078125, + 773.3682861328125, + 697.2586669921875, + 628.0260009765625, + 565.2922973632812, + 508.639892578125, + 457.6256103515625, + 411.7938537597656, + 370.6884765625, + 333.8628234863281, + 300.88751220703125, + 271.35638427734375, + 244.890625, + 221.14117431640625, + 199.7899627685547, + 180.5499267578125, + 163.16445922851562, + 147.40625, + 133.07582092285156, + 120, + 108.03028106689453, + 97.04131317138672, + 86.92942810058594, + 77.61116027832031, + 69.02168273925781, + 61.11283874511719, + 53.85063552856445, + 47.21189498901367, + 41.180023193359375, + 35.740203857421875, + 30.874561309814453, + 26.558300018310547, + 22.75761604309082, + 19.42980194091797, + 16.52532196044922, + 13.99103832244873, + 11.773679733276367, + 9.822750091552734, + 8.09251880645752, + 6.543033599853516, + 5.140361309051514, + 3.8562798500061035, + 2.6676816940307617, + 1.5558552742004395, + 0.5057600140571594, + ]), + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 16, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 32, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1826, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'uo_oras', + 'standard_name': 'eastward_sea_water_velocity', + 'units': 'm s-1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'depth', + 'maximum_value': None, + 'minimum_value': None, + 'step': None, + 'units': 'm', + 'values': list([ + 5902.0576171875, + 5698.060546875, + 5494.5751953125, + 5291.68310546875, + 5089.478515625, + 4888.06982421875, + 4687.5810546875, + 4488.15478515625, + 4289.95263671875, + 4093.15869140625, + 3897.98193359375, + 3704.65673828125, + 3513.445556640625, + 3324.640869140625, + 3138.56494140625, + 2955.5703125, + 2776.039306640625, + 2600.38037109375, + 2429.025146484375, + 2262.421630859375, + 2101.026611328125, + 1945.2955322265625, + 1795.6707763671875, + 1652.5684814453125, + 1516.3636474609375, + 1387.376953125, + 1265.8614501953125, + 1151.9912109375, + 1045.854248046875, + 947.4478759765625, + 856.678955078125, + 773.3682861328125, + 697.2586669921875, + 628.0260009765625, + 565.2922973632812, + 508.639892578125, + 457.6256103515625, + 411.7938537597656, + 370.6884765625, + 333.8628234863281, + 300.88751220703125, + 271.35638427734375, + 244.890625, + 221.14117431640625, + 199.7899627685547, + 180.5499267578125, + 163.16445922851562, + 147.40625, + 133.07582092285156, + 120, + 108.03028106689453, + 97.04131317138672, + 86.92942810058594, + 77.61116027832031, + 69.02168273925781, + 61.11283874511719, + 53.85063552856445, + 47.21189498901367, + 41.180023193359375, + 35.740203857421875, + 30.874561309814453, + 26.558300018310547, + 22.75761604309082, + 19.42980194091797, + 16.52532196044922, + 13.99103832244873, + 11.773679733276367, + 9.822750091552734, + 8.09251880645752, + 6.543033599853516, + 5.140361309051514, + 3.8562798500061035, + 2.6676816940307617, + 1.5558552742004395, + 0.5057600140571594, + ]), + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 16, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 32, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1826, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'vo_cglo', + 'standard_name': 'northward_sea_water_velocity', + 'units': 'm s-1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'depth', + 'maximum_value': None, + 'minimum_value': None, + 'step': None, + 'units': 'm', + 'values': list([ + 5902.0576171875, + 5698.060546875, + 5494.5751953125, + 5291.68310546875, + 5089.478515625, + 4888.06982421875, + 4687.5810546875, + 4488.15478515625, + 4289.95263671875, + 4093.15869140625, + 3897.98193359375, + 3704.65673828125, + 3513.445556640625, + 3324.640869140625, + 3138.56494140625, + 2955.5703125, + 2776.039306640625, + 2600.38037109375, + 2429.025146484375, + 2262.421630859375, + 2101.026611328125, + 1945.2955322265625, + 1795.6707763671875, + 1652.5684814453125, + 1516.3636474609375, + 1387.376953125, + 1265.8614501953125, + 1151.9912109375, + 1045.854248046875, + 947.4478759765625, + 856.678955078125, + 773.3682861328125, + 697.2586669921875, + 628.0260009765625, + 565.2922973632812, + 508.639892578125, + 457.6256103515625, + 411.7938537597656, + 370.6884765625, + 333.8628234863281, + 300.88751220703125, + 271.35638427734375, + 244.890625, + 221.14117431640625, + 199.7899627685547, + 180.5499267578125, + 163.16445922851562, + 147.40625, + 133.07582092285156, + 120, + 108.03028106689453, + 97.04131317138672, + 86.92942810058594, + 77.61116027832031, + 69.02168273925781, + 61.11283874511719, + 53.85063552856445, + 47.21189498901367, + 41.180023193359375, + 35.740203857421875, + 30.874561309814453, + 26.558300018310547, + 22.75761604309082, + 19.42980194091797, + 16.52532196044922, + 13.99103832244873, + 11.773679733276367, + 9.822750091552734, + 8.09251880645752, + 6.543033599853516, + 5.140361309051514, + 3.8562798500061035, + 2.6676816940307617, + 1.5558552742004395, + 0.5057600140571594, + ]), + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 16, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 32, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1826, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'vo_glor', + 'standard_name': 'northward_sea_water_velocity', + 'units': 'm s-1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1, + 'coordinate_id': 'depth', + 'maximum_value': None, + 'minimum_value': None, + 'step': None, + 'units': 'm', + 'values': list([ + 5902.0576171875, + 5698.060546875, + 5494.5751953125, + 5291.68310546875, + 5089.478515625, + 4888.06982421875, + 4687.5810546875, + 4488.15478515625, + 4289.95263671875, + 4093.15869140625, + 3897.98193359375, + 3704.65673828125, + 3513.445556640625, + 3324.640869140625, + 3138.56494140625, + 2955.5703125, + 2776.039306640625, + 2600.38037109375, + 2429.025146484375, + 2262.421630859375, + 2101.026611328125, + 1945.2955322265625, + 1795.6707763671875, + 1652.5684814453125, + 1516.3636474609375, + 1387.376953125, + 1265.8614501953125, + 1151.9912109375, + 1045.854248046875, + 947.4478759765625, + 856.678955078125, + 773.3682861328125, + 697.2586669921875, + 628.0260009765625, + 565.2922973632812, + 508.639892578125, + 457.6256103515625, + 411.7938537597656, + 370.6884765625, + 333.8628234863281, + 300.88751220703125, + 271.35638427734375, + 244.890625, + 221.14117431640625, + 199.7899627685547, + 180.5499267578125, + 163.16445922851562, + 147.40625, + 133.07582092285156, + 120, + 108.03028106689453, + 97.04131317138672, + 86.92942810058594, + 77.61116027832031, + 69.02168273925781, + 61.11283874511719, + 53.85063552856445, + 47.21189498901367, + 41.180023193359375, + 35.740203857421875, + 30.874561309814453, + 26.558300018310547, + 22.75761604309082, + 19.42980194091797, + 16.52532196044922, + 13.99103832244873, + 11.773679733276367, + 9.822750091552734, + 8.09251880645752, + 6.543033599853516, + 5.140361309051514, + 3.8562798500061035, + 2.6676816940307617, + 1.5558552742004395, + 0.5057600140571594, + ]), + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 16, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 32, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1826, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'vo_oras', + 'standard_name': 'northward_sea_water_velocity', + 'units': 'm s-1', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 16, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 32, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1826, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'zos_cglo', + 'standard_name': 'sea_surface_height_above_geoid', + 'units': 'm', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 16, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 32, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1826, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'zos_glor', + 'standard_name': 'sea_surface_height_above_geoid', + 'units': 'm', + }), + dict({ + 'bbox': list([ + -180, + -80, + 179.75, + 90, + ]), + 'coordinates': list([ + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 16, + 'coordinate_id': 'latitude', + 'maximum_value': 90, + 'minimum_value': -80, + 'step': 0.25, + 'units': 'degrees_north', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 32, + 'coordinate_id': 'longitude', + 'maximum_value': 179.75, + 'minimum_value': -180, + 'step': 0.25, + 'units': 'degrees_east', + 'values': None, + }), + dict({ + 'chunk_geometric_factor': None, + 'chunk_reference_coordinate': None, + 'chunk_type': None, + 'chunking_length': 1826, + 'coordinate_id': 'time', + 'maximum_value': 1703980800000, + 'minimum_value': 725846400000, + 'step': 86400000, + 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'values': None, + }), + ]), + 'short_name': 'zos_oras', + 'standard_name': 'sea_surface_height_above_geoid', + 'units': 'm', + }), + ]), + }), + ]) +# --- diff --git a/tests/__snapshots__/test_describe_released_date.ambr b/tests/__snapshots__/test_describe_released_date.ambr index 9f1cad13..5f2d9af9 100644 --- a/tests/__snapshots__/test_describe_released_date.ambr +++ b/tests/__snapshots__/test_describe_released_date.ambr @@ -6,7 +6,7 @@ 'datasets': list([ dict({ 'dataset_id': 'cmems_mod_glo_phy-cur_anfc_0.083deg_P1D-m', - 'dataset_name': 'cmems_mod_glo_phy-cur_anfc_0.083deg_P1D-m', + 'dataset_name': 'daily mean fields from Global Ocean Physics Analysis and Forecast updated Daily', 'versions': list([ dict({ 'label': '202211', @@ -73,7 +73,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 1, - 'coordinates_id': 'depth', + 'coordinate_id': 'depth', 'maximum_value': None, 'minimum_value': None, 'step': None, @@ -136,7 +136,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 512, - 'coordinates_id': 'latitude', + 'coordinate_id': 'latitude', 'maximum_value': 90, 'minimum_value': -80, 'step': 0.08333333333333333, @@ -148,7 +148,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 2048, - 'coordinates_id': 'longitude', + 'coordinate_id': 'longitude', 'maximum_value': 179.91668701171875, 'minimum_value': -180, 'step': 0.08333333804392655, @@ -160,7 +160,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 1, - 'coordinates_id': 'time', + 'coordinate_id': 'time', 'maximum_value': 1714608000000, 'minimum_value': 1604188800000, 'step': 86400000, @@ -185,7 +185,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 1, - 'coordinates_id': 'depth', + 'coordinate_id': 'depth', 'maximum_value': None, 'minimum_value': None, 'step': None, @@ -248,7 +248,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 512, - 'coordinates_id': 'latitude', + 'coordinate_id': 'latitude', 'maximum_value': 90, 'minimum_value': -80, 'step': 0.08333333333333333, @@ -260,7 +260,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 2048, - 'coordinates_id': 'longitude', + 'coordinate_id': 'longitude', 'maximum_value': 179.91668701171875, 'minimum_value': -180, 'step': 0.08333333804392655, @@ -272,7 +272,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 1, - 'coordinates_id': 'time', + 'coordinate_id': 'time', 'maximum_value': 1714608000000, 'minimum_value': 1604188800000, 'step': 86400000, @@ -307,7 +307,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 2, - 'coordinates_id': 'depth', + 'coordinate_id': 'depth', 'maximum_value': None, 'minimum_value': None, 'step': None, @@ -370,7 +370,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 32, - 'coordinates_id': 'latitude', + 'coordinate_id': 'latitude', 'maximum_value': 90, 'minimum_value': -80, 'step': 0.08333333333333333, @@ -382,7 +382,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 64, - 'coordinates_id': 'longitude', + 'coordinate_id': 'longitude', 'maximum_value': 179.91668701171875, 'minimum_value': -180, 'step': 0.08333333804392655, @@ -394,7 +394,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 177, - 'coordinates_id': 'time', + 'coordinate_id': 'time', 'maximum_value': 1714608000000, 'minimum_value': 1604188800000, 'step': 86400000, @@ -419,7 +419,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 2, - 'coordinates_id': 'depth', + 'coordinate_id': 'depth', 'maximum_value': None, 'minimum_value': None, 'step': None, @@ -482,7 +482,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 32, - 'coordinates_id': 'latitude', + 'coordinate_id': 'latitude', 'maximum_value': 90, 'minimum_value': -80, 'step': 0.08333333333333333, @@ -494,7 +494,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 64, - 'coordinates_id': 'longitude', + 'coordinate_id': 'longitude', 'maximum_value': 179.91668701171875, 'minimum_value': -180, 'step': 0.08333333804392655, @@ -506,7 +506,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 177, - 'coordinates_id': 'time', + 'coordinate_id': 'time', 'maximum_value': 1714608000000, 'minimum_value': 1604188800000, 'step': 86400000, @@ -589,7 +589,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 1, - 'coordinates_id': 'depth', + 'coordinate_id': 'depth', 'maximum_value': None, 'minimum_value': None, 'step': None, @@ -652,7 +652,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 512, - 'coordinates_id': 'latitude', + 'coordinate_id': 'latitude', 'maximum_value': 90, 'minimum_value': -80, 'step': 0.08333333333333333, @@ -664,7 +664,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 2048, - 'coordinates_id': 'longitude', + 'coordinate_id': 'longitude', 'maximum_value': 179.91668701171875, 'minimum_value': -180, 'step': 0.08333333804392655, @@ -676,7 +676,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 1, - 'coordinates_id': 'time', + 'coordinate_id': 'time', 'maximum_value': 1714608000000, 'minimum_value': 1604188800000, 'step': 86400000, @@ -701,7 +701,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 1, - 'coordinates_id': 'depth', + 'coordinate_id': 'depth', 'maximum_value': None, 'minimum_value': None, 'step': None, @@ -764,7 +764,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 512, - 'coordinates_id': 'latitude', + 'coordinate_id': 'latitude', 'maximum_value': 90, 'minimum_value': -80, 'step': 0.08333333333333333, @@ -776,7 +776,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 2048, - 'coordinates_id': 'longitude', + 'coordinate_id': 'longitude', 'maximum_value': 179.91668701171875, 'minimum_value': -180, 'step': 0.08333333804392655, @@ -788,7 +788,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 1, - 'coordinates_id': 'time', + 'coordinate_id': 'time', 'maximum_value': 1714608000000, 'minimum_value': 1604188800000, 'step': 86400000, @@ -823,7 +823,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 2, - 'coordinates_id': 'depth', + 'coordinate_id': 'depth', 'maximum_value': None, 'minimum_value': None, 'step': None, @@ -886,7 +886,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 32, - 'coordinates_id': 'latitude', + 'coordinate_id': 'latitude', 'maximum_value': 90, 'minimum_value': -80, 'step': 0.08333333333333333, @@ -898,7 +898,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 64, - 'coordinates_id': 'longitude', + 'coordinate_id': 'longitude', 'maximum_value': 179.91668701171875, 'minimum_value': -180, 'step': 0.08333333804392655, @@ -910,7 +910,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 177, - 'coordinates_id': 'time', + 'coordinate_id': 'time', 'maximum_value': 1714608000000, 'minimum_value': 1604188800000, 'step': 86400000, @@ -935,7 +935,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 2, - 'coordinates_id': 'depth', + 'coordinate_id': 'depth', 'maximum_value': None, 'minimum_value': None, 'step': None, @@ -998,7 +998,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 32, - 'coordinates_id': 'latitude', + 'coordinate_id': 'latitude', 'maximum_value': 90, 'minimum_value': -80, 'step': 0.08333333333333333, @@ -1010,7 +1010,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 64, - 'coordinates_id': 'longitude', + 'coordinate_id': 'longitude', 'maximum_value': 179.91668701171875, 'minimum_value': -180, 'step': 0.08333333804392655, @@ -1022,7 +1022,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 177, - 'coordinates_id': 'time', + 'coordinate_id': 'time', 'maximum_value': 1714608000000, 'minimum_value': 1604188800000, 'step': 86400000, @@ -1098,7 +1098,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 1, - 'coordinates_id': 'depth', + 'coordinate_id': 'depth', 'maximum_value': None, 'minimum_value': None, 'step': None, @@ -1161,7 +1161,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 512, - 'coordinates_id': 'latitude', + 'coordinate_id': 'latitude', 'maximum_value': 90, 'minimum_value': -80, 'step': 0.08333333333333333, @@ -1173,7 +1173,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 2048, - 'coordinates_id': 'longitude', + 'coordinate_id': 'longitude', 'maximum_value': 179.91668701171875, 'minimum_value': -180, 'step': 0.08333333804392655, @@ -1185,7 +1185,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 1, - 'coordinates_id': 'time', + 'coordinate_id': 'time', 'maximum_value': 1714608000000, 'minimum_value': 1604188800000, 'step': 86400000, @@ -1220,7 +1220,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 2, - 'coordinates_id': 'depth', + 'coordinate_id': 'depth', 'maximum_value': None, 'minimum_value': None, 'step': None, @@ -1283,7 +1283,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 32, - 'coordinates_id': 'latitude', + 'coordinate_id': 'latitude', 'maximum_value': 90, 'minimum_value': -80, 'step': 0.08333333333333333, @@ -1295,7 +1295,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 64, - 'coordinates_id': 'longitude', + 'coordinate_id': 'longitude', 'maximum_value': 179.91668701171875, 'minimum_value': -180, 'step': 0.08333333804392655, @@ -1307,7 +1307,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 177, - 'coordinates_id': 'time', + 'coordinate_id': 'time', 'maximum_value': 1714608000000, 'minimum_value': 1604188800000, 'step': 86400000, @@ -1437,7 +1437,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 4, - 'coordinates_id': 'time', + 'coordinate_id': 'time', 'maximum_value': 1693440000000, 'minimum_value': 725846400000, 'step': 86400000, @@ -1449,7 +1449,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 2, - 'coordinates_id': 'depth', + 'coordinate_id': 'depth', 'maximum_value': None, 'minimum_value': None, 'step': None, @@ -1486,7 +1486,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 375, - 'coordinates_id': 'latitude', + 'coordinate_id': 'latitude', 'maximum_value': 65.00125122070312, 'minimum_value': 40.06666946411133, 'step': 0.06667000469676951, @@ -1498,7 +1498,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 297, - 'coordinates_id': 'longitude', + 'coordinate_id': 'longitude', 'maximum_value': 12.999670028686523, 'minimum_value': -19.88888931274414, 'step': 0.11110999133135821, @@ -1533,7 +1533,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 2156, - 'coordinates_id': 'time', + 'coordinate_id': 'time', 'maximum_value': 1693440000000, 'minimum_value': 725846400000, 'step': 86400000, @@ -1545,7 +1545,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 1, - 'coordinates_id': 'depth', + 'coordinate_id': 'depth', 'maximum_value': None, 'minimum_value': None, 'step': None, @@ -1582,7 +1582,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 16, - 'coordinates_id': 'latitude', + 'coordinate_id': 'latitude', 'maximum_value': 65.00125122070312, 'minimum_value': 40.06666946411133, 'step': 0.06667000469676951, @@ -1594,7 +1594,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 16, - 'coordinates_id': 'longitude', + 'coordinate_id': 'longitude', 'maximum_value': 12.999670028686523, 'minimum_value': -19.88888931274414, 'step': 0.11110999133135821, @@ -1670,7 +1670,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 4, - 'coordinates_id': 'time', + 'coordinate_id': 'time', 'maximum_value': None, 'minimum_value': None, 'step': None, @@ -2051,7 +2051,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 2, - 'coordinates_id': 'depth', + 'coordinate_id': 'depth', 'maximum_value': None, 'minimum_value': None, 'step': None, @@ -2088,7 +2088,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 375, - 'coordinates_id': 'latitude', + 'coordinate_id': 'latitude', 'maximum_value': 65.00125122070312, 'minimum_value': 40.06666946411133, 'step': 0.06667000469676951, @@ -2100,7 +2100,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 297, - 'coordinates_id': 'longitude', + 'coordinate_id': 'longitude', 'maximum_value': 12.999670028686523, 'minimum_value': -19.88888931274414, 'step': 0.11110999133135821, @@ -2135,7 +2135,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 72, - 'coordinates_id': 'time', + 'coordinate_id': 'time', 'maximum_value': None, 'minimum_value': None, 'step': None, @@ -2516,7 +2516,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 2, - 'coordinates_id': 'depth', + 'coordinate_id': 'depth', 'maximum_value': None, 'minimum_value': None, 'step': None, @@ -2553,7 +2553,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 64, - 'coordinates_id': 'latitude', + 'coordinate_id': 'latitude', 'maximum_value': 65.00125122070312, 'minimum_value': 40.06666946411133, 'step': 0.06667000469676951, @@ -2565,7 +2565,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 64, - 'coordinates_id': 'longitude', + 'coordinate_id': 'longitude', 'maximum_value': 12.999670028686523, 'minimum_value': -19.88888931274414, 'step': 0.11110999133135821, @@ -2606,7 +2606,7 @@ 'datasets': list([ dict({ 'dataset_id': 'cmems_mod_glo_phy-cur_anfc_0.083deg_P1D-m', - 'dataset_name': 'cmems_mod_glo_phy-cur_anfc_0.083deg_P1D-m', + 'dataset_name': 'daily mean fields from Global Ocean Physics Analysis and Forecast updated Daily', 'versions': list([ dict({ 'label': '202211', @@ -2673,7 +2673,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 1, - 'coordinates_id': 'depth', + 'coordinate_id': 'depth', 'maximum_value': None, 'minimum_value': None, 'step': None, @@ -2736,7 +2736,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 512, - 'coordinates_id': 'latitude', + 'coordinate_id': 'latitude', 'maximum_value': 90, 'minimum_value': -80, 'step': 0.08333333333333333, @@ -2748,7 +2748,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 2048, - 'coordinates_id': 'longitude', + 'coordinate_id': 'longitude', 'maximum_value': 179.91668701171875, 'minimum_value': -180, 'step': 0.08333333804392655, @@ -2760,7 +2760,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 1, - 'coordinates_id': 'time', + 'coordinate_id': 'time', 'maximum_value': 1714608000000, 'minimum_value': 1604188800000, 'step': 86400000, @@ -2785,7 +2785,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 1, - 'coordinates_id': 'depth', + 'coordinate_id': 'depth', 'maximum_value': None, 'minimum_value': None, 'step': None, @@ -2848,7 +2848,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 512, - 'coordinates_id': 'latitude', + 'coordinate_id': 'latitude', 'maximum_value': 90, 'minimum_value': -80, 'step': 0.08333333333333333, @@ -2860,7 +2860,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 2048, - 'coordinates_id': 'longitude', + 'coordinate_id': 'longitude', 'maximum_value': 179.91668701171875, 'minimum_value': -180, 'step': 0.08333333804392655, @@ -2872,7 +2872,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 1, - 'coordinates_id': 'time', + 'coordinate_id': 'time', 'maximum_value': 1714608000000, 'minimum_value': 1604188800000, 'step': 86400000, @@ -2907,7 +2907,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 2, - 'coordinates_id': 'depth', + 'coordinate_id': 'depth', 'maximum_value': None, 'minimum_value': None, 'step': None, @@ -2970,7 +2970,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 32, - 'coordinates_id': 'latitude', + 'coordinate_id': 'latitude', 'maximum_value': 90, 'minimum_value': -80, 'step': 0.08333333333333333, @@ -2982,7 +2982,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 64, - 'coordinates_id': 'longitude', + 'coordinate_id': 'longitude', 'maximum_value': 179.91668701171875, 'minimum_value': -180, 'step': 0.08333333804392655, @@ -2994,7 +2994,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 177, - 'coordinates_id': 'time', + 'coordinate_id': 'time', 'maximum_value': 1714608000000, 'minimum_value': 1604188800000, 'step': 86400000, @@ -3019,7 +3019,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 2, - 'coordinates_id': 'depth', + 'coordinate_id': 'depth', 'maximum_value': None, 'minimum_value': None, 'step': None, @@ -3082,7 +3082,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 32, - 'coordinates_id': 'latitude', + 'coordinate_id': 'latitude', 'maximum_value': 90, 'minimum_value': -80, 'step': 0.08333333333333333, @@ -3094,7 +3094,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 64, - 'coordinates_id': 'longitude', + 'coordinate_id': 'longitude', 'maximum_value': 179.91668701171875, 'minimum_value': -180, 'step': 0.08333333804392655, @@ -3106,7 +3106,7 @@ 'chunk_reference_coordinate': None, 'chunk_type': None, 'chunking_length': 177, - 'coordinates_id': 'time', + 'coordinate_id': 'time', 'maximum_value': 1714608000000, 'minimum_value': 1604188800000, 'step': 86400000, diff --git a/tests/__snapshots__/test_help_command_interface.ambr b/tests/__snapshots__/test_help_command_interface.ambr index 7de4ae35..629e69a7 100644 --- a/tests/__snapshots__/test_help_command_interface.ambr +++ b/tests/__snapshots__/test_help_command_interface.ambr @@ -8,7 +8,7 @@ ' -h, --help Show this message and exit.', '', 'Commands:', - ' describe Print Copernicus Marine catalog as JSON.', + ' describe Print Copernicus Marine catalogue as JSON.', ' get Download originally produced data files.', ' login Create a configuration file with your Copernicus Marine', ' credentials.', @@ -20,88 +20,77 @@ list([ "CompletedProcess(args=['copernicusmarine', 'describe', '--help'], returncode=0, stdout=b'Usage: copernicusmarine describe [OPTIONS]", '', - ' Print Copernicus Marine catalog as JSON.', + ' Retrieve and parse the metadata information from the Copernicus Marine', + ' catalogue.', '', - ' The default display contains information on the products, and more data can', - ' be displayed using the --include- flags.', - '', - ' The --contains option allows the user to specify one or several strings to', - ' filter through the catalogue display. The search is performed recursively on', - ' all attributes of the catalogue, and the tokens only need to be contained in', - ' one of the attributes (i.e. not exact match).', + ' Returns JSON A dictionary containing the retrieved metadata information.', '', 'Options:', ' --include-description Include product description in output.', ' --include-datasets Include product dataset details in output.', ' --include-keywords Include product keyword details in output.', - ' --include-versions, --include-all-versions', - ' Include dataset versions in output. By', + ' --include-versions Include dataset versions in output. By', ' default, shows only the default version.', ' -a, --include-all Include all the possible data in output:', ' description, datasets, keywords, and', ' versions.', ' -c, --contains TEXT Filter catalogue output. Returns products', ' with attributes matching a string token.', - ' --overwrite-metadata-cache Force to refresh the catalogue by', - ' overwriting the local cache.', - ' \\x08 NOTE: This argument is mutually exclusive', - ' with arguments: [no_metadata_cache].', - ' --no-metadata-cache Bypass the use of cache. \\x08', - ' NOTE: This argument is mutually exclusive', - ' with arguments: [overwrite_metadata_cache].', + ' --max-concurrent-requests INTEGER', + ' Maximum number of concurrent requests (>=1).', + ' Default 15. The command uses a thread pool', + ' executor to manage concurrent requests.', ' --disable-progress-bar Flag to hide progress bar.', ' --log-level [DEBUG|INFO|WARN|ERROR|CRITICAL|QUIET]', ' Set the details printed to console by the', ' command (based on standard logging library).', ' -h, --help Show this message and exit.', '', - ' Examples:', '', - ' copernicusmarine describe --contains METOFFICE-GLO-SST-L4-NRT-OBS-SST-V2 --include-datasets', + ' Examples:', + '', + '', + ' copernicusmarine describe --contains METOFFICE-GLO-SST-L4-NRT-OBS-SST-V2 --include-datasets', '', - ' copernicusmarine describe -c METOFFICE-GLO-SST-L4-NRT-OBS-SST-V2', + '', + ' copernicusmarine describe -c METOFFICE-GLO-SST-L4-NRT-OBS-SST-V2 ', + '', + ' ', "', stderr=b'')", ]) # --- # name: TestHelpCommandLineInterface.test_help_from_get_is_as_expected list([ - "CompletedProcess(args=['copernicusmarine', 'get', '--help'], returncode=0, stdout=b'Usage: copernicusmarine get [OPTIONS]", + 'CompletedProcess(args=[\'copernicusmarine\', \'get\', \'--help\'], returncode=0, stdout=b"Usage: copernicusmarine get [OPTIONS]', '', ' Download originally produced data files.', '', - ' Either one of --dataset-id or --dataset-url is required (can be found via', - ' the "describe" command). The function fetches the files recursively if a', - ' folder path is passed as URL. When provided a datasetID, all the files in', - ' the corresponding folder will be downloaded if none of the --filter or', - ' --regex options is specified.', + ' The datasetID is required (either as an argument or in a request file) and', + ' can be found via the ``describe`` command. See :ref:`describe `.', + '', + ' Returns JSON A list of files that were downloaded and some metadata.', '', 'Options:', - ' -u, --dataset-url TEXT URL to the data files.', - ' -i, --dataset-id TEXT The datasetID.', - ' --dataset-version, --force-dataset-version TEXT', - ' Force the selection of a specific dataset', + ' -i, --dataset-id TEXT The datasetID, required either as an', + ' argument or in the request_file option.', + ' --dataset-version TEXT Force the selection of a specific dataset', ' version.', - ' --dataset-part, --force-dataset-part TEXT', - ' Force the selection of a specific dataset', + ' --dataset-part TEXT Force the selection of a specific dataset', ' part.', - ' --username TEXT If not set, search for environment variable', - ' COPERNICUSMARINE_SERVICE_USERNAME, or else', - ' look for configuration files, or else ask', - ' for user input.', - ' --password TEXT If not set, search for environment variable', - ' COPERNICUSMARINE_SERVICE_PASSWORD, or else', - ' look for configuration files, or else ask', - ' for user input.', - ' -nd, --no-directories Option to not recreate folder hierarchy in', - ' ouput directory. \\x08 NOTE:', - ' This argument is mutually exclusive with', - ' arguments: [sync].', + ' --username TEXT The username for authentication.', + ' --password TEXT The password for authentication.', + ' -nd, --no-directories If True, downloaded files will not be', + ' organized into directories.', + ' \\x08 NOTE: This argument is mutually exclusive', + ' with arguments: [sync].', ' --show-outputnames Option to display the names of the output', ' files before download.', - ' -o, --output-directory PATH The destination directory for the downloaded', + ' -o, --output-directory PATH The destination folder for the downloaded', ' files. Default is the current directory.', ' --credentials-file PATH Path to a credentials file if not in its', - ' default directory. Accepts', + ' default directory', + ' (``$HOME/.copernicusmarine``). Accepts', ' .copernicusmarine-credentials / .netrc or', ' _netrc / motuclient-python.ini files.', ' --force-download Flag to skip confirmation before download.', @@ -110,48 +99,38 @@ ' on destination, then it will be overwritten', ' instead of creating new one with unique', ' index.', - ' -s, --service, --force-service TEXT', - ' Force download through one of the available', - ' services using the service name among', - " [\\'original-files\\'] or its short name among", - " [\\'files\\'].", - ' --create-template Option to create a file get_template.json in', - ' your current directory containing CLI', - ' arguments. If specified, no other action', - ' will be performed.', - ' --request-file PATH Option to pass a file containing CLI', - ' arguments. The file MUST follow the', - " structure of dataclass \\'GetRequest\\'. For", - ' more information please refer to the README.', - ' --overwrite-metadata-cache Force to refresh the catalogue by', - ' overwriting the local cache.', - ' \\x08 NOTE: This argument is mutually exclusive', - ' with arguments: [no_metadata_cache].', - ' --no-metadata-cache Bypass the use of cache. \\x08', - ' NOTE: This argument is mutually exclusive', - ' with arguments: [overwrite_metadata_cache].', + ' --create-template Option to create a file', + ' _template.json in your current', + ' directory containing the arguments. If', + ' specified, no other action will be', + ' performed.', + ' --request-file PATH Option to pass a file containing the', + ' arguments. For more information please refer', + ' to the documentation or use option', + ' ``--create-template`` from the command line', + ' interface for an example template.', ' --filter, --filter-with-globbing-pattern TEXT', ' A pattern that must match the absolute paths', ' of the files to download.', ' --regex, --filter-with-regular-expression TEXT', ' The regular expression that must match the', ' absolute paths of the files to download.', - ' --file-list PATH Path to a .txt file containing a list of', + " --file-list PATH Path to a '.txt' file containing a list of", ' file paths, line by line, that will be', ' downloaded directly. These files must be', - ' from the specified dataset using the', - ' --dataset-id. If no files can be found, the', - ' Toolbox will list all files on the remote', - ' server and attempt to find a match.', + ' from the same dataset as the one specified', + ' dataset with the datasetID option. If no', + ' files can be found, the Toolbox will list', + ' all files on the remote server and attempt', + ' to find a match.', ' --create-file-list TEXT Option to only create a file containing the', ' names of the targeted files instead of', - ' downloading them. It writes the file in the', - ' directory specified with the --output-', - ' directory option (default to current', - ' directory). The file name specified should', - " end with \\'.txt\\' or \\'.csv\\' If specified, no", - ' other action will be performed. Please find', - ' more information in the README.', + ' downloading them. It writes the file to the', + ' specified output directory (default to', + ' current directory). The file name specified', + " should end with '.txt' or '.csv'. If", + ' specified, no other action will be', + ' performed.', ' --sync Option to synchronize the local directory', ' with the remote directory. See the', ' documentation for more details.', @@ -163,196 +142,223 @@ ' is mutually exclusive with arguments: [no-', ' directories].', ' --index-parts Option to get the index files of an INSITU', - ' dataset. Temporary option.', + ' dataset.', + ' --dry-run If True, runs query without downloading', + ' data.', + ' --max-concurrent-requests INTEGER', + ' Maximum number of concurrent requests.', + ' Default 15. The command uses a thread pool', + ' executor to manage concurrent requests. If', + ' set to 0, no parallel executions are used.', ' --disable-progress-bar Flag to hide progress bar.', ' --log-level [DEBUG|INFO|WARN|ERROR|CRITICAL|QUIET]', ' Set the details printed to console by the', ' command (based on standard logging library).', ' -h, --help Show this message and exit.', '', - ' Examples:', '', - ' copernicusmarine get -nd -o data_folder --dataset-id cmems_mod_nws_bgc-pft_myint_7km-3D-diato_P1M-m', - "', stderr=b'')", + ' Example to download all the files from a given dataset:', + '', + '', + ' copernicusmarine get -i cmems_mod_nws_bgc-pft_myint_7km-3D-diato_P1M-m ', + '', + ' ', + '", stderr=b\'\')', ]) # --- # name: TestHelpCommandLineInterface.test_help_from_login_is_as_expected list([ "CompletedProcess(args=['copernicusmarine', 'login', '--help'], returncode=0, stdout=b'Usage: copernicusmarine login [OPTIONS]", '', - ' Create a configuration file with your Copernicus Marine credentials.', + ' Create a configuration file with your Copernicus Marine credentials under', + ' the ``$HOME/.copernicusmarine`` directory (overwritable with the', + ' ``overwrite_configuration_file`` option).', '', - ' Create a configuration file under the $HOME/.copernicusmarine directory', - ' (overwritable with option --credentials-file).', + ' Returns Exit code 0 exit code if the login was successfully completed, 1', + ' otherwise.', '', 'Options:', - ' --username TEXT If not set, search for environment variable', - ' COPERNICUSMARINE_SERVICE_USERNAME, or else', - ' ask for user input.', - ' --password TEXT If not set, search for environment variable', - ' COPERNICUSMARINE_SERVICE_PASSWORD, or else', - ' ask for user input.', + ' --username TEXT The username for authentication.', + ' --password TEXT The password for authentication.', ' --configuration-file-directory PATH', ' Path to the directory where the', ' configuration file is stored.', ' -overwrite, --overwrite-configuration-file', ' Flag to skip confirmation before overwriting', ' configuration file.', - ' --skip-if-user-logged-in Flag to skip the logging process if the user', - ' is already logged in.', + ' --check-credentials-valid Flag to check if the credentials are valid.', + ' No other action will be performed. The', + ' validity will be check in this order: 1.', + ' Check if the credentials are valid with the', + ' provided username and password. 2. Check if', + ' the credentials are valid in the environment', + ' variables. 3. Check if the credentials are', + ' valid in the configuration file. When any is', + ' found (valid or not valid), will return', + ' immediately.', ' --log-level [DEBUG|INFO|WARN|ERROR|CRITICAL|QUIET]', ' Set the details printed to console by the', ' command (based on standard logging library).', ' -h, --help Show this message and exit.', '', - ' Examples:', '', - ' COPERNICUSMARINE_SERVICE_USERNAME= COPERNICUSMARINE_SERVICE_PASSWORD= copernicusmarine login', + ' Examples:', '', - ' copernicusmarine login --username --password ', + ' Using environment variables:', '', - ' copernicusmarine login', - ' > Username: [USER-INPUT]', - ' > Password: [USER-INPUT]', + '', + ' COPERNICUSMARINE_SERVICE_USERNAME= COPERNICUSMARINE_SERVICE_PASSWORD= copernicusmarine login', + '', + ' Using command line arguments:', + '', + '', + ' copernicusmarine login --username --password ', + '', + ' Using directly user input:', + '', + '', + ' copernicusmarine login', + ' > Username: [USER-INPUT]', + ' > Password: [USER-INPUT]', + ' ', "', stderr=b'')", ]) # --- # name: TestHelpCommandLineInterface.test_help_from_subset_is_as_expected list([ - "CompletedProcess(args=['copernicusmarine', 'subset', '--help'], returncode=0, stdout=b'Usage: copernicusmarine subset [OPTIONS]", + 'CompletedProcess(args=[\'copernicusmarine\', \'subset\', \'--help\'], returncode=0, stdout=b"Usage: copernicusmarine subset [OPTIONS]', + '', + ' Extracts a subset of data from a specified dataset using given parameters.', '', - ' Download subsets of datasets as NetCDF files or Zarr stores.', + ' The datasetID is required and can be found via the ``describe`` command. See', + ' :ref:`describe `.', '', - ' Either one of --dataset-id or --dataset-url is required (can be found via', - ' the "describe" command). The argument values passed individually through the', - ' CLI take precedence over the values from the --motu-api-request option,', - ' which takes precedence over the ones from the --request-file option.', + ' Returns JSON A description of the downloaded data and its destination.', '', 'Options:', - ' -u, --dataset-url TEXT The full dataset URL.', - ' -i, --dataset-id TEXT The datasetID.', - ' --dataset-version, --force-dataset-version TEXT', - ' Force the selection of a specific dataset', + ' -i, --dataset-id TEXT The datasetID, required either as an', + ' argument or in the request_file option.', + ' --dataset-version TEXT Force the selection of a specific dataset', ' version.', - ' --dataset-part, --force-dataset-part TEXT', - ' Force the selection of a specific dataset', + ' --dataset-part TEXT Force the selection of a specific dataset', ' part.', - ' --username TEXT If not set, search for environment variable', - ' COPERNICUSMARINE_SERVICE_USERNAME, or else', - ' look for configuration files, or else ask', - ' for user input.', - ' --password TEXT If not set, search for environment variable', - ' COPERNICUSMARINE_SERVICE_PASSWORD, or else', - ' look for configuration files, or else ask', - ' for user input.', + ' --username TEXT The username for authentication.', + ' --password TEXT The password for authentication.', ' -v, --variable TEXT Specify dataset variable. Can be used', ' multiple times.', - ' -x, --minimum-longitude, --minimal-longitude FLOAT', - ' Minimum longitude for the subset. The value', - ' will be reduced to the interval [-180; 360[.', - ' -X, --maximum-longitude, --maximal-longitude FLOAT', - ' Maximum longitude for the subset. The value', - ' will be reduced to the interval [-180; 360[.', - ' -y, --minimum-latitude, --minimal-latitude FLOAT RANGE', + ' -x, --minimum-longitude FLOAT Minimum longitude for the subset. The value', + ' will be transposed to the interval [-180;', + ' 360[.', + ' -X, --maximum-longitude FLOAT Maximum longitude for the subset. The value', + ' will be transposed to the interval [-180;', + ' 360[.', + ' -y, --minimum-latitude FLOAT RANGE', ' Minimum latitude for the subset. Requires a', - ' float within this range: [-90<=x<=90]', - ' -Y, --maximum-latitude, --maximal-latitude FLOAT RANGE', + ' float from -90 degrees to +90. [-90<=x<=90]', + ' -Y, --maximum-latitude FLOAT RANGE', ' Maximum latitude for the subset. Requires a', - ' float within this range: [-90<=x<=90]', - ' -z, --minimum-depth, --minimal-depth FLOAT RANGE', + ' float from -90 degrees to +90. [-90<=x<=90]', + ' -z, --minimum-depth FLOAT RANGE', ' Minimum depth for the subset. Requires a', - ' float within this range: [x>=0]', - ' -Z, --maximum-depth, --maximal-depth FLOAT RANGE', + ' positive float (or 0). [x>=0]', + ' -Z, --maximum-depth FLOAT RANGE', ' Maximum depth for the subset. Requires a', - ' float within this range: [x>=0]', - ' --vertical-dimension-as-originally-produced BOOLEAN', + ' positive float (or 0). [x>=0]', + ' -V, --vertical-dimension-output [depth|elevation]', ' Consolidate the vertical dimension (the', - ' z-axis) as it is in the dataset originally', - ' produced, named `depth` with descending', - ' positive values. [default: True]', - ' -t, --start-datetime [%Y|%Y-%m-%d|%Y-%m-%dT%H:%M:%S|%Y-%m-%d %H:%M:%S|%Y-%m-%dT%H:%M:%S.%fZ]', - ' The start datetime of the temporal subset.', - ' Caution: encapsulate date with " " to ensure', - ' valid expression for format "%Y-%m-%d', - ' %H:%M:%S".', - ' -T, --end-datetime [%Y|%Y-%m-%d|%Y-%m-%dT%H:%M:%S|%Y-%m-%d %H:%M:%S|%Y-%m-%dT%H:%M:%S.%fZ]', - ' The end datetime of the temporal subset.', - ' Caution: encapsulate date with " " to ensure', - ' valid expression for format "%Y-%m-%d', - ' %H:%M:%S".', - ' --subset-method [nearest|strict]', - ' The subset method when requesting the', - ' dataset. If strict, you can only request', - ' dimension strictly inside the dataset.', + ' z-axis) as requested: depth with descending', + ' positive values, elevation with ascending', + ' positive values. Default is depth.', + ' -t, --start-datetime TEXT The start datetime of the temporal subset.', + ' Supports common format parsed by pendulum (h', + ' ttps://pendulum.eustace.io/docs/#parsing).Ca', + ' ution: encapsulate date with \\xe2\\x80\\x9c \\xe2\\x80\\x9c to ensure', + ' valid expression for format \\xe2\\x80\\x9c%Y-%m-%d', + ' %H:%M:%S\\xe2\\x80\\x9d.', + ' -T, --end-datetime TEXT The end datetime of the temporal subset.', + ' Supports common format parsed by pendulum (h', + ' ttps://pendulum.eustace.io/docs/#parsing).Ca', + ' ution: encapsulate date with \\xe2\\x80\\x9c \\xe2\\x80\\x9c to ensure', + ' valid expression for format \\xe2\\x80\\x9c%Y-%m-%d', + ' %H:%M:%S\\xe2\\x80\\x9d.', + ' --coordinates-selection-method [inside|strict-inside|nearest|outside]', + ' If ``inside``, the selection retrieved will', + ' be inside the requested range. If ``strict-', + ' inside``, the selection retrieved will be', + ' inside the requested range, and an error', + " will be raised if the values don't exist. If", + ' ``nearest``, the extremes closest to the', + ' requested values will be returned. If', + ' ``outside``, the extremes will be taken to', + ' contain all the requested interval. The', + ' methods ``inside``, ``nearest`` and', + ' ``outside`` will display a warning if the', + ' request is out of bounds.', ' -o, --output-directory PATH The destination folder for the downloaded', ' files. Default is the current directory.', ' --credentials-file PATH Path to a credentials file if not in its', - ' default directory. Accepts', + ' default directory', + ' (``$HOME/.copernicusmarine``). Accepts', ' .copernicusmarine-credentials / .netrc or', ' _netrc / motuclient-python.ini files.', - ' -f, --output-filename TEXT Concatenate the downloaded data in the given', - ' file name (under the output directory).', + ' -f, --output-filename TEXT Save the downloaded data with the given file', + ' name (under the output directory).', ' --file-format [netcdf|zarr] Format of the downloaded dataset. Default to', - ' NetCDF (.nc).', + " NetCDF '.nc'.", ' --force-download Flag to skip confirmation before download.', ' --overwrite-output-data, --overwrite', ' If specified and if the file already exists', ' on destination, then it will be overwritten', ' instead of creating new one with unique', ' index.', - ' -s, --service, --force-service TEXT', - ' Force download through one of the available', + ' -s, --service TEXT Force download through one of the available', ' services using the service name among', - " [\\'arco-geo-series\\', \\'arco-time-series\\',", - " \\'omi-arco\\', \\'static-arco\\'] or its short name", - " among [\\'geoseries\\', \\'timeseries\\', \\'omi-", - " arco\\', \\'static-arco\\'].", - ' --create-template Option to create a file subset_template.json', - ' in your current directory containing CLI', - ' arguments. If specified, no other action', - ' will be performed.', - ' --request-file PATH Option to pass a file containing CLI', - ' arguments. The file MUST follow the', - " structure of dataclass \\'SubsetRequest\\'. For", - ' more information please refer to the README.', + " ['arco-geo-series', 'arco-time-series',", + " 'omi-arco', 'static-arco'] or its short name", + " among ['arco-geo-series', 'arco-time-", + " series', 'omi-arco', 'static-arco'].", + ' --create-template Option to create a file', + ' _template.json in your current', + ' directory containing the arguments. If', + ' specified, no other action will be', + ' performed.', + ' --request-file PATH Option to pass a file containing the', + ' arguments. For more information please refer', + ' to the documentation or use option', + ' ``--create-template`` from the command line', + ' interface for an example template.', ' --motu-api-request TEXT Option to pass a complete MOTU API request', ' as a string. Caution, user has to replace', - ' double quotes " with single quotes \\\' in the', + " double quotes \\xe2\\x80\\x9c with single quotes ' in the", ' request.', - ' --overwrite-metadata-cache Force to refresh the catalogue by', - ' overwriting the local cache.', - ' \\x08 NOTE: This argument is mutually exclusive', - ' with arguments: [no_metadata_cache].', - ' --no-metadata-cache Bypass the use of cache. \\x08', - ' NOTE: This argument is mutually exclusive', - ' with arguments: [overwrite_metadata_cache].', + ' --dry-run If True, runs query without downloading', + ' data.', ' --disable-progress-bar Flag to hide progress bar.', ' --log-level [DEBUG|INFO|WARN|ERROR|CRITICAL|QUIET]', ' Set the details printed to console by the', ' command (based on standard logging library).', - ' --netcdf-compression-enabled Enable compression level 1 to the NetCDF', - ' output file. Use --netcdf-compression-level', - ' option to customize the compression level', ' --netcdf-compression-level INTEGER RANGE', ' Specify a compression level to apply on the', ' NetCDF output file. A value of 0 means no', ' compression, and 9 is the highest level of', - ' compression available [0<=x<=9]', - ' --netcdf3-compatible Enable downloading the dataset in a netCDF 3', + ' compression available. If used as a flag,', + ' the assigned value will be 1. [0<=x<=9]', + ' --netcdf3-compatible Enable downloading the dataset in a netCDF3', ' compatible format.', ' -h, --help Show this message and exit.', '', - ' Examples:', '', - ' copernicusmarine subset', - ' --dataset-id cmems_mod_glo_phy-thetao_anfc_0.083deg_PT6H-i', - ' --variable thetao', - ' --start-datetime 2022-01-01T00:00:00 --end-datetime 2022-12-31T23:59:59', - ' --minimum-longitude -6.17 --maximum-longitude -5.08', - ' --minimum-latitude 35.75 --maximum-latitude 36.30', - ' --minimum-depth 0.0 --maximum-depth 5.0', + ' Examples:', '', - ' copernicusmarine subset -i cmems_mod_glo_phy-thetao_anfc_0.083deg_PT6H-i -v thetao -t 2022-01-01T00:00:00 -T 2022-12-31T23:59:59 -x -6.17 -X -5.08 -y 35.75 -Y 36.30 -z 0.0 -Z 5.0', - "', stderr=b'')", + '', + ' copernicusmarine subset --dataset-id cmems_mod_ibi_phy_my_0.083deg-3D_P1D-m --variable thetao --variable so --start-datetime 2021-01-01 --end-datetime 2021-01-03 --minimum-longitude 0.0 --maximum-longitude 0.1 --minimum-latitude 28.0 --maximum-latitude 28.1 --minimum-depth 1 --maximum-depth 2', + '', + ' Equivalent to:', + '', + '', + ' copernicusmarine subset -i cmems_mod_ibi_phy_my_0.083deg-3D_P1D-m -v thetao -v so -t 2021-01-01 -T 2021-01-03 -x 0.0 -X 0.1 -y 28.0 -Y 28.1 -z 1 -Z 2 ', + '', + ' ', + '", stderr=b\'\')', ]) # --- diff --git a/tests/resources/mock_stac_catalog/marine_data_store_stac_metadata_mock.py b/tests/resources/mock_stac_catalog/marine_data_store_stac_metadata_mock.py index ba1ab2c9..70ae66ed 100644 --- a/tests/resources/mock_stac_catalog/marine_data_store_stac_metadata_mock.py +++ b/tests/resources/mock_stac_catalog/marine_data_store_stac_metadata_mock.py @@ -1,7 +1,7 @@ from typing import Optional from copernicusmarine.catalogue_parser.catalogue_parser import ( - MARINE_DATA_STORE_STAC_BASE_URL, + MARINE_DATA_STORE_STAC_URL, ) from tests.resources.mock_stac_catalog.mock_catalog import MOCK_STAC_CATALOG from tests.resources.mock_stac_catalog.mock_dataset_GLO_glo_phy_cur import ( @@ -22,27 +22,28 @@ from tests.resources.mock_stac_catalog.mock_dataset_NWSHELF_P1M_m_202012 import ( MOCK_DATASET_NWSHELF_P1M_M_202012, ) +from tests.resources.mock_stac_catalog.mock_mds_version import MOCK_MDS_VERSION from tests.resources.mock_stac_catalog.mock_product_GLO import MOCK_PRODUCT_GLO from tests.resources.mock_stac_catalog.mock_product_NWSHELF import ( MOCK_PRODUCT_NWSHELF, ) -BASE_URL = MARINE_DATA_STORE_STAC_BASE_URL +BASE_URL = MARINE_DATA_STORE_STAC_URL -def mocked_stac_aiohttp_get(*args, **kwargs): +def mocked_stac_requests_get(*args, **kwargs): class MockResponse: def __init__(self, json_data: Optional[dict], status_code: int): self.json_data = json_data self.status_code = status_code - async def json(self) -> Optional[dict]: + def json(self) -> Optional[dict]: return self.json_data - async def __aenter__(self): + def __enter__(self): return self - async def __aexit__(self, exc_type, exc_val, exc_tb): + def __exit__(self, exc_type, exc_val, exc_tb): pass if args[0] == f"{BASE_URL}/catalog.stac.json": @@ -93,4 +94,9 @@ async def __aexit__(self, exc_type, exc_val, exc_tb): f"dataset.stac.json" ): return MockResponse(MOCK_DATASET_IN_PREP, 200) + elif ( + args[0] + == "https://s3.waw3-1.cloudferro.com/mdl-metadata/mdsVersions.json" + ): + return MockResponse(MOCK_MDS_VERSION, 200) return MockResponse(None, 404) diff --git a/tests/resources/mock_stac_catalog/mock_mds_version.py b/tests/resources/mock_stac_catalog/mock_mds_version.py new file mode 100644 index 00000000..600c949d --- /dev/null +++ b/tests/resources/mock_stac_catalog/mock_mds_version.py @@ -0,0 +1,21 @@ +MOCK_MDS_VERSION = { + "systemVersions": { + "mds": "1.0.0", + "mds/serverlessNative": "1.0.0", + "mds/serverlessArco": "1.0.0", + "mds/serverlessArco/meta": "1.0.0", + "mds/serverlessArco/data": "1.0.0", + "mds/serverlessArco/dense": "1.0.0", + "mds/serverlessArco/sparse": "1.0.0", + "mds/serverlessArco/static": "1.0.0", + "mds/serverlessArco/omis": "1.0.0", + "mds/wmts": "1.0.0", + "mds/opendap": "1.0.0", + }, + "clientVersions": { + "mds": ">=1.2.2", + "mds/serverlessNative": ">=1.2.2", + "mds/serverlessArco": ">=1.2.2", + "mds/serverlessArco/meta": ">=1.2.2", + }, +} diff --git a/tests/test_cf_compliance.py b/tests/test_cf_compliance.py new file mode 100644 index 00000000..0b222708 --- /dev/null +++ b/tests/test_cf_compliance.py @@ -0,0 +1,76 @@ +import json + +import xarray + +from copernicusmarine import subset +from tests.test_utils import execute_in_terminal + + +class TestCFCompliance: + def test_subset_open(self, tmp_path, snapshot): + dataset_id = "cmems_mod_nws_bgc-pft_my_7km-3D-pico_P1M-m" + self.if_I_subset_a_dataset(dataset_id, tmp_path, "output_1.nc", "pico") + self.then_it_is_cf_compliant( + dataset_id, tmp_path, snapshot, "output_1" + ) + + def test_subset_with_warns(self, tmp_path, snapshot): + dataset_id = ( + "cmems_obs-sst_med_phy-sst_nrt_diurnal-oi-0.0625deg_PT1H-m" + ) + self.if_I_subset_a_dataset( + dataset_id, + tmp_path, + "output_2.nc", + "analysed_sst", + ) + self.then_it_is_cf_compliant( + dataset_id, tmp_path, snapshot, "output_2" + ) + + def if_I_subset_a_dataset( + self, dataset_id, tmp_path, output_filename, variable + ): + subset( + dataset_id=dataset_id, + variables=[variable], + output_directory=tmp_path, + output_filename=output_filename, + start_datetime="2022-01-01T00:00:00", + end_datetime="2022-01-05T00:00:00", + force_download=True, + ) + assert (tmp_path / output_filename).exists() + + def then_it_is_cf_compliant( + self, dataset_id, tmp_path, snapshot, output_filename + ): + dataset_id = dataset_id + dataset = xarray.open_dataset(f"{tmp_path}/{output_filename}.nc") + CF_convention = dataset.attrs["Conventions"][-3:] + if CF_convention < "1.6": + CF_convention = "1.6" + command = [ + "compliance-checker", + f"--test=cf:{CF_convention}", + f"{tmp_path}/{output_filename}.nc", + "-f", + "json", + "-o", + f"{tmp_path}/{output_filename}_checked.json", + ] + execute_in_terminal(command) + + f = open(f"{tmp_path}/{output_filename}_checked.json") + data = json.load(f) + + list_msgs = [] + for diccionari in data[f"cf:{CF_convention}"]["all_priorities"]: + if len(diccionari["msgs"]) > 0: + list_msgs.append(diccionari["name"]) + list_msgs.append(diccionari["msgs"]) + + assert dataset_id == snapshot + assert data[f"cf:{CF_convention}"]["scored_points"] == snapshot + assert data[f"cf:{CF_convention}"]["possible_points"] == snapshot + assert list_msgs == snapshot diff --git a/tests/test_command_line_interface.py b/tests/test_command_line_interface.py index 743fc606..38ad44a0 100644 --- a/tests/test_command_line_interface.py +++ b/tests/test_command_line_interface.py @@ -5,16 +5,14 @@ import os import pathlib import re -import shutil from dataclasses import dataclass from json import loads from pathlib import Path -from typing import List, Optional, Union +from typing import List, Literal, Optional, Union -import pytest import xarray -from copernicusmarine.catalogue_parser.catalogue_parser import ( +from copernicusmarine.catalogue_parser.models import ( PART_DEFAULT, REGEX_PATTERN_DATE_YYYYMM, VERSION_DEFAULT, @@ -36,17 +34,6 @@ def get_all_files_in_folder_tree(folder: str) -> list[str]: return downloaded_files -def get_environment_without_crendentials(): - environment_without_crendentials = os.environ.copy() - environment_without_crendentials.pop( - "COPERNICUSMARINE_SERVICE_USERNAME", None - ) - environment_without_crendentials.pop( - "COPERNICUSMARINE_SERVICE_PASSWORD", None - ) - return environment_without_crendentials - - def get_file_size(filepath): file_path = Path(filepath) file_stats = file_path.stat() @@ -54,22 +41,19 @@ def get_file_size(filepath): class TestCommandLineInterface: - @pytest.mark.order(1) - def test_describe_overwrite_metadata_cache(self): - self.when_I_run_copernicus_marine_describe_with_overwrite_cache() - self.then_stdout_can_be_load_as_json() - def test_describe_default(self): self.when_I_run_copernicus_marine_describe_with_default_arguments() + self.then_stdout_can_be_load_as_json() self.then_I_can_read_the_default_json() self.and_there_are_no_warnings_about_backend_versions() - def test_describe_including_datasets(self): + def test_describe_including_datasets(self, snapshot): self.when_I_run_copernicus_marine_describe_including_datasets() self.then_I_can_read_it_does_not_contain_weird_symbols() self.then_I_can_read_the_json_including_datasets() self.then_omi_services_are_not_in_the_catalog() self.then_products_from_marine_data_store_catalog_are_available() + self.then_datasets_variables_are_correct(snapshot) self.then_all_dataset_parts_are_filled() def test_describe_contains_option(self): @@ -80,17 +64,9 @@ def test_describe_with_staging_flag(self): self.when_I_use_staging_environment_in_debug_logging_level() self.then_I_check_that_the_urls_contains_only_dta() - def when_I_run_copernicus_marine_describe_with_overwrite_cache(self): - command = [ - "copernicusmarine", - "describe", - "--overwrite-metadata-cache", - ] - self.output = execute_in_terminal(command) - def when_I_run_copernicus_marine_describe_with_default_arguments(self): command = ["copernicusmarine", "describe"] - self.output = execute_in_terminal(command) + self.output = execute_in_terminal(command, timeout_second=30) def then_stdout_can_be_load_as_json(self): loads(self.output.stdout.decode("utf-8")) @@ -164,6 +140,38 @@ def then_products_from_marine_data_store_catalog_are_available(self): map(lambda x: x in expected_services, expected_dataset_services) ) + def then_datasets_variables_are_correct(self, snapshot): + expected_product_id = "GLOBAL_MULTIYEAR_PHY_ENS_001_031" + expected_dataset_id = "cmems_mod_glo_phy-all_my_0.25deg_P1D-m" + wanted_services = [ + "original-files", + "arco-geo-series", + "arco-time-series", + ] + json_result = loads(self.output.stdout) + expected_product = list( + filter( + lambda product: product["product_id"] == expected_product_id, + json_result["products"], + ) + ) + product = expected_product[0] + product_datasets = product["datasets"] + expected_dataset = list( + filter( + lambda product: product["dataset_id"] == expected_dataset_id, + product_datasets, + ) + ) + dataset = expected_dataset[0] + wanted_services_in_dataset = list( + filter( + lambda x: x["service_type"]["service_name"] in wanted_services, + dataset["versions"][0]["parts"][0]["services"], + ) + ) + assert snapshot == wanted_services_in_dataset + def then_all_dataset_parts_are_filled(self): expected_product_id = "BALTICSEA_ANALYSISFORECAST_BGC_003_007" expected_dataset_id = "cmems_mod_bal_bgc_anfc_static" @@ -222,7 +230,7 @@ def when_I_run_copernicus_marine_describe_with_contains_option(self): "--contains", f"{filter_token}", ] - self.output = execute_in_terminal(command) + self.output = execute_in_terminal(command, timeout_second=30) def then_I_can_read_the_filtered_json(self): json_result = loads(self.output.stdout) @@ -261,7 +269,7 @@ def when_I_run_copernicus_marine_describe_including_datasets(self): "describe", "--include-datasets", ] - self.output = execute_in_terminal(command) + self.output = execute_in_terminal(command, timeout_second=30) def then_I_can_read_it_does_not_contain_weird_symbols(self): assert b"__" not in self.output.stdout @@ -385,7 +393,6 @@ def when_I_use_staging_environment_in_debug_logging_level(self): "--staging", "--log-level", "DEBUG", - "--no-metadata-cache", ] self.output = execute_in_terminal(command) @@ -454,11 +461,6 @@ def _test_subset_functionnalities( self.check_default_subset_request( subset_service_to_test.subpath, tmp_path ) - self.check_subset_request_with_dataseturl( - subset_service_to_test.subpath, - subset_service_to_test.dataset_url, - tmp_path, - ) self.check_subset_request_with_dataset_not_in_catalog() self.check_subset_request_with_no_subsetting() @@ -476,25 +478,6 @@ def check_default_subset_request(self, function_name, tmp_path): self.output = execute_in_terminal(command) assert self.output.returncode == 0 - def check_subset_request_with_dataseturl( - self, function_name, dataset_url, tmp_path - ): - folder = pathlib.Path(tmp_path, function_name) - if not folder.is_dir(): - pathlib.Path.mkdir(folder, parents=True) - - self.base_request_dict.pop("--dataset-id") - self.base_request_dict["--dataset-url"] = f"{dataset_url}" - - command = [ - "copernicusmarine", - "subset", - "--force-download", - ] + self.flatten_request_dict(self.base_request_dict) - - self.output = execute_in_terminal(command) - assert self.output.returncode == 0 - def check_subset_request_with_dataset_not_in_catalog(self): self.base_request_dict["--dataset-id"] = "FAKE_ID" self.base_request_dict.pop("--dataset-url") @@ -532,39 +515,7 @@ def check_subset_request_with_no_subsetting(self): b"copernicusmarine get --dataset-id " + bytes(dataset_id, "utf-8") ) in self.output.stderr - def test_if_dataset_coordinate_valid_minmax_attributes_are_setted( - self, tmp_path - ): - self.base_request_dict = { - "--dataset-id": "cmems_mod_glo_phy-so_anfc_0.083deg_P1D-m", - "--variable": "so", - "--start-datetime": "2024-01-01", - "--end-datetime": "2024-01-02", - "--minimum-latitude": "0.0", - "--maximum-latitude": "0.1", - "--minimum-longitude": "0.2", - "--maximum-longitude": "0.3", - "--minimum-depth": "0.0", - "--maximum-depth": "5.0", - "-f": "self.output.nc", - "--output-directory": tmp_path, - } - - self.check_default_subset_request(self.GEOSERIES.subpath, tmp_path) - - dataset_path = pathlib.Path(tmp_path) / "self.output.nc" - dataset = xarray.open_dataset(dataset_path) - - assert dataset.latitude.attrs["valid_min"] >= 0 - assert dataset.latitude.attrs["valid_max"] <= 0.1 - assert dataset.longitude.attrs["valid_min"] >= 0.2 - assert dataset.longitude.attrs["valid_max"] <= 0.3 - assert dataset.depth.attrs["valid_min"] >= 0 - assert dataset.depth.attrs["valid_max"] <= 5 - assert dataset.time.attrs["valid_min"] == 648672 - assert dataset.time.attrs["valid_max"] == 648696 - - def test_retention_period_works(self): + def test_retention_period_works(self, tmp_path): self.command = [ "copernicusmarine", "subset", @@ -583,6 +534,36 @@ def test_retention_period_works(self): "--maximum-latitude", "48.13780081656672", "--force-download", + "--output-directory", + tmp_path, + ] + + self.output = execute_in_terminal(self.command) + assert ( + b"time (time) datetime64[ns] 2023" not in self.output.stderr + ) + + def test_retention_period_works_when_only_values_in_metadata( + self, tmp_path + ): + self.command = [ + "copernicusmarine", + "subset", + "--dataset-id", + "cmems_obs-oc_atl_bgc-pp_nrt_l4-multi-1km_P1M", + "--variable", + "PP", + "--minimum-longitude", + "-36.29005445972566", + "--maximum-longitude", + "-35.14832052107781", + "--minimum-latitude", + "47.122926204435295", + "--maximum-latitude", + "48.13780081656672", + "--force-download", + "--output-directory", + tmp_path, ] self.output = execute_in_terminal(self.command) @@ -594,30 +575,19 @@ def test_retention_period_works(self): # Test on get requests # # -------------------------# - @dataclass(frozen=True) - class GetServiceToTest: - name: str - - FILES = GetServiceToTest("files") - def test_get_original_files_functionnality(self, tmp_path): - self._test_get_functionalities(self.FILES, tmp_path) + self._test_get_functionalities(tmp_path) - def _test_get_functionalities( - self, get_service_to_test: GetServiceToTest, tmp_path - ): + def _test_get_functionalities(self, tmp_path): self.base_get_request_dict: dict[str, Optional[Union[str, Path]]] = { "--dataset-id": "cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m", "--output-directory": str(tmp_path), "--no-directories": None, - "--service": get_service_to_test.name, } - self.check_default_get_request(get_service_to_test, tmp_path) + self.check_default_get_request(tmp_path) - def check_default_get_request( - self, get_service_to_test: GetServiceToTest, tmp_path - ): - folder = pathlib.Path(tmp_path, get_service_to_test.name) + def check_default_get_request(self, tmp_path): + folder = pathlib.Path(tmp_path, "files") if not folder.is_dir(): pathlib.Path.mkdir(folder, parents=True) @@ -639,8 +609,6 @@ def test_get_download_s3_without_regex(self, tmp_path): "get", "-i", f"{dataset_id}", - "--service", - f"{self.FILES.name}", "--force-download", "--output-directory", f"{tmp_path}", @@ -659,8 +627,6 @@ def test_get_download_s3_with_regex(self, tmp_path): "get", "-i", f"{dataset_id}", - "--service", - f"{self.FILES.name}", "--regex", f"{regex}", "--force-download", @@ -684,8 +650,6 @@ def test_files_to_download_are_displayed(self, tmp_path): "get", "-i", f"{dataset_id}", - "--service", - f"{self.FILES.name}", "--regex", f"{regex}", "--output-directory", @@ -708,8 +672,6 @@ def test_downloaded_files_are_not_displayed_with_force_download_option( "get", "-i", f"{dataset_id}", - "--service", - f"{self.FILES.name}", "--regex", f"{regex}", "--force-download", @@ -723,74 +685,58 @@ def test_downloaded_files_are_not_displayed_with_force_download_option( not in self.output.stderr ) - def test_subset_output_file_as_netcdf(self, tmp_path): + def test_get_download_with_dry_run_option(self, tmp_path): dataset_id = "cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m" - output_filename = "test_subset_output_file_as_netcdf.nc" - command = [ "copernicusmarine", - "subset", - "--dataset-id", + "get", + "-i", f"{dataset_id}", - "--variable", - "thetao", - "--minimum-longitude", - "-9.9", - "--maximum-longitude", - "-9.6", - "--minimum-latitude", - "33.96", - "--maximum-latitude", - "34.2", - "--minimum-depth", - "0.5", - "--maximum-depth", - "1.6", - "-o", + "--output-directory", f"{tmp_path}", - "-f", - f"{output_filename}", - "--service", - f"{self.GEOSERIES.name}", + "--dry-run", "--force-download", ] self.output = execute_in_terminal(command) - is_file = pathlib.Path(tmp_path, output_filename).is_file() + # weirdly add \n at the end of the output + returned_value = loads(self.output.stdout[:-1]) assert self.output.returncode == 0 - assert is_file - - def test_process_is_stopped_when_credentials_are_invalid(self): - dataset_id = "cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m" - + assert len(returned_value["files"]) != 0 + for get_file in returned_value["files"]: + assert get_file["output"] is not None + assert get_file["size"] is not None + assert get_file["url"] is not None + assert get_file["last_modified"] is not None + assert str(tmp_path) in get_file["output"] + assert not os.path.exists(get_file["output"]) + + def test_subset_with_dry_run_option(self, tmp_path): command = [ "copernicusmarine", "subset", - "--username", - "toto", - "--password", - "tutu", "--dataset-id", - f"{dataset_id}", + "cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m", "--variable", "thetao", + "--minimum-longitude", + "-9.9", + "--maximum-longitude", + "-9.6", "--force-download", + "--dry-run", + "-o", + f"{tmp_path}", ] - self.output = execute_in_terminal(command) + assert self.output.returncode == 0 + returned_value = loads(self.output.stdout[:-1]) + assert str(tmp_path) in returned_value["output"] + assert not os.path.exists(returned_value["output"]) - assert self.output.returncode == 1 - assert b"Invalid username or password" in self.output.stderr - - def test_login_is_prompt_when_configuration_file_doest_not_exist( - self, tmp_path - ): + def test_subset_output_file_as_netcdf(self, tmp_path): dataset_id = "cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m" - credentials_file = Path(tmp_path, "i_do_not_exist") - - environment_without_crendentials = ( - get_environment_without_crendentials() - ) + output_filename = "test_subset_output_file_as_netcdf.nc" command = [ "copernicusmarine", @@ -811,65 +757,19 @@ def test_login_is_prompt_when_configuration_file_doest_not_exist( "0.5", "--maximum-depth", "1.6", - "--credentials-file", - f"{credentials_file}", - ] - - self.output = execute_in_terminal( - command, env=environment_without_crendentials - ) - assert self.output.returncode == 1 - assert b"username:" in self.output.stdout - - def test_login_command(self, tmp_path): - self.check_credentials_username_specified_password_prompt(tmp_path) - - def check_credentials_username_specified_password_prompt(self, tmp_path): - assert os.getenv("COPERNICUSMARINE_SERVICE_USERNAME") is not None - assert os.getenv("COPERNICUSMARINE_SERVICE_PASSWORD") is not None - - environment_without_crendentials = ( - get_environment_without_crendentials() - ) - - command = [ - "copernicusmarine", - "subset", - "--dataset-id", - "cmems_mod_glo_phy-thetao_anfc_0.083deg_P1D-m", - "--variable", - "thetao", - "--start-datetime", - "2023-04-26 00:00:00", - "--end-datetime", - "2023-04-28 23:59:59", - "--minimum-longitude", - "-9.8", - "--maximum-longitude", - "-4.8", - "--minimum-latitude", - "33.9", - "--maximum-latitude", - "38.0", - "--minimum-depth", - "9.573", - "--maximum-depth", - "11.4", - "--username", - f"{os.getenv('COPERNICUSMARINE_SERVICE_USERNAME')}", - "--force-download", "-o", f"{tmp_path}", + "-f", + f"{output_filename}", + "--service", + f"{self.GEOSERIES.name}", + "--force-download", ] - password = os.getenv("COPERNICUSMARINE_SERVICE_PASSWORD") - assert password is not None - self.output = execute_in_terminal( - command, - env=environment_without_crendentials, - input=bytes(password, "utf-8"), - ) - assert self.output.returncode == 0, self.output.stderr - shutil.rmtree(Path(tmp_path)) + + self.output = execute_in_terminal(command) + is_file = pathlib.Path(tmp_path, output_filename).is_file() + assert self.output.returncode == 0 + assert is_file def test_get_download_s3_with_wildcard_filter(self, tmp_path): filter = "*_200[123]*.nc" @@ -879,8 +779,6 @@ def test_get_download_s3_with_wildcard_filter(self, tmp_path): "get", "-i", f"{dataset_id}", - "--service", - f"{self.FILES.name}", "--filter", f"{filter}", "--force-download", @@ -905,8 +803,6 @@ def test_get_download_s3_with_wildcard_filter_and_regex(self, tmp_path): "get", "-i", f"{dataset_id}", - "--service", - f"{self.FILES.name}", "--filter", f"{filter}", "--regex", @@ -943,34 +839,8 @@ def test_get_download_no_files(self): assert b"No data to download" in self.output.stderr assert self.output.returncode == 0 - def test_login(self, tmp_path): - non_existing_directory = Path(tmp_path, "i_dont_exist") - command = [ - "copernicusmarine", - "login", - "--overwrite-configuration-file", - "--configuration-file-directory", - f"{non_existing_directory}", - "--username", - f"{os.getenv('COPERNICUSMARINE_SERVICE_USERNAME')}", - "--password", - f"{os.getenv('COPERNICUSMARINE_SERVICE_PASSWORD')}", - ] - - self.output = execute_in_terminal(command) - assert self.output.returncode == 0 - assert non_existing_directory.is_dir() - - command_with_skip = [ - "copernicusmarine", - "login", - "--configuration-file-directory", - f"{non_existing_directory}", - "--skip-if-user-logged-in", - ] - output_with_skip = execute_in_terminal(command_with_skip) - assert output_with_skip.returncode == 0 - + # TODO: separate tests for each service + # SUBSET, GET, DESCRIBE def test_subset_error_when_forced_service_does_not_exist(self): self.when_I_run_copernicus_marine_subset_forcing_a_service_not_available() self.then_I_got_a_clear_output_with_available_service_for_subset() @@ -998,50 +868,10 @@ def then_I_got_a_clear_output_with_available_service_for_subset(self): b"'arco-time-series', 'timeseries', 'omi-arco', 'static-arco']" ) in self.output.stderr - def test_mutual_exclusivity_of_cache_options_for_describe(self): - self.when_I_run_copernicus_marine_command_with_both_cache_options( - "describe" - ) - self.then_I_got_an_error_regarding_mutual_exclusivity() - - def test_mutual_exclusivity_of_cache_options_for_get(self): - self.when_I_run_copernicus_marine_command_with_both_cache_options( - "get" - ) - self.then_I_got_an_error_regarding_mutual_exclusivity() - - def test_mutual_exclusivity_of_cache_options_for_subset(self): - self.when_I_run_copernicus_marine_command_with_both_cache_options( - "subset" - ) - self.then_I_got_an_error_regarding_mutual_exclusivity() - - def when_I_run_copernicus_marine_command_with_both_cache_options( - self, command_option - ): - command = [ - "copernicusmarine", - f"{command_option}", - "--overwrite-metadata-cache", - "--no-metadata-cache", - ] - self.output = execute_in_terminal(command) - - def then_I_got_an_error_regarding_mutual_exclusivity(self): - assert self.output.returncode == 2 - assert self.output.stdout == b"" - assert self.output.stderr == ( - b"Error: Illegal usage: `overwrite-metadata-cache` is mutually " - b"exclusive with arguments `no-metadata-cache`.\n" - ) - - def test_describe_without_using_cache(self): - command = ["copernicusmarine", "describe", "--no-metadata-cache"] - self.output = execute_in_terminal(command=command, timeout_second=30) - assert self.output.returncode == 0 - def when_I_request_subset_dataset_with_zarr_service( - self, output_path, vertical_dimension_as_originally_produced + self, + output_path, + vertical_dimension_output: Literal["depth", "elevation"] = "depth", ): command = [ "copernicusmarine", @@ -1061,13 +891,13 @@ def when_I_request_subset_dataset_with_zarr_service( "-Y", "40", "-z", - "0", + "1", "-Z", "10", "-v", "thetao", - "--vertical-dimension-as-originally-produced", - f"{vertical_dimension_as_originally_produced}", + "--vertical-dimension-output", + f"{vertical_dimension_output}", "--service", "arco-time-series", "-o", @@ -1098,24 +928,27 @@ def then_I_have_correct_attribute_value( ): filepath = pathlib.Path(output_path, "data.zarr") dataset = xarray.open_dataset(filepath, engine="zarr") + assert dataset[dimention_name].attrs["standard_name"] == dimention_name assert dataset[dimention_name].attrs["positive"] == attribute_value def test_conversion_between_elevation_and_depth(self, tmp_path): - self.when_I_request_subset_dataset_with_zarr_service(tmp_path, True) + self.when_I_request_subset_dataset_with_zarr_service(tmp_path, "depth") self.then_I_have_correct_sign_for_depth_coordinates_values( tmp_path, "positive" ) self.then_I_have_correct_attribute_value(tmp_path, "depth", "down") def test_force_no_conversion_between_elevation_and_depth(self, tmp_path): - self.when_I_request_subset_dataset_with_zarr_service(tmp_path, False) + self.when_I_request_subset_dataset_with_zarr_service( + tmp_path, "elevation" + ) self.then_I_have_correct_sign_for_depth_coordinates_values( tmp_path, "negative" ) self.then_I_have_correct_attribute_value(tmp_path, "elevation", "up") def when_I_run_copernicus_marine_command_using_no_directories_option( - self, tmp_path, force_service: GetServiceToTest, output_directory=None + self, tmp_path, output_directory=None ): download_folder = ( tmp_path @@ -1135,8 +968,6 @@ def when_I_run_copernicus_marine_command_using_no_directories_option( "--force-download", "--output-directory", f"{download_folder}", - "--service", - f"{force_service.name}", "--no-directories", ] @@ -1166,11 +997,11 @@ def then_files_are_created_without_tree_folder( def test_no_directories_option_original_files(self, tmp_path): self.when_I_run_copernicus_marine_command_using_no_directories_option( - tmp_path, force_service=self.FILES + tmp_path ) self.then_files_are_created_without_tree_folder(tmp_path) self.when_I_run_copernicus_marine_command_using_no_directories_option( - tmp_path, force_service=self.FILES, output_directory="test" + tmp_path, output_directory="test" ) self.then_files_are_created_without_tree_folder( tmp_path, output_directory="test" @@ -1182,8 +1013,6 @@ def test_default_prompt_for_get_command(self, tmp_path): "get", "-i", "cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m", - "--service", - f"{self.FILES.name}", "-nd", "--filter", "*20120101_20121231_R20221101_RE01*", @@ -1244,40 +1073,6 @@ def then_I_can_see_the_arco_geo_series_service_is_choosen(self): in self.output.stderr ) - def test_subset_with_dataset_id_and_url(self): - command = [ - "copernicusmarine", - "subset", - "-i", - "cmems_mod_arc_bgc_anfc_ecosmo_P1M-m", - "-u", - "https://nrt.cmems-du.eu/thredds/dodsC/METOFFICE-GLO-SST-L4-NRT-OBS-SST-V2", - "--variable", - "thetao", - ] - - self.output = execute_in_terminal(command) - - assert self.output.returncode == 1 - assert ( - b"Must specify only one of 'dataset_url' or 'dataset_id' options" - ) in self.output.stderr - - def test_no_traceback_is_printed_on_dataset_url_error(self): - command = [ - "copernicusmarine", - "get", - "--dataset-url", - "https://s3.waw3-1.cloudferro.com/mdl-arco-time-013/arco/" - "GLOBAL_ANALYSISFORECAST_PHY_XXXXXXX/" - "cmems_mod_glo_phy_anfc_0.083deg_P1D-m/2023", - ] - - self.output = execute_in_terminal(command) - - assert self.output.returncode == 1 - assert b"Traceback" not in self.output.stderr - def test_get_2023_08_original_files(self): command = [ "copernicusmarine", @@ -1325,20 +1120,6 @@ def test_subset_with_chunking(self, tmp_path): assert self.output.returncode == 0 - def test_dataset_url_suffix_path_are_used_as_filter(self): - command = [ - "copernicusmarine", - "get", - "--dataset-url", - "https://s3.waw3-1.cloudferro.com/mdl-native-14/native/" - "GLOBAL_ANALYSISFORECAST_PHY_001_024/" - "cmems_mod_glo_phy_anfc_0.083deg_P1D-m_202406/2023/11", - ] - - self.output = execute_in_terminal(command) - - assert b"Printed 20 out of 30 files" in self.output.stderr - def test_short_option_for_copernicus_marine_command_helper(self): short_option_command = [ "copernicusmarine", @@ -1375,7 +1156,11 @@ def test_short_option_for_copernicus_marine_subcommand_helper(self): self.short_option_output.stderr == self.long_option_output.stderr ) - def test_subset_template_creation(self): + def test_subset_create_template(self): + self.when_created_is_created() + self.and_it_runs_correctly() + + def when_created_is_created(self): command = ["copernicusmarine", "subset", "--create-template"] self.output = execute_in_terminal(command) @@ -1386,6 +1171,19 @@ def test_subset_template_creation(self): ) assert Path("subset_template.json").is_file() + def and_it_runs_correctly(self): + command = [ + "copernicusmarine", + "subset", + "--force-download", + "--request-file", + "./subset_template.json", + ] + + self.output = execute_in_terminal(command) + + assert self.output.returncode == 0 + def test_get_template_creation(self): command = ["copernicusmarine", "get", "--create-template"] @@ -1459,7 +1257,7 @@ def then_I_can_read_copernicusmarine_version_in_the_dataset_attributes( def test_copernicusmarine_version_in_dataset_attributes_with_arco( self, tmp_path ): - self.when_I_request_subset_dataset_with_zarr_service(tmp_path, True) + self.when_I_request_subset_dataset_with_zarr_service(tmp_path) self.then_I_can_read_copernicusmarine_version_in_the_dataset_attributes( tmp_path / "data.zarr" ) @@ -1628,7 +1426,7 @@ def then_I_can_read_dataset_size(self): def test_dataset_size_is_displayed_when_downloading_with_arco_service( self, tmp_path ): - self.when_I_request_subset_dataset_with_zarr_service(tmp_path, True) + self.when_I_request_subset_dataset_with_zarr_service(tmp_path) self.then_I_can_read_dataset_size() def test_dataset_has_always_every_dimensions(self, tmp_path): @@ -1655,9 +1453,9 @@ def test_dataset_has_always_every_dimensions(self, tmp_path): "-T", "2023-11-20 00:00:00", "-z", - "0", + "0.5", "-Z", - "0", + "0.5", "-o", f"{tmp_path}", "-f", @@ -1676,66 +1474,13 @@ def test_dataset_has_always_every_dimensions(self, tmp_path): == 4 ) - def when_I_request_a_dataset_with_subset_method_option( - self, subset_method - ): - command = [ - "copernicusmarine", - "subset", - "-i", - "med-hcmr-wav-rean-h", - "-x", - "-19", - "-X", - "-17", - "-y", - "38.007", - "-Y", - "38.028", - "-t", - "1993-01-01T00:00:00", - "-T", - "1993-01-01T06:00:00", - "-v", - "VHM0", - "--force-download", - "--subset-method", - f"{subset_method}", - ] - - self.output = execute_in_terminal(command) - - def then_I_can_read_an_error_in_stdout(self): - assert self.output.returncode == 1 - assert b"ERROR" in self.output.stderr - assert ( - b"Some or all of your subset selection [-19.0, -17.0] for " - b"the longitude dimension exceed the dataset coordinates" - ) in self.output.stderr - - def then_I_can_read_a_warning_in_stdout(self): - assert self.output.returncode == 0 - assert b"WARNING" in self.output.stderr - assert ( - b"Some or all of your subset selection [-19.0, -17.0] for " - b"the longitude dimension exceed the dataset coordinates" - ) in self.output.stderr - - def test_subset_strict_method(self): - self.when_I_request_a_dataset_with_subset_method_option("strict") - self.then_I_can_read_an_error_in_stdout() - - def test_subset_nearest_method(self): - self.when_I_request_a_dataset_with_subset_method_option("nearest") - self.then_I_can_read_a_warning_in_stdout() - def test_netcdf_compression_option(self, tmp_path): filename_without_option = "without_option.nc" filename_with_option = "with_option.nc" filename_zarr_without_option = "filename_without_option.zarr" filename_zarr_with_option = "filename_with_option.zarr" - netcdf_compression_option = "--netcdf-compression-enabled" + netcdf_compression_option = "--netcdf-compression-level" base_command = [ "copernicusmarine", @@ -1871,7 +1616,6 @@ def test_subset_dataset_part_option(self, tmp_path): assert self.output.returncode == 0 def test_netcdf_compression_level(self, tmp_path): - netcdf_compression_enabled_option = "--netcdf-compression-enabled" forced_comp_level = 4 base_command = [ @@ -1904,14 +1648,10 @@ def test_netcdf_compression_level(self, tmp_path): f"{forced_comp_level}", ] - output_without_netcdf_compression_enabled = execute_in_terminal( - base_command - ) output_with_netcdf_compression_enabled = execute_in_terminal( - base_command + [netcdf_compression_enabled_option] + base_command ) - assert output_without_netcdf_compression_enabled.returncode != 0 assert output_with_netcdf_compression_enabled.returncode == 0 filepath = Path(tmp_path / "data.nc") @@ -1923,49 +1663,77 @@ def test_netcdf_compression_level(self, tmp_path): assert dataset.uo.encoding["contiguous"] is False assert dataset.uo.encoding["shuffle"] is True - def test_that_cache_folder_isnt_created_when_no_metadata_cache_option_was_provided( - self, tmp_path + def test_subset_approximation_of_data_that_needs_to_be_downloaded( + self, ): - dataset_id = "cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m" - output_filename = "test_subset_output_file_as_netcdf.nc" - cache_directory = f"{tmp_path}" - - os.environ["COPERNICUSMARINE_CACHE_DIRECTORY"] = cache_directory - command = [ "copernicusmarine", "subset", - "--dataset-id", - f"{dataset_id}", - "--variable", + "-i", + "cmems_mod_glo_phy-thetao_anfc_0.083deg_P1D-m", + "-v", "thetao", - "--minimum-longitude", - "-9.9", - "--maximum-longitude", - "-9.6", - "--minimum-latitude", - "33.96", - "--maximum-latitude", - "34.2", - "--minimum-depth", - "0.5", - "--maximum-depth", - "1.6", - "-o", - f"{tmp_path}", - "-f", - f"{output_filename}", - "--force-download", + "-x", + "-100.0", + "-X", + "-70.0", + "-y", + "-80.0", + "-Y", + "-65.0", + "-t", + "2023-03-20", + "-T", + "2023-03-20", ] + self.output = execute_in_terminal(command, input=b"n") + assert ( + b"Estimated size of the data that needs" + b" to be downloaded to obtain the result: 200 MB" + in self.output.stderr + ) - execute_in_terminal(command + ["--no-metadata-cache"]) - cache_path = Path(tmp_path) / Path(".copernicusmarine") / Path("cache") - assert cache_path.is_dir() is False - - execute_in_terminal(command) - assert cache_path.is_dir() is True - - del os.environ["COPERNICUSMARINE_CACHE_DIRECTORY"] + def test_subset_approximation_of_big_data_that_needs_to_be_downloaded( + self, + ): + command = [ + "copernicusmarine", + "subset", + "-i", + "cmems_mod_glo_phy-all_my_0.25deg_P1D-m", + "-v", + "thetao_oras", + "-v", + "uo_oras", + "-v", + "vo_oras", + "-v", + "so_oras", + "-v", + "zos_oras", + "-x", + "50", + "-X", + "110", + "-y", + "-10.0", + "-Y", + "30.0", + "-t", + "2010-03-01T00:00:00", + "-T", + "2010-06-30T00:00:00", + "-z", + "0.5057600140571594", + "-Z", + "500", + ] + self.output = execute_in_terminal(command, input=b"n") + assert ( + b"Estimated size of the data that needs" + b" to be downloaded to obtain the result: 71692 MB" + in self.output.stderr + ) def test_file_list_filter(self, tmp_path): dataset_id = "cmems_obs-sl_glo_phy-ssh_nrt_allsat-l4-duacs-0.25deg_P1D" @@ -1974,8 +1742,6 @@ def test_file_list_filter(self, tmp_path): "get", "-i", f"{dataset_id}", - "--service", - f"{self.FILES.name}", "--file-list", "./tests/resources/file_list_examples/file_list_example.txt", "--force-download", @@ -2008,11 +1774,10 @@ def test_get_download_file_list(self, tmp_path): "get", "-i", f"{dataset_id}", - "--service", - "files", "--regex", f"{regex}", - "--download-file-list", + "--create-file-list", + "files_to_download.txt", "--output-directory", f"{tmp_path}", ] @@ -2047,8 +1812,6 @@ def test_last_modified_date_is_set_with_s3(self, tmp_path): "--force-download", "--filter", "*2022053112000*", - "--force-service", - "original-files", "--output-directory", f"{tmp_path}", "--no-directories", @@ -2106,3 +1869,251 @@ def test_netcdf3_option(self, tmp_path): ) assert output_netcdf_format.returncode == 0 assert output_netcdf_format.stdout == b"classic\n" + + def test_that_requested_interval_fully_included_with_coords_sel_method_outside( + self, tmp_path + ): + output_filename = "output.nc" + min_longitude = 0.01 + max_longitude = 1.55 + min_latitude = 0.01 + max_latitude = 1.1 + min_depth = 30.5 + max_depth = 50.0 + start_datetime = "2023-12-01T01:00:00" + end_datetime = "2023-12-12T01:00:00" + command = [ + "copernicusmarine", + "subset", + "--dataset-id", + "cmems_mod_glo_phy-thetao_anfc_0.083deg_P1D-m", + "--variable", + "thetao", + "--minimum-longitude", + f"{min_longitude}", + "--maximum-longitude", + f"{max_longitude}", + "--minimum-latitude", + f"{min_latitude}", + "--maximum-latitude", + f"{max_latitude}", + "--start-datetime", + f"{start_datetime}", + "--end-datetime", + f"{end_datetime}", + "--minimum-depth", + f"{min_depth}", + "--maximum-depth", + f"{max_depth}", + "--coordinates-selection-method", + "outside", + "-o", + f"{tmp_path}", + "-f", + f"{output_filename}", + "--force-download", + ] + output = execute_in_terminal(command) + assert output.returncode == 0 + + dataset = xarray.open_dataset(Path(tmp_path, output_filename)) + assert dataset.longitude.values.min() <= min_longitude + assert dataset.longitude.values.max() >= max_longitude + assert dataset.latitude.values.min() <= min_latitude + assert dataset.latitude.values.max() >= max_latitude + assert dataset.depth.values.min() <= min_depth + assert dataset.depth.values.max() >= max_depth + assert datetime.datetime.strptime( + str(dataset.time.values.min()), "%Y-%m-%dT%H:%M:%S.000%f" + ) <= datetime.datetime.strptime(start_datetime, "%Y-%m-%dT%H:%M:%S") + assert datetime.datetime.strptime( + str(dataset.time.values.max()), "%Y-%m-%dT%H:%M:%S.000%f" + ) >= datetime.datetime.strptime(end_datetime, "%Y-%m-%dT%H:%M:%S") + + def test_that_requested_interval_is_correct_with_coords_sel_method_inside( + self, tmp_path + ): + output_filename = "output.nc" + min_longitude = 0.01 + max_longitude = 1.567 + min_latitude = 0.013 + max_latitude = 1.123 + min_depth = 30.554 + max_depth = 50.023 + start_datetime = "2023-12-01T01:00:23" + end_datetime = "2023-12-12T01:10:03" + command = [ + "copernicusmarine", + "subset", + "--dataset-id", + "cmems_mod_glo_phy-thetao_anfc_0.083deg_P1D-m", + "--variable", + "thetao", + "--minimum-longitude", + f"{min_longitude}", + "--maximum-longitude", + f"{max_longitude}", + "--minimum-latitude", + f"{min_latitude}", + "--maximum-latitude", + f"{max_latitude}", + "--start-datetime", + f"{start_datetime}", + "--end-datetime", + f"{end_datetime}", + "--minimum-depth", + f"{min_depth}", + "--maximum-depth", + f"{max_depth}", + "--coordinates-selection-method", + "inside", + "-o", + f"{tmp_path}", + "-f", + f"{output_filename}", + "--force-download", + ] + output = execute_in_terminal(command) + assert output.returncode == 0 + + dataset = xarray.open_dataset(Path(tmp_path, output_filename)) + assert dataset.longitude.values.min() >= min_longitude + assert dataset.longitude.values.max() <= max_longitude + assert dataset.latitude.values.min() >= min_latitude + assert dataset.latitude.values.max() <= max_latitude + assert dataset.depth.values.min() >= min_depth + assert dataset.depth.values.max() <= max_depth + assert datetime.datetime.strptime( + str(dataset.time.values.min()), "%Y-%m-%dT%H:%M:%S.000%f" + ) >= datetime.datetime.strptime(start_datetime, "%Y-%m-%dT%H:%M:%S") + assert datetime.datetime.strptime( + str(dataset.time.values.max()), "%Y-%m-%dT%H:%M:%S.000%f" + ) <= datetime.datetime.strptime(end_datetime, "%Y-%m-%dT%H:%M:%S") + + def test_that_requested_interval_is_correct_with_coords_sel_method_nearest( + self, tmp_path + ): + output_filename = "output.nc" + min_longitude = 0.08 + max_longitude = 1.567 + min_latitude = 0.013 + max_latitude = 1.123 + min_depth = 30.554 + max_depth = 50.023 + start_datetime = "2023-01-01T00:00:00" + end_datetime = "2023-01-03T23:04:00" + command = [ + "copernicusmarine", + "subset", + "--dataset-id", + "cmems_mod_glo_phy-thetao_anfc_0.083deg_P1D-m", + "--variable", + "thetao", + "--minimum-longitude", + f"{min_longitude}", + "--maximum-longitude", + f"{max_longitude}", + "--minimum-latitude", + f"{min_latitude}", + "--maximum-latitude", + f"{max_latitude}", + "--start-datetime", + f"{start_datetime}", + "--end-datetime", + f"{end_datetime}", + "--minimum-depth", + f"{min_depth}", + "--maximum-depth", + f"{max_depth}", + "--coordinates-selection-method", + "nearest", + "-o", + f"{tmp_path}", + "-f", + f"{output_filename}", + "--force-download", + ] + output = execute_in_terminal(command) + assert output.returncode == 0 + + dataset = xarray.open_dataset(Path(tmp_path, output_filename)) + + assert dataset.longitude.values.min() == 0.083343505859375 + assert dataset.longitude.max().values == 1.583343505859375 + assert dataset.latitude.values.min() == 0.0 + assert dataset.latitude.values.max() == 1.0833358764648438 + assert dataset.depth.values.min() == 29.444730758666992 + assert dataset.depth.values.max() == 47.37369155883789 + assert datetime.datetime.strptime( + str(dataset.time.values.min()), "%Y-%m-%dT%H:%M:%S.000%f" + ) == datetime.datetime.strptime("2023-01-01", "%Y-%m-%d") + assert datetime.datetime.strptime( + str(dataset.time.values.max()), "%Y-%m-%dT%H:%M:%S.000%f" + ) == datetime.datetime.strptime("2023-01-04", "%Y-%m-%d") + + def test_coordinates_selection_method_outside_w_elevation(self, tmp_path): + """dataset characteristics: + * depth (depth) float32 500B 1.018 3.166 5.465 ... 4.062e+03 4.153e+03 + * latitude (latitude) float32 2kB 30.19 30.23 30.27 ... 45.9 45.94 45.98 + * longitude (longitude) float32 4kB -5.542 -5.5 -5.458 ... 36.21 36.25 36.29 + * time (time) datetime64[ns] 14kB 2020-01-01 2020-01-02 ... 2024-09-13 + """ + output_filename = "output.nc" + min_longitude = -6 + max_longitude = -5 + min_latitude = 40 + max_latitude = 50 + min_depth = 1.1 + max_depth = 2.3 + start_datetime = "2023-01-01T00:00:00" + end_datetime = "2023-01-03T23:04:00" + command = [ + "copernicusmarine", + "subset", + "--dataset-id", + "cmems_mod_med_bgc-bio_anfc_4.2km_P1D-m", + "--variable", + "nppv", + "--minimum-longitude", + f"{min_longitude}", + "--maximum-longitude", + f"{max_longitude}", + "--minimum-latitude", + f"{min_latitude}", + "--maximum-latitude", + f"{max_latitude}", + "--start-datetime", + f"{start_datetime}", + "--end-datetime", + f"{end_datetime}", + "--minimum-depth", + f"{min_depth}", + "--maximum-depth", + f"{max_depth}", + "--coordinates-selection-method", + "outside", + "--vertical-dimension-output", + "elevation", + "-o", + f"{tmp_path}", + "-f", + f"{output_filename}", + "--force-download", + ] + output = execute_in_terminal(command) + assert output.returncode == 0 + + dataset = xarray.open_dataset(Path(tmp_path, output_filename)) + + assert dataset.longitude.values.min() <= -5.5416 # dataset limit + assert dataset.longitude.max().values >= -5.0 # our limit + assert dataset.latitude.values.min() <= 40 # our limit + assert dataset.latitude.values.max() >= 45.9791 # dataset limit + assert dataset.elevation.values.max() >= -1.01823665 # dataset limit + assert dataset.elevation.values.min() <= -2.3 # our limit + assert datetime.datetime.strptime( + str(dataset.time.values.min()), "%Y-%m-%dT%H:%M:%S.000%f" + ) <= datetime.datetime.strptime("2023-01-01", "%Y-%m-%d") + assert datetime.datetime.strptime( + str(dataset.time.values.max()), "%Y-%m-%dT%H:%M:%S.000%f" + ) >= datetime.datetime.strptime("2023-01-03", "%Y-%m-%d") diff --git a/tests/test_command_line_interface_nearest_layer_subset.py b/tests/test_command_line_interface_nearest_layer_subset.py index 8a104f91..026d6fb7 100644 --- a/tests/test_command_line_interface_nearest_layer_subset.py +++ b/tests/test_command_line_interface_nearest_layer_subset.py @@ -35,7 +35,10 @@ def _nearest_layer_subset( SUBSET_NEAREST_LAYER_OPTIONS["requested_depth"], ) if same_depth - else (0.0, 50.0) + else ( + 0.0, + 50.0, + ) ) minimum_datetime, maximum_datetime = ( ( @@ -109,9 +112,9 @@ def test_subset_same_depth_surface_zarr(self, tmp_path): "--variable", "thetao", "--minimum-depth", - "0", + "0.5", "--maximum-depth", - "0", + "0.5", "--start-datetime", "2023-04-26 00:00:00", "--end-datetime", @@ -189,9 +192,9 @@ def test_subset_same_depth_with_vertical_dimension_as_originally_produced( "--maximum-latitude", "34.2", "--minimum-depth", - "0", + "0.5", "--maximum-depth", - "0", + "0.5", "--output-directory", f"{tmp_path}", "--output-filename", @@ -222,12 +225,12 @@ def test_subset_same_datetime_zarr(self, tmp_path): same_datetime=True, ) self.output = execute_in_terminal(command) + assert self.output.returncode == 0 dataset = xarray.open_dataset(pathlib.Path(tmp_path, output_filename)) min_datetime = dataset.time.values.min() max_datetime = dataset.time.values.max() - assert self.output.returncode == 0 assert dataset.time.size == 1 assert ( min_datetime == SUBSET_NEAREST_LAYER_OPTIONS["expected_datetime"] diff --git a/tests/test_dataset_part_selection.py b/tests/test_dataset_part_selection.py index 48a021de..1020fcd6 100644 --- a/tests/test_dataset_part_selection.py +++ b/tests/test_dataset_part_selection.py @@ -3,23 +3,6 @@ class TestDatasetPartSelection: - def test_get_when_force_files_no_part_raises_error(self): - command = [ - "copernicusmarine", - "get", - "--dataset-id", - "cmems_obs-ins_arc_phybgcwav_mynrt_na_irr", - "--service", - "files", - ] - - self.output = execute_in_terminal(command) - - assert ( - b"When dataset has multiple parts and using 'files' service" - in self.output.stderr - ) - def test_get_when_dataset_part_is_specified(self): command = [ "copernicusmarine", @@ -28,8 +11,6 @@ def test_get_when_dataset_part_is_specified(self): "cmems_obs-ins_arc_phybgcwav_mynrt_na_irr", "--dataset-part", "history", - "--service", - "files", ] self.output = execute_in_terminal(command) @@ -51,8 +32,6 @@ def test_get_when_dataset_specified_part_does_not_exist(self): "cmems_obs-ins_arc_phybgcwav_mynrt_na_irr", "--dataset-part", "default", - "--service", - "files", ] self.output = execute_in_terminal(command) @@ -68,7 +47,6 @@ def test_dataset_part_is_specifiable_in_python_with_get(self, caplog): copernicusmarine.get( dataset_id="cmems_obs-ins_arc_phybgcwav_mynrt_na_irr", dataset_part="history", - service="files", ) except OSError: pass diff --git a/tests/test_dataset_version_selection.py b/tests/test_dataset_version_selection.py index 93b4109a..0fb4075f 100644 --- a/tests/test_dataset_version_selection.py +++ b/tests/test_dataset_version_selection.py @@ -89,8 +89,6 @@ def test_get_when_dataset_specified_version_does_not_exist_with_forced_service( "CERSAT-GLO-SEAICE_6DAYS_DRIFT_QUICKSCAT_RAN-OBS_FULL_TIME_SERIE", "--dataset-version", "default", - "--service", - "files", ] self.output = execute_in_terminal(command) diff --git a/tests/test_deprecated_options.py b/tests/test_deprecated_options.py index 3ebf42b9..cdc8b582 100644 --- a/tests/test_deprecated_options.py +++ b/tests/test_deprecated_options.py @@ -1,89 +1,2 @@ -from copernicusmarine import describe, open_dataset -from tests.test_utils import execute_in_terminal - - class TestDeprecatedOptions: - def test_get_command_line_works_with_deprecated_options( - self, - ): - command = [ - "copernicusmarine", - "get", - "--dataset-id", - "cmems_obs-ins_glo_phybgcwav_mynrt_na_irr", - "--force-service", - "files", - "--force-dataset-version", - "202311", - "--force-dataset-part", - "latest", - ] - self.output = execute_in_terminal(command) - assert b"Downloading" in self.output.stderr - - def test_subset_command_line_works_with_deprecated_options( - self, - ): - command = [ - "copernicusmarine", - "subset", - "--dataset-id", - "cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m", - "--variable", - "thetao", - "--minimal-longitude", - "-9.9", - "--maximal-longitude", - "-9.6", - "--minimal-latitude", - "33.96", - "--maximal-latitude", - "34.2", - "--force-service", - "arco-time-series", - "--force-dataset-version", - "202211", - "--force-dataset-part", - "default", - ] - self.output = execute_in_terminal(command) - assert b"Downloading" in self.output.stderr - - def test_get_python_works_and_shows_preferred_options_over_deprecated( - self, - ): - dataset = open_dataset( - dataset_id="cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m", - variables=["thetao"], - minimal_longitude=-9.9, - maximal_longitude=-9.6, - minimal_latitude=33.96, - maximal_latitude=34.2, - force_service="arco-time-series", - force_dataset_version="202211", - force_dataset_part="default", - ) - assert dataset - - def test_describe_include_all_dataset_versions_deprecated(self): - command = [ - "copernicusmarine", - "describe", - "--contains", - "METOFFICE-GLO-SST-L4-NRT-OBS-SST-V2", - "--include-all-versions", - ] - self.output = execute_in_terminal(command) - assert b"WARNING" in self.output.stderr - assert ( - b"'--include-all-versions' has been deprecated, " - b"use '--include-versions' instead" - ) in self.output.stderr - assert self.output.returncode == 0 - - def test_describe_include_all_dataset_versions_python_interface(self): - describe_result = describe( - contains=["lkshdflkhsdlfksdflhh"], - include_all_versions=True, - ) - assert describe_result == {} + pass diff --git a/tests/test_describe_released_date.py b/tests/test_describe_released_date.py index b20044f5..2f507cb9 100644 --- a/tests/test_describe_released_date.py +++ b/tests/test_describe_released_date.py @@ -2,14 +2,14 @@ from copernicusmarine import describe from tests.resources.mock_stac_catalog.marine_data_store_stac_metadata_mock import ( - mocked_stac_aiohttp_get, + mocked_stac_requests_get, ) class TestDescribe: @mock.patch( - "aiohttp.ClientSession.get", - side_effect=mocked_stac_aiohttp_get, + "requests.Session.get", + side_effect=mocked_stac_requests_get, ) def when_I_describe_the_marine_data_store( self, @@ -17,7 +17,6 @@ def when_I_describe_the_marine_data_store( include_versions=False, ): return describe( - no_metadata_cache=True, include_versions=include_versions, include_datasets=True, ) diff --git a/tests/test_documentation.py b/tests/test_documentation.py new file mode 100644 index 00000000..27a4cb6e --- /dev/null +++ b/tests/test_documentation.py @@ -0,0 +1,137 @@ +from numpydoc.docscrape import FunctionDoc + +import copernicusmarine +from copernicusmarine.core_functions import documentation_utils + +LIST_OF_EXCEPTIONS = ["username", "password"] + + +class TestDocumentation: + def test_subset(self): + text_subset = FunctionDoc(copernicusmarine.subset) + + for i in range(len(text_subset["Parameters"])): + name_of_variable = text_subset["Parameters"][i].name + if name_of_variable in ["start_datetime", "end_datetime"]: + assert text_subset["Parameters"][i].desc == [ + documentation_utils.SUBSET[ + name_of_variable.upper() + "_HELP" + ] + ] + continue + if name_of_variable == "variables": + assert text_subset["Parameters"][i].desc == [ + "List of variable names to extract." + ] + continue + if name_of_variable == "netcdf_compression_level": + assert text_subset["Parameters"][i].desc == [ + documentation_utils.SUBSET[ + name_of_variable.upper() + "_HELP" + ] + ] + continue + if name_of_variable in LIST_OF_EXCEPTIONS: + continue + assert text_subset["Parameters"][i].desc == [ + documentation_utils.SUBSET[name_of_variable.upper() + "_HELP"] + ] + + def test_get(self): + text_get = FunctionDoc(copernicusmarine.get) + + for i in range(len(text_get["Parameters"])): + name_of_variable = text_get["Parameters"][i].name + if name_of_variable in LIST_OF_EXCEPTIONS: + continue + assert text_get["Parameters"][i].desc == [ + documentation_utils.GET[name_of_variable.upper() + "_HELP"] + ] + + def test_login(self): + text_login = FunctionDoc(copernicusmarine.login) + + for i in range(len(text_login["Parameters"])): + name_of_variable = text_login["Parameters"][i].name + if len(text_login["Parameters"][i].desc) > 1: + assert ( + " ".join(text_login["Parameters"][i].desc) + == documentation_utils.LOGIN[ + name_of_variable.upper() + "_HELP" + ] + ) + continue + if name_of_variable in LIST_OF_EXCEPTIONS: + continue + assert text_login["Parameters"][i].desc == [ + documentation_utils.LOGIN[name_of_variable.upper() + "_HELP"] + ] + + def test_describe(self): + text_describe = FunctionDoc(copernicusmarine.describe) + + for i in range(len(text_describe["Parameters"])): + name_of_variable = text_describe["Parameters"][i].name + assert text_describe["Parameters"][i].desc == [ + documentation_utils.DESCRIBE[ + name_of_variable.upper() + "_HELP" + ] + ] + + def test_open_dataset(self): + text_open_dataset = FunctionDoc(copernicusmarine.open_dataset) + + for i in range(len(text_open_dataset["Parameters"])): + name_of_variable = text_open_dataset["Parameters"][i].name + if name_of_variable in ["start_datetime", "end_datetime"]: + assert text_open_dataset["Parameters"][i].desc == [ + documentation_utils.SUBSET[ + name_of_variable.upper() + "_HELP" + ] + ] + continue + if name_of_variable == "variables": + assert text_open_dataset["Parameters"][i].desc == [ + "List of variable names to extract." + ] + continue + if name_of_variable == "dataset_id": + assert text_open_dataset["Parameters"][i].desc == [ + "The datasetID, required." + ] + continue + if name_of_variable in LIST_OF_EXCEPTIONS: + continue + assert text_open_dataset["Parameters"][i].desc == [ + documentation_utils.SUBSET[name_of_variable.upper() + "_HELP"] + ] + + def test_read_dataframe(self): + text_read_dataframe = FunctionDoc(copernicusmarine.read_dataframe) + + for i in range(len(text_read_dataframe["Parameters"])): + name_of_variable = text_read_dataframe["Parameters"][i].name + if name_of_variable in ["start_datetime", "end_datetime"]: + assert text_read_dataframe["Parameters"][i].desc == [ + documentation_utils.SUBSET[ + name_of_variable.upper() + "_HELP" + ] + ] + continue + if name_of_variable == "variables": + assert text_read_dataframe["Parameters"][i].desc == [ + "List of variable names to extract." + ] + continue + if name_of_variable == "dataset_id": + assert text_read_dataframe["Parameters"][i].desc == [ + "The datasetID, required." + ] + continue + if name_of_variable in LIST_OF_EXCEPTIONS: + continue + print(name_of_variable) + + assert text_read_dataframe["Parameters"][i].desc == [ + documentation_utils.SUBSET[name_of_variable.upper() + "_HELP"] + ] diff --git a/tests/test_get_create_file_list.py b/tests/test_get_create_file_list.py index 5f3908a2..56bdbbbd 100644 --- a/tests/test_get_create_file_list.py +++ b/tests/test_get_create_file_list.py @@ -5,20 +5,6 @@ class TestGetCreateFileList: - def test_get_download_file_list_is_deprecated(self): - self.command = [ - "copernicusmarine", - "get", - "--dataset-id", - "cmems_mod_arc_bgc_my_ecosmo_P1D-m", - "--download-file-list", - ] - self.output = execute_in_terminal(self.command) - assert ( - b"'--download-file-list' has been deprecated, " - b"use '--create-file-list' instead" in self.output.stderr - ) - def test_get_create_file_list_without_extension_raises(self): self.command = [ "copernicusmarine", @@ -30,7 +16,7 @@ def test_get_create_file_list_without_extension_raises(self): ] self.output = execute_in_terminal(self.command) assert ( - b"Assertion error: Download file list must be a .txt or .csv file." + b"Assertion error: Download file list must be a '.txt' or '.csv' file." in self.output.stderr ) @@ -72,7 +58,10 @@ def test_get_create_file_list_without_extension_raises_python(self): create_file_list="hello", ) except AssertionError as e: - assert str(e) == "Download file list must be a .txt or .csv file. " + assert ( + str(e) + == "Download file list must be a '.txt' or '.csv' file. " + ) def test_get_create_file_list_python(self): get( diff --git a/tests/test_get_direct_download.py b/tests/test_get_direct_download.py index be1d6cb3..1fa7e45f 100644 --- a/tests/test_get_direct_download.py +++ b/tests/test_get_direct_download.py @@ -142,7 +142,8 @@ def test_get_direct_download_file_list_python(self, tmp_path): overwrite_output_data=True, output_directory=tmp_path, ) - assert set(get_result) == { + result_paths = [result.output for result in get_result.files] + assert set(result_paths) == { pathlib.Path( f"{tmp_path}/" f"INSITU_GLO_PHYBGCWAV_DISCRETE_MYNRT_013_030/" @@ -156,7 +157,7 @@ def test_get_direct_download_file_list_python(self, tmp_path): f"history/BO/AR_PR_BO_58US.nc" ), } - for file_path in get_result: + for file_path in result_paths: assert os.path.exists(file_path) def _assert_insitu_file_exists_locally( diff --git a/tests/test_get_index_files_insitu.py b/tests/test_get_index_files_insitu.py index 25770d8a..12c5df4d 100644 --- a/tests/test_get_index_files_insitu.py +++ b/tests/test_get_index_files_insitu.py @@ -53,4 +53,9 @@ def test_get_index_insitu_files_python(self): force_download=True, ) assert get_result is not None - assert all(map(lambda x: x.exists(), get_result)) + assert all( + map( + lambda x: x.exists(), + [result.output for result in get_result.files], + ) + ) diff --git a/tests/test_login.py b/tests/test_login.py new file mode 100644 index 00000000..7344e3cc --- /dev/null +++ b/tests/test_login.py @@ -0,0 +1,346 @@ +import os +import shutil +from pathlib import Path + +from copernicusmarine import login +from tests.test_utils import execute_in_terminal + + +def get_environment_without_crendentials(): + environment_without_crendentials = os.environ.copy() + environment_without_crendentials.pop( + "COPERNICUSMARINE_SERVICE_USERNAME", None + ) + environment_without_crendentials.pop( + "COPERNICUSMARINE_SERVICE_PASSWORD", None + ) + return environment_without_crendentials + + +class TestLogin: + def test_process_is_stopped_when_credentials_are_invalid(self): + dataset_id = "cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m" + + command = [ + "copernicusmarine", + "subset", + "--username", + "toto", + "--password", + "tutu", + "--dataset-id", + f"{dataset_id}", + "--variable", + "thetao", + "--force-download", + ] + + self.output = execute_in_terminal(command) + + assert self.output.returncode == 1 + assert b"Invalid username or password" in self.output.stderr + + def test_login_is_prompt_when_configuration_file_doest_not_exist( + self, tmp_path + ): + dataset_id = "cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m" + credentials_file = Path(tmp_path, "i_do_not_exist") + + environment_without_crendentials = ( + get_environment_without_crendentials() + ) + + command = [ + "copernicusmarine", + "subset", + "--dataset-id", + f"{dataset_id}", + "--variable", + "thetao", + "--minimum-longitude", + "-9.9", + "--maximum-longitude", + "-9.6", + "--minimum-latitude", + "33.96", + "--maximum-latitude", + "34.2", + "--minimum-depth", + "0.5", + "--maximum-depth", + "1.6", + "--credentials-file", + f"{credentials_file}", + ] + + self.output = execute_in_terminal( + command, env=environment_without_crendentials + ) + assert self.output.returncode == 1 + assert ( + b"Downloading Copernicus Marine data requires a Copernicus Marine username " + b"and password, sign up for free at:" + b" https://data.marine.copernicus.eu/register" + ) in self.output.stderr + assert b"Copernicus Marine username:" in self.output.stdout + + def test_login_command_prompt_with_other_commands(self, tmp_path): + assert os.getenv("COPERNICUSMARINE_SERVICE_USERNAME") is not None + assert os.getenv("COPERNICUSMARINE_SERVICE_PASSWORD") is not None + + environment_without_crendentials = ( + get_environment_without_crendentials() + ) + + command = [ + "copernicusmarine", + "subset", + "--dataset-id", + "cmems_mod_glo_phy-thetao_anfc_0.083deg_P1D-m", + "--variable", + "thetao", + "--start-datetime", + "2023-04-26 00:00:00", + "--end-datetime", + "2023-04-28 23:59:59", + "--minimum-longitude", + "-9.8", + "--maximum-longitude", + "-4.8", + "--minimum-latitude", + "33.9", + "--maximum-latitude", + "38.0", + "--minimum-depth", + "9.573", + "--maximum-depth", + "11.4", + "--username", + f"{os.getenv('COPERNICUSMARINE_SERVICE_USERNAME')}", + "--force-download", + "-o", + f"{tmp_path}", + ] + password = os.getenv("COPERNICUSMARINE_SERVICE_PASSWORD") + assert password is not None + self.output = execute_in_terminal( + command, + env=environment_without_crendentials, + input=bytes(password, "utf-8"), + ) + assert self.output.returncode == 0, self.output.stderr + shutil.rmtree(Path(tmp_path)) + + def test_login_command_with_username_and_password(self, tmp_path): + non_existing_directory = Path(tmp_path, "i_dont_exist") + command = [ + "copernicusmarine", + "login", + "--overwrite-configuration-file", + "--configuration-file-directory", + f"{non_existing_directory}", + "--username", + f"{os.getenv('COPERNICUSMARINE_SERVICE_USERNAME')}", + "--password", + f"{os.getenv('COPERNICUSMARINE_SERVICE_PASSWORD')}", + ] + + self.output = execute_in_terminal(command) + assert self.output.returncode == 0 + assert non_existing_directory.is_dir() + + def test_login_check_credentials_are_valid(self, tmp_path): + self.check_credentials_username_password_specified_are_invalid() + self.check_credentials_username_specified_password_are_valid() + self.check_credentials_username_password_env_var_are_wrong() + self.check_credentials_username_password_env_var_are_valid() + self.check_credentials_file_is_invalid() + self.check_credentials_file_is_valid(tmp_path) + + def check_credentials_username_specified_password_are_valid(self): + command = [ + "copernicusmarine", + "login", + "--check-credentials-valid", + "--username", + f"{os.getenv('COPERNICUSMARINE_SERVICE_USERNAME')}", + "--password", + f"{os.getenv('COPERNICUSMARINE_SERVICE_PASSWORD')}", + ] + + self.output = execute_in_terminal(command) + assert self.output.returncode == 0 + assert ( + b"Valid credentials from input username and password" + in self.output.stderr + ) + + def check_credentials_username_password_specified_are_invalid(self): + command = [ + "copernicusmarine", + "login", + "--check-credentials-valid", + "--username", + "toto", + "--password", + "tutu", + ] + + self.output = execute_in_terminal(command) + assert self.output.returncode == 1 + assert ( + b"Invalid credentials from input username and password" + in self.output.stderr + ) + + def check_credentials_username_password_env_var_are_wrong(self): + command = [ + "copernicusmarine", + "login", + "--check-credentials-valid", + ] + + environment_without_crendentials = ( + get_environment_without_crendentials() + ) + + environment_without_crendentials[ + "COPERNICUSMARINE_SERVICE_USERNAME" + ] = "toto" + environment_without_crendentials[ + "COPERNICUSMARINE_SERVICE_PASSWORD" + ] = "tutu" + self.output = execute_in_terminal( + command, env=environment_without_crendentials + ) + assert self.output.returncode == 1 + assert ( + b"Invalid credentials from environment variables" + in self.output.stderr + ) + + def check_credentials_username_password_env_var_are_valid(self): + command = [ + "copernicusmarine", + "login", + "--check-credentials-valid", + ] + self.output = execute_in_terminal(command) + assert self.output.returncode == 0 + assert ( + b"Valid credentials from environment variables" + in self.output.stderr + ) + + def check_credentials_file_is_invalid(self): + command = [ + "copernicusmarine", + "login", + "--check-credentials-valid", + "--configuration-file-directory", + "/toto", + ] + + environment_without_crendentials = ( + get_environment_without_crendentials() + ) + + self.output = execute_in_terminal( + command, env=environment_without_crendentials + ) + assert self.output.returncode == 1 + assert ( + b"Invalid credentials from configuration file" + in self.output.stderr + ) + + def check_credentials_file_is_valid(self, tmp_path): + non_existing_directory = Path(tmp_path, "lolololo") + + command = [ + "copernicusmarine", + "login", + "--username", + f"{os.getenv('COPERNICUSMARINE_SERVICE_USERNAME')}", + "--password", + f"{os.getenv('COPERNICUSMARINE_SERVICE_PASSWORD')}", + "--configuration-file-directory", + f"{non_existing_directory}", + ] + + self.output = execute_in_terminal(command) + + environment_without_crendentials = ( + get_environment_without_crendentials() + ) + + command = [ + "copernicusmarine", + "login", + "--check-credentials-valid", + "--configuration-file-directory", + f"{non_existing_directory}", + ] + + self.output = execute_in_terminal( + command, env=environment_without_crendentials + ) + assert self.output.returncode == 0 + assert ( + b"Valid credentials from configuration file" in self.output.stderr + ) + + def test_login_falls_back_to_old_system(self): + environment_without_crendentials = ( + get_environment_without_crendentials() + ) + command = [ + "copernicusmarine", + "login", + "--check-credentials-valid", + "--username", + "toto", + "--password", + "tutu", + "--log-level", + "DEBUG", + ] + + self.output = execute_in_terminal( + command=command, env=environment_without_crendentials + ) + assert self.output.returncode == 1 + assert ( + b"Could not connect with new authentication system" + in self.output.stderr + ) + assert ( + b" Trying with old authentication system..." in self.output.stderr + ) + + def test_login_python_interface(self, tmp_path): + folder = Path(tmp_path, "lololo12") + assert not login( + username=os.getenv("COPERNICUSMARINE_SERVICE_USERNAME"), + password="FAKEPASSWORD", + configuration_file_directory=folder, + overwrite_configuration_file=True, + ) + + assert folder.is_dir() is False + assert login( + username=os.getenv("COPERNICUSMARINE_SERVICE_USERNAME"), + password=os.getenv("COPERNICUSMARINE_SERVICE_PASSWORD"), + configuration_file_directory=folder, + overwrite_configuration_file=True, + ) + + assert (folder / ".copernicusmarine-credentials").is_file() + assert login( + check_credentials_valid=True, + ) + + assert not login( + username="toto", + password="tutu", + ) diff --git a/tests/test_longitudes_with_modulus.py b/tests/test_longitudes_with_modulus.py index ea7689a8..64cf8123 100644 --- a/tests/test_longitudes_with_modulus.py +++ b/tests/test_longitudes_with_modulus.py @@ -19,9 +19,9 @@ def _build_custom_command( "--variable", "thetao", "--minimum-depth", - "0", + "0.5", "--maximum-depth", - "0", + "0.5", "--minimum-longitude", f"{min_longitude}", "--maximum-longitude", diff --git a/tests/test_overwrite_output_data.py b/tests/test_overwrite_output_data.py index 6a566342..bdd05bdc 100644 --- a/tests/test_overwrite_output_data.py +++ b/tests/test_overwrite_output_data.py @@ -133,8 +133,6 @@ def request_data_download( "cmems_mod_ibi_phy_my_0.083deg-3D_P1Y-m", "--regex", ".*20120101.*", - "--service", - f"{service}", "-o", f"{folder}", "--force-download", diff --git a/tests/test_python_interface.py b/tests/test_python_interface.py index 2d69883b..7115c8fd 100644 --- a/tests/test_python_interface.py +++ b/tests/test_python_interface.py @@ -1,12 +1,12 @@ import inspect import os -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from pathlib import Path +from unittest import mock import xarray from copernicusmarine import ( - core_functions, describe, get, login, @@ -14,6 +14,9 @@ read_dataframe, subset, ) +from copernicusmarine.download_functions.utils import ( + timestamp_or_datestring_to_datetime, +) class TestPythonInterface: @@ -37,7 +40,34 @@ def test_get_function(self, tmp_path): force_download=True, ) assert get_result is not None - assert all(map(lambda x: x.exists(), get_result)) + assert all( + map( + lambda x: x.exists(), + [result.output for result in get_result.files], + ) + ) + + @mock.patch("os.utime", side_effect=PermissionError) + def test_permission_denied_for_modification_date( + self, mock_utime, tmp_path, caplog + ): + get( + dataset_id="METOFFICE-GLO-SST-L4-REP-OBS-SST", + force_download=True, + filter="*2022053112000*", + output_directory=f"{tmp_path}", + no_directories=True, + ) + assert "Permission to modify the last modified date" in caplog.text + assert "is denied" in caplog.text + output_file = Path( + tmp_path, + "20220531120000-UKMO-L4_GHRSST-SSTfnd-OSTIA-GLOB_REP-v02.0-fv02.0.nc", + ) + five_minutes_ago = datetime.now() - timedelta(minutes=5) + assert datetime.fromtimestamp(os.path.getmtime(output_file)) > ( + five_minutes_ago + ) def test_subset_function(self, tmp_path): subset_result = subset( @@ -56,7 +86,7 @@ def test_subset_function(self, tmp_path): ) assert subset_result is not None - assert subset_result.exists() + assert subset_result.output.exists() def test_open_dataset(self): dataset = open_dataset( @@ -86,42 +116,8 @@ def test_read_dataframe(self): ) assert dataframe is not None - def test_login_ok(self, tmp_path): - non_existing_directory = Path(tmp_path, "i_dont_exist") - is_valid = login( - username=os.getenv("COPERNICUSMARINE_SERVICE_USERNAME"), - password=os.getenv("COPERNICUSMARINE_SERVICE_PASSWORD"), - configuration_file_directory=non_existing_directory, - overwrite_configuration_file=True, - ) - - assert is_valid is True - assert ( - non_existing_directory / ".copernicusmarine-credentials" - ).is_file() - - is_valid_with_skip = login( - configuration_file_directory=non_existing_directory, - skip_if_user_logged_in=True, - ) - assert is_valid_with_skip is True - - def test_login_not_ok_with_wrong_credentials(self, tmp_path): - non_existing_directory = Path(tmp_path, "i_dont_exist") - is_valid = login( - username=os.getenv("COPERNICUSMARINE_SERVICE_USERNAME"), - password="FAKEPASSWORD", - configuration_file_directory=non_existing_directory, - overwrite_configuration_file=True, - ) - - assert is_valid is False - assert non_existing_directory.is_dir() is False - def test_signature_inspection_is_working(self): - assert inspect.signature(describe).parameters[ - "overwrite_metadata_cache" - ] + assert inspect.signature(describe).parameters["contains"] common_key_parameter = "username" assert inspect.signature(login).parameters[common_key_parameter] @@ -146,7 +142,7 @@ def test_ISO8601_datetime_format_as_string( maximum_latitude=0.1, minimum_longitude=0.2, maximum_longitude=0.3, - vertical_dimension_as_originally_produced=False, + vertical_dimension_output="elevation", ) assert dataset is not None assert ( @@ -166,13 +162,13 @@ def test_open_dataset_with_strict_method(self, caplog): dataset_id = "cmems_mod_glo_phy-cur_anfc_0.083deg_PT6H-i" start_datetime = "2023-09-15T00:00:00.000Z" end_datetime = "2023-09-15T00:00:00.000Z" - subset_method = "strict" + coordinates_selection_method = "strict-inside" dataset = open_dataset( dataset_id=dataset_id, start_datetime=start_datetime, end_datetime=end_datetime, - subset_method=subset_method, + coordinates_selection_method=coordinates_selection_method, ) assert dataset.coords is not None @@ -189,7 +185,7 @@ def test_read_dataframe_with_strict_method(self, caplog): maximum_latitude=0.1, minimum_longitude=0.2, maximum_longitude=0.3, - subset_method="strict", + coordinates_selection_method="strict-inside", ) assert dataframe is not None @@ -200,7 +196,21 @@ def test_open_dataset_with_retention_date(self): password=os.getenv("COPERNICUSMARINE_SERVICE_PASSWORD"), dataset_id="cmems_obs-oc_atl_bgc-plankton_nrt_l4-gapfree-multi-1km_P1D", ) - assert dataset.time.valid_min >= 45388 + assert timestamp_or_datestring_to_datetime( + dataset.time.values.min() + ) >= datetime(2024, 8, 31, 0, 0, 0, tzinfo=timezone.utc) + + def test_open_dataset_with_retention_date_and_only_values_in_metadata( + self, + ): + dataset = open_dataset( + username=os.getenv("COPERNICUSMARINE_SERVICE_USERNAME"), + password=os.getenv("COPERNICUSMARINE_SERVICE_PASSWORD"), + dataset_id="cmems_obs-oc_atl_bgc-pp_nrt_l4-multi-1km_P1M", + ) + assert timestamp_or_datestring_to_datetime( + dataset.time.values.min() + ) >= datetime(2024, 6, 1, 0, 0, 0, tzinfo=timezone.utc) def test_subset_modify_attr_for_depth(self): dataset = open_dataset( @@ -235,7 +245,6 @@ def test_subset_keeps_fillvalue_empty(self, tmp_path): assert "_FillValue" not in subsetdata.time.attrs assert "_FillValue" not in subsetdata.latitude.attrs assert "_FillValue" not in subsetdata.depth.attrs - assert "valid_max" in subsetdata.longitude.attrs assert subsetdata.time.attrs["calendar"] == "gregorian" assert subsetdata.time.attrs["units"] == "hours since 1950-01-01" @@ -249,12 +258,12 @@ def test_subset_keeps_fillvalue_empty_w_compression(self, tmp_path): maximum_latitude=40.44, start_datetime="2024-02-23T00:00:00", end_datetime="2024-02-23T23:59:59", - minimum_depth=0, - maximum_depth=1, + minimum_depth=5, + maximum_depth=10, force_download=True, output_directory=tmp_path, output_filename="netcdf_fillval_compressed.nc", - netcdf_compression_enabled=True, + netcdf_compression_level=1, overwrite_output_data=True, ) @@ -265,18 +274,5 @@ def test_subset_keeps_fillvalue_empty_w_compression(self, tmp_path): assert "_FillValue" not in subsetdata.time.attrs assert "_FillValue" not in subsetdata.latitude.attrs assert "_FillValue" not in subsetdata.depth.attrs - assert "valid_max" in subsetdata.longitude.attrs assert subsetdata.time.attrs["calendar"] == "gregorian" assert subsetdata.time.attrs["units"] == "hours since 1950-01-01" - - def test_error_Coord_out_of_dataset_bounds(self): - try: - _ = subset( - dataset_id="cmems_mod_glo_phy_anfc_0.083deg_P1D-m", - start_datetime=datetime.today() + timedelta(10), - force_download=True, - end_datetime=datetime.today() - + timedelta(days=10, hours=23, minutes=59), - ) - except core_functions.exceptions.CoordinatesOutOfDatasetBounds as e: - assert "Some or all of your subset selection" in e.__str__() diff --git a/tests/test_subfunctions.py b/tests/test_subfunctions.py index 76d4509a..6a2c6fc9 100644 --- a/tests/test_subfunctions.py +++ b/tests/test_subfunctions.py @@ -1,6 +1,13 @@ +import datetime import random -from copernicusmarine.download_functions.subset_xarray import longitude_modulus +import pendulum + +import copernicusmarine as cm +from copernicusmarine.download_functions.subset_xarray import ( + _dataset_custom_sel, + longitude_modulus, +) class TestSubfunctions: @@ -25,3 +32,91 @@ def test_longitude_modulus_range(self): for random_value in random_values: modulus_value = longitude_modulus(random_value) assert modulus_value >= -180 and modulus_value < 180 + + def test_custom_dataset_selection(self, tmp_path): + dataset = cm.open_dataset( + "cmems_mod_glo_phy-thetao_anfc_0.083deg_P1D-m", + minimum_longitude=0, + maximum_longitude=50, + minimum_latitude=0, + maximum_latitude=50, + minimum_depth=0, + maximum_depth=100, + start_datetime="2023-01-01", + end_datetime="2023-01-03", + ) + min_value = 1 + max_value = 49 + coord_selection = slice(min_value, max_value) + dataset_1 = _dataset_custom_sel( + dataset, "longitude", coord_selection, "strict-inside" + ) + assert dataset_1.longitude.values.min() >= min_value + assert dataset_1.longitude.max().values <= max_value + dataset_1 = _dataset_custom_sel( + dataset_1, "latitude", coord_selection, "strict-inside" + ) + assert dataset_1.latitude.values.min() >= min_value + assert dataset_1.latitude.values.max() <= max_value + dataset_1 = _dataset_custom_sel( + dataset_1, "depth", coord_selection, "strict-inside" + ) + assert dataset_1.depth.values.min() >= min_value + assert dataset_1.depth.values.max() <= max_value + coord_selection = slice( + pendulum.datetime(2023, 1, 1).naive(), + pendulum.datetime(2023, 1, 3).naive(), + ) + dataset_1 = _dataset_custom_sel( + dataset_1, "time", coord_selection, "strict-inside" + ) + assert datetime.datetime.strptime( + str(dataset_1.time.values.min()), "%Y-%m-%dT%H:%M:%S.000%f" + ) >= datetime.datetime.strptime("2023-01-01", "%Y-%m-%d") + assert datetime.datetime.strptime( + str(dataset_1.time.values.max()), "%Y-%m-%dT%H:%M:%S.000%f" + ) <= datetime.datetime.strptime("2023-01-03", "%Y-%m-%d") + + min_value = 20 + max_value = 39.9 + coord_selection = slice(min_value, max_value) + dataset_1 = _dataset_custom_sel( + dataset_1, "longitude", coord_selection, "outside" + ) + print(dataset_1.longitude.values.min()) + print(dataset_1.longitude.values.max()) + assert dataset_1.longitude.values.min() <= min_value + assert dataset_1.longitude.max().values >= max_value + dataset_1 = _dataset_custom_sel( + dataset_1, "latitude", coord_selection, "outside" + ) + assert dataset_1.latitude.values.min() <= min_value + assert dataset_1.latitude.values.max() >= max_value + dataset_1 = _dataset_custom_sel( + dataset_1, "depth", coord_selection, "outside" + ) + assert dataset_1.depth.values.min() <= min_value + assert dataset_1.depth.values.max() >= max_value + coord_selection = slice( + pendulum.datetime(2023, 1, 2).naive(), + pendulum.datetime(2023, 1, 2).naive(), + ) + dataset_1 = _dataset_custom_sel( + dataset_1, "time", coord_selection, "outside" + ) + assert datetime.datetime.strptime( + str(dataset_1.time.values.min()), "%Y-%m-%dT%H:%M:%S.000%f" + ) >= datetime.datetime.strptime("2023-01-02", "%Y-%m-%d") + assert datetime.datetime.strptime( + str(dataset_1.time.values.max()), "%Y-%m-%dT%H:%M:%S.000%f" + ) <= datetime.datetime.strptime("2023-01-02", "%Y-%m-%d") + + # Check also that when asking for values outside the dataset, + # the returned makes sense + + coord_selection = slice(10, 45) + dataset_1 = _dataset_custom_sel( + dataset_1, "longitude", coord_selection, "outside" + ) + assert dataset_1.longitude.values.min() >= 20 # the old values + assert dataset_1.longitude.max().values <= 39.92 diff --git a/tests/test_utility_functions.py b/tests/test_utility_functions.py new file mode 100644 index 00000000..87e6b6cb --- /dev/null +++ b/tests/test_utility_functions.py @@ -0,0 +1,54 @@ +from datetime import datetime, timezone + +from freezegun import freeze_time + +from copernicusmarine.core_functions.utils import ( + datetime_parser, + timestamp_parser, +) + + +class TestUtilityFunctions: + @freeze_time("2012-01-14 03:21:34", tz_offset=-2) + def test_datetime_parser(self): + # all parsed dates are in UTC + assert datetime_parser("now") == datetime( + 2012, 1, 14, 1, 21, 34, tzinfo=timezone.utc + ) + assert datetime_parser("2012-01-14T03:21:34.000000+02:00") == datetime( + 2012, 1, 14, 1, 21, 34, tzinfo=timezone.utc + ) + + # All format are supported + assert datetime_parser("2012") == datetime( + 2012, 1, 1, 0, 0, 0, tzinfo=timezone.utc + ) + assert datetime_parser("2012-01-14") == datetime( + 2012, 1, 14, 0, 0, 0, tzinfo=timezone.utc + ) + assert datetime_parser("2012-01-14T03:21:34") == datetime( + 2012, 1, 14, 3, 21, 34, tzinfo=timezone.utc + ) + assert datetime_parser("2012-01-14 03:21:34") == datetime( + 2012, 1, 14, 3, 21, 34, tzinfo=timezone.utc + ) + assert datetime_parser("2012-01-14T03:21:34.000000") == datetime( + 2012, 1, 14, 3, 21, 34, tzinfo=timezone.utc + ) + assert datetime_parser("2012-01-14T03:21:34.000000Z") == datetime( + 2012, 1, 14, 3, 21, 34, tzinfo=timezone.utc + ) + + def test_timestamp_parser(self): + assert timestamp_parser(-630633600000) == datetime( + 1950, 1, 7, 0, 0, 0, tzinfo=timezone.utc + ) + assert timestamp_parser(0) == datetime( + 1970, 1, 1, 0, 0, 0, tzinfo=timezone.utc + ) + assert timestamp_parser(1672527600000) == datetime( + 2022, 12, 31, 23, 0, 0, tzinfo=timezone.utc + ) + assert timestamp_parser(1672527600, unit="s") == datetime( + 2022, 12, 31, 23, 0, 0, tzinfo=timezone.utc + ) diff --git a/tests/test_versions_parts_sorting.py b/tests/test_versions_parts_sorting.py index bb09cf6a..b45356ba 100644 --- a/tests/test_versions_parts_sorting.py +++ b/tests/test_versions_parts_sorting.py @@ -1,4 +1,4 @@ -from copernicusmarine.catalogue_parser.catalogue_parser import ( +from copernicusmarine.catalogue_parser.models import ( PART_DEFAULT, VERSION_DEFAULT, CopernicusMarineDatasetVersion, diff --git a/tests/test_warnings_subset_bounds.py b/tests/test_warnings_subset_bounds.py index 430f8f56..492eb0df 100644 --- a/tests/test_warnings_subset_bounds.py +++ b/tests/test_warnings_subset_bounds.py @@ -1,3 +1,6 @@ +from datetime import datetime, timedelta + +from copernicusmarine import core_functions, subset from tests.test_utils import execute_in_terminal @@ -8,7 +11,7 @@ def _build_custom_command( variable, min_longitude, max_longitude, - subset_method="nearest", + coordinates_selection_method="nearest", ): return [ "copernicusmarine", @@ -21,8 +24,8 @@ def _build_custom_command( f"{min_longitude}", "--maximum-longitude", f"{max_longitude}", - "--subset-method", - f"{subset_method}", + "--coordinates-selection-method", + f"{coordinates_selection_method}", ] def test_subset_send_warning_with_method_nearest(self): @@ -36,8 +39,8 @@ def test_subset_send_warning_with_method_nearest(self): assert b"WARNING" in self.output.stderr assert ( - b"Some or all of your subset selection [-180.0, 180.0]" - b" for the longitude dimension exceed the dataset" + b"Some of your subset selection [-180.0, 180.0]" + b" for the longitude dimension exceed the dataset" b" coordinates [-179.9791717529297, 179.9791717529297]" ) in self.output.stderr @@ -73,13 +76,13 @@ def test_subset_warnings_when_surpassing(self): self.output2 = execute_in_terminal(command2, input=b"n") assert ( - b"Some or all of your subset selection [-180.0, 180.0] for the longitude " - b"dimension exceed the dataset coordinates " + b"Some of your subset selection [-180.0, 180.0] for the longitude " + b"dimension exceed the dataset coordinates " b"[-179.9791717529297, 179.9791717529297]" ) in self.output1.stderr assert ( - b"Some or all of your subset selection [-179.99, 179.99] for the longitude " - b"dimension exceed the dataset coordinates " + b"Some of your subset selection [-179.99, 179.99] for the longitude " + b"dimension exceed the dataset coordinates " b"[-179.9791717529297, 179.9791717529297]" ) in self.output2.stderr @@ -89,10 +92,10 @@ def test_subset_strict_error(self): ) command1 = self._build_custom_command( - dataset_id, "CHL", -180, 180, "strict" + dataset_id, "CHL", -180, 180, "strict-inside" ) command2 = self._build_custom_command( - dataset_id, "CHL", -179.9, 179.9, "strict" + dataset_id, "CHL", -179.9, 179.9, "strict-inside" ) self.output1 = execute_in_terminal(command1, input=b"n") self.output2 = execute_in_terminal(command2, input=b"n") @@ -100,16 +103,16 @@ def test_subset_strict_error(self): b"""one was selected: "arco-geo-series"\nERROR""" ) in self.output1.stderr assert ( - b"Some or all of your subset selection [-180.0, 180.0] for the longitude " - b"dimension exceed the dataset coordinates " + b"Some of your subset selection [-180.0, 180.0] for the longitude " + b"dimension exceed the dataset coordinates " b"[-179.9791717529297, 179.9791717529297]" ) in self.output1.stderr assert ( b"""one was selected: "arco-geo-series"\nERROR""" ) not in self.output2.stderr assert ( - b"Some or all of your subset selection [-179.9, 179.9] for the longitude " - b"dimension exceed the dataset coordinates " + b"Some of your subset selection [-179.9, 179.9] for the longitude " + b"dimension exceed the dataset coordinates " b"[-179.9791717529297, 179.9791717529297]" ) not in self.output2.stderr @@ -117,7 +120,7 @@ def test_subset_handle_180_point_correctly(self): dataset_id = "cmems_mod_glo_phy-thetao_anfc_0.083deg_P1D-m" command = self._build_custom_command( - dataset_id, "thetao", -150, 180, "strict" + dataset_id, "thetao", -150, 180, "strict-inside" ) self.output = execute_in_terminal(command, input=b"n") assert ( @@ -126,3 +129,161 @@ def test_subset_handle_180_point_correctly(self): assert ( b"Some or all of your subset selection" ) not in self.output.stderr + + def test_warn_depth_out_of_dataset_bounds(self, tmp_path): + output_filename = "output.nc" + dataset_id = "cmems_mod_glo_phy-thetao_anfc_0.083deg_P1D-m" + min_longitude = 29.0 + max_longitude = 30.0 + min_latitude = 30 + max_latitude = 32 + min_depth = 0.4 + max_depth = 50.0 + start_datetime = "2023-11-03" + end_datetime = "2023-11-03" + command = self._build_custom_command( + dataset_id, "thetao", min_longitude, max_longitude, "nearest" + ) + command.extend( + [ + "--minimum-latitude", + f"{min_latitude}", + "--maximum-latitude", + f"{max_latitude}", + "--start-datetime", + f"{start_datetime}", + "--end-datetime", + f"{end_datetime}", + "--minimum-depth", + f"{min_depth}", + "--maximum-depth", + f"{max_depth}", + "-o", + f"{tmp_path}", + "-f", + f"{output_filename}", + ] + ) + output = execute_in_terminal(command, input=b"n") + + assert ( + b"Some of your subset selection [0.4, 50.0] for the depth " + b"dimension exceed the dataset coordinates " + b"[0.49402499198913574, 5727.9169921875]" + ) in output.stderr + + def test_warn_elevation_out_of_dataset_bounds(self, tmp_path): + output_filename = "output.nc" + dataset_id = "cmems_mod_glo_phy-thetao_anfc_0.083deg_P1D-m" + min_longitude = 29.0 + max_longitude = 30.0 + min_latitude = 30 + max_latitude = 32 + min_depth = 0.4 + max_depth = 50.0 + start_datetime = "2023-11-03" + end_datetime = "2023-11-03" + command = self._build_custom_command( + dataset_id, "thetao", min_longitude, max_longitude, "nearest" + ) + command.extend( + [ + "--minimum-latitude", + f"{min_latitude}", + "--maximum-latitude", + f"{max_latitude}", + "--start-datetime", + f"{start_datetime}", + "--end-datetime", + f"{end_datetime}", + "--minimum-depth", + f"{min_depth}", + "--maximum-depth", + f"{max_depth}", + "--vertical-dimension-output", + "elevation", + "-o", + f"{tmp_path}", + "-f", + f"{output_filename}", + ] + ) + output = execute_in_terminal(command, input=b"n") + + assert ( + b"Some of your subset selection [0.4, 50.0] for the depth " + b"dimension exceed the dataset coordinates " + b"[0.49402499198913574, 5727.9169921875]" + ) in output.stderr + assert ( + b"* elevation (elevation) float32 72B -47.37 -40.34" + ) in output.stderr + + def test_error_Coord_out_of_dataset_bounds(self): + try: + _ = subset( + dataset_id="cmems_mod_glo_phy_anfc_0.083deg_P1D-m", + start_datetime=datetime.today() + timedelta(10), + force_download=True, + end_datetime=datetime.today() + + timedelta(days=10, hours=23, minutes=59), + ) + except core_functions.exceptions.CoordinatesOutOfDatasetBounds as e: + assert "Some of your subset selection" in e.__str__() + + def when_I_request_a_dataset_with_coordinates_selection_method_option( + self, coordinates_selection_method + ): + command = [ + "copernicusmarine", + "subset", + "-i", + "med-hcmr-wav-rean-h", + "-x", + "-19", + "-X", + "-17", + "-y", + "38.007", + "-Y", + "38.028", + "-t", + "1993-01-01T00:00:00", + "-T", + "1993-01-01T06:00:00", + "-v", + "VHM0", + "--force-download", + "--coordinates-selection-method", + f"{coordinates_selection_method}", + ] + + self.output = execute_in_terminal(command) + + def then_I_can_read_an_error_in_stdout(self): + assert self.output.returncode == 1 + assert b"ERROR" in self.output.stderr + assert ( + b"Some of your subset selection [-19.0, -17.0] for " + b"the longitude dimension exceed the dataset coordinates" + ) in self.output.stderr + + def then_I_can_read_a_warning_in_stdout(self): + assert self.output.returncode == 0 + assert b"WARNING" in self.output.stderr + assert ( + b"Some of your subset selection [-19.0, -17.0] for " + b"the longitude dimension exceed the dataset coordinates" + ) in self.output.stderr + + def test_subset_strict_method(self): + self.when_I_request_a_dataset_with_coordinates_selection_method_option( + "strict-inside" + ) + self.then_I_can_read_an_error_in_stdout() + + def test_subset_nearest_method(self): + self.when_I_request_a_dataset_with_coordinates_selection_method_option( + "nearest" + ) + self.then_I_can_read_a_warning_in_stdout() diff --git a/tests_dependencie_versions/test_basic_commands.py b/tests_dependencie_versions/test_basic_commands.py index c3803fc8..e8c57095 100644 --- a/tests_dependencie_versions/test_basic_commands.py +++ b/tests_dependencie_versions/test_basic_commands.py @@ -9,7 +9,6 @@ def test_describe(self): command = [ "copernicusmarine", "describe", - "--overwrite-metadata-cache", ] self.output = execute_in_terminal(command) diff --git a/toolbox_icon.png b/toolbox_icon.png new file mode 100644 index 00000000..3748d5af Binary files /dev/null and b/toolbox_icon.png differ