diff --git a/.flake8 b/.flake8 index 1b7589d..63a92da 100644 --- a/.flake8 +++ b/.flake8 @@ -1,6 +1,6 @@ -[flake8] -max-line-length = 88 -extend-ignore = E203, E713, W604, E231 -select = C,E,F,W,B,B950 -ignore = E203, E501, W503 -per-file-ignores = __init__.py:F401 +[flake8] +max-line-length = 88 +extend-ignore = E203, E713, W604, E231 +select = C,E,F,W,B,B950 +ignore = E203, E501, W503 +per-file-ignores = __init__.py:F401 diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index aa49638..6a9339c 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,52 +1,52 @@ -## Describe your changes - -< Summary of the changes.> - -< Please also include relevant motivation and context. > - -< List any dependencies that are required for this change. > - -## Issue Link - -< Link to the relevant issue or task. > (e.g. `closes #00` or `solves #00`) - -## Type of change - -- [ ] 🐛 Bug fix (non-breaking change that fixes an issue) -- [ ] ✨ New feature (non-breaking change that adds functionality) -- [ ] 💥 Breaking change (fix or feature that would cause existing functionality to not work as expected) -- [ ] 📖 Documentation (Addition or improvements to documentation) - -## Checklist before requesting a review - -- [ ] My branch is up-to-date with the target branch - if not update your fork with the changes from the target branch (use `pull` with `--rebase` option if possible). -- [ ] I have performed a self-review of my code -- [ ] For any new/modified functions/classes I have added docstrings that clearly describe its purpose, expected inputs and returned values -- [ ] I have placed in-line comments to clarify the intent of any hard-to-understand passages of my code -- [ ] I have updated the documentation to cover introduced code changes -- [ ] I have added tests that prove my fix is effective or that my feature works -- [ ] I have given the PR a name that clearly describes the change, written in imperative form ([context](https://www.gitkraken.com/learn/git/best-practices/git-commit-message#using-imperative-verb-form)). -- [ ] I have requested a reviewer and an assignee (assignee is responsible for merging) - -## Checklist for reviewers - -Each PR comes with its own improvements and flaws. The reviewer should check the following: -- [ ] the code is readable -- [ ] the code is well tested -- [ ] the code is documented (including return types and parameters) -- [ ] the code is easy to maintain - -## Author checklist after completed review - -- [ ] I have added a line to the CHANGELOG describing this change, in a section - reflecting type of change (add section where missing): - - *added*: when you have added new functionality - - *changed*: when default behaviour of the code has been changed - - *fixes*: when your contribution fixes a bug - -## Checklist for assignee - -- [ ] PR is up to date with the base branch -- [ ] the tests pass -- [ ] author has added an entry to the changelog (and designated the change as *added*, *changed* or *fixed*) -- Once the PR is ready to be merged, squash commits and merge the PR. +## Describe your changes + +< Summary of the changes.> + +< Please also include relevant motivation and context. > + +< List any dependencies that are required for this change. > + +## Issue Link + +< Link to the relevant issue or task. > (e.g. `closes #00` or `solves #00`) + +## Type of change + +- [ ] 🐛 Bug fix (non-breaking change that fixes an issue) +- [ ] ✨ New feature (non-breaking change that adds functionality) +- [ ] 💥 Breaking change (fix or feature that would cause existing functionality to not work as expected) +- [ ] 📖 Documentation (Addition or improvements to documentation) + +## Checklist before requesting a review + +- [ ] My branch is up-to-date with the target branch - if not update your fork with the changes from the target branch (use `pull` with `--rebase` option if possible). +- [ ] I have performed a self-review of my code +- [ ] For any new/modified functions/classes I have added docstrings that clearly describe its purpose, expected inputs and returned values +- [ ] I have placed in-line comments to clarify the intent of any hard-to-understand passages of my code +- [ ] I have updated the documentation to cover introduced code changes +- [ ] I have added tests that prove my fix is effective or that my feature works +- [ ] I have given the PR a name that clearly describes the change, written in imperative form ([context](https://www.gitkraken.com/learn/git/best-practices/git-commit-message#using-imperative-verb-form)). +- [ ] I have requested a reviewer and an assignee (assignee is responsible for merging) + +## Checklist for reviewers + +Each PR comes with its own improvements and flaws. The reviewer should check the following: +- [ ] the code is readable +- [ ] the code is well tested +- [ ] the code is documented (including return types and parameters) +- [ ] the code is easy to maintain + +## Author checklist after completed review + +- [ ] I have added a line to the CHANGELOG describing this change, in a section + reflecting type of change (add section where missing): + - *added*: when you have added new functionality + - *changed*: when default behaviour of the code has been changed + - *fixes*: when your contribution fixes a bug + +## Checklist for assignee + +- [ ] PR is up to date with the base branch +- [ ] the tests pass +- [ ] author has added an entry to the changelog (and designated the change as *added*, *changed* or *fixed*) +- Once the PR is ready to be merged, squash commits and merge the PR. diff --git a/.github/workflows/ci-pre-commit.yml b/.github/workflows/ci-pre-commit.yml index 2372819..c6b3fd4 100644 --- a/.github/workflows/ci-pre-commit.yml +++ b/.github/workflows/ci-pre-commit.yml @@ -1,21 +1,21 @@ -name: linting - -on: - push: - branches: "*" - pull_request: - branches: "*" - -jobs: - linting: - name: "pre-commit hooks" - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 - with: - # don't use python3.12 because flake8 finds extra issues with that - # version - python-version: "3.11" - - uses: pre-commit/action@v3.0.1 +name: linting + +on: + push: + branches: "*" + pull_request: + branches: "*" + +jobs: + linting: + name: "pre-commit hooks" + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + # don't use python3.12 because flake8 finds extra issues with that + # version + python-version: "3.11" + - uses: pre-commit/action@v3.0.1 diff --git a/.github/workflows/ci-pypi-deploy.yml b/.github/workflows/ci-pypi-deploy.yml index 00f3101..62b1458 100644 --- a/.github/workflows/ci-pypi-deploy.yml +++ b/.github/workflows/ci-pypi-deploy.yml @@ -1,24 +1,24 @@ -name: package-release - -on: - workflow_dispatch: - pull_request: - push: - branches: - - main - release: - types: - - published - -jobs: - build: - name: build and upload release to pypi - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 - - uses: casperdcl/deploy-pypi@v2 - with: - password: ${{ secrets.PYPI_TOKEN }} - pip: wheel -w dist/ --no-deps . - upload: ${{ github.event_name == 'release' && github.event.action == 'published' }} +name: package-release + +on: + workflow_dispatch: + pull_request: + push: + branches: + - main + release: + types: + - published + +jobs: + build: + name: build and upload release to pypi + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + - uses: casperdcl/deploy-pypi@v2 + with: + password: ${{ secrets.PYPI_TOKEN }} + pip: wheel -w dist/ --no-deps . + upload: ${{ github.event_name == 'release' && github.event.action == 'published' }} diff --git a/.github/workflows/python-package-pip.yml b/.github/workflows/python-package-pip.yml index 6611adb..18a3c3e 100644 --- a/.github/workflows/python-package-pip.yml +++ b/.github/workflows/python-package-pip.yml @@ -1,49 +1,49 @@ -name: pytest - -on: - push: - branches: - - main - pull_request: - branches: - - main - -jobs: - test: - name: Test pip install python ${{ matrix.python-version }} on ${{ matrix.os }} with zarr ${{ matrix.zarr-version }} - - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest] - python-version: ["3.9", "3.10"] - zarr-version: [">=2,<3", ">2,<=3"] - steps: - - uses: actions/checkout@v2 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - - name: Install package with pip - run: | - python -m pip install . "zarr${{ matrix.zarr-version }}" - python -m pip install pytest - - - name: Run tests (non-distributed) - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - run: | - python -m pytest tests/ - - name: Install distributed dependencies - run: | - python -m pip install .[dask-distributed] - - name: Run tests (distributed) - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - run: | - python -m pytest tests/ +name: pytest + +on: + push: + branches: + - main + pull_request: + branches: + - main + +jobs: + test: + name: Test pip install python ${{ matrix.python-version }} on ${{ matrix.os }} with zarr ${{ matrix.zarr-version }} + + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + python-version: ["3.9", "3.10"] + zarr-version: [">=2,<3", ">2,<=3"] + steps: + - uses: actions/checkout@v2 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Install package with pip + run: | + python -m pip install . "zarr${{ matrix.zarr-version }}" + python -m pip install pytest + + - name: Run tests (non-distributed) + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + run: | + python -m pytest tests/ + - name: Install distributed dependencies + run: | + python -m pip install .[dask-distributed] + - name: Run tests (distributed) + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + run: | + python -m pytest tests/ diff --git a/.gitignore b/.gitignore index e8e5d09..4a0710f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,165 +1,165 @@ -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -share/python-wheels/ -*.egg-info/ -.installed.cfg -*.egg -MANIFEST - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.nox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -*.py,cover -.hypothesis/ -.pytest_cache/ -cover/ - -# Translations -*.mo -*.pot - -# Django stuff: -*.log -local_settings.py -db.sqlite3 -db.sqlite3-journal - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -.pybuilder/ -target/ - -# Jupyter Notebook -.ipynb_checkpoints - -# IPython -profile_default/ -ipython_config.py - -# pyenv -# For a library or package, you might want to ignore these files since the code is -# intended to run in multiple environments; otherwise, check them in: -# .python-version - -# pipenv -# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. -# However, in case of collaboration, if having platform-specific dependencies or dependencies -# having no cross-platform support, pipenv may install dependencies that don't work, or not -# install all needed dependencies. -#Pipfile.lock - -# poetry -# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. -# This is especially recommended for binary packages to ensure reproducibility, and is more -# commonly ignored for libraries. -# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control -#poetry.lock - -# pdm -# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. -#pdm.lock -# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it -# in version control. -# https://pdm-project.org/#use-with-ide -.pdm.toml -.pdm-python -.pdm-build/ - -# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm -__pypackages__/ - -# Celery stuff -celerybeat-schedule -celerybeat.pid - -# SageMath parsed files -*.sage.py - -# Environments -.env -.venv -env/ -venv/ -ENV/ -env.bak/ -venv.bak/ - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ -.dmypy.json -dmypy.json - -# Pyre type checker -.pyre/ - -# pytype static type analyzer -.pytype/ - -# Cython debug symbols -cython_debug/ - -# PyCharm -# JetBrains specific template is maintained in a separate JetBrains.gitignore that can -# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore -# and can be added to the global gitignore or merged into this file. For a more nuclear -# option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ - - -*.zarr/ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm-project.org/#use-with-ide +.pdm.toml +.pdm-python +.pdm-build/ + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + + +*.zarr/ diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 088506c..c95b80e 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,25 +1,25 @@ -stages: - - lint - - test - -# run linting checks with pre-commit -lint: - stage: lint - image: python:3.10 - script: - - python -m pip install pre-commit - - pre-commit run --all-files - -pytest: - image: python:3.10 - before_script: - # 1. Install pdm - - curl -sSL https://pdm.fming.dev/install-pdm.py | python3 - - - export PATH="$HOME/.local/bin:$PATH" - - export CI_TMP_ENV=gitlab-ci-test-${CI_COMMIT_SHA} - script: - # Install project dependencies using pdm - - pdm install - - pdm install --dev - # Run pytest - - pdm run pytest +stages: + - lint + - test + +# run linting checks with pre-commit +lint: + stage: lint + image: python:3.10 + script: + - python -m pip install pre-commit + - pre-commit run --all-files + +pytest: + image: python:3.10 + before_script: + # 1. Install pdm + - curl -sSL https://pdm.fming.dev/install-pdm.py | python3 - + - export PATH="$HOME/.local/bin:$PATH" + - export CI_TMP_ENV=gitlab-ci-test-${CI_COMMIT_SHA} + script: + # Install project dependencies using pdm + - pdm install + - pdm install --dev + # Run pytest + - pdm run pytest diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 465e6ab..bea80c0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,28 +1,28 @@ -# https://pre-commit.com/ -repos: - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.0.1 - hooks: - - id: trailing-whitespace - - id: end-of-file-fixer - # isort should run before black as black sometimes tweaks the isort output - - repo: https://github.com/PyCQA/isort - rev: 5.12.0 - hooks: - - id: isort - # https://github.com/python/black#version-control-integration - - repo: https://github.com/psf/black - rev: 22.3.0 - hooks: - - id: black - - repo: https://github.com/PyCQA/flake8 - rev: 5.0.4 - hooks: - - id: flake8 - - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.14.1 - hooks: - - id: mypy - additional_dependencies: [types-PyYAML, types-Pillow, types-tqdm] - description: Check for type errors - files: ^mllam_data_prep/ +# https://pre-commit.com/ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.0.1 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + # isort should run before black as black sometimes tweaks the isort output + - repo: https://github.com/PyCQA/isort + rev: 5.12.0 + hooks: + - id: isort + # https://github.com/python/black#version-control-integration + - repo: https://github.com/psf/black + rev: 22.3.0 + hooks: + - id: black + - repo: https://github.com/PyCQA/flake8 + rev: 5.0.4 + hooks: + - id: flake8 + - repo: https://github.com/pre-commit/mirrors-mypy + rev: v1.14.1 + hooks: + - id: mypy + additional_dependencies: [types-PyYAML, types-Pillow, types-tqdm] + description: Check for type errors + files: ^mllam_data_prep/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 105c9d4..4c6b1b2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,164 +1,169 @@ -# Changelog - -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), -and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - -## [v0.6.1](https://github.com/mllam/mllam-data-prep/release/tag/v0.6.1) - -[All changes](https://github.com/mllam/mllam-data-prep/compare/v0.6.1...v0.6.0) - -This release contains bugfixes to update tests to use newer version of pre-commit, use correct python version, and remove uses of incompatible typing notation. - -### Fixes -- use old union typing notation compatible with all required python versions [\#77](https://github.com/mllam/mllam-data-prep/pull/77) @SimonKamuk - -### Maintenance -- update pre-commit action to v3.0.1 [\#77](https://github.com/mllam/mllam-data-prep/pull/77) @SimonKamuk -- fix tests to use expected python version from test matrix [\#77](https://github.com/mllam/mllam-data-prep/pull/77) @SimonKamuk - -## [v0.6.0](https://github.com/mllam/mllam-data-prep/release/tag/v0.6.0) - -[All changes](https://github.com/mllam/mllam-data-prep/compare/v0.6.0...v0.5.0) - -This release adds the ability to slice input data by any coordinate, derive variables from input datasets, and store config in created datasets. It also adds support for zarr 3.0.0 and above, and a mypy typing action to pre-commit hooks. In addition a number of bugs were fixed related to adding unwanted dimensions to the dataset, chunk size estimates, and derived functions. The release also includes a number of maintenance updates including updating the DANRA test dataset to v0.2.0 (which smaller, leading to faster test execution) and updating the `dataclass-wizard` dependency to at least v0.29.2. - -### Added - -- add functionality to slice input data by any coordinate [\#55](https://github.com/mllam/mllam-data-prep/pull/55a)@matschreiner -- add ability to derive variables from input datasets [\#34](https://github.com/mllam/mllam-data-prep/pull/34), @ealerskans, @mafdmi -- add github PR template to guide development process on github [\#44](https://github.com/mllam/mllam-data-prep/pull/44), @leifdenby -- add support for zarr 3.0.0 and above [\#51](https://github.com/mllam/mllam-data-prep/pull/51), @kashif -- warn if the user tries to load a non-YAML file [\#50](https://github.com/mllam/mllam-data-prep/pull/50), @j6k4m8 -- add mypy typing action to pre-commit hooks [\#67](https://github.com/mllam/mllam-data-prep/pull/67), @observingClouds -- add support for storing config in created datasets and option to only overwrite zarr dataset of config change [\#64](https://github.com/mllam/mllam-data-prep/pull/64), @leifdenby - -### Fixes - -- fix bug which adds unwanted dimensions to the dataset [\#60](https://github.com/mllam/mllam-data-prep/pull/60), @ealerskans, @observingClouds -- correct chunk size estimate [\#59](https://github.com/mllam/mllam-data-prep/pull/59), @ealerskans -- fix bug arising when variables provided to derived functions are renamed [\#56](https://github.com/mllam/mllam-data-prep/pull/56), @leifdenby -- ensure config fields defaulting to `None` are typed as `Optional` and fields defaulting to `{}` are given a default-factory so that serialization with default values works correctly [\#63](https://github.com/mllam/mllam-data-prep/pull/63), @leifdenby -- fix reading of exported config files [\#67](https://github.com/mllam/mllam-data-prep/pull/67), @observingClouds - -### Maintenance - -- update DANRA test dataset to v0.2.0 which uses a smaller cropped domain [\#62](https://github.com/mllam/mllam-data-prep/pull/62), @leifdenby -- update `dataclass-wizard` dependency to at least v0.29.2 allowing for use of `Union` types together with check for unmatched keys in config yaml [\#73](https://github.com/mllam/mllam-data-prep/pull/73), @leifdenby - - -## [v0.5.0](https://github.com/mllam/mllam-data-prep/releases/tag/v0.5.0) - -[All changes](https://github.com/mllam/mllam-data-prep/compare/v0.4.0...v0.5.0) - -This release adds support for an optional `extra` section in the config file (for user-defined extra information that is ignored by `mllam-data-prep`) and fixes a few minor issues. Note that to use `extra` section in the config file the schema version in the config file must be increased to `v0.5.0`. - -### Added - -- Add optional section called `extra` to config file to allow for user-defined extra information that is ignored by `mllam-data-prep` but can be used by downstream applications. ![\#18](https://github.com/mllam/mllam-data-prep/pull/18), @leifdenby - -### Changed - -- remove f-string from `name_format` in config examples [\#35](https://github.com/mllam/mllam-data-prep/pull/35) -- replace global config for `dataclass_wizard` on `mllam_data_prep.config.Config` with config specific to that dataclass (to avoid conflicts with other uses of `dataclass_wizard`) [\#36](https://github.com/mllam/mllam-data-prep/pull/36) -- Schema version bumped to `v0.5.0` to match release version that supports optional `extra` section in config [\#18](https://github.com/mllam/mllam-data-prep/pull/18) - - -## [v0.4.0](https://github.com/mllam/mllam-data-prep/releases/tag/v0.4.0) - -[All changes](https://github.com/mllam/mllam-data-prep/compare/v0.3.0...v0.4.0) - -This release adds support for defining the output path in the command line -interface and addresses bugs around optional dependencies for -`dask.distributed`. - -### Added - -- add access to CLI via `mllam_data_prep` and add tests for CLI with/without `dask.distributed` ![\25](https://github.com/mllam/mllam-data-prep/pull/25). -- add optional output path argument to parser. ![\#26](https://github.com/mllam/mllam-data-prep/pull/26) - -### Changed - -- fix bug by making dependency `distributed` optional ![\#27](https://github.com/mllam/mllam-data-prep/pull/27) -- change config example to call validation split `val` instead of `validation` [\#28](https://github.com/mllam/mllam-data-prep/pull/28) -- fix typo in install dependency `distributed` ![\#20](https://github.com/mllam/mllam-data-prep/pull/20) -- add missing `psutil` requirement. [\#21](https://github.com/mllam/mllam-data-prep/pull/21). - - -## [v0.3.0](https://github.com/mllam/mllam-data-prep/releases/tag/v0.3.0) - -[All changes](https://github.com/mllam/mllam-data-prep/compare/v0.2.0...v0.3.0) - -### Added - -- add support for parallel processing using `dask.distributed` with command - line flags `--dask-distributed-local-core-fraction` and - `--dask-distributed-local-memory-fraction` to control the number of cores and - memory to use on the local machine. - ![\#16](https://github.com/mllam/mllam-data-prep/pull/16) - - -## [v0.2.0](https://github.com/mllam/mllam-data-prep/releases/tags/v0.2.0) - -[All changes](https://github.com/mllam/mllam-data-prep/compare/v0.1.0...v0.2.0) - -### Added - -- add support for creating dataset splits (e.g. train, validation, test) - through `output.splitting` section in the config file, and support for - optionally compute statistics for a given split (with - `output.splitting.splits.{split_name}.compute_statistics`). - ![\#28](https://github.com/mllam/mllam-data-prep/pull/10). - -- include `units` and `long_name` attributes for all stacked variables as - `{output_variable}_units` and `{output_variable}_long_name` - ![\#11](https://github.com/mllam/mllam-data-prep/pull/11). - -- include version of `mllam-data-prep` in output - ![\#12](https://github.com/mllam/mllam-data-prep/pull/12) - -### Changed - -- split dataset creation and storage to zarr into separate functions - `mllam_data_prep.create_dataset(...)` and - `mllam_data_prep.create_dataset_zarr(...)` respectively - ![\#7](https://github.com/mllam/mllam-data-prep/pull/7) - -- changes to spec from v0.1.0: - - the `architecture` section has been renamed `output` to make it clearer - that this section defines the properties of the output of `mllam-data-prep` - - `sampling_dim` removed from `output` (previously `architecture`) section of - spec, this is not needed to create the training data - - the variables (and their dimensions) of the output definition has been - renamed from `architecture.input_variables` to `output.variables` - - coordinate value ranges for the dimensions of the output (i.e. what that - the architecture expects as input) has been renamed from - `architecture.input_ranges` to `output.coord_ranges` to make the use more - clear - - selection on variable coordinates values is now set with - `inputs.{dataset_name}.variables.{variable_name}.values` rather than - `inputs.{dataset_name}.variables.{variable_name}.sel` - - when dimension-mapping method `stack_variables_by_var_name` is used the - formatting string for the new variable is now called `name_format` rather - than `name` - - when dimension-mapping is done by simply renaming a dimension this - configuration now needs to be set by providing the named method (`rename`) - explicitly through the `method` key, i.e. rather than `{to_dim}: - {from_dim}` it is now `{to_dim}: {method: rename, dim: {from_dim}}` to - match the signature of the other dimension-mapping methods. - - attribute `inputs.{dataset_name}.name` attribute has been removed, with the - key `dataset_name` this is superfluous - -- relax minimuim python version requirement to `>3.8` to simplify downstream - usage ![\#13](https://github.com/mllam/mllam-data-prep/pull/13) - -## [v0.1.0](https://github.com/mllam/mllam-data-prep/releases/tag/v0.1.0) - -First tagged release of `mllam-data-prep` which includes functionality to -declaratively (in a yaml-config file) describe how the variables and -coordinates of a set of zarr-based source datasets are mapped to a new set of -variables with new coordinates to single a training dataset and write this -resulting single dataset to a new zarr dataset. This explicit mapping gives the -flexibility to target different different model architectures (which may -require different inputs with different shapes between architectures). +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [unreleased](https://github.com/mllam/mllam-data-prep/compare/v0.6.1...HEAD) + +### Fixes +- fix bug where coordinate selection of an unshared dimension isn't applied to subsequent ouput variables when an output variable without this dimension is processed before the others [\#87](https://github.com/mllam/mllam-data-prep/pull/87) @zweihuehner + +## [v0.6.1](https://github.com/mllam/mllam-data-prep/release/tag/v0.6.1) + +[All changes](https://github.com/mllam/mllam-data-prep/compare/v0.6.1...v0.6.0) + +This release contains bugfixes to update tests to use newer version of pre-commit, use correct python version, and remove uses of incompatible typing notation. + +### Fixes +- use old union typing notation compatible with all required python versions [\#77](https://github.com/mllam/mllam-data-prep/pull/77) @SimonKamuk + +### Maintenance +- update pre-commit action to v3.0.1 [\#77](https://github.com/mllam/mllam-data-prep/pull/77) @SimonKamuk +- fix tests to use expected python version from test matrix [\#77](https://github.com/mllam/mllam-data-prep/pull/77) @SimonKamuk + +## [v0.6.0](https://github.com/mllam/mllam-data-prep/release/tag/v0.6.0) + +[All changes](https://github.com/mllam/mllam-data-prep/compare/v0.6.0...v0.5.0) + +This release adds the ability to slice input data by any coordinate, derive variables from input datasets, and store config in created datasets. It also adds support for zarr 3.0.0 and above, and a mypy typing action to pre-commit hooks. In addition a number of bugs were fixed related to adding unwanted dimensions to the dataset, chunk size estimates, and derived functions. The release also includes a number of maintenance updates including updating the DANRA test dataset to v0.2.0 (which smaller, leading to faster test execution) and updating the `dataclass-wizard` dependency to at least v0.29.2. + +### Added + +- add functionality to slice input data by any coordinate [\#55](https://github.com/mllam/mllam-data-prep/pull/55a)@matschreiner +- add ability to derive variables from input datasets [\#34](https://github.com/mllam/mllam-data-prep/pull/34), @ealerskans, @mafdmi +- add github PR template to guide development process on github [\#44](https://github.com/mllam/mllam-data-prep/pull/44), @leifdenby +- add support for zarr 3.0.0 and above [\#51](https://github.com/mllam/mllam-data-prep/pull/51), @kashif +- warn if the user tries to load a non-YAML file [\#50](https://github.com/mllam/mllam-data-prep/pull/50), @j6k4m8 +- add mypy typing action to pre-commit hooks [\#67](https://github.com/mllam/mllam-data-prep/pull/67), @observingClouds +- add support for storing config in created datasets and option to only overwrite zarr dataset of config change [\#64](https://github.com/mllam/mllam-data-prep/pull/64), @leifdenby + +### Fixes + +- fix bug which adds unwanted dimensions to the dataset [\#60](https://github.com/mllam/mllam-data-prep/pull/60), @ealerskans, @observingClouds +- correct chunk size estimate [\#59](https://github.com/mllam/mllam-data-prep/pull/59), @ealerskans +- fix bug arising when variables provided to derived functions are renamed [\#56](https://github.com/mllam/mllam-data-prep/pull/56), @leifdenby +- ensure config fields defaulting to `None` are typed as `Optional` and fields defaulting to `{}` are given a default-factory so that serialization with default values works correctly [\#63](https://github.com/mllam/mllam-data-prep/pull/63), @leifdenby +- fix reading of exported config files [\#67](https://github.com/mllam/mllam-data-prep/pull/67), @observingClouds + +### Maintenance + +- update DANRA test dataset to v0.2.0 which uses a smaller cropped domain [\#62](https://github.com/mllam/mllam-data-prep/pull/62), @leifdenby +- update `dataclass-wizard` dependency to at least v0.29.2 allowing for use of `Union` types together with check for unmatched keys in config yaml [\#73](https://github.com/mllam/mllam-data-prep/pull/73), @leifdenby + + +## [v0.5.0](https://github.com/mllam/mllam-data-prep/releases/tag/v0.5.0) + +[All changes](https://github.com/mllam/mllam-data-prep/compare/v0.4.0...v0.5.0) + +This release adds support for an optional `extra` section in the config file (for user-defined extra information that is ignored by `mllam-data-prep`) and fixes a few minor issues. Note that to use `extra` section in the config file the schema version in the config file must be increased to `v0.5.0`. + +### Added + +- Add optional section called `extra` to config file to allow for user-defined extra information that is ignored by `mllam-data-prep` but can be used by downstream applications. ![\#18](https://github.com/mllam/mllam-data-prep/pull/18), @leifdenby + +### Changed + +- remove f-string from `name_format` in config examples [\#35](https://github.com/mllam/mllam-data-prep/pull/35) +- replace global config for `dataclass_wizard` on `mllam_data_prep.config.Config` with config specific to that dataclass (to avoid conflicts with other uses of `dataclass_wizard`) [\#36](https://github.com/mllam/mllam-data-prep/pull/36) +- Schema version bumped to `v0.5.0` to match release version that supports optional `extra` section in config [\#18](https://github.com/mllam/mllam-data-prep/pull/18) + + +## [v0.4.0](https://github.com/mllam/mllam-data-prep/releases/tag/v0.4.0) + +[All changes](https://github.com/mllam/mllam-data-prep/compare/v0.3.0...v0.4.0) + +This release adds support for defining the output path in the command line +interface and addresses bugs around optional dependencies for +`dask.distributed`. + +### Added + +- add access to CLI via `mllam_data_prep` and add tests for CLI with/without `dask.distributed` ![\25](https://github.com/mllam/mllam-data-prep/pull/25). +- add optional output path argument to parser. ![\#26](https://github.com/mllam/mllam-data-prep/pull/26) + +### Changed + +- fix bug by making dependency `distributed` optional ![\#27](https://github.com/mllam/mllam-data-prep/pull/27) +- change config example to call validation split `val` instead of `validation` [\#28](https://github.com/mllam/mllam-data-prep/pull/28) +- fix typo in install dependency `distributed` ![\#20](https://github.com/mllam/mllam-data-prep/pull/20) +- add missing `psutil` requirement. [\#21](https://github.com/mllam/mllam-data-prep/pull/21). + + +## [v0.3.0](https://github.com/mllam/mllam-data-prep/releases/tag/v0.3.0) + +[All changes](https://github.com/mllam/mllam-data-prep/compare/v0.2.0...v0.3.0) + +### Added + +- add support for parallel processing using `dask.distributed` with command + line flags `--dask-distributed-local-core-fraction` and + `--dask-distributed-local-memory-fraction` to control the number of cores and + memory to use on the local machine. + ![\#16](https://github.com/mllam/mllam-data-prep/pull/16) + + +## [v0.2.0](https://github.com/mllam/mllam-data-prep/releases/tags/v0.2.0) + +[All changes](https://github.com/mllam/mllam-data-prep/compare/v0.1.0...v0.2.0) + +### Added + +- add support for creating dataset splits (e.g. train, validation, test) + through `output.splitting` section in the config file, and support for + optionally compute statistics for a given split (with + `output.splitting.splits.{split_name}.compute_statistics`). + ![\#28](https://github.com/mllam/mllam-data-prep/pull/10). + +- include `units` and `long_name` attributes for all stacked variables as + `{output_variable}_units` and `{output_variable}_long_name` + ![\#11](https://github.com/mllam/mllam-data-prep/pull/11). + +- include version of `mllam-data-prep` in output + ![\#12](https://github.com/mllam/mllam-data-prep/pull/12) + +### Changed + +- split dataset creation and storage to zarr into separate functions + `mllam_data_prep.create_dataset(...)` and + `mllam_data_prep.create_dataset_zarr(...)` respectively + ![\#7](https://github.com/mllam/mllam-data-prep/pull/7) + +- changes to spec from v0.1.0: + - the `architecture` section has been renamed `output` to make it clearer + that this section defines the properties of the output of `mllam-data-prep` + - `sampling_dim` removed from `output` (previously `architecture`) section of + spec, this is not needed to create the training data + - the variables (and their dimensions) of the output definition has been + renamed from `architecture.input_variables` to `output.variables` + - coordinate value ranges for the dimensions of the output (i.e. what that + the architecture expects as input) has been renamed from + `architecture.input_ranges` to `output.coord_ranges` to make the use more + clear + - selection on variable coordinates values is now set with + `inputs.{dataset_name}.variables.{variable_name}.values` rather than + `inputs.{dataset_name}.variables.{variable_name}.sel` + - when dimension-mapping method `stack_variables_by_var_name` is used the + formatting string for the new variable is now called `name_format` rather + than `name` + - when dimension-mapping is done by simply renaming a dimension this + configuration now needs to be set by providing the named method (`rename`) + explicitly through the `method` key, i.e. rather than `{to_dim}: + {from_dim}` it is now `{to_dim}: {method: rename, dim: {from_dim}}` to + match the signature of the other dimension-mapping methods. + - attribute `inputs.{dataset_name}.name` attribute has been removed, with the + key `dataset_name` this is superfluous + +- relax minimuim python version requirement to `>3.8` to simplify downstream + usage ![\#13](https://github.com/mllam/mllam-data-prep/pull/13) + +## [v0.1.0](https://github.com/mllam/mllam-data-prep/releases/tag/v0.1.0) + +First tagged release of `mllam-data-prep` which includes functionality to +declaratively (in a yaml-config file) describe how the variables and +coordinates of a set of zarr-based source datasets are mapped to a new set of +variables with new coordinates to single a training dataset and write this +resulting single dataset to a new zarr dataset. This explicit mapping gives the +flexibility to target different different model architectures (which may +require different inputs with different shapes between architectures). diff --git a/README.md b/README.md index 7350562..c34253c 100644 --- a/README.md +++ b/README.md @@ -1,421 +1,421 @@ -# mllam-data-prep - -This package aims to be a *declarative* way to prepare training-data for data-driven (i.e. machine learning) weather forecasting models. -A training dataset is constructed by declaring in a yaml configuration file (for example [example.danra.yaml](example.danra.yaml)) the data sources, the variables to extract, the transformations to apply to the data, and the target variable(s) of the model architecture to map the data to. - -![](docs/processing_diagram.png) - -The configuration is principally a means to represent how the dimensions of a given variable in a source dataset should be mapped to the dimensions and input variables of the model architecture to be trained. - -The configuration is given in yaml-format and the file specification is defined using python3 [dataclasses](https://docs.python.org/3/library/dataclasses.html) (serialised to yaml using [dataclasses-wizard](https://dataclass-wizard.readthedocs.io/en/latest/)) and defined in [mllam_data_prep/config.py](mllam_data_prep/config.py). - - -## Installation - -To simply use `mllam-data-prep` you can install the most recent tagged version from pypi with pip: - -```bash -python -m pip install mllam-data-prep -``` - -If you want support for [creating datasets in parallel using `dask.distributed`](#creating-large-datasets-with-daskdistributed) you can install the package with the `dask-distributed` extra: - -```bash -python -m pip install mllam-data-prep[dask-distributed] -``` - -## Developing `mllam-data-prep` - -To work on developing `mllam-data-prep` it easiest to install and manage the dependencies with [pdm](https://pdm.fming.dev/). To get started clone your fork of [the main repo](https://github.com/mllam/mllam-data-prep) locally: - -```bash -git clone https://github.com//mllam-data-prep -cd mllam-data-prep -``` - -Use pdm to create and use a virtualenv: - -```bash -pdm venv create -pdm use --venv in-project -pdm install -``` - -All the linting is handelled by `pre-commit` which can be setup to automatically be run on each `git commit` by installing the git commit hook: - -```bash -pdm run pre-commit install -``` - -Then branch, commit, push and make a pull-request :) - - -## Usage - -The package is designed to be used as a command-line tool. The main command is `mllam-data-prep` which takes a configuration file as input and outputs a training dataset in the form of a `.zarr` dataset named from the config file (e.g. `example.danra.yaml` produces `example.danra.zarr`). -The format for the [config is described below](#configuration-file). -The package can also be used as a python module to create datasets in a more programmatic way by calling `mllam_data_prep.create_dataset()` directly (see below). - -### Command-line usage - -```bash -mllam_data_prep example.danra.yaml -``` - -Example output: - -![](docs/example_output.png) - - -#### Creating large datasets (with `dask.distributed`) - -If you will be creating datasets larger than a few 100MB you may want to use -`dask.distributed.LocalCluster` to parallelise the creation of the dataset. This can be done -by setting the ` --dask-distributed-local-core-fraction` flag to a value -between `0.0` and `1.0`. This will create a local `dask.distributed` cluster with the -number of workers set to the number of cores on the machine multiplied by the -fraction given. For example, to use 50% of the cores on the machine you would -run: - -```bash -mllam_data_prep example.danra.yaml --dask-distributed-local-core-fraction 0.5 -``` - -Unfortunately, the number of cores to use can only be worked out by trial and -error, but a good starting point is to use 50% of the cores on the machine and -then if you notice warnings suggesting that workers are running out of memory -you should reduce the fraction of cores used (so that each worker has more -memory available). -You can also adjust the fraction of the total system memory allocated with -`--dask-distributed-local-memory-fraction` (default is `0.9`). - -When you run the above command the console will print a URL to the dask -dashboard, which you can open in a browser to monitor the progress of the -dataset creation (and see the memory usage of the workers). - -![example of using mllam-data-prep with dask.distrubted for parallel processing](docs/using_dask_distributed.png) - -### Usage as a python module - -The package can also be used as a python module to create datasets directly, for example to create training datasets during training. The main function to use is `mllam_data_prep.create_dataset(config)` which takes a `mllam_data_prep.Config` as input and returns a `xarray.Dataset` object. For example: - -```python -import mllam_data_prep as mdp - -config_path = "example.danra.yaml" -config = mdp.Config.from_yaml_file(config_path) -ds = mdp.create_dataset(config=config) -``` - -## Configuration file - -A full example configuration file is given in [example.danra.yaml](example.danra.yaml), and reproduced here for completeness: - -```yaml -schema_version: v0.6.0 -dataset_version: v0.1.0 - -output: - variables: - static: [grid_index, static_feature] - state: [time, grid_index, state_feature] - forcing: [time, grid_index, forcing_feature] - coord_ranges: - time: - start: 1990-09-03T00:00 - end: 1990-09-09T00:00 - step: PT3H - chunking: - time: 1 - splitting: - dim: time - splits: - train: - start: 1990-09-03T00:00 - end: 1990-09-06T00:00 - compute_statistics: - ops: [mean, std, diff_mean, diff_std] - dims: [grid_index, time] - val: - start: 1990-09-06T00:00 - end: 1990-09-07T00:00 - test: - start: 1990-09-07T00:00 - end: 1990-09-09T00:00 - -inputs: - danra_height_levels: - path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/height_levels.zarr - dims: [time, x, y, altitude] - variables: - u: - altitude: - values: [100,] - units: m - v: - altitude: - values: [100, ] - units: m - dim_mapping: - time: - method: rename - dim: time - state_feature: - method: stack_variables_by_var_name - dims: [altitude] - name_format: "{var_name}{altitude}m" - grid_index: - method: stack - dims: [x, y] - target_output_variable: state - - danra_surface: - path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/single_levels.zarr - dims: [time, x, y] - variables: - # use surface incoming shortwave radiation as forcing - - swavr0m - derived_variables: - # derive variables to be used as forcings - toa_radiation: - kwargs: - time: ds_input.time - lat: ds_input.lat - lon: ds_input.lon - function: mllam_data_prep.ops.derive_variable.physical_field.calculate_toa_radiation - hour_of_day_sin: - kwargs: - time: ds_input.time - component: sin - function: mllam_data_prep.ops.derive_variable.time_components.calculate_hour_of_day - hour_of_day_cos: - kwargs: - time: ds_input.time - component: cos - function: mllam_data_prep.ops.derive_variable.time_components.calculate_hour_of_day - dim_mapping: - time: - method: rename - dim: time - grid_index: - method: stack - dims: [x, y] - forcing_feature: - method: stack_variables_by_var_name - name_format: "{var_name}" - coord_ranges: - x: - start: -170000 - end: -120000 - y: - start: -600000 - end: -550000 - target_output_variable: forcing - - danra_static: - path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/single_levels.zarr - dims: [x, y] - variables: - - lsm - - orography - dim_mapping: - grid_index: - method: stack - dims: [x, y] - static_feature: - method: stack_variables_by_var_name - name_format: "{var_name}" - target_output_variable: static - -extra: - projection: - class_name: LambertConformal - kwargs: - central_longitude: 25.0 - central_latitude: 56.7 - standard_parallels: [56.7, 56.7] - globe: - semimajor_axis: 6367470.0 - semiminor_axis: 6367470.0 -``` - -Apart from identifiers to keep track of the configuration file format version and the dataset version (for you to keep track of changes that you make to the dataset), the configuration file is divided into two main sections: - -- `output`: defines the variables and dimensions of the output dataset produced by `mllam-data-prep`. These are the variables and dimensions that the input datasets will be mapped to. These output variables and dimensions should match the input variables and dimensions expected by the model architecture you are training. -- `inputs`: a list of source datasets to extract data from. These are the datasets that will be mapped to the architecture defined in the `architecture` section. - -If you want to add any extra information to the configuration file you can add it to the `extra` section. This section is not used or validated by `mllam-data-prep` but can be used to store any extra information you want to keep track of (for example when using `mllam-data-prep` with [neural-lam](https://github.com/mllam/neural-lam) the `extra` section is used to store the projection information). - -### The `output` section - -```yaml -output: - variables: - static: [grid_index, static_feature] - state: [time, grid_index, state_feature] - forcing: [time, grid_index, forcing_feature] - coord_ranges: - time: - start: 1990-09-03T00:00 - end: 1990-09-09T00:00 - step: PT3H - chunking: - time: 1 - splitting: - dim: time - splits: - train: - start: 1990-09-03T00:00 - end: 1990-09-06T00:00 - compute_statistics: - ops: [mean, std, diff_mean, diff_std] - dims: [grid_index, time] - val: - start: 1990-09-06T00:00 - end: 1990-09-07T00:00 - test: - start: 1990-09-07T00:00 - end: 1990-09-09T00:00 -``` - -The `output` section defines three things: - -1. `variables`: what input variables the model architecture you are targeting expects, and what the dimensions are for each of these variables. -2. `coord_ranges`: the range of values for each of the dimensions that the model architecture expects as input. These are optional, but allows you to ensure that the training dataset is created with the correct range of values for each dimension. -3. `chunking`: the chunk sizes to use when writing the training dataset to zarr. This is optional, but can be used to optimise the performance of the zarr dataset. By default the chunk sizes are set to the size of the dimension, but this can be overridden by setting the chunk size in the configuration file. A common choice is to set the dimension along which you are batching to align with the of each training item (e.g. if you are training a model with time-step roll-out of 10 timesteps, you might choose a chunksize of 10 along the time dimension). -4. Splitting and calculation of statistics of the output variables, using the `splitting` section. The `output.splitting.splits` attribute defines the individual splits to create (for example `train`, `val` and `test`) and `output.splitting.dim` defines the dimension to split along. The `compute_statistics` can be optionally set for a given split to calculate the statistical properties requested (for example `mean`, `std`) any method available on `xarray.Dataset.{op}` can be used. In addition methods prefixed by `diff_` (so the operational would be listed as `diff_{op}`) to compute a statistic based on difference of consecutive time-steps, e.g. `diff_mean` to compute the `mean` of the difference between consecutive timesteps (these are used for normalisating increments). The `dims` attribute defines the dimensions to calculate the statistics over (for example `grid_index` and `time`). - -### The `inputs` section - -```yaml -inputs: - danra_height_levels: - path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/height_levels.zarr - dims: [time, x, y, altitude] - variables: - u: - altitude: - values: [100,] - units: m - v: - altitude: - values: [100, ] - units: m - dim_mapping: - time: - method: rename - dim: time - state_feature: - method: stack_variables_by_var_name - dims: [altitude] - name_format: "{var_name}{altitude}m" - grid_index: - method: stack - dims: [x, y] - target_output_variable: state - - danra_surface: - path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/single_levels.zarr - dims: [time, x, y] - variables: - # use surface incoming shortwave radiation as forcing - - swavr0m - derived_variables: - # derive variables to be used as forcings - toa_radiation: - kwargs: - time: ds_input.time - lat: ds_input.lat - lon: ds_input.lon - function: mllam_data_prep.ops.derive_variable.physical_field.calculate_toa_radiation - hour_of_day_sin: - kwargs: - time: ds_input.time - component: sin - function: mllam_data_prep.ops.derive_variable.time_components.calculate_hour_of_day - hour_of_day_cos: - kwargs: - time: ds_input.time - component: cos - function: mllam_data_prep.ops.derive_variable.time_components.calculate_hour_of_day - dim_mapping: - time: - method: rename - dim: time - grid_index: - method: stack - dims: [x, y] - forcing_feature: - method: stack_variables_by_var_name - name_format: "{var_name}" - coord_ranges: - x: - start: -170000 - end: -120000 - y: - start: -600000 - end: -550000 - target_output_variable: forcing - - ... -``` - -The `inputs` section defines the source datasets to extract data from. Each source dataset is defined by a key (e.g. `danra_height_levels`) which names the source dataset, and the attributes of the source dataset: - -- `path`: the path to the source dataset. This can be a local path or a URL to e.g. a zarr dataset or netCDF file, anything that can be read by `xarray.open_dataset(...)`. -- `dims`: the dimensions that the source dataset is expected to have. This is used to check that the source dataset has the expected dimensions and also makes it clearer in the config file what the dimensions of the source dataset are. -- `variables`: selects which variables to extract from the source dataset. This may either be a list of variable names, or a dictionary where each key is the variable name and the value defines a dictionary of coordinates to do selection on. When doing selection you may also optionally define the units of the variable to check that the units of the variable match the units of the variable in the model architecture. -- `target_output_variable`: the variable in the model architecture that the source dataset should be mapped to. -- `dim_mapping`: defines how the dimensions of the source dataset should be mapped to the dimensions of the model architecture. This is done by defining a method to apply to each dimension. The methods are: - - `rename`: simply rename the dimension to the new name - - `stack`: stack the listed dimension to create the dimension in the output - - `stack_variables_by_var_name`: stack the dimension into the new dimension, and also stack the variable name into the new variable name. This is useful when you have multiple variables with the same dimensions that you want to stack into a single variable. -- `derived_variables`: defines the variables to be derived from the variables available in the source dataset. This should be a dictionary where each key is the name of the variable to be derived and the value defines a dictionary with the following additional information. See also the 'Derived Variables' section for more details. - - `function`: the function used to derive a variable. This should be a string with the full namespace of the function, e.g. `mllam_data_prep.ops.derived_variables.physical_field.calculate_toa_radiation`. - - `kwargs`: arguments to `function`. This is a dictionary where each key is the named argument to `function` and each value is the input to the function. Here we distinguish between values to be extracted/selected from the input dataset and values supplied by the users themselves. Arguments with values to be extracted from the input dataset need to be prefixed with "ds_input." to distinguish them from other arguments. See the 'Derived Variables' section for more details. - - `coord_ranges` - - `varable` : the variable to extract the coordinate from - `start`: start value of the variable - `end`: end value of the variable - -#### Derived Variables -Variables that are not part of the source dataset but can be derived from variables in the source dataset can also be included. They should be defined in their own section, called `derived_variables` as illustrated in the example config above and in the example config file [example.danra.yaml](example.danra.yaml). - -To derive a variable, the function to be used (`function`) and the arguments to this function (`kwargs`) need to be specified, as explained above. Here we need to distinguish between arguments that should be data from the input dataset and arguments that should be supplied by the users themselves. The example below illustrates how to derive the cosine component of the cyclically encoded hour of day variable - -```yaml - derived_variables: - hour_of_day_cos: - kwargs: - time: ds_input.time - component: cos - function: mllam_data_prep.ops.derive_variable.time_components.calculate_hour_of_day - attrs: - units: 1 - long_name: cos component of cyclically encoded hour of day -``` - -The function `mllam_data_prep.ops.derive_variable.time_components.calculate_hour_of_day` takes two arguments; `time` and `component`. The `time` argument should extract the `time` variable from the input dataset and has therefore been prefixed with "ds_input." to distinguish it from other arguments that should not be extracted from the source dataset. The `component` argument, on the other hand, is a string (either "sin" or "cos") and decides if the returned derived variable is the sine or cosine component of the cyclically encoded hour of day. - -In addition, an optional section called `attrs` can be added. In this section, the user can add attributes to the derived variable, as illustrated in the example above. Note that the attributes `units` and `long_name` are **required**. This means that if the function used to derive a variable does not set these attributes they are **required** to be set in the config file. If using a function defined in `mllam_data_prep.ops.derive_variable` the `attrs` section is optional as the required attributes should already be defined. In this case, adding the `units` and `long_name` attributes to the `attrs` section of the derived variable in config file will **overwrite** the already-defined attributes in the function. It is also possible to set other attributes. This can be done by adding them under the `attrs` section in the same way as shown for `unit` and `long_name` in the example above. - -Currently, the following derived variables are included as part of `mllam-data-prep`: -- `toa_radiation`: - - Top-of-atmosphere incoming radiation - - function: `mllam_data_prep.ops.derive_variable.physical_field.calculate_toa_radiation` - - arguments: `lat`, `lon`, `time` -- `hour_of_day_[sin/cos]`: - - Sine or cosine part of cyclically encoded hour of day - - function: `mllam_data_prep.ops.derive_variable.time_components.calculate_hour_of_day` - - arguments: `time`, `component` -- `day_of_year_[sin/cos]`: - - Sine or cosine part of cyclically encoded day of year - - function: `mllam_data_prep.ops.derive_variable.time_components.calculate_day_of_year` - - arguments: `time`, `component` - - -### Config schema versioning - -The schema version of the configuration file is defined by the `schema_version` attribute at the top of the configuration file. This is used to keep track of changes to the configuration file format. The schema version is used to check that the configuration file is compatible with the version of `mllam-data-prep` that you are using. If the schema version of the configuration file is not compatible with the version of `mllam-data-prep` that you are using you will get an error message telling you that the schema version is not compatible. - -The schema version is updated whenever the configuration format changes, with the new schema version matching the minimum version of `mllam-data-prep` that is required to use the new configuration format. As `mllam-data-prep` is still in rapid development (and hasn't reached version `v1.0.0` yet) we unfortunately make no gaurantee about backward compatibility. However, the [CHANGELOG.md](CHANGELOG.md) will always contain migration instructions when the config format changes. +# mllam-data-prep + +This package aims to be a *declarative* way to prepare training-data for data-driven (i.e. machine learning) weather forecasting models. +A training dataset is constructed by declaring in a yaml configuration file (for example [example.danra.yaml](example.danra.yaml)) the data sources, the variables to extract, the transformations to apply to the data, and the target variable(s) of the model architecture to map the data to. + +![](docs/processing_diagram.png) + +The configuration is principally a means to represent how the dimensions of a given variable in a source dataset should be mapped to the dimensions and input variables of the model architecture to be trained. + +The configuration is given in yaml-format and the file specification is defined using python3 [dataclasses](https://docs.python.org/3/library/dataclasses.html) (serialised to yaml using [dataclasses-wizard](https://dataclass-wizard.readthedocs.io/en/latest/)) and defined in [mllam_data_prep/config.py](mllam_data_prep/config.py). + + +## Installation + +To simply use `mllam-data-prep` you can install the most recent tagged version from pypi with pip: + +```bash +python -m pip install mllam-data-prep +``` + +If you want support for [creating datasets in parallel using `dask.distributed`](#creating-large-datasets-with-daskdistributed) you can install the package with the `dask-distributed` extra: + +```bash +python -m pip install mllam-data-prep[dask-distributed] +``` + +## Developing `mllam-data-prep` + +To work on developing `mllam-data-prep` it easiest to install and manage the dependencies with [pdm](https://pdm.fming.dev/). To get started clone your fork of [the main repo](https://github.com/mllam/mllam-data-prep) locally: + +```bash +git clone https://github.com//mllam-data-prep +cd mllam-data-prep +``` + +Use pdm to create and use a virtualenv: + +```bash +pdm venv create +pdm use --venv in-project +pdm install +``` + +All the linting is handelled by `pre-commit` which can be setup to automatically be run on each `git commit` by installing the git commit hook: + +```bash +pdm run pre-commit install +``` + +Then branch, commit, push and make a pull-request :) + + +## Usage + +The package is designed to be used as a command-line tool. The main command is `mllam-data-prep` which takes a configuration file as input and outputs a training dataset in the form of a `.zarr` dataset named from the config file (e.g. `example.danra.yaml` produces `example.danra.zarr`). +The format for the [config is described below](#configuration-file). +The package can also be used as a python module to create datasets in a more programmatic way by calling `mllam_data_prep.create_dataset()` directly (see below). + +### Command-line usage + +```bash +mllam_data_prep example.danra.yaml +``` + +Example output: + +![](docs/example_output.png) + + +#### Creating large datasets (with `dask.distributed`) + +If you will be creating datasets larger than a few 100MB you may want to use +`dask.distributed.LocalCluster` to parallelise the creation of the dataset. This can be done +by setting the ` --dask-distributed-local-core-fraction` flag to a value +between `0.0` and `1.0`. This will create a local `dask.distributed` cluster with the +number of workers set to the number of cores on the machine multiplied by the +fraction given. For example, to use 50% of the cores on the machine you would +run: + +```bash +mllam_data_prep example.danra.yaml --dask-distributed-local-core-fraction 0.5 +``` + +Unfortunately, the number of cores to use can only be worked out by trial and +error, but a good starting point is to use 50% of the cores on the machine and +then if you notice warnings suggesting that workers are running out of memory +you should reduce the fraction of cores used (so that each worker has more +memory available). +You can also adjust the fraction of the total system memory allocated with +`--dask-distributed-local-memory-fraction` (default is `0.9`). + +When you run the above command the console will print a URL to the dask +dashboard, which you can open in a browser to monitor the progress of the +dataset creation (and see the memory usage of the workers). + +![example of using mllam-data-prep with dask.distrubted for parallel processing](docs/using_dask_distributed.png) + +### Usage as a python module + +The package can also be used as a python module to create datasets directly, for example to create training datasets during training. The main function to use is `mllam_data_prep.create_dataset(config)` which takes a `mllam_data_prep.Config` as input and returns a `xarray.Dataset` object. For example: + +```python +import mllam_data_prep as mdp + +config_path = "example.danra.yaml" +config = mdp.Config.from_yaml_file(config_path) +ds = mdp.create_dataset(config=config) +``` + +## Configuration file + +A full example configuration file is given in [example.danra.yaml](example.danra.yaml), and reproduced here for completeness: + +```yaml +schema_version: v0.6.0 +dataset_version: v0.1.0 + +output: + variables: + static: [grid_index, static_feature] + state: [time, grid_index, state_feature] + forcing: [time, grid_index, forcing_feature] + coord_ranges: + time: + start: 1990-09-03T00:00 + end: 1990-09-09T00:00 + step: PT3H + chunking: + time: 1 + splitting: + dim: time + splits: + train: + start: 1990-09-03T00:00 + end: 1990-09-06T00:00 + compute_statistics: + ops: [mean, std, diff_mean, diff_std] + dims: [grid_index, time] + val: + start: 1990-09-06T00:00 + end: 1990-09-07T00:00 + test: + start: 1990-09-07T00:00 + end: 1990-09-09T00:00 + +inputs: + danra_height_levels: + path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/height_levels.zarr + dims: [time, x, y, altitude] + variables: + u: + altitude: + values: [100,] + units: m + v: + altitude: + values: [100, ] + units: m + dim_mapping: + time: + method: rename + dim: time + state_feature: + method: stack_variables_by_var_name + dims: [altitude] + name_format: "{var_name}{altitude}m" + grid_index: + method: stack + dims: [x, y] + target_output_variable: state + + danra_surface: + path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/single_levels.zarr + dims: [time, x, y] + variables: + # use surface incoming shortwave radiation as forcing + - swavr0m + derived_variables: + # derive variables to be used as forcings + toa_radiation: + kwargs: + time: ds_input.time + lat: ds_input.lat + lon: ds_input.lon + function: mllam_data_prep.ops.derive_variable.physical_field.calculate_toa_radiation + hour_of_day_sin: + kwargs: + time: ds_input.time + component: sin + function: mllam_data_prep.ops.derive_variable.time_components.calculate_hour_of_day + hour_of_day_cos: + kwargs: + time: ds_input.time + component: cos + function: mllam_data_prep.ops.derive_variable.time_components.calculate_hour_of_day + dim_mapping: + time: + method: rename + dim: time + grid_index: + method: stack + dims: [x, y] + forcing_feature: + method: stack_variables_by_var_name + name_format: "{var_name}" + coord_ranges: + x: + start: -170000 + end: -120000 + y: + start: -600000 + end: -550000 + target_output_variable: forcing + + danra_static: + path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/single_levels.zarr + dims: [x, y] + variables: + - lsm + - orography + dim_mapping: + grid_index: + method: stack + dims: [x, y] + static_feature: + method: stack_variables_by_var_name + name_format: "{var_name}" + target_output_variable: static + +extra: + projection: + class_name: LambertConformal + kwargs: + central_longitude: 25.0 + central_latitude: 56.7 + standard_parallels: [56.7, 56.7] + globe: + semimajor_axis: 6367470.0 + semiminor_axis: 6367470.0 +``` + +Apart from identifiers to keep track of the configuration file format version and the dataset version (for you to keep track of changes that you make to the dataset), the configuration file is divided into two main sections: + +- `output`: defines the variables and dimensions of the output dataset produced by `mllam-data-prep`. These are the variables and dimensions that the input datasets will be mapped to. These output variables and dimensions should match the input variables and dimensions expected by the model architecture you are training. +- `inputs`: a list of source datasets to extract data from. These are the datasets that will be mapped to the architecture defined in the `architecture` section. + +If you want to add any extra information to the configuration file you can add it to the `extra` section. This section is not used or validated by `mllam-data-prep` but can be used to store any extra information you want to keep track of (for example when using `mllam-data-prep` with [neural-lam](https://github.com/mllam/neural-lam) the `extra` section is used to store the projection information). + +### The `output` section + +```yaml +output: + variables: + static: [grid_index, static_feature] + state: [time, grid_index, state_feature] + forcing: [time, grid_index, forcing_feature] + coord_ranges: + time: + start: 1990-09-03T00:00 + end: 1990-09-09T00:00 + step: PT3H + chunking: + time: 1 + splitting: + dim: time + splits: + train: + start: 1990-09-03T00:00 + end: 1990-09-06T00:00 + compute_statistics: + ops: [mean, std, diff_mean, diff_std] + dims: [grid_index, time] + val: + start: 1990-09-06T00:00 + end: 1990-09-07T00:00 + test: + start: 1990-09-07T00:00 + end: 1990-09-09T00:00 +``` + +The `output` section defines three things: + +1. `variables`: what input variables the model architecture you are targeting expects, and what the dimensions are for each of these variables. +2. `coord_ranges`: the range of values for each of the dimensions that the model architecture expects as input. These are optional, but allows you to ensure that the training dataset is created with the correct range of values for each dimension. +3. `chunking`: the chunk sizes to use when writing the training dataset to zarr. This is optional, but can be used to optimise the performance of the zarr dataset. By default the chunk sizes are set to the size of the dimension, but this can be overridden by setting the chunk size in the configuration file. A common choice is to set the dimension along which you are batching to align with the of each training item (e.g. if you are training a model with time-step roll-out of 10 timesteps, you might choose a chunksize of 10 along the time dimension). +4. Splitting and calculation of statistics of the output variables, using the `splitting` section. The `output.splitting.splits` attribute defines the individual splits to create (for example `train`, `val` and `test`) and `output.splitting.dim` defines the dimension to split along. The `compute_statistics` can be optionally set for a given split to calculate the statistical properties requested (for example `mean`, `std`) any method available on `xarray.Dataset.{op}` can be used. In addition methods prefixed by `diff_` (so the operational would be listed as `diff_{op}`) to compute a statistic based on difference of consecutive time-steps, e.g. `diff_mean` to compute the `mean` of the difference between consecutive timesteps (these are used for normalisating increments). The `dims` attribute defines the dimensions to calculate the statistics over (for example `grid_index` and `time`). + +### The `inputs` section + +```yaml +inputs: + danra_height_levels: + path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/height_levels.zarr + dims: [time, x, y, altitude] + variables: + u: + altitude: + values: [100,] + units: m + v: + altitude: + values: [100, ] + units: m + dim_mapping: + time: + method: rename + dim: time + state_feature: + method: stack_variables_by_var_name + dims: [altitude] + name_format: "{var_name}{altitude}m" + grid_index: + method: stack + dims: [x, y] + target_output_variable: state + + danra_surface: + path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/single_levels.zarr + dims: [time, x, y] + variables: + # use surface incoming shortwave radiation as forcing + - swavr0m + derived_variables: + # derive variables to be used as forcings + toa_radiation: + kwargs: + time: ds_input.time + lat: ds_input.lat + lon: ds_input.lon + function: mllam_data_prep.ops.derive_variable.physical_field.calculate_toa_radiation + hour_of_day_sin: + kwargs: + time: ds_input.time + component: sin + function: mllam_data_prep.ops.derive_variable.time_components.calculate_hour_of_day + hour_of_day_cos: + kwargs: + time: ds_input.time + component: cos + function: mllam_data_prep.ops.derive_variable.time_components.calculate_hour_of_day + dim_mapping: + time: + method: rename + dim: time + grid_index: + method: stack + dims: [x, y] + forcing_feature: + method: stack_variables_by_var_name + name_format: "{var_name}" + coord_ranges: + x: + start: -170000 + end: -120000 + y: + start: -600000 + end: -550000 + target_output_variable: forcing + + ... +``` + +The `inputs` section defines the source datasets to extract data from. Each source dataset is defined by a key (e.g. `danra_height_levels`) which names the source dataset, and the attributes of the source dataset: + +- `path`: the path to the source dataset. This can be a local path or a URL to e.g. a zarr dataset or netCDF file, anything that can be read by `xarray.open_dataset(...)`. +- `dims`: the dimensions that the source dataset is expected to have. This is used to check that the source dataset has the expected dimensions and also makes it clearer in the config file what the dimensions of the source dataset are. +- `variables`: selects which variables to extract from the source dataset. This may either be a list of variable names, or a dictionary where each key is the variable name and the value defines a dictionary of coordinates to do selection on. When doing selection you may also optionally define the units of the variable to check that the units of the variable match the units of the variable in the model architecture. +- `target_output_variable`: the variable in the model architecture that the source dataset should be mapped to. +- `dim_mapping`: defines how the dimensions of the source dataset should be mapped to the dimensions of the model architecture. This is done by defining a method to apply to each dimension. The methods are: + - `rename`: simply rename the dimension to the new name + - `stack`: stack the listed dimension to create the dimension in the output + - `stack_variables_by_var_name`: stack the dimension into the new dimension, and also stack the variable name into the new variable name. This is useful when you have multiple variables with the same dimensions that you want to stack into a single variable. +- `derived_variables`: defines the variables to be derived from the variables available in the source dataset. This should be a dictionary where each key is the name of the variable to be derived and the value defines a dictionary with the following additional information. See also the 'Derived Variables' section for more details. + - `function`: the function used to derive a variable. This should be a string with the full namespace of the function, e.g. `mllam_data_prep.ops.derived_variables.physical_field.calculate_toa_radiation`. + - `kwargs`: arguments to `function`. This is a dictionary where each key is the named argument to `function` and each value is the input to the function. Here we distinguish between values to be extracted/selected from the input dataset and values supplied by the users themselves. Arguments with values to be extracted from the input dataset need to be prefixed with "ds_input." to distinguish them from other arguments. See the 'Derived Variables' section for more details. + - `coord_ranges` + - `varable` : the variable to extract the coordinate from + `start`: start value of the variable + `end`: end value of the variable + +#### Derived Variables +Variables that are not part of the source dataset but can be derived from variables in the source dataset can also be included. They should be defined in their own section, called `derived_variables` as illustrated in the example config above and in the example config file [example.danra.yaml](example.danra.yaml). + +To derive a variable, the function to be used (`function`) and the arguments to this function (`kwargs`) need to be specified, as explained above. Here we need to distinguish between arguments that should be data from the input dataset and arguments that should be supplied by the users themselves. The example below illustrates how to derive the cosine component of the cyclically encoded hour of day variable + +```yaml + derived_variables: + hour_of_day_cos: + kwargs: + time: ds_input.time + component: cos + function: mllam_data_prep.ops.derive_variable.time_components.calculate_hour_of_day + attrs: + units: 1 + long_name: cos component of cyclically encoded hour of day +``` + +The function `mllam_data_prep.ops.derive_variable.time_components.calculate_hour_of_day` takes two arguments; `time` and `component`. The `time` argument should extract the `time` variable from the input dataset and has therefore been prefixed with "ds_input." to distinguish it from other arguments that should not be extracted from the source dataset. The `component` argument, on the other hand, is a string (either "sin" or "cos") and decides if the returned derived variable is the sine or cosine component of the cyclically encoded hour of day. + +In addition, an optional section called `attrs` can be added. In this section, the user can add attributes to the derived variable, as illustrated in the example above. Note that the attributes `units` and `long_name` are **required**. This means that if the function used to derive a variable does not set these attributes they are **required** to be set in the config file. If using a function defined in `mllam_data_prep.ops.derive_variable` the `attrs` section is optional as the required attributes should already be defined. In this case, adding the `units` and `long_name` attributes to the `attrs` section of the derived variable in config file will **overwrite** the already-defined attributes in the function. It is also possible to set other attributes. This can be done by adding them under the `attrs` section in the same way as shown for `unit` and `long_name` in the example above. + +Currently, the following derived variables are included as part of `mllam-data-prep`: +- `toa_radiation`: + - Top-of-atmosphere incoming radiation + - function: `mllam_data_prep.ops.derive_variable.physical_field.calculate_toa_radiation` + - arguments: `lat`, `lon`, `time` +- `hour_of_day_[sin/cos]`: + - Sine or cosine part of cyclically encoded hour of day + - function: `mllam_data_prep.ops.derive_variable.time_components.calculate_hour_of_day` + - arguments: `time`, `component` +- `day_of_year_[sin/cos]`: + - Sine or cosine part of cyclically encoded day of year + - function: `mllam_data_prep.ops.derive_variable.time_components.calculate_day_of_year` + - arguments: `time`, `component` + + +### Config schema versioning + +The schema version of the configuration file is defined by the `schema_version` attribute at the top of the configuration file. This is used to keep track of changes to the configuration file format. The schema version is used to check that the configuration file is compatible with the version of `mllam-data-prep` that you are using. If the schema version of the configuration file is not compatible with the version of `mllam-data-prep` that you are using you will get an error message telling you that the schema version is not compatible. + +The schema version is updated whenever the configuration format changes, with the new schema version matching the minimum version of `mllam-data-prep` that is required to use the new configuration format. As `mllam-data-prep` is still in rapid development (and hasn't reached version `v1.0.0` yet) we unfortunately make no gaurantee about backward compatibility. However, the [CHANGELOG.md](CHANGELOG.md) will always contain migration instructions when the config format changes. diff --git a/example.danra.yaml b/example.danra.yaml index e81619b..67d5917 100644 --- a/example.danra.yaml +++ b/example.danra.yaml @@ -1,120 +1,120 @@ -# This config example builds on the DANRA test dataset v0.2.0, details of which -# can be found in https://github.com/mllam/mllam-testdata/releases/tag/2025-02-05 -schema_version: v0.6.0 -dataset_version: v0.1.0 - -output: - variables: - static: [grid_index, static_feature] - state: [time, grid_index, state_feature] - forcing: [time, grid_index, forcing_feature] - coord_ranges: - time: - start: 2022-04-01T00:00 - end: 2022-04-10T00:00 - step: PT3H - chunking: - time: 1 - splitting: - dim: time - splits: - train: - start: 2022-04-01T00:00 - end: 2022-04-04T00:00 - compute_statistics: - ops: [mean, std, diff_mean, diff_std] - dims: [grid_index, time] - val: - start: 2022-04-04T00:00 - end: 2022-04-07T00:00 - test: - start: 2022-04-07T00:00 - end: 2022-04-10T00:00 - -inputs: - danra_height_levels: - path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/height_levels.zarr - dims: [time, x, y, altitude] - variables: - u: - altitude: - values: [100,] - units: m - v: - altitude: - values: [100, ] - units: m - dim_mapping: - time: - method: rename - dim: time - state_feature: - method: stack_variables_by_var_name - dims: [altitude] - name_format: "{var_name}{altitude}m" - grid_index: - method: stack - dims: [x, y] - target_output_variable: state - - danra_surface: - path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/single_levels.zarr - dims: [time, x, y] - variables: - # use surface incoming shortwave radiation as forcing - - swavr0m - derived_variables: - # derive variables to be used as forcings - toa_radiation: - kwargs: - time: ds_input.time - lat: ds_input.lat - lon: ds_input.lon - function: mllam_data_prep.ops.derive_variable.physical_field.calculate_toa_radiation - hour_of_day_sin: - kwargs: - time: ds_input.time - component: sin - function: mllam_data_prep.ops.derive_variable.time_components.calculate_hour_of_day - hour_of_day_cos: - kwargs: - time: ds_input.time - component: cos - function: mllam_data_prep.ops.derive_variable.time_components.calculate_hour_of_day - dim_mapping: - time: - method: rename - dim: time - grid_index: - method: stack - dims: [x, y] - forcing_feature: - method: stack_variables_by_var_name - name_format: "{var_name}" - target_output_variable: forcing - - danra_static: - path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/single_levels.zarr - dims: [x, y] - variables: - - lsm - - orography - dim_mapping: - grid_index: - method: stack - dims: [x, y] - static_feature: - method: stack_variables_by_var_name - name_format: "{var_name}" - target_output_variable: static - -extra: - projection: - class_name: LambertConformal - kwargs: - central_longitude: 25.0 - central_latitude: 56.7 - standard_parallels: [56.7, 56.7] - globe: - semimajor_axis: 6367470.0 - semiminor_axis: 6367470.0 +# This config example builds on the DANRA test dataset v0.2.0, details of which +# can be found in https://github.com/mllam/mllam-testdata/releases/tag/2025-02-05 +schema_version: v0.6.0 +dataset_version: v0.1.0 + +output: + variables: + static: [grid_index, static_feature] + state: [time, grid_index, state_feature] + forcing: [time, grid_index, forcing_feature] + coord_ranges: + time: + start: 2022-04-01T00:00 + end: 2022-04-10T00:00 + step: PT3H + chunking: + time: 1 + splitting: + dim: time + splits: + train: + start: 2022-04-01T00:00 + end: 2022-04-04T00:00 + compute_statistics: + ops: [mean, std, diff_mean, diff_std] + dims: [grid_index, time] + val: + start: 2022-04-04T00:00 + end: 2022-04-07T00:00 + test: + start: 2022-04-07T00:00 + end: 2022-04-10T00:00 + +inputs: + danra_height_levels: + path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/height_levels.zarr + dims: [time, x, y, altitude] + variables: + u: + altitude: + values: [100,] + units: m + v: + altitude: + values: [100, ] + units: m + dim_mapping: + time: + method: rename + dim: time + state_feature: + method: stack_variables_by_var_name + dims: [altitude] + name_format: "{var_name}{altitude}m" + grid_index: + method: stack + dims: [x, y] + target_output_variable: state + + danra_surface: + path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/single_levels.zarr + dims: [time, x, y] + variables: + # use surface incoming shortwave radiation as forcing + - swavr0m + derived_variables: + # derive variables to be used as forcings + toa_radiation: + kwargs: + time: ds_input.time + lat: ds_input.lat + lon: ds_input.lon + function: mllam_data_prep.ops.derive_variable.physical_field.calculate_toa_radiation + hour_of_day_sin: + kwargs: + time: ds_input.time + component: sin + function: mllam_data_prep.ops.derive_variable.time_components.calculate_hour_of_day + hour_of_day_cos: + kwargs: + time: ds_input.time + component: cos + function: mllam_data_prep.ops.derive_variable.time_components.calculate_hour_of_day + dim_mapping: + time: + method: rename + dim: time + grid_index: + method: stack + dims: [x, y] + forcing_feature: + method: stack_variables_by_var_name + name_format: "{var_name}" + target_output_variable: forcing + + danra_static: + path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/single_levels.zarr + dims: [x, y] + variables: + - lsm + - orography + dim_mapping: + grid_index: + method: stack + dims: [x, y] + static_feature: + method: stack_variables_by_var_name + name_format: "{var_name}" + target_output_variable: static + +extra: + projection: + class_name: LambertConformal + kwargs: + central_longitude: 25.0 + central_latitude: 56.7 + standard_parallels: [56.7, 56.7] + globe: + semimajor_axis: 6367470.0 + semiminor_axis: 6367470.0 diff --git a/mllam_data_prep/__init__.py b/mllam_data_prep/__init__.py index 64bfa91..c53db76 100644 --- a/mllam_data_prep/__init__.py +++ b/mllam_data_prep/__init__.py @@ -1,10 +1,10 @@ -import importlib.metadata - -try: - __version__ = importlib.metadata.version(__name__) -except importlib.metadata.PackageNotFoundError: - __version__ = "unknown" - -# expose the public API -from .config import Config, InvalidConfigException # noqa -from .create_dataset import create_dataset, create_dataset_zarr # noqa +import importlib.metadata + +try: + __version__ = importlib.metadata.version(__name__) +except importlib.metadata.PackageNotFoundError: + __version__ = "unknown" + +# expose the public API +from .config import Config, InvalidConfigException # noqa +from .create_dataset import create_dataset, create_dataset_zarr # noqa diff --git a/mllam_data_prep/__main__.py b/mllam_data_prep/__main__.py index d94e5e9..508aca1 100644 --- a/mllam_data_prep/__main__.py +++ b/mllam_data_prep/__main__.py @@ -1,4 +1,4 @@ -from .cli import call - -if __name__ == "__main__": - args = call(args=None) +from .cli import call + +if __name__ == "__main__": + args = call(args=None) diff --git a/mllam_data_prep/cli.py b/mllam_data_prep/cli.py index 7c356c1..880265a 100644 --- a/mllam_data_prep/cli.py +++ b/mllam_data_prep/cli.py @@ -1,87 +1,87 @@ -import argparse -import os -from pathlib import Path - -from loguru import logger - -from .create_dataset import create_dataset_zarr - -# Attempt to import psutil and dask.distributed modules -DASK_DISTRIBUTED_AVAILABLE = True -try: - import psutil - from dask.diagnostics import ProgressBar - from dask.distributed import LocalCluster -except (ImportError, ModuleNotFoundError): - DASK_DISTRIBUTED_AVAILABLE = False - - -def call(args=None): - parser = argparse.ArgumentParser( - formatter_class=argparse.ArgumentDefaultsHelpFormatter - ) - parser.add_argument("config", help="Path to the config file", type=Path) - parser.add_argument( - "-o", "--output", help="Path to the output zarr file", type=Path, default=None - ) - parser.add_argument( - "--show-progress", help="Show progress bar", action="store_true" - ) - parser.add_argument( - "--dask-distributed-local-core-fraction", - help="Fraction of cores to use on the local machine to do multiprocessing with dask.distributed", - type=float, - default=0.0, - ) - parser.add_argument( - "--dask-distributed-local-memory-fraction", - help="Fraction of memory to use on the local machine (when doing multiprocessing with dask.distributed)", - type=float, - default=0.9, - ) - parser.add_argument( - "--overwrite", - help="Overwrite existing zarr dataset if it exists", - choices=["always", "never", "on_config_change"], - default="always", - ) - args = parser.parse_args(args) - - if args.show_progress: - ProgressBar().register() - - if args.dask_distributed_local_core_fraction > 0.0: - # Only run this block if dask.distributed is available - if not DASK_DISTRIBUTED_AVAILABLE: - raise ModuleNotFoundError( - "Currently dask.distributed isn't installed and therefore can't " - "be used in mllam-data-prep. Please install the optional dependency " - 'with `python -m pip install "mllam-data-prep[dask-distributed]"`' - ) - # get the number of system cores - n_system_cores = os.cpu_count() - # compute the number of cores to use - n_local_cores = int(args.dask_distributed_local_core_fraction * n_system_cores) - # get the total system memory - total_memory = psutil.virtual_memory().total - # compute the memory per worker - memory_per_worker = ( - total_memory / n_local_cores * args.dask_distributed_local_memory_fraction - ) - - logger.info( - f"Setting up dask.distributed.LocalCluster with {n_local_cores} cores and {memory_per_worker / 1024 / 1024:0.0f} MB of memory per worker" - ) - - cluster = LocalCluster( - n_workers=n_local_cores, - threads_per_worker=1, - memory_limit=memory_per_worker, - ) - - # print the dashboard link - logger.info(f"Dashboard link: {cluster.dashboard_link}") - - create_dataset_zarr( - fp_config=args.config, fp_zarr=args.output, overwrite=args.overwrite - ) +import argparse +import os +from pathlib import Path + +from loguru import logger + +from .create_dataset import create_dataset_zarr + +# Attempt to import psutil and dask.distributed modules +DASK_DISTRIBUTED_AVAILABLE = True +try: + import psutil + from dask.diagnostics import ProgressBar + from dask.distributed import LocalCluster +except (ImportError, ModuleNotFoundError): + DASK_DISTRIBUTED_AVAILABLE = False + + +def call(args=None): + parser = argparse.ArgumentParser( + formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) + parser.add_argument("config", help="Path to the config file", type=Path) + parser.add_argument( + "-o", "--output", help="Path to the output zarr file", type=Path, default=None + ) + parser.add_argument( + "--show-progress", help="Show progress bar", action="store_true" + ) + parser.add_argument( + "--dask-distributed-local-core-fraction", + help="Fraction of cores to use on the local machine to do multiprocessing with dask.distributed", + type=float, + default=0.0, + ) + parser.add_argument( + "--dask-distributed-local-memory-fraction", + help="Fraction of memory to use on the local machine (when doing multiprocessing with dask.distributed)", + type=float, + default=0.9, + ) + parser.add_argument( + "--overwrite", + help="Overwrite existing zarr dataset if it exists", + choices=["always", "never", "on_config_change"], + default="always", + ) + args = parser.parse_args(args) + + if args.show_progress: + ProgressBar().register() + + if args.dask_distributed_local_core_fraction > 0.0: + # Only run this block if dask.distributed is available + if not DASK_DISTRIBUTED_AVAILABLE: + raise ModuleNotFoundError( + "Currently dask.distributed isn't installed and therefore can't " + "be used in mllam-data-prep. Please install the optional dependency " + 'with `python -m pip install "mllam-data-prep[dask-distributed]"`' + ) + # get the number of system cores + n_system_cores = os.cpu_count() + # compute the number of cores to use + n_local_cores = int(args.dask_distributed_local_core_fraction * n_system_cores) + # get the total system memory + total_memory = psutil.virtual_memory().total + # compute the memory per worker + memory_per_worker = ( + total_memory / n_local_cores * args.dask_distributed_local_memory_fraction + ) + + logger.info( + f"Setting up dask.distributed.LocalCluster with {n_local_cores} cores and {memory_per_worker / 1024 / 1024:0.0f} MB of memory per worker" + ) + + cluster = LocalCluster( + n_workers=n_local_cores, + threads_per_worker=1, + memory_limit=memory_per_worker, + ) + + # print the dashboard link + logger.info(f"Dashboard link: {cluster.dashboard_link}") + + create_dataset_zarr( + fp_config=args.config, fp_zarr=args.output, overwrite=args.overwrite + ) diff --git a/mllam_data_prep/config.py b/mllam_data_prep/config.py index ebdba90..a42470f 100644 --- a/mllam_data_prep/config.py +++ b/mllam_data_prep/config.py @@ -1,459 +1,459 @@ -from dataclasses import dataclass, field -from typing import Any, Dict, List, Optional, Union - -import dataclass_wizard -import xarray as xr -from dataclass_wizard import JSONWizard -from deepdiff import DeepDiff -from packaging.version import Version - - -class InvalidConfigException(Exception): - pass - - -def validate_config(config_inputs): - """ - Validate that, in the config: - - either `variables` or `derived_variables` are present in the config - - if both `variables` and `derived_variables` are present, that they don't - add the same variables to the dataset - - Parameters - ---------- - config_inputs: Dict[str, InputDataset] - - Returns - ------- - """ - - for input_dataset_name, input_dataset in config_inputs.items(): - if not input_dataset.variables and not input_dataset.derived_variables: - raise InvalidConfigException( - f"Input dataset '{input_dataset_name}' is missing the keys `variables` and/or" - " `derived_variables`. Make sure that you update the config so that the input" - f" dataset '{input_dataset_name}' contains at least either a `variables` or" - " `derived_variables` section." - ) - elif input_dataset.variables and input_dataset.derived_variables: - # Check so that there are no overlapping variables - if isinstance(input_dataset.variables, list): - variable_vars = input_dataset.variables - elif isinstance(input_dataset.variables, dict): - variable_vars = input_dataset.variables.keys() - else: - raise TypeError( - f"Expected an instance of list or dict, but got {type(input_dataset.variables)}." - ) - derived_variable_vars = input_dataset.derived_variables.keys() - common_vars = list(set(variable_vars) & set(derived_variable_vars)) - if len(common_vars) > 0: - raise InvalidConfigException( - "Both `variables` and `derived_variables` include the following variables name(s):" - f" '{', '.join(common_vars)}'. This is not allowed. Make sure that there" - " are no overlapping variable names between `variables` and `derived_variables`," - f" either by renaming or removing '{', '.join(common_vars)}' from one of them." - ) - - -@dataclass -class Range: - """ - Defines a range for a variable to be used for selection, i.e. - `xarray.Dataset.sel({var_name}: slice({start}, {end}, {step}))`, the variable - name is the key in the dictionary and the slice object is created from the - `start`, `end`, and `step` attributes. - - Attributes - ---------- - start: str - The start of the range, e.g. "1990-09-03T00:00", 0, or 0.0. - end: str - The end of the range, e.g. "1990-09-04T00:00", 1, or 1.0. - step: str - The step size for the range, e.g. "PT3H", 1, or 1.0. If not given - then the entire range will be selected. - """ - - start: Union[str, int, float] - end: Union[str, int, float] - step: Optional[Union[str, int, float]] = None - - -@dataclass -class ValueSelection: - """ - Defines a selection on the coordinate values of a variable, the - `values` attribute can either be a list of values to select or a - `Range` object to select a range of values. This is used to create - a slice object for the selection. Optionally, the `units` attribute can be - used to specify the units of the values which will used to ensure that - the `units` attribute of the variable has the same value. - - Attributes: - values: The values to select. - units: The units of the values. - """ - - values: Union[List[Union[float, int]], Range] - units: Optional[str] = None - - -@dataclass -class DerivedVariable: - """ - Defines a derived variables, where the function (for calculating the variable) and - the kwargs (arguments to function) are specified. kwargs can contain both arguments - which should extract/select data from the input dataset, in which case they should - have the "ds_input." prefix to distinguish them from other argument that should not - be extracted from the dataset (e.g. a string to indicate if the sine or cosine - component should be extracted). - - Optionally, attributes to the derived variable can be specified in `attrs`, e.g. - {"attrs": "units": "W*m**-2, "long_name": "top-of-the-atmosphere radiation"}. - In case a function does not return an `xr.DataArray` with the required attributes - (`units` and `long_name`) set, these have to be specified in `attrs`. - - Attributes: - kwargs: Variables required for calculating the derived variable. - function: Function used to calculate the derived variable. - attrs: Attributes (e.g. `units` and `long_name`) to set for the derived variable. - """ - - kwargs: Dict[str, str] - function: str - attrs: Optional[Dict[str, str]] = field(default_factory=dict) - - -@dataclass -class DimMapping: - """ - Defines the process for mapping dimensions and variables from an input - dataset to a single new dimension (as in dimension in the - output dataset of the dataset generation). - - There are three methods implemented for mapping: - - "rename": - Renames a dimension in the dataset to a new name. - - E.g. adding a dim-mapping as `{"time": {"method": "rename", "dim": "analysis_time"}}` - will rename the "analysis_time" dimension in the input dataset to "time" dimension in the output. - - - "stack_variables_by_var_name": - Stacks all variables along a new dimension that is mapped to the output dimensions name given. - - E.g. adding a dim-mapping as - `{"state_feature": {"method": "stack_variables_by_var_name", "name_format": "{var_name}{altitude}m", dims: [altitude]}}` - will stack all variables in the input dataset along the "state_feature" dimension in the output - and the coordinate values will be given as f"{var_name}{altitude}m" where `var_name` is the name - of the variable and `altitude` is the value of the "altitude" coordinate. - If any dimensions are specified in the `dims` attribute, then the these dimensions will - also be stacked into this new dimension, and the `name_format` attribute can be used to - use the coordinate values from the stacked dimensions in the new coordinate values. - - - "stack": - Stacks the provided coordinates and maps the result to the output dimension. - - E.g. `{"grid_index": {"method": "stack", "dims": ["x", "y"]}}` will stack the "x" and "y" - dimensions in the input dataset into a new "grid_index" dimension in the output. - - Attributes: - method: The method used for mapping. - dims: The dimensions to be mapped. - name_format: The format for naming the mapped dimensions. - - Attributes - ---------- - method: str - The method used for mapping. The options are: - - "rename": Renames a dimension in the dataset to a new name. - - "stack_variables_by_var_name": Stacks all variables along a new dimension that is mapped to the output dimensions name given. - - "stack": Stacks the provided coordinates and maps the result to the output dimension. - dims: List[str] - The dimensions to be mapped when using the "stack" or "stack_variables_by_var_name" methods. - dim: str - The dimension to be renamed when using the "rename" method. - name_format: str - The format for naming the mapped dimensions when using the "stack_variables_by_var_name" method. - """ - - method: str - dims: Optional[List[str]] = None - dim: Optional[str] = None - name_format: Optional[str] = field(default=None) - coord_ranges: Optional[Dict[str, Range]] = field(default_factory=dict) - - -@dataclass -class InputDataset: - """ - Definition of a single input dataset which will be mapped to one the - variables that have been defined as output variables in the produced dataset - (i.e. the input variables for model architecture being targeted by the dataset). - The definition for a input dataset includes setting - 1) the path to the dataset, - 2) the expected dimensions of the dataset, - 3) the variables to select from the dataset (and optionally subsection - along the coordinates for each variable) or the variables to derive - from the dataset, and finally - 4) the method by which the dimensions and variables of the dataset are - mapped to one of the output variables (this includes stacking of all - the selected variables into a new single variable along a new coordinate, - and may include renaming and stacking dimensions existing dimensions). - - Attributes - ---------- - path: str - Path to the dataset, e.g. the path to a zarr dataset or netCDF file. - This can be anything that can be passed to `xarray.open_dataset` - dims: List[str] - List of the expected dimensions of the dataset. E.g. `["time", "x", "y"]`. - These will be checked to ensure consistency of the dataset being read. - dim_mapping: Dict[str, DimMapping] - Mapping of the variables and dimensions in the input dataset to the dimensions of the - output variable (`target_output_variable`). The key is the name of the output dimension to map to - and the ´DimMapping´ describes how to map the dimensions and variables of the input dataset - to this input dimension for the output variable. - target_output_variable: str - The name of the output variable (i.e. the name of a variable that that is expected by - the architecture to exist in the training dataset). If multiple datasets map to the same variable, - then the data from all datasets will be concatenated along the dimension that isn't shared - (e.g. two datasets that coincide in space and time will only differ in the feature dimension, - so the two will be combined by concatenating along the feature dimension). - If a single shared coordinate cannot be found then an exception will be raised. - variables: Union[List[str], Dict[str, Dict[str, ValueSelection]]] - List of the variables to select from the dataset. E.g. `["temperature", "precipitation"]` - or a dictionary where the keys are the variable names and the values are dictionaries - defining the selection for each variable. E.g. `{"temperature": levels: {"values": [1000, 950, 900]}}` - would select the "temperature" variable and only the levels 1000, 950, and 900. - derived_variables: Dict[str, DerivedVariable] - Dictionary of variables to derive from the dataset, where the keys are the names variables will be given and - the values are `DerivedVariable` definitions that specify how to derive a variable. - """ - - path: str - dims: List[str] - dim_mapping: Dict[str, DimMapping] - target_output_variable: str - variables: Optional[Union[List[str], Dict[str, Dict[str, ValueSelection]]]] = None - derived_variables: Optional[Dict[str, DerivedVariable]] = None - attributes: Optional[Dict[str, Any]] = field(default_factory=dict) - coord_ranges: Optional[Dict[str, Range]] = None - - -@dataclass -class Statistics: - """ - Define the statistics to compute for the output dataset, this includes defining - the the statistics to compute and the dimensions to compute the statistics over. - The statistics will be computed for each variable in the output dataset seperately. - - Attributes - ---------- - ops: List[str] - The statistics to compute, e.g. ["mean", "std", "min", "max"]. - dims: List[str] - The dimensions to compute the statistics over, e.g. ["time", "grid_index"]. - """ - - ops: List[str] - dims: List[str] - - -@dataclass -class Split: - """ - Define the `start` and `end` coordinate value (e.g. time) for a split of the dataset and optionally - the statistics to compute for the split. - - Attributes - ---------- - start: str - The start of the split, e.g. "1990-09-03T00:00". - end: str - The end of the split, e.g. "1990-09-04T00:00". - compute_statistics: StatisticsInput - The statistics to compute for the split. - """ - - start: str - end: str - compute_statistics: Optional[Statistics] = None - - -@dataclass -class Splitting: - """ - dim: str - The dimension to split the dataset along, e.g. "time", this must be provided if splits are defined. - - splits: Dict[str, Split] - Defines the splits of the dataset, the keys are the names of the splits and the values - are the `Split` objects defining the start and end of the split. Optionally, the - `compute_statistics` attribute can be used to define the statistics to compute for the split. - """ - - dim: str - splits: Dict[str, Split] - - -@dataclass -class Output: - """ - Definition of the output dataset that will be created by the dataset generation, you should - adapt this to the architecture of the model that you are going to using the dataset with. This - includes defining what input variables the architecture expects (and the dimensions of each), - the expected value range for each coordinate, and the chunking information for each dimension. - - Attributes - ---------- - variables: Dict[str, List[str]] - Defines the variables of the produced output, i.e. the input variables for the model - architecture. The keys are the variable names to create and the values are lists of - the dimensions. E.g. `{"static": ["grid_index", "feature"], "state": ["time", - "grid_index", "state_feature"]}` would define that the architecture expects a variable - named "static" with dimensions "grid_index" and "feature" and a variable named "state" with - dimensions "time", "grid_index", and "state_feature". - - coord_ranges: Dict[str, Range] - Defines the expected value range for each coordinate. The keys are the - name of the coordinate and the values are the range, e.g. - `{"time": {"start": "1990-09-03T00:00", "end": "1990-09-04T00:00", "step": "PT3H"}}` - would define that the "time" coordinate should have values between - "1990-09-03T00:00" and "1990-09-04T00:00" with a step size of 3 hours. - These range definitions are both used to ensure that the input dataset - has the expected range and to select the correct values from the input - dataset. If not given then the entire range will be selected. - - chunking: Dict[str, int] - Defines the chunking information for each dimension. The keys are the - names of the dimensions and the values are the chunk size for that dimension. - If chunking is not specified for a dimension, then the entire dimension - will be a single chunk. - - splitting: Splitting - Defines the splits of the dataset (e.g. train, test, validation), the dimension to split - the dataset along, and optionally the statistics to compute for each split. - """ - - variables: Dict[str, List[str]] - coord_ranges: Dict[str, Range] = field(default_factory=dict) - chunking: Dict[str, int] = field(default_factory=dict) - splitting: Optional[Splitting] = None - - -@dataclass -class Config(dataclass_wizard.JSONWizard, dataclass_wizard.YAMLWizard): - """Configuration for the model. - - Attributes: - schema_version: Version of the config file schema. - dataset_version: Version of the dataset itself. - architecture: Information about the model architecture this dataset is intended for. - inputs: Input datasets for the model. - - Attributes - ---------- - output: Output - Information about the structure of the output from mllam-data-prep, you should set this - to matchthe model architecture this dataset is intended for. This - covers defining what input variables the architecture expects (and the dimensions of each), - the expected value range for each coordinate, and the chunking information for each dimension. - inputs: Dict[str, InputDataset] - Input datasets for the model. The keys are the names of the datasets and the values are - the input dataset configurations. - extra: Dict[str, Any] - Extra information to include in the config file. This will be ignored by the - `mllam_data_prep` library, but can be used to include additional information - that is useful for the user. - schema_version: str - Version string for the config file schema. - dataset_version: str - Version string for the dataset itself. - """ - - output: Output - inputs: Dict[str, InputDataset] - schema_version: str - dataset_version: str - extra: Dict[str, Any] = field(default_factory=dict) - - def __post_init__(self): - validate_config(self.inputs) - - class _(JSONWizard.Meta): - raise_on_unknown_json_key = True - - -class UnsupportedMllamDataPrepVersion(Exception): - pass - - -def find_config_differences( - config: Config, ds_existing: xr.Dataset -) -> Union[None, dict]: - """ - Compare the provided config against the one the provided dataset is created - from (which is stored in the `creation_config` attribute), and return the - differences. - - Parameters - ---------- - config : Config - The configuration object to compare against - ds_existing : xr.Dataset - The existing dataset to compare against - - Returns - ------- - Union[None, dict] - If the configurations are the same, returns None. If they are different, returns - a dictionary of the differences. - - Raises - ------ - UnsupportedMllamDataPrepVersion - If the existing dataset was created with an older version of mllam-data-prep - that does not have the `creation_config` attribute - - """ - required_mdp_version = Version("v0.6.0") - - config_mdp_version = Version(ds_existing.attrs["mdp_version"]) - if config_mdp_version < required_mdp_version: - raise UnsupportedMllamDataPrepVersion( - "The existing dataset was created with an older version of mllam-data-prep " - f"({config_mdp_version}), and does not have the creation_config attribute " - f"(added in v{required_mdp_version}). Please delete the existing dataset " - "or set overwrite='always' to overwrite it." - ) - else: - existing_config_yaml = ds_existing.attrs.get("creation_config", None) - if existing_config_yaml is None: - raise ValueError( - "The provided dataset does not have a creation_config attribute" - ) - existing_config = Config.from_yaml(existing_config_yaml) - if existing_config != config: - differences = DeepDiff( - existing_config.to_dict(), config.to_dict(), ignore_order=True - ).to_dict() - return differences - - return None - - -if __name__ == "__main__": - import argparse - - argparser = argparse.ArgumentParser() - argparser.add_argument( - "-f", help="Path to the yaml file to load.", default="example.danra.yaml" - ) - args = argparser.parse_args() - - assert args.f.endswith(".yaml"), "Config file must have a .yaml extension." - config = Config.from_yaml_file(args.f) - import rich - - rich.print(config) +from dataclasses import dataclass, field +from typing import Any, Dict, List, Optional, Union + +import dataclass_wizard +import xarray as xr +from dataclass_wizard import JSONWizard +from deepdiff import DeepDiff +from packaging.version import Version + + +class InvalidConfigException(Exception): + pass + + +def validate_config(config_inputs): + """ + Validate that, in the config: + - either `variables` or `derived_variables` are present in the config + - if both `variables` and `derived_variables` are present, that they don't + add the same variables to the dataset + + Parameters + ---------- + config_inputs: Dict[str, InputDataset] + + Returns + ------- + """ + + for input_dataset_name, input_dataset in config_inputs.items(): + if not input_dataset.variables and not input_dataset.derived_variables: + raise InvalidConfigException( + f"Input dataset '{input_dataset_name}' is missing the keys `variables` and/or" + " `derived_variables`. Make sure that you update the config so that the input" + f" dataset '{input_dataset_name}' contains at least either a `variables` or" + " `derived_variables` section." + ) + elif input_dataset.variables and input_dataset.derived_variables: + # Check so that there are no overlapping variables + if isinstance(input_dataset.variables, list): + variable_vars = input_dataset.variables + elif isinstance(input_dataset.variables, dict): + variable_vars = input_dataset.variables.keys() + else: + raise TypeError( + f"Expected an instance of list or dict, but got {type(input_dataset.variables)}." + ) + derived_variable_vars = input_dataset.derived_variables.keys() + common_vars = list(set(variable_vars) & set(derived_variable_vars)) + if len(common_vars) > 0: + raise InvalidConfigException( + "Both `variables` and `derived_variables` include the following variables name(s):" + f" '{', '.join(common_vars)}'. This is not allowed. Make sure that there" + " are no overlapping variable names between `variables` and `derived_variables`," + f" either by renaming or removing '{', '.join(common_vars)}' from one of them." + ) + + +@dataclass +class Range: + """ + Defines a range for a variable to be used for selection, i.e. + `xarray.Dataset.sel({var_name}: slice({start}, {end}, {step}))`, the variable + name is the key in the dictionary and the slice object is created from the + `start`, `end`, and `step` attributes. + + Attributes + ---------- + start: str + The start of the range, e.g. "1990-09-03T00:00", 0, or 0.0. + end: str + The end of the range, e.g. "1990-09-04T00:00", 1, or 1.0. + step: str + The step size for the range, e.g. "PT3H", 1, or 1.0. If not given + then the entire range will be selected. + """ + + start: Union[str, int, float] + end: Union[str, int, float] + step: Optional[Union[str, int, float]] = None + + +@dataclass +class ValueSelection: + """ + Defines a selection on the coordinate values of a variable, the + `values` attribute can either be a list of values to select or a + `Range` object to select a range of values. This is used to create + a slice object for the selection. Optionally, the `units` attribute can be + used to specify the units of the values which will used to ensure that + the `units` attribute of the variable has the same value. + + Attributes: + values: The values to select. + units: The units of the values. + """ + + values: Union[List[Union[float, int]], Range] + units: Optional[str] = None + + +@dataclass +class DerivedVariable: + """ + Defines a derived variables, where the function (for calculating the variable) and + the kwargs (arguments to function) are specified. kwargs can contain both arguments + which should extract/select data from the input dataset, in which case they should + have the "ds_input." prefix to distinguish them from other argument that should not + be extracted from the dataset (e.g. a string to indicate if the sine or cosine + component should be extracted). + + Optionally, attributes to the derived variable can be specified in `attrs`, e.g. + {"attrs": "units": "W*m**-2, "long_name": "top-of-the-atmosphere radiation"}. + In case a function does not return an `xr.DataArray` with the required attributes + (`units` and `long_name`) set, these have to be specified in `attrs`. + + Attributes: + kwargs: Variables required for calculating the derived variable. + function: Function used to calculate the derived variable. + attrs: Attributes (e.g. `units` and `long_name`) to set for the derived variable. + """ + + kwargs: Dict[str, str] + function: str + attrs: Optional[Dict[str, str]] = field(default_factory=dict) + + +@dataclass +class DimMapping: + """ + Defines the process for mapping dimensions and variables from an input + dataset to a single new dimension (as in dimension in the + output dataset of the dataset generation). + + There are three methods implemented for mapping: + - "rename": + Renames a dimension in the dataset to a new name. + + E.g. adding a dim-mapping as `{"time": {"method": "rename", "dim": "analysis_time"}}` + will rename the "analysis_time" dimension in the input dataset to "time" dimension in the output. + + - "stack_variables_by_var_name": + Stacks all variables along a new dimension that is mapped to the output dimensions name given. + + E.g. adding a dim-mapping as + `{"state_feature": {"method": "stack_variables_by_var_name", "name_format": "{var_name}{altitude}m", dims: [altitude]}}` + will stack all variables in the input dataset along the "state_feature" dimension in the output + and the coordinate values will be given as f"{var_name}{altitude}m" where `var_name` is the name + of the variable and `altitude` is the value of the "altitude" coordinate. + If any dimensions are specified in the `dims` attribute, then the these dimensions will + also be stacked into this new dimension, and the `name_format` attribute can be used to + use the coordinate values from the stacked dimensions in the new coordinate values. + + - "stack": + Stacks the provided coordinates and maps the result to the output dimension. + + E.g. `{"grid_index": {"method": "stack", "dims": ["x", "y"]}}` will stack the "x" and "y" + dimensions in the input dataset into a new "grid_index" dimension in the output. + + Attributes: + method: The method used for mapping. + dims: The dimensions to be mapped. + name_format: The format for naming the mapped dimensions. + + Attributes + ---------- + method: str + The method used for mapping. The options are: + - "rename": Renames a dimension in the dataset to a new name. + - "stack_variables_by_var_name": Stacks all variables along a new dimension that is mapped to the output dimensions name given. + - "stack": Stacks the provided coordinates and maps the result to the output dimension. + dims: List[str] + The dimensions to be mapped when using the "stack" or "stack_variables_by_var_name" methods. + dim: str + The dimension to be renamed when using the "rename" method. + name_format: str + The format for naming the mapped dimensions when using the "stack_variables_by_var_name" method. + """ + + method: str + dims: Optional[List[str]] = None + dim: Optional[str] = None + name_format: Optional[str] = field(default=None) + coord_ranges: Optional[Dict[str, Range]] = field(default_factory=dict) + + +@dataclass +class InputDataset: + """ + Definition of a single input dataset which will be mapped to one the + variables that have been defined as output variables in the produced dataset + (i.e. the input variables for model architecture being targeted by the dataset). + The definition for a input dataset includes setting + 1) the path to the dataset, + 2) the expected dimensions of the dataset, + 3) the variables to select from the dataset (and optionally subsection + along the coordinates for each variable) or the variables to derive + from the dataset, and finally + 4) the method by which the dimensions and variables of the dataset are + mapped to one of the output variables (this includes stacking of all + the selected variables into a new single variable along a new coordinate, + and may include renaming and stacking dimensions existing dimensions). + + Attributes + ---------- + path: str + Path to the dataset, e.g. the path to a zarr dataset or netCDF file. + This can be anything that can be passed to `xarray.open_dataset` + dims: List[str] + List of the expected dimensions of the dataset. E.g. `["time", "x", "y"]`. + These will be checked to ensure consistency of the dataset being read. + dim_mapping: Dict[str, DimMapping] + Mapping of the variables and dimensions in the input dataset to the dimensions of the + output variable (`target_output_variable`). The key is the name of the output dimension to map to + and the ´DimMapping´ describes how to map the dimensions and variables of the input dataset + to this input dimension for the output variable. + target_output_variable: str + The name of the output variable (i.e. the name of a variable that that is expected by + the architecture to exist in the training dataset). If multiple datasets map to the same variable, + then the data from all datasets will be concatenated along the dimension that isn't shared + (e.g. two datasets that coincide in space and time will only differ in the feature dimension, + so the two will be combined by concatenating along the feature dimension). + If a single shared coordinate cannot be found then an exception will be raised. + variables: Union[List[str], Dict[str, Dict[str, ValueSelection]]] + List of the variables to select from the dataset. E.g. `["temperature", "precipitation"]` + or a dictionary where the keys are the variable names and the values are dictionaries + defining the selection for each variable. E.g. `{"temperature": levels: {"values": [1000, 950, 900]}}` + would select the "temperature" variable and only the levels 1000, 950, and 900. + derived_variables: Dict[str, DerivedVariable] + Dictionary of variables to derive from the dataset, where the keys are the names variables will be given and + the values are `DerivedVariable` definitions that specify how to derive a variable. + """ + + path: str + dims: List[str] + dim_mapping: Dict[str, DimMapping] + target_output_variable: str + variables: Optional[Union[List[str], Dict[str, Dict[str, ValueSelection]]]] = None + derived_variables: Optional[Dict[str, DerivedVariable]] = None + attributes: Optional[Dict[str, Any]] = field(default_factory=dict) + coord_ranges: Optional[Dict[str, Range]] = None + + +@dataclass +class Statistics: + """ + Define the statistics to compute for the output dataset, this includes defining + the the statistics to compute and the dimensions to compute the statistics over. + The statistics will be computed for each variable in the output dataset seperately. + + Attributes + ---------- + ops: List[str] + The statistics to compute, e.g. ["mean", "std", "min", "max"]. + dims: List[str] + The dimensions to compute the statistics over, e.g. ["time", "grid_index"]. + """ + + ops: List[str] + dims: List[str] + + +@dataclass +class Split: + """ + Define the `start` and `end` coordinate value (e.g. time) for a split of the dataset and optionally + the statistics to compute for the split. + + Attributes + ---------- + start: str + The start of the split, e.g. "1990-09-03T00:00". + end: str + The end of the split, e.g. "1990-09-04T00:00". + compute_statistics: StatisticsInput + The statistics to compute for the split. + """ + + start: str + end: str + compute_statistics: Optional[Statistics] = None + + +@dataclass +class Splitting: + """ + dim: str + The dimension to split the dataset along, e.g. "time", this must be provided if splits are defined. + + splits: Dict[str, Split] + Defines the splits of the dataset, the keys are the names of the splits and the values + are the `Split` objects defining the start and end of the split. Optionally, the + `compute_statistics` attribute can be used to define the statistics to compute for the split. + """ + + dim: str + splits: Dict[str, Split] + + +@dataclass +class Output: + """ + Definition of the output dataset that will be created by the dataset generation, you should + adapt this to the architecture of the model that you are going to using the dataset with. This + includes defining what input variables the architecture expects (and the dimensions of each), + the expected value range for each coordinate, and the chunking information for each dimension. + + Attributes + ---------- + variables: Dict[str, List[str]] + Defines the variables of the produced output, i.e. the input variables for the model + architecture. The keys are the variable names to create and the values are lists of + the dimensions. E.g. `{"static": ["grid_index", "feature"], "state": ["time", + "grid_index", "state_feature"]}` would define that the architecture expects a variable + named "static" with dimensions "grid_index" and "feature" and a variable named "state" with + dimensions "time", "grid_index", and "state_feature". + + coord_ranges: Dict[str, Range] + Defines the expected value range for each coordinate. The keys are the + name of the coordinate and the values are the range, e.g. + `{"time": {"start": "1990-09-03T00:00", "end": "1990-09-04T00:00", "step": "PT3H"}}` + would define that the "time" coordinate should have values between + "1990-09-03T00:00" and "1990-09-04T00:00" with a step size of 3 hours. + These range definitions are both used to ensure that the input dataset + has the expected range and to select the correct values from the input + dataset. If not given then the entire range will be selected. + + chunking: Dict[str, int] + Defines the chunking information for each dimension. The keys are the + names of the dimensions and the values are the chunk size for that dimension. + If chunking is not specified for a dimension, then the entire dimension + will be a single chunk. + + splitting: Splitting + Defines the splits of the dataset (e.g. train, test, validation), the dimension to split + the dataset along, and optionally the statistics to compute for each split. + """ + + variables: Dict[str, List[str]] + coord_ranges: Dict[str, Range] = field(default_factory=dict) + chunking: Dict[str, int] = field(default_factory=dict) + splitting: Optional[Splitting] = None + + +@dataclass +class Config(dataclass_wizard.JSONWizard, dataclass_wizard.YAMLWizard): + """Configuration for the model. + + Attributes: + schema_version: Version of the config file schema. + dataset_version: Version of the dataset itself. + architecture: Information about the model architecture this dataset is intended for. + inputs: Input datasets for the model. + + Attributes + ---------- + output: Output + Information about the structure of the output from mllam-data-prep, you should set this + to matchthe model architecture this dataset is intended for. This + covers defining what input variables the architecture expects (and the dimensions of each), + the expected value range for each coordinate, and the chunking information for each dimension. + inputs: Dict[str, InputDataset] + Input datasets for the model. The keys are the names of the datasets and the values are + the input dataset configurations. + extra: Dict[str, Any] + Extra information to include in the config file. This will be ignored by the + `mllam_data_prep` library, but can be used to include additional information + that is useful for the user. + schema_version: str + Version string for the config file schema. + dataset_version: str + Version string for the dataset itself. + """ + + output: Output + inputs: Dict[str, InputDataset] + schema_version: str + dataset_version: str + extra: Dict[str, Any] = field(default_factory=dict) + + def __post_init__(self): + validate_config(self.inputs) + + class _(JSONWizard.Meta): + raise_on_unknown_json_key = True + + +class UnsupportedMllamDataPrepVersion(Exception): + pass + + +def find_config_differences( + config: Config, ds_existing: xr.Dataset +) -> Union[None, dict]: + """ + Compare the provided config against the one the provided dataset is created + from (which is stored in the `creation_config` attribute), and return the + differences. + + Parameters + ---------- + config : Config + The configuration object to compare against + ds_existing : xr.Dataset + The existing dataset to compare against + + Returns + ------- + Union[None, dict] + If the configurations are the same, returns None. If they are different, returns + a dictionary of the differences. + + Raises + ------ + UnsupportedMllamDataPrepVersion + If the existing dataset was created with an older version of mllam-data-prep + that does not have the `creation_config` attribute + + """ + required_mdp_version = Version("v0.6.0") + + config_mdp_version = Version(ds_existing.attrs["mdp_version"]) + if config_mdp_version < required_mdp_version: + raise UnsupportedMllamDataPrepVersion( + "The existing dataset was created with an older version of mllam-data-prep " + f"({config_mdp_version}), and does not have the creation_config attribute " + f"(added in v{required_mdp_version}). Please delete the existing dataset " + "or set overwrite='always' to overwrite it." + ) + else: + existing_config_yaml = ds_existing.attrs.get("creation_config", None) + if existing_config_yaml is None: + raise ValueError( + "The provided dataset does not have a creation_config attribute" + ) + existing_config = Config.from_yaml(existing_config_yaml) + if existing_config != config: + differences = DeepDiff( + existing_config.to_dict(), config.to_dict(), ignore_order=True + ).to_dict() + return differences + + return None + + +if __name__ == "__main__": + import argparse + + argparser = argparse.ArgumentParser() + argparser.add_argument( + "-f", help="Path to the yaml file to load.", default="example.danra.yaml" + ) + args = argparser.parse_args() + + assert args.f.endswith(".yaml"), "Config file must have a .yaml extension." + config = Config.from_yaml_file(args.f) + import rich + + rich.print(config) diff --git a/mllam_data_prep/create_dataset.py b/mllam_data_prep/create_dataset.py index 3daf321..35c9ee8 100644 --- a/mllam_data_prep/create_dataset.py +++ b/mllam_data_prep/create_dataset.py @@ -1,411 +1,416 @@ -import datetime -import shutil -from collections import defaultdict -from pathlib import Path -from typing import Optional, Union - -import numpy as np -import xarray as xr -import yaml -import zarr -from loguru import logger -from packaging.version import Version - -from mllam_data_prep.ops import selection - -from . import __version__ -from .config import ( - Config, - InvalidConfigException, - UnsupportedMllamDataPrepVersion, - find_config_differences, -) -from .ops.chunking import chunk_dataset -from .ops.derive_variable import derive_variable -from .ops.loading import load_input_dataset -from .ops.mapping import map_dims_and_variables -from .ops.selection import select_by_kwargs -from .ops.statistics import calc_stats -from .ops.subsetting import extract_variable - -if Version(zarr.__version__) >= Version("3"): - from zarr.codecs import BloscCodec, BloscShuffle -else: - from numcodecs import Blosc - -# The config versions defined in SUPPORTED_CONFIG_VERSIONS are the ones currently supported. -# The `extra` field in the config that was added between v0.2.0 and v0.5.0 is optional, and -# the `derived_variables` field in the config added in v0.6.0 is also optional, so we can -# support v0.2.0, v0.5.0, and v0.6.0 -SUPPORTED_CONFIG_VERSIONS = ["v0.2.0", "v0.5.0", "v0.6.0"] - - -def _check_dataset_attributes(ds, expected_attributes, dataset_name): - # check that the dataset has the expected attributes with the expected values - missing_attributes = set(expected_attributes.keys()) - set(ds.attrs.keys()) - if len(missing_attributes) > 0: - raise ValueError( - f"Dataset {dataset_name} is missing the following attributes: {missing_attributes}" - ) - - # check for attributes having the wrong value - incorrect_attributes = { - key: val for key, val in expected_attributes.items() if ds.attrs[key] != val - } - if len(incorrect_attributes) > 0: - s_list = "\n".join( - [ - f"{key}: {val} != {ds.attrs[key]}" - for key, val in incorrect_attributes.items() - ] - ) - raise ValueError( - f"Dataset {dataset_name} has the following incorrect attributes: {s_list}" - ) - - -def _merge_dataarrays_by_target(dataarrays_by_target): - attrs_to_keep = ["source_dataset"] - dataarrays = [] - for target, das in dataarrays_by_target.items(): - logger.info(f"Merging dataarrays for target variable `{target}`") - concat_dim = None - for da in das: - d = da.attrs.get("variables_mapping_dim", None) - if d is None: - raise ValueError( - f"Dataarray for target {target} does not have the 'variables_mapping_dim' attribute" - ) - if concat_dim is not None and d != concat_dim: - raise ValueError( - f"Dataarrays for target {target} have different 'variables_mapping_dim' attributes: {d} != {concat_dim}" - ) - concat_dim = d - - for da in das: - for attr in attrs_to_keep: - # create a aux coord for each attribute we want to keep - # (for example the name of the source dataset) - # so that we have this in the resulting dataset - da.coords[f"{concat_dim}_{attr}"] = xr.DataArray( - [da.attrs.pop(attr)] * int(da[concat_dim].count()), - dims=[concat_dim], - ) - - da_target = xr.concat(das, dim=concat_dim) - da_target.name = target - dataarrays.append(da_target) - - # by doing a merge with join="exact" we make sure that the dataarrays - # are aligned along the same dimensions, and that the coordinates are - # the same for all dataarrays. Otherwise xarray will fill in with NaNs - # for any missing coordinate values - try: - ds = xr.merge(dataarrays, join="exact") - except ValueError as ex: - if ex.args[0].startswith("cannot align objects with join='exact'"): - raise InvalidConfigException( - f"Couldn't merge together the dataarrays for all targets ({', '.join(dataarrays_by_target.keys())})" - f" This is likely because the dataarrays have different dimensions or coordinates." - " Maybe you need to give the 'feature' dimension a unique name for each target variable?" - ) from ex - else: - raise ex - return ds - - -def create_dataset(config: Config): - """ - Create a dataset from the input datasets specified in the config file. - - Parameters - ---------- - config : Config - The configuration object defining the input datasets and how to map them to the output dataset. - - Returns - ------- - xr.Dataset - The dataset created from the input datasets with a variable for each output - as defined in the config file. - """ - if not config.schema_version in SUPPORTED_CONFIG_VERSIONS: - raise ValueError( - f"Unsupported schema version {config.schema_version}. Only schema versions " - f" {', '.join(SUPPORTED_CONFIG_VERSIONS)} are supported by mllam-data-prep " - f"v{__version__}." - ) - if config.schema_version == "v0.2.0" and config.extra: - raise ValueError( - "Config schema version v0.2.0 does not support the `extra` field. Please " - "update the schema version used in your config to v0.5.0." - ) - - output_config = config.output - output_coord_ranges = output_config.coord_ranges - chunking_config = config.output.chunking - - dataarrays_by_target = defaultdict(list) - - for dataset_name, input_config in config.inputs.items(): - path = input_config.path - selected_variables = input_config.variables - derived_variables = input_config.derived_variables - target_output_var = input_config.target_output_variable - expected_input_attributes = input_config.attributes - expected_input_var_dims = input_config.dims - - output_dims = output_config.variables[target_output_var] - - logger.info(f"Loading dataset {dataset_name} from {path}") - try: - ds_input = load_input_dataset(fp=path) - except Exception as ex: - raise Exception(f"Error loading dataset {dataset_name} from {path}") from ex - - if input_config.coord_ranges is not None: - ds_input = selection.select_by_kwargs(ds_input, **input_config.coord_ranges) - - # Initialize the output dataset - ds = xr.Dataset() - ds.attrs.update(ds_input.attrs) - - if selected_variables: - logger.info(f"Extracting selected variables from dataset {dataset_name}") - if isinstance(selected_variables, dict): - for var_name, coords_to_sample in selected_variables.items(): - ds[var_name] = extract_variable( - ds=ds_input, - var_name=var_name, - coords_to_sample=coords_to_sample, - ) - elif isinstance(selected_variables, list): - for var_name in selected_variables: - ds[var_name] = extract_variable(ds=ds_input, var_name=var_name) - else: - raise ValueError( - "The `variables` argument should be a list or a dictionary" - ) - - if derived_variables: - logger.info(f"Deriving variables from {dataset_name}") - for var_name, derived_variable in derived_variables.items(): - ds[var_name] = derive_variable( - ds=ds_input, - derived_variable=derived_variable, - chunking=chunking_config, - target_dims=expected_input_var_dims, - ) - - _check_dataset_attributes( - ds=ds, - expected_attributes=expected_input_attributes, - dataset_name=dataset_name, - ) - - dim_mapping = input_config.dim_mapping - - # check that there is an entry for each arch dimension - # in the dim_mapping so that we know how to construct the - # final dataset - missing_dims = set(output_dims) - set(dim_mapping.keys()) - if missing_dims: - raise ValueError( - f"Missing dimension mapping for {missing_dims}" - f" for input dataset {dataset_name}, please provide" - " a mapping for all output dimensions by" - " using the 'dim_mapping' key in the input dataset" - ) - - logger.info( - f"Mapping dimensions and variables for dataset {dataset_name} to {target_output_var}" - ) - try: - da_target = map_dims_and_variables( - ds=ds, - dim_mapping=dim_mapping, - expected_input_var_dims=expected_input_var_dims, - ) - except Exception as ex: - raise Exception( - f"There was an issue stacking dimensions and variables to" - f" produce variable {target_output_var} from dataset {dataset_name}" - ) from ex - - da_target.attrs["source_dataset"] = dataset_name - - # only need to do selection for the coordinates that the input dataset actually has - if output_coord_ranges is not None: - output_coord_ranges = { - k: w for k, w in output_coord_ranges.items() if k in output_dims - } - da_target = select_by_kwargs(da_target, **output_coord_ranges) - - dataarrays_by_target[target_output_var].append(da_target) - - ds = _merge_dataarrays_by_target(dataarrays_by_target=dataarrays_by_target) - - # need to drop the encoding so that we can write to zarr with new chunksizes - ds = ds.drop_encoding() - - # default to making a single chunk for each dimension if chunksize is not specified - # in the config - logger.info(f"Chunking dataset with {chunking_config}") - chunks = {dim: chunking_config.get(dim, int(ds[dim].count())) for dim in ds.dims} - ds = chunk_dataset(ds, chunks) - - splitting = config.output.splitting - - if splitting is not None: - splits = splitting.splits - logger.info( - f"Setting splitting information to define `{list(splits.keys())}` splits " - f"along dimension `{splitting.dim}`" - ) - - for split_name, split_config in splits.items(): - if split_config.compute_statistics is not None: - ds_split = ds.sel( - {splitting.dim: slice(split_config.start, split_config.end)} - ) - logger.info(f"Computing statistics for split {split_name}") - split_stats = calc_stats( - ds=ds_split, - statistics_config=split_config.compute_statistics, - splitting_dim=splitting.dim, - ) - for op, op_dataarrays in split_stats.items(): - for var_name, da in op_dataarrays.items(): - ds[f"{var_name}__{split_name}__{op}"] = da - - # add a new variable which contains the start, stop for each split, the coords would then be the split names - # and the data would be the start, stop values - split_vals = np.array([[split.start, split.end] for split in splits.values()]) - da_splits = xr.DataArray( - split_vals, - dims=["split_name", "split_part"], - coords={"split_name": list(splits.keys()), "split_part": ["start", "end"]}, - ) - ds["splits"] = da_splits - - ds.attrs = {} - ds.attrs["schema_version"] = config.schema_version - ds.attrs["dataset_version"] = config.dataset_version - ds.attrs["created_on"] = datetime.datetime.now().replace(microsecond=0).isoformat() - ds.attrs[ - "created_with" - ] = "mllam-data-prep (https://github.com/mllam/mllam-data-prep)" - ds.attrs["mdp_version"] = f"v{__version__}" - ds.attrs["creation_config"] = config.to_yaml() - - return ds - - -def create_dataset_zarr( - fp_config: Path, - fp_zarr: Optional[Union[str, Path]] = None, - overwrite: str = "always", -): - """ - Create a dataset from the input datasets specified in the config file and - write it to a zarr dataset. The path to the zarr dataset is the same as the - config file (unless `fp_zarr` is provided), but with the extension changed - to '.zarr'. - - Parameters - ---------- - fp_config : Path - The path to the configuration file. - fp_zarr : Path, optional - The path to the zarr file to write the dataset to. If not provided, the zarr file will be written - to the same directory as the config file with the extension changed to '.zarr'. - overwrite : str, optional - How to handle an existing dataset at the provided path. Options are: - - "always": Always delete the existing dataset (default) - - "never": Never delete the existing dataset - - "on_config_change": Only delete the existing dataset if the configuration has changed - """ - config = Config.from_yaml_file(file=fp_config) - - if fp_zarr is None: - fp_zarr = fp_config.parent / fp_config.name.replace(".yaml", ".zarr") - else: - fp_zarr = Path(fp_zarr) - - if fp_zarr.exists(): - if overwrite == "never": - ds_existing = xr.open_zarr(fp_zarr) - try: - config_differences = find_config_differences( - config=config, ds_existing=ds_existing - ) - except UnsupportedMllamDataPrepVersion: - config_differences = None - - ex_str = ( - f"There already exists a dataset at {fp_zarr}, and the overwrite option is set to 'never'. " - "Either delete the existing dataset or set overwrite='always' to overwrite it. " - ) - # try and parse the differences in the config in case the existing - # dataset was created with a supported version - if config_differences: - ex_str += ( - "The existing dataset was created with a different configuration than the current one. " - "Differences between existing and new configuration: \n" - f"{yaml.dump(config_differences, default_flow_style=False)}" - ) - raise FileExistsError(ex_str) - elif overwrite == "on_config_change": - try: - ds_existing = xr.open_zarr(fp_zarr) - config_differences = find_config_differences( - config=config, ds_existing=ds_existing - ) - except UnsupportedMllamDataPrepVersion as ex: - raise FileExistsError( - f"There already exists a dataset at {fp_zarr}, however it was created with an older version of mllam-data-prep " - "and so doesn't contain a record of the configuration used to create it. Either delete the existing dataset or " - "set overwrite='always' to overwrite it." - ) from ex - - if config_differences: - logger.info( - "The existing dataset was created with a different configuration than the current one." - ) - diff_yaml = yaml.dump(config_differences, default_flow_style=False) - logger.info( - f"Differences between existing and new configuration:\n{diff_yaml}" - ) - logger.info(f"Removing existing dataset at {fp_zarr}") - shutil.rmtree(fp_zarr) - else: - logger.info( - f"Skipping creation of writing of dataset to {fp_zarr} as the configuration is unchanged" - ) - return - elif overwrite == "always": - logger.info(f"Removing existing dataset at {fp_zarr}") - shutil.rmtree(fp_zarr) - else: - raise NotImplementedError( - f"Unsupported overwrite option {overwrite}. Options are 'always', 'never', or 'on_config_change'" - ) - - ds = create_dataset(config=config) - - logger.info("Writing dataset to zarr") - - # use zstd compression since it has a good balance of speed and compression ratio - # https://engineering.fb.com/2016/08/31/core-infra/smaller-and-faster-data-compression-with-zstandard/ - if Version(zarr.__version__) >= Version("3"): - compressor = BloscCodec(cname="zstd", clevel=3, shuffle=BloscShuffle.bitshuffle) - encoding = {v: {"compressors": compressor} for v in ds.data_vars} - else: - compressor = Blosc(cname="zstd", clevel=1, shuffle=Blosc.BITSHUFFLE) - encoding = {v: {"compressor": compressor} for v in ds.data_vars} - - # default mode to "w-" so that an error is raised if the dataset already exists - ds.to_zarr(fp_zarr, consolidated=True, mode="w-", encoding=encoding) - logger.info(f"Wrote training-ready dataset to {fp_zarr}") - - logger.info(ds) +import datetime +import shutil +from collections import defaultdict +from pathlib import Path +from typing import Optional, Union + +import numpy as np +import xarray as xr +import yaml +import zarr +from loguru import logger +from packaging.version import Version + +from mllam_data_prep.ops import selection + +from . import __version__ +from .config import ( + Config, + InvalidConfigException, + UnsupportedMllamDataPrepVersion, + find_config_differences, +) +from .ops.chunking import chunk_dataset +from .ops.derive_variable import derive_variable +from .ops.loading import load_input_dataset +from .ops.mapping import map_dims_and_variables +from .ops.selection import select_by_kwargs +from .ops.statistics import calc_stats +from .ops.subsetting import extract_variable + +if Version(zarr.__version__) >= Version("3"): + from zarr.codecs import BloscCodec, BloscShuffle +else: + from numcodecs import Blosc + +# The config versions defined in SUPPORTED_CONFIG_VERSIONS are the ones currently supported. +# The `extra` field in the config that was added between v0.2.0 and v0.5.0 is optional, and +# the `derived_variables` field in the config added in v0.6.0 is also optional, so we can +# support v0.2.0, v0.5.0, and v0.6.0 +SUPPORTED_CONFIG_VERSIONS = ["v0.2.0", "v0.5.0", "v0.6.0"] + + +def _check_dataset_attributes(ds, expected_attributes, dataset_name): + # check that the dataset has the expected attributes with the expected values + missing_attributes = set(expected_attributes.keys()) - set(ds.attrs.keys()) + if len(missing_attributes) > 0: + raise ValueError( + f"Dataset {dataset_name} is missing the following attributes: {missing_attributes}" + ) + + # check for attributes having the wrong value + incorrect_attributes = { + key: val for key, val in expected_attributes.items() if ds.attrs[key] != val + } + if len(incorrect_attributes) > 0: + s_list = "\n".join( + [ + f"{key}: {val} != {ds.attrs[key]}" + for key, val in incorrect_attributes.items() + ] + ) + raise ValueError( + f"Dataset {dataset_name} has the following incorrect attributes: {s_list}" + ) + + +def _merge_dataarrays_by_target(dataarrays_by_target): + attrs_to_keep = ["source_dataset"] + dataarrays = [] + for target, das in dataarrays_by_target.items(): + logger.info(f"Merging dataarrays for target variable `{target}`") + concat_dim = None + for da in das: + d = da.attrs.get("variables_mapping_dim", None) + if d is None: + raise ValueError( + f"Dataarray for target {target} does not have the 'variables_mapping_dim' attribute" + ) + if concat_dim is not None and d != concat_dim: + raise ValueError( + f"Dataarrays for target {target} have different 'variables_mapping_dim' attributes: {d} != {concat_dim}" + ) + concat_dim = d + + for da in das: + for attr in attrs_to_keep: + # create a aux coord for each attribute we want to keep + # (for example the name of the source dataset) + # so that we have this in the resulting dataset + da.coords[f"{concat_dim}_{attr}"] = xr.DataArray( + [da.attrs.pop(attr)] * int(da[concat_dim].count()), + dims=[concat_dim], + ) + + da_target = xr.concat(das, dim=concat_dim) + da_target.name = target + dataarrays.append(da_target) + + # by doing a merge with join="exact" we make sure that the dataarrays + # are aligned along the same dimensions, and that the coordinates are + # the same for all dataarrays. Otherwise xarray will fill in with NaNs + # for any missing coordinate values + try: + ds = xr.merge(dataarrays, join="exact") + except ValueError as ex: + if ex.args[0].startswith("cannot align objects with join='exact'"): + raise InvalidConfigException( + f"Couldn't merge together the dataarrays for all targets ({', '.join(dataarrays_by_target.keys())})" + f" This is likely because the dataarrays have different dimensions or coordinates." + " Maybe you need to give the 'feature' dimension a unique name for each target variable?" + ) from ex + else: + raise ex + return ds + + +def create_dataset(config: Config): + """ + Create a dataset from the input datasets specified in the config file. + + Parameters + ---------- + config : Config + The configuration object defining the input datasets and how to map them to the output dataset. + + Returns + ------- + xr.Dataset + The dataset created from the input datasets with a variable for each output + as defined in the config file. + """ + if not config.schema_version in SUPPORTED_CONFIG_VERSIONS: + raise ValueError( + f"Unsupported schema version {config.schema_version}. Only schema versions " + f" {', '.join(SUPPORTED_CONFIG_VERSIONS)} are supported by mllam-data-prep " + f"v{__version__}." + ) + if config.schema_version == "v0.2.0" and config.extra: + raise ValueError( + "Config schema version v0.2.0 does not support the `extra` field. Please " + "update the schema version used in your config to v0.5.0." + ) + + output_config = config.output + output_coord_ranges = output_config.coord_ranges + chunking_config = config.output.chunking + + dataarrays_by_target = defaultdict(list) + + for dataset_name, input_config in config.inputs.items(): + path = input_config.path + selected_variables = input_config.variables + derived_variables = input_config.derived_variables + target_output_var = input_config.target_output_variable + expected_input_attributes = input_config.attributes + expected_input_var_dims = input_config.dims + + output_dims = output_config.variables[target_output_var] + + logger.info(f"Loading dataset {dataset_name} from {path}") + try: + ds_input = load_input_dataset(fp=path) + except Exception as ex: + raise Exception(f"Error loading dataset {dataset_name} from {path}") from ex + + if input_config.coord_ranges is not None: + ds_input = selection.select_by_kwargs(ds_input, **input_config.coord_ranges) + + # Initialize the output dataset + ds = xr.Dataset() + ds.attrs.update(ds_input.attrs) + + if selected_variables: + logger.info(f"Extracting selected variables from dataset {dataset_name}") + if isinstance(selected_variables, dict): + for var_name, coords_to_sample in selected_variables.items(): + ds[var_name] = extract_variable( + ds=ds_input, + var_name=var_name, + coords_to_sample=coords_to_sample, + ) + elif isinstance(selected_variables, list): + for var_name in selected_variables: + ds[var_name] = extract_variable(ds=ds_input, var_name=var_name) + else: + raise ValueError( + "The `variables` argument should be a list or a dictionary" + ) + + if derived_variables: + logger.info(f"Deriving variables from {dataset_name}") + for var_name, derived_variable in derived_variables.items(): + ds[var_name] = derive_variable( + ds=ds_input, + derived_variable=derived_variable, + chunking=chunking_config, + target_dims=expected_input_var_dims, + ) + + _check_dataset_attributes( + ds=ds, + expected_attributes=expected_input_attributes, + dataset_name=dataset_name, + ) + + dim_mapping = input_config.dim_mapping + + # check that there is an entry for each arch dimension + # in the dim_mapping so that we know how to construct the + # final dataset + missing_dims = set(output_dims) - set(dim_mapping.keys()) + if missing_dims: + raise ValueError( + f"Missing dimension mapping for {missing_dims}" + f" for input dataset {dataset_name}, please provide" + " a mapping for all output dimensions by" + " using the 'dim_mapping' key in the input dataset" + ) + + logger.info( + f"Mapping dimensions and variables for dataset {dataset_name} to {target_output_var}" + ) + try: + da_target = map_dims_and_variables( + ds=ds, + dim_mapping=dim_mapping, + expected_input_var_dims=expected_input_var_dims, + ) + except Exception as ex: + raise Exception( + f"There was an issue stacking dimensions and variables to" + f" produce variable {target_output_var} from dataset {dataset_name}" + ) from ex + + da_target.attrs["source_dataset"] = dataset_name + + # only need to do selection for the coordinates that the input dataset actually has + if output_coord_ranges is not None: + # Use a temporary dict to apply selection on coordinate ranges to avoid + # modifying the original ranges given in the config. This is needed because + # static features, for example, do not have a time dimension. Hence, the time + # based selection returns an empty dictionary, which should not overwrite the + # selection for the other variables. + output_coord_ranges_tmp = { + k: w for k, w in output_coord_ranges.items() if k in output_dims + } + da_target = select_by_kwargs(da_target, **output_coord_ranges_tmp) + + dataarrays_by_target[target_output_var].append(da_target) + + ds = _merge_dataarrays_by_target(dataarrays_by_target=dataarrays_by_target) + + # need to drop the encoding so that we can write to zarr with new chunksizes + ds = ds.drop_encoding() + + # default to making a single chunk for each dimension if chunksize is not specified + # in the config + logger.info(f"Chunking dataset with {chunking_config}") + chunks = {dim: chunking_config.get(dim, int(ds[dim].count())) for dim in ds.dims} + ds = chunk_dataset(ds, chunks) + + splitting = config.output.splitting + + if splitting is not None: + splits = splitting.splits + logger.info( + f"Setting splitting information to define `{list(splits.keys())}` splits " + f"along dimension `{splitting.dim}`" + ) + + for split_name, split_config in splits.items(): + if split_config.compute_statistics is not None: + ds_split = ds.sel( + {splitting.dim: slice(split_config.start, split_config.end)} + ) + logger.info(f"Computing statistics for split {split_name}") + split_stats = calc_stats( + ds=ds_split, + statistics_config=split_config.compute_statistics, + splitting_dim=splitting.dim, + ) + for op, op_dataarrays in split_stats.items(): + for var_name, da in op_dataarrays.items(): + ds[f"{var_name}__{split_name}__{op}"] = da + + # add a new variable which contains the start, stop for each split, the coords would then be the split names + # and the data would be the start, stop values + split_vals = np.array([[split.start, split.end] for split in splits.values()]) + da_splits = xr.DataArray( + split_vals, + dims=["split_name", "split_part"], + coords={"split_name": list(splits.keys()), "split_part": ["start", "end"]}, + ) + ds["splits"] = da_splits + + ds.attrs = {} + ds.attrs["schema_version"] = config.schema_version + ds.attrs["dataset_version"] = config.dataset_version + ds.attrs["created_on"] = datetime.datetime.now().replace(microsecond=0).isoformat() + ds.attrs[ + "created_with" + ] = "mllam-data-prep (https://github.com/mllam/mllam-data-prep)" + ds.attrs["mdp_version"] = f"v{__version__}" + ds.attrs["creation_config"] = config.to_yaml() + + return ds + + +def create_dataset_zarr( + fp_config: Path, + fp_zarr: Optional[Union[str, Path]] = None, + overwrite: str = "always", +): + """ + Create a dataset from the input datasets specified in the config file and + write it to a zarr dataset. The path to the zarr dataset is the same as the + config file (unless `fp_zarr` is provided), but with the extension changed + to '.zarr'. + + Parameters + ---------- + fp_config : Path + The path to the configuration file. + fp_zarr : Path, optional + The path to the zarr file to write the dataset to. If not provided, the zarr file will be written + to the same directory as the config file with the extension changed to '.zarr'. + overwrite : str, optional + How to handle an existing dataset at the provided path. Options are: + - "always": Always delete the existing dataset (default) + - "never": Never delete the existing dataset + - "on_config_change": Only delete the existing dataset if the configuration has changed + """ + config = Config.from_yaml_file(file=fp_config) + + if fp_zarr is None: + fp_zarr = fp_config.parent / fp_config.name.replace(".yaml", ".zarr") + else: + fp_zarr = Path(fp_zarr) + + if fp_zarr.exists(): + if overwrite == "never": + ds_existing = xr.open_zarr(fp_zarr) + try: + config_differences = find_config_differences( + config=config, ds_existing=ds_existing + ) + except UnsupportedMllamDataPrepVersion: + config_differences = None + + ex_str = ( + f"There already exists a dataset at {fp_zarr}, and the overwrite option is set to 'never'. " + "Either delete the existing dataset or set overwrite='always' to overwrite it. " + ) + # try and parse the differences in the config in case the existing + # dataset was created with a supported version + if config_differences: + ex_str += ( + "The existing dataset was created with a different configuration than the current one. " + "Differences between existing and new configuration: \n" + f"{yaml.dump(config_differences, default_flow_style=False)}" + ) + raise FileExistsError(ex_str) + elif overwrite == "on_config_change": + try: + ds_existing = xr.open_zarr(fp_zarr) + config_differences = find_config_differences( + config=config, ds_existing=ds_existing + ) + except UnsupportedMllamDataPrepVersion as ex: + raise FileExistsError( + f"There already exists a dataset at {fp_zarr}, however it was created with an older version of mllam-data-prep " + "and so doesn't contain a record of the configuration used to create it. Either delete the existing dataset or " + "set overwrite='always' to overwrite it." + ) from ex + + if config_differences: + logger.info( + "The existing dataset was created with a different configuration than the current one." + ) + diff_yaml = yaml.dump(config_differences, default_flow_style=False) + logger.info( + f"Differences between existing and new configuration:\n{diff_yaml}" + ) + logger.info(f"Removing existing dataset at {fp_zarr}") + shutil.rmtree(fp_zarr) + else: + logger.info( + f"Skipping creation of writing of dataset to {fp_zarr} as the configuration is unchanged" + ) + return + elif overwrite == "always": + logger.info(f"Removing existing dataset at {fp_zarr}") + shutil.rmtree(fp_zarr) + else: + raise NotImplementedError( + f"Unsupported overwrite option {overwrite}. Options are 'always', 'never', or 'on_config_change'" + ) + + ds = create_dataset(config=config) + + logger.info("Writing dataset to zarr") + + # use zstd compression since it has a good balance of speed and compression ratio + # https://engineering.fb.com/2016/08/31/core-infra/smaller-and-faster-data-compression-with-zstandard/ + if Version(zarr.__version__) >= Version("3"): + compressor = BloscCodec(cname="zstd", clevel=3, shuffle=BloscShuffle.bitshuffle) + encoding = {v: {"compressors": compressor} for v in ds.data_vars} + else: + compressor = Blosc(cname="zstd", clevel=1, shuffle=Blosc.BITSHUFFLE) + encoding = {v: {"compressor": compressor} for v in ds.data_vars} + + # default mode to "w-" so that an error is raised if the dataset already exists + ds.to_zarr(fp_zarr, consolidated=True, mode="w-", encoding=encoding) + logger.info(f"Wrote training-ready dataset to {fp_zarr}") + + logger.info(ds) diff --git a/mllam_data_prep/ops/__init__.py b/mllam_data_prep/ops/__init__.py index 877cdfb..0b0143f 100644 --- a/mllam_data_prep/ops/__init__.py +++ b/mllam_data_prep/ops/__init__.py @@ -1 +1 @@ -from . import derive_variable +from . import derive_variable diff --git a/mllam_data_prep/ops/chunking.py b/mllam_data_prep/ops/chunking.py index 9df27e9..d84f245 100644 --- a/mllam_data_prep/ops/chunking.py +++ b/mllam_data_prep/ops/chunking.py @@ -1,73 +1,73 @@ -import numpy as np -from loguru import logger - -# Max chunk size warning -CHUNK_MAX_SIZE_WARNING = 1 * 1024**3 # 1GB - - -def check_chunk_size(ds, chunks): - """ - Check the chunk size and warn if it exceeds CHUNK_MAX_SIZE_WARNING. - - Parameters - ---------- - ds: xr.Dataset - Dataset to be chunked - chunks: Dict[str, int] - Dictionary with keys as dimensions to be chunked and - chunk sizes as the values - - Returns - ------- - ds: xr.Dataset - Dataset with chunking applied - """ - - for var_name, var_data in ds.data_vars.items(): - total_chunk_size = 1 - - # Loop over all dims in the dataset to be chunked - for dim, chunk_size in chunks.items(): - chunk_dim_size = var_data.sizes.get(dim, None) - if chunk_dim_size is None: - continue # Dimension 'dim' not found in the data-array - total_chunk_size *= chunk_size - - dtype = var_data.dtype - bytes_per_element = np.dtype(dtype).itemsize - - memory_usage = total_chunk_size * bytes_per_element - - if memory_usage > CHUNK_MAX_SIZE_WARNING: - logger.warning( - f"The chunk size for '{var_name}' exceeds '{CHUNK_MAX_SIZE_WARNING / 1024**3}' GB." - ) - - -def chunk_dataset(ds, chunks): - """ - Check the chunk size and chunk the dataset. - - Parameters - ---------- - ds: xr.Dataset - Dataset to be chunked - chunks: Dict[str, int] - Dictionary with keys as dimensions to be chunked and - chunk sizes as the values - - Returns - ------- - ds: xr.Dataset - Dataset with chunking applied - """ - # Check the chunk size - check_chunk_size(ds, chunks) - - # Try chunking - try: - ds = ds.chunk(chunks) - except Exception as ex: - raise Exception(f"Error chunking dataset: {ex}") - - return ds +import numpy as np +from loguru import logger + +# Max chunk size warning +CHUNK_MAX_SIZE_WARNING = 1 * 1024**3 # 1GB + + +def check_chunk_size(ds, chunks): + """ + Check the chunk size and warn if it exceeds CHUNK_MAX_SIZE_WARNING. + + Parameters + ---------- + ds: xr.Dataset + Dataset to be chunked + chunks: Dict[str, int] + Dictionary with keys as dimensions to be chunked and + chunk sizes as the values + + Returns + ------- + ds: xr.Dataset + Dataset with chunking applied + """ + + for var_name, var_data in ds.data_vars.items(): + total_chunk_size = 1 + + # Loop over all dims in the dataset to be chunked + for dim, chunk_size in chunks.items(): + chunk_dim_size = var_data.sizes.get(dim, None) + if chunk_dim_size is None: + continue # Dimension 'dim' not found in the data-array + total_chunk_size *= chunk_size + + dtype = var_data.dtype + bytes_per_element = np.dtype(dtype).itemsize + + memory_usage = total_chunk_size * bytes_per_element + + if memory_usage > CHUNK_MAX_SIZE_WARNING: + logger.warning( + f"The chunk size for '{var_name}' exceeds '{CHUNK_MAX_SIZE_WARNING / 1024**3}' GB." + ) + + +def chunk_dataset(ds, chunks): + """ + Check the chunk size and chunk the dataset. + + Parameters + ---------- + ds: xr.Dataset + Dataset to be chunked + chunks: Dict[str, int] + Dictionary with keys as dimensions to be chunked and + chunk sizes as the values + + Returns + ------- + ds: xr.Dataset + Dataset with chunking applied + """ + # Check the chunk size + check_chunk_size(ds, chunks) + + # Try chunking + try: + ds = ds.chunk(chunks) + except Exception as ex: + raise Exception(f"Error chunking dataset: {ex}") + + return ds diff --git a/mllam_data_prep/ops/derive_variable/__init__.py b/mllam_data_prep/ops/derive_variable/__init__.py index cc455e7..44c53af 100644 --- a/mllam_data_prep/ops/derive_variable/__init__.py +++ b/mllam_data_prep/ops/derive_variable/__init__.py @@ -1,3 +1,3 @@ -from .main import derive_variable -from .physical_field import calculate_toa_radiation -from .time_components import calculate_day_of_year, calculate_hour_of_day +from .main import derive_variable +from .physical_field import calculate_toa_radiation +from .time_components import calculate_day_of_year, calculate_hour_of_day diff --git a/mllam_data_prep/ops/derive_variable/main.py b/mllam_data_prep/ops/derive_variable/main.py index 07fc8f9..42b8e23 100644 --- a/mllam_data_prep/ops/derive_variable/main.py +++ b/mllam_data_prep/ops/derive_variable/main.py @@ -1,241 +1,241 @@ -""" -Handle deriving new variables (xr.DataArrays) from an individual input dataset -that has been loaded. This makes it possible to for example add fields that can -be derived from analytical expressions and are functions of coordinate values -(e.g. top-of-atmosphere incoming radiation is a function of time and lat/lon location), -but also of other physical fields (wind-speed is a function of both meridional -and zonal wind components). -""" - -import importlib -import sys - -import xarray as xr -from loguru import logger - -from ..chunking import chunk_dataset - -REQUIRED_FIELD_ATTRIBUTES = ["units", "long_name"] - - -def derive_variable(ds, derived_variable, chunking, target_dims): - """ - Derive a variable using the `function` and `kwargs` of `derived_variable`. - - Parameters - --------- - ds : xr.Dataset - Input dataset - derived_variable : Dict[str, DerivedVariable] - Dictionary with the variables to derive with keys as the variable - names and values with entries for kwargs and function to use in - the calculation - chunking: Dict[str, int] - Dictionary with keys as the dimensions to chunk along and values - with the chunk size - target_dims: List[str] - List of dims from ds to broadcast derived variable to, - if not used in calculation - - Returns - ------- - xr.Dataset - Dataset with derived variables included - """ - - function_namespace = derived_variable.function - expected_field_attributes = derived_variable.attrs - - # split the function kwargs defined in the config into two groups: - # 1. variables that should be extracted from the input dataset (and renamed) - # 2. other kwargs that should be passed in as is - required_input_dataset_vars = {} - other_kwargs = {} - for key, val in derived_variable.kwargs.items(): - if val.startswith("ds_input."): - var_name = val.rpartition(".")[2] - required_input_dataset_vars[key] = var_name - else: - other_kwargs[key] = val - - # select from the input dataset the subset of variables which have been - # selected to be used as input arguments for the derived variable - ds_subset = ds[list(required_input_dataset_vars.values())] - - # Chunking is needed for coordinates used to derive a variable since they are - # not lazily loaded, as otherwise one might run into memory issues if using a - # large dataset as input. - # Any coordinates needed for the derivation, for which chunking should be performed, - # should be converted to variables since it is not possible for *indexed* coordinates - # to be chunked dask arrays - chunks = { - dim: chunking.get(dim, int(ds_subset[dim].count())) for dim in ds_subset.dims - } - required_coordinates = [ - coord - for coord in required_input_dataset_vars.values() - if coord in ds_subset.coords - ] - ds_subset = ds_subset.drop_indexes(required_coordinates, errors="ignore") - for req_coord in required_coordinates: - if req_coord in chunks: - ds_subset = ds_subset.reset_coords(req_coord) - - # Chunk the dataset - ds_subset = chunk_dataset(ds_subset, chunks) - - # Add function arguments to kwargs - kwargs = {} - for arg, val in required_input_dataset_vars.items(): - kwargs[arg] = ds_subset[val] - kwargs.update(other_kwargs) - - # Get the function - func = _get_derived_variable_function(function_namespace) - - # Calculate the derived variable - derived_field = func(**kwargs) - - if isinstance(derived_field, xr.DataArray): - # Check that the derived field has the necessary attributes - # (REQUIRED_FIELD_ATTRIBUTES) set, and set them if not - derived_field_attrs = _check_and_get_required_attributes( - derived_field, expected_field_attributes - ) - derived_field.attrs.update(derived_field_attrs) - - # Return any dropped/reset coordinates - for req_coord in required_coordinates: - if req_coord in chunks: - derived_field.coords[req_coord] = ds_subset[req_coord] - - # Align the derived field to the output dataset dimensions (if necessary) - derived_field = _align_derived_variable(derived_field, ds, target_dims) - else: - raise TypeError( - f"Expected an instance of xr.DataArray, but got {type(derived_field)}." - ) - - return derived_field - - -def _get_derived_variable_function(function_namespace): - """ - Function for getting the function for deriving - the specified variable. - - Parameters - ---------- - function_namespace: str - The full function namespace - - Returns - ------- - function: object - Function for deriving the specified variable - """ - # Get module and function names - module_name, _, function_name = function_namespace.rpartition(".") - - # Import the module (if necessary) - if module_name in sys.modules: - module = sys.modules[module_name] - else: - module = importlib.import_module(module_name) - - # Get the function from the module - function = getattr(module, function_name) - - return function - - -def _check_and_get_required_attributes(field, expected_attributes): - """ - Check if the required attributes of the derived variable are set. - If not set, get them from the config. - If set and defined in the config, get the attributes from the config - and use them for overwriting the attributes defined in the function. - - Parameters - ---------- - field: xr.DataArray - The derived field - expected_attributes: Dict[str, str] - Dictionary with expected attributes for the derived variables. - Defined in the config file. - - Returns - ------- - field: xr.DataArray - The derived field - """ - - attrs = {} - for attribute in REQUIRED_FIELD_ATTRIBUTES: - if attribute not in field.attrs or field.attrs[attribute] is None: - if attribute in expected_attributes.keys(): - attrs[attribute] = expected_attributes[attribute] - else: - # The expected attributes are empty and the attributes have not been - # set during the calculation of the derived variable - raise KeyError( - f'The attribute "{attribute}" has not been set for the derived' - f' variable "{field.name}". This is most likely because you are' - " using a function external to `mlllam-data-prep` to derive the field," - f" in which the required attributes ({', '.join(REQUIRED_FIELD_ATTRIBUTES)})" - " are not set. If they are not set in the function call when deriving the field," - ' they can be set in the config file by adding an "attrs" section under the' - f' "{field.name}" derived variable section. For example, if the required attributes' - f" ({', '.join(REQUIRED_FIELD_ATTRIBUTES)}) are not set for a derived variable named" - f' "toa_radiation" they can be set by adding the following to the config file:' - ' {"attrs": {"units": "W*m**-2", "long_name": "top-of-atmosphere incoming radiation"}}.' - ) - elif attribute in expected_attributes.keys(): - logger.warning( - f"The attribute '{attribute}' of the derived field" - f" {field.name} is being overwritten from" - f" '{field.attrs[attribute]}' to" - f" '{expected_attributes[attribute]}' according" - " to the specification in the config file." - ) - attrs[attribute] = expected_attributes[attribute] - else: - # Attributes are set in the function and nothing has been defined in the config file - attrs[attribute] = field.attrs[attribute] - - return attrs - - -def _align_derived_variable(field, ds, target_dims): - """ - Align a derived variable to the target dimensions (ignoring non-dimension coordinates). - - Parameters - ---------- - field: xr.DataArray - Derived field to align - ds: xr.Dataset - Target dataset - target_dims: List[str] - Dimensions to align to (e.g. 'time', 'y', 'x') - - Returns - ------- - field: xr.DataArray - The derived field aligned to the target dimensions - """ - # Ensure that dimensions are ordered correctly - field = field.transpose( - *[dim for dim in target_dims if dim in field.dims], missing_dims="ignore" - ) - - # Add missing dimensions explicitly - for dim in target_dims: - if dim not in field.dims: - field = field.expand_dims({dim: ds.sizes[dim]}) - - # Broadcast to match only the target dimensions - broadcast_shape = {dim: ds[dim] for dim in target_dims if dim in ds.dims} - field = field.broadcast_like(xr.Dataset(coords=broadcast_shape)) - - return field +""" +Handle deriving new variables (xr.DataArrays) from an individual input dataset +that has been loaded. This makes it possible to for example add fields that can +be derived from analytical expressions and are functions of coordinate values +(e.g. top-of-atmosphere incoming radiation is a function of time and lat/lon location), +but also of other physical fields (wind-speed is a function of both meridional +and zonal wind components). +""" + +import importlib +import sys + +import xarray as xr +from loguru import logger + +from ..chunking import chunk_dataset + +REQUIRED_FIELD_ATTRIBUTES = ["units", "long_name"] + + +def derive_variable(ds, derived_variable, chunking, target_dims): + """ + Derive a variable using the `function` and `kwargs` of `derived_variable`. + + Parameters + --------- + ds : xr.Dataset + Input dataset + derived_variable : Dict[str, DerivedVariable] + Dictionary with the variables to derive with keys as the variable + names and values with entries for kwargs and function to use in + the calculation + chunking: Dict[str, int] + Dictionary with keys as the dimensions to chunk along and values + with the chunk size + target_dims: List[str] + List of dims from ds to broadcast derived variable to, + if not used in calculation + + Returns + ------- + xr.Dataset + Dataset with derived variables included + """ + + function_namespace = derived_variable.function + expected_field_attributes = derived_variable.attrs + + # split the function kwargs defined in the config into two groups: + # 1. variables that should be extracted from the input dataset (and renamed) + # 2. other kwargs that should be passed in as is + required_input_dataset_vars = {} + other_kwargs = {} + for key, val in derived_variable.kwargs.items(): + if val.startswith("ds_input."): + var_name = val.rpartition(".")[2] + required_input_dataset_vars[key] = var_name + else: + other_kwargs[key] = val + + # select from the input dataset the subset of variables which have been + # selected to be used as input arguments for the derived variable + ds_subset = ds[list(required_input_dataset_vars.values())] + + # Chunking is needed for coordinates used to derive a variable since they are + # not lazily loaded, as otherwise one might run into memory issues if using a + # large dataset as input. + # Any coordinates needed for the derivation, for which chunking should be performed, + # should be converted to variables since it is not possible for *indexed* coordinates + # to be chunked dask arrays + chunks = { + dim: chunking.get(dim, int(ds_subset[dim].count())) for dim in ds_subset.dims + } + required_coordinates = [ + coord + for coord in required_input_dataset_vars.values() + if coord in ds_subset.coords + ] + ds_subset = ds_subset.drop_indexes(required_coordinates, errors="ignore") + for req_coord in required_coordinates: + if req_coord in chunks: + ds_subset = ds_subset.reset_coords(req_coord) + + # Chunk the dataset + ds_subset = chunk_dataset(ds_subset, chunks) + + # Add function arguments to kwargs + kwargs = {} + for arg, val in required_input_dataset_vars.items(): + kwargs[arg] = ds_subset[val] + kwargs.update(other_kwargs) + + # Get the function + func = _get_derived_variable_function(function_namespace) + + # Calculate the derived variable + derived_field = func(**kwargs) + + if isinstance(derived_field, xr.DataArray): + # Check that the derived field has the necessary attributes + # (REQUIRED_FIELD_ATTRIBUTES) set, and set them if not + derived_field_attrs = _check_and_get_required_attributes( + derived_field, expected_field_attributes + ) + derived_field.attrs.update(derived_field_attrs) + + # Return any dropped/reset coordinates + for req_coord in required_coordinates: + if req_coord in chunks: + derived_field.coords[req_coord] = ds_subset[req_coord] + + # Align the derived field to the output dataset dimensions (if necessary) + derived_field = _align_derived_variable(derived_field, ds, target_dims) + else: + raise TypeError( + f"Expected an instance of xr.DataArray, but got {type(derived_field)}." + ) + + return derived_field + + +def _get_derived_variable_function(function_namespace): + """ + Function for getting the function for deriving + the specified variable. + + Parameters + ---------- + function_namespace: str + The full function namespace + + Returns + ------- + function: object + Function for deriving the specified variable + """ + # Get module and function names + module_name, _, function_name = function_namespace.rpartition(".") + + # Import the module (if necessary) + if module_name in sys.modules: + module = sys.modules[module_name] + else: + module = importlib.import_module(module_name) + + # Get the function from the module + function = getattr(module, function_name) + + return function + + +def _check_and_get_required_attributes(field, expected_attributes): + """ + Check if the required attributes of the derived variable are set. + If not set, get them from the config. + If set and defined in the config, get the attributes from the config + and use them for overwriting the attributes defined in the function. + + Parameters + ---------- + field: xr.DataArray + The derived field + expected_attributes: Dict[str, str] + Dictionary with expected attributes for the derived variables. + Defined in the config file. + + Returns + ------- + field: xr.DataArray + The derived field + """ + + attrs = {} + for attribute in REQUIRED_FIELD_ATTRIBUTES: + if attribute not in field.attrs or field.attrs[attribute] is None: + if attribute in expected_attributes.keys(): + attrs[attribute] = expected_attributes[attribute] + else: + # The expected attributes are empty and the attributes have not been + # set during the calculation of the derived variable + raise KeyError( + f'The attribute "{attribute}" has not been set for the derived' + f' variable "{field.name}". This is most likely because you are' + " using a function external to `mlllam-data-prep` to derive the field," + f" in which the required attributes ({', '.join(REQUIRED_FIELD_ATTRIBUTES)})" + " are not set. If they are not set in the function call when deriving the field," + ' they can be set in the config file by adding an "attrs" section under the' + f' "{field.name}" derived variable section. For example, if the required attributes' + f" ({', '.join(REQUIRED_FIELD_ATTRIBUTES)}) are not set for a derived variable named" + f' "toa_radiation" they can be set by adding the following to the config file:' + ' {"attrs": {"units": "W*m**-2", "long_name": "top-of-atmosphere incoming radiation"}}.' + ) + elif attribute in expected_attributes.keys(): + logger.warning( + f"The attribute '{attribute}' of the derived field" + f" {field.name} is being overwritten from" + f" '{field.attrs[attribute]}' to" + f" '{expected_attributes[attribute]}' according" + " to the specification in the config file." + ) + attrs[attribute] = expected_attributes[attribute] + else: + # Attributes are set in the function and nothing has been defined in the config file + attrs[attribute] = field.attrs[attribute] + + return attrs + + +def _align_derived_variable(field, ds, target_dims): + """ + Align a derived variable to the target dimensions (ignoring non-dimension coordinates). + + Parameters + ---------- + field: xr.DataArray + Derived field to align + ds: xr.Dataset + Target dataset + target_dims: List[str] + Dimensions to align to (e.g. 'time', 'y', 'x') + + Returns + ------- + field: xr.DataArray + The derived field aligned to the target dimensions + """ + # Ensure that dimensions are ordered correctly + field = field.transpose( + *[dim for dim in target_dims if dim in field.dims], missing_dims="ignore" + ) + + # Add missing dimensions explicitly + for dim in target_dims: + if dim not in field.dims: + field = field.expand_dims({dim: ds.sizes[dim]}) + + # Broadcast to match only the target dimensions + broadcast_shape = {dim: ds[dim] for dim in target_dims if dim in ds.dims} + field = field.broadcast_like(xr.Dataset(coords=broadcast_shape)) + + return field diff --git a/mllam_data_prep/ops/derive_variable/physical_field.py b/mllam_data_prep/ops/derive_variable/physical_field.py index d7b9617..5622e87 100644 --- a/mllam_data_prep/ops/derive_variable/physical_field.py +++ b/mllam_data_prep/ops/derive_variable/physical_field.py @@ -1,74 +1,74 @@ -""" -Contains functions used to derive physical fields. This can be both -fields that can be derived from analytical expressions and are functions -of coordinate values (e.g. top-of-atmosphere incoming radiation is a function -of time and lat/lon location), but also of other physical fields, such as -wind speed, which is a function of both meridional and zonal wind components. -""" -import datetime - -import numpy as np -import xarray as xr -from loguru import logger - - -def calculate_toa_radiation(lat, lon, time): - """ - Function for calculating top-of-atmosphere incoming radiation - - Parameters - ---------- - lat : Union[xr.DataArray, float] - Latitude values. Should be in the range [-90, 90] - lon : Union[xr.DataArray, float] - Longitude values. Should be in the range [-180, 180] or [0, 360] - time : Union[xr.DataArray, datetime.datetime] - Time - - Returns - ------- - toa_radiation : Union[xr.DataArray, float] - Top-of-atmosphere incoming radiation - """ - logger.info("Calculating top-of-atmosphere incoming radiation") - - # Solar constant - solar_constant = 1366 # W*m**-2 - - # Different handling if xr.DataArray or datetime object - if isinstance(time, xr.DataArray): - day = time.dt.dayofyear - hour_utc = time.dt.hour - elif isinstance(time, datetime.datetime): - day = time.timetuple().tm_yday - hour_utc = time.hour - else: - raise TypeError( - "Expected an instance of xr.DataArray or datetime object," - f" but got {type(time)}." - ) - - # Eq. 1.6.1a in Solar Engineering of Thermal Processes 4th ed. - # dec: declination - angular position of the sun at solar noon w.r.t. - # the plane of the equator - dec = np.pi / 180 * 23.45 * np.sin(2 * np.pi * (284 + day) / 365) - - utc_solar_time = hour_utc + lon / 15 - hour_angle = 15 * (utc_solar_time - 12) - - # Eq. 1.6.2 with beta=0 in Solar Engineering of Thermal Processes 4th ed. - # cos_sza: Cosine of solar zenith angle - cos_sza = np.sin(lat * np.pi / 180) * np.sin(dec) + np.cos( - lat * np.pi / 180 - ) * np.cos(dec) * np.cos(hour_angle * np.pi / 180) - - # Where TOA radiation is negative, set to 0 - toa_radiation = xr.where(solar_constant * cos_sza < 0, 0, solar_constant * cos_sza) - - if isinstance(toa_radiation, xr.DataArray): - # Add attributes - toa_radiation.name = "toa_radiation" - toa_radiation.attrs["long_name"] = "top-of-atmosphere incoming radiation" - toa_radiation.attrs["units"] = "W*m**-2" - - return toa_radiation +""" +Contains functions used to derive physical fields. This can be both +fields that can be derived from analytical expressions and are functions +of coordinate values (e.g. top-of-atmosphere incoming radiation is a function +of time and lat/lon location), but also of other physical fields, such as +wind speed, which is a function of both meridional and zonal wind components. +""" +import datetime + +import numpy as np +import xarray as xr +from loguru import logger + + +def calculate_toa_radiation(lat, lon, time): + """ + Function for calculating top-of-atmosphere incoming radiation + + Parameters + ---------- + lat : Union[xr.DataArray, float] + Latitude values. Should be in the range [-90, 90] + lon : Union[xr.DataArray, float] + Longitude values. Should be in the range [-180, 180] or [0, 360] + time : Union[xr.DataArray, datetime.datetime] + Time + + Returns + ------- + toa_radiation : Union[xr.DataArray, float] + Top-of-atmosphere incoming radiation + """ + logger.info("Calculating top-of-atmosphere incoming radiation") + + # Solar constant + solar_constant = 1366 # W*m**-2 + + # Different handling if xr.DataArray or datetime object + if isinstance(time, xr.DataArray): + day = time.dt.dayofyear + hour_utc = time.dt.hour + elif isinstance(time, datetime.datetime): + day = time.timetuple().tm_yday + hour_utc = time.hour + else: + raise TypeError( + "Expected an instance of xr.DataArray or datetime object," + f" but got {type(time)}." + ) + + # Eq. 1.6.1a in Solar Engineering of Thermal Processes 4th ed. + # dec: declination - angular position of the sun at solar noon w.r.t. + # the plane of the equator + dec = np.pi / 180 * 23.45 * np.sin(2 * np.pi * (284 + day) / 365) + + utc_solar_time = hour_utc + lon / 15 + hour_angle = 15 * (utc_solar_time - 12) + + # Eq. 1.6.2 with beta=0 in Solar Engineering of Thermal Processes 4th ed. + # cos_sza: Cosine of solar zenith angle + cos_sza = np.sin(lat * np.pi / 180) * np.sin(dec) + np.cos( + lat * np.pi / 180 + ) * np.cos(dec) * np.cos(hour_angle * np.pi / 180) + + # Where TOA radiation is negative, set to 0 + toa_radiation = xr.where(solar_constant * cos_sza < 0, 0, solar_constant * cos_sza) + + if isinstance(toa_radiation, xr.DataArray): + # Add attributes + toa_radiation.name = "toa_radiation" + toa_radiation.attrs["long_name"] = "top-of-atmosphere incoming radiation" + toa_radiation.attrs["units"] = "W*m**-2" + + return toa_radiation diff --git a/mllam_data_prep/ops/derive_variable/time_components.py b/mllam_data_prep/ops/derive_variable/time_components.py index 5329e12..5b08a0e 100644 --- a/mllam_data_prep/ops/derive_variable/time_components.py +++ b/mllam_data_prep/ops/derive_variable/time_components.py @@ -1,113 +1,113 @@ -""" -Contains functions used to derive time component fields, such as e.g. day of year -and hour of day. -""" -import datetime - -import numpy as np -import xarray as xr -from loguru import logger - - -def calculate_hour_of_day(time, component): - """ - Function for calculating hour of day features with a cyclic encoding - - Parameters - ---------- - time: Union[xr.DataArray, datetime.datetime] - Time - component: str - String indicating if the sine or cosine component of the encoding - should be returned - - Returns - ------- - hour_of_day_encoded: Union[xr.DataArray, float] - sine or cosine of the hour of day - """ - logger.info("Calculating hour of day") - - # Get the hour of the day - if isinstance(time, xr.DataArray): - hour_of_day = time.dt.hour - elif isinstance(time, datetime.datetime): - hour_of_day = time.hour - else: - raise TypeError( - "Expected an instance of xr.DataArray or datetime object," - f" but got {type(time)}." - ) - - # Cyclic encoding of hour of day - if component == "sin": - hour_of_day_encoded = np.sin((hour_of_day / 24) * 2 * np.pi) - elif component == "cos": - hour_of_day_encoded = np.cos((hour_of_day / 24) * 2 * np.pi) - else: - raise ValueError( - f"Invalid value of `component`: '{component}'. Expected one of: 'cos' or 'sin'." - " Please update the config accordingly." - ) - - if isinstance(hour_of_day_encoded, xr.DataArray): - # Add attributes - hour_of_day_encoded.name = "hour_of_day_" + component - hour_of_day_encoded.attrs[ - "long_name" - ] = f"{component.capitalize()} component of cyclically encoded hour of day" - hour_of_day_encoded.attrs["units"] = "1" - - return hour_of_day_encoded - - -def calculate_day_of_year(time, component): - """ - Function for calculating day of year features with a cyclic encoding - - Parameters - ---------- - time : Union[xr.DataArray, datetime.datetime] - Time - component: str - String indicating if the sine or cosine component of the encoding - should be returned - - Returns - ------- - day_of_year_encoded: Union[xr.DataArray, float] - sine or cosine of the day of year - """ - logger.info("Calculating day of year") - - # Get the day of year - if isinstance(time, xr.DataArray): - day_of_year = time.dt.dayofyear - elif isinstance(time, datetime.datetime): - day_of_year = time.timetuple().tm_yday - else: - raise TypeError( - "Expected an instance of xr.DataArray or datetime object," - f" but got {type(time)}." - ) - - # Cyclic encoding of day of year - use 366 to include leap years! - if component == "sin": - day_of_year_encoded = np.sin((day_of_year / 366) * 2 * np.pi) - elif component == "cos": - day_of_year_encoded = np.cos((day_of_year / 366) * 2 * np.pi) - else: - raise ValueError( - f"Invalid value of `component`: '{component}'. Expected one of: 'cos' or 'sin'." - " Please update the config accordingly." - ) - - if isinstance(day_of_year_encoded, xr.DataArray): - # Add attributes - day_of_year_encoded.name = "day_of_year_" + component - day_of_year_encoded.attrs[ - "long_name" - ] = f"{component.capitalize()} component of cyclically encoded day of year" - day_of_year_encoded.attrs["units"] = "1" - - return day_of_year_encoded +""" +Contains functions used to derive time component fields, such as e.g. day of year +and hour of day. +""" +import datetime + +import numpy as np +import xarray as xr +from loguru import logger + + +def calculate_hour_of_day(time, component): + """ + Function for calculating hour of day features with a cyclic encoding + + Parameters + ---------- + time: Union[xr.DataArray, datetime.datetime] + Time + component: str + String indicating if the sine or cosine component of the encoding + should be returned + + Returns + ------- + hour_of_day_encoded: Union[xr.DataArray, float] + sine or cosine of the hour of day + """ + logger.info("Calculating hour of day") + + # Get the hour of the day + if isinstance(time, xr.DataArray): + hour_of_day = time.dt.hour + elif isinstance(time, datetime.datetime): + hour_of_day = time.hour + else: + raise TypeError( + "Expected an instance of xr.DataArray or datetime object," + f" but got {type(time)}." + ) + + # Cyclic encoding of hour of day + if component == "sin": + hour_of_day_encoded = np.sin((hour_of_day / 24) * 2 * np.pi) + elif component == "cos": + hour_of_day_encoded = np.cos((hour_of_day / 24) * 2 * np.pi) + else: + raise ValueError( + f"Invalid value of `component`: '{component}'. Expected one of: 'cos' or 'sin'." + " Please update the config accordingly." + ) + + if isinstance(hour_of_day_encoded, xr.DataArray): + # Add attributes + hour_of_day_encoded.name = "hour_of_day_" + component + hour_of_day_encoded.attrs[ + "long_name" + ] = f"{component.capitalize()} component of cyclically encoded hour of day" + hour_of_day_encoded.attrs["units"] = "1" + + return hour_of_day_encoded + + +def calculate_day_of_year(time, component): + """ + Function for calculating day of year features with a cyclic encoding + + Parameters + ---------- + time : Union[xr.DataArray, datetime.datetime] + Time + component: str + String indicating if the sine or cosine component of the encoding + should be returned + + Returns + ------- + day_of_year_encoded: Union[xr.DataArray, float] + sine or cosine of the day of year + """ + logger.info("Calculating day of year") + + # Get the day of year + if isinstance(time, xr.DataArray): + day_of_year = time.dt.dayofyear + elif isinstance(time, datetime.datetime): + day_of_year = time.timetuple().tm_yday + else: + raise TypeError( + "Expected an instance of xr.DataArray or datetime object," + f" but got {type(time)}." + ) + + # Cyclic encoding of day of year - use 366 to include leap years! + if component == "sin": + day_of_year_encoded = np.sin((day_of_year / 366) * 2 * np.pi) + elif component == "cos": + day_of_year_encoded = np.cos((day_of_year / 366) * 2 * np.pi) + else: + raise ValueError( + f"Invalid value of `component`: '{component}'. Expected one of: 'cos' or 'sin'." + " Please update the config accordingly." + ) + + if isinstance(day_of_year_encoded, xr.DataArray): + # Add attributes + day_of_year_encoded.name = "day_of_year_" + component + day_of_year_encoded.attrs[ + "long_name" + ] = f"{component.capitalize()} component of cyclically encoded day of year" + day_of_year_encoded.attrs["units"] = "1" + + return day_of_year_encoded diff --git a/mllam_data_prep/ops/loading.py b/mllam_data_prep/ops/loading.py index f6bfc34..848f5b0 100644 --- a/mllam_data_prep/ops/loading.py +++ b/mllam_data_prep/ops/loading.py @@ -1,25 +1,25 @@ -import xarray as xr - - -def load_input_dataset(fp): - """ - Load the dataset - - Parameters - ---------- - fp : str - Filepath to the source dataset, for example the path to a zarr dataset - or a netCDF file (anything that is supported by `xarray.open_dataset` will work) - - Returns - ------- - ds: xr.Dataset - Source dataset - """ - - try: - ds = xr.open_zarr(fp) - except ValueError: - ds = xr.open_dataset(fp) - - return ds +import xarray as xr + + +def load_input_dataset(fp): + """ + Load the dataset + + Parameters + ---------- + fp : str + Filepath to the source dataset, for example the path to a zarr dataset + or a netCDF file (anything that is supported by `xarray.open_dataset` will work) + + Returns + ------- + ds: xr.Dataset + Source dataset + """ + + try: + ds = xr.open_zarr(fp) + except ValueError: + ds = xr.open_dataset(fp) + + return ds diff --git a/mllam_data_prep/ops/mapping.py b/mllam_data_prep/ops/mapping.py index 9482ff8..8d4fb5c 100644 --- a/mllam_data_prep/ops/mapping.py +++ b/mllam_data_prep/ops/mapping.py @@ -1,141 +1,141 @@ -from .stacking import stack_variables_as_coord_values, stack_variables_by_coord_values - - -def _check_for_malformed_list_arg(s): - if isinstance(s, str) and "," in s: - raise Exception( - "Rather than writing `{s}` to define a list you would `[{s}]` in the config file." - ) - - -def map_dims_and_variables(ds, dim_mapping, expected_input_var_dims): - """ - Map the input dimensions to the architecture dimensions - using the `dim_mapping` dictionary. Each key in the `dim_mapping` - describes the name of the architecture dimension to map to and the values - describe what to map from (through a `dict` named `input_dim_map`). - Finally, the function checks that each variable has the dimensions of - `expected_input_var_dims`. - - Each `input_dim_map` `dict` defines how to map the input dimensions with the following - entries: - - - 'dims': The list of dimensions in the input dataset to map to the - architecture dimension - - 'method': The method to use for mapping the variables to the - architecture dimension, with the following options: - - 'stack_variables_by_var_name': - map variables to coordinate values by stacking the variables along - the architecture dimension, the 'name' key should be the string - format to construct the new coordinate values for the architecture - dimension. Exactly one of this type of mapping should be used. - - 'stack': - used to map variables to the architecture dimension by stacking - the along the `dims` provided. - - 'rename' (or if the input_dim_map is a string naming the dimension to rename): - rename the provided dimension to the architecture dimension (only one - dimension must be given by `dims`) - - 'name_format': The string format to construct the new coordinate values - for the architecture dimension (only used for method 'stack_variables_by_var_name') - - Parameters - ---------- - ds : xr.Dataset - The dataset to map the dimensions and variables - dim_mapping : dict - The mapping of the input dimensions to the architecture - dimensions. - arch_dim : str - The name of the architecture dimension to map to - expected_input_var_dims : list - The list of dimensions that each variable in the input dataset - should have - - Returns - ------- - da: xr.DataArray - The dataset mapped to a single data-array with coordinates given by the keys - of the `dim_mapping` dictionary - """ - - # check that there is only one mapping defined going from the input variables - # store it so we do that last - dim_mapping = dim_mapping.copy() - variable_dim_mappings = {} - for arch_dim in list(dim_mapping.keys()): - if dim_mapping[arch_dim].method == "stack_variables_by_var_name": - variable_dim_mappings[arch_dim] = dim_mapping.pop(arch_dim) - if len(variable_dim_mappings) > 1: - raise ValueError( - "Only one mapping which requires stacking variables" - " into a single dataarray is allowed, found ones targeting" - f" the following arch dimensions: {list(variable_dim_mappings.keys())}" - ) - elif len(variable_dim_mappings) == 0: - raise Exception( - "At least one mapping should be defined for stacking variables, i.e. uses" - f" the method `stack_variables_by_var_name`. Current mapping is: {dim_mapping}" - ) - - # check that none of the variables have dims that are not in the expected_input_var_dims - for var_name in ds.data_vars: - if not set(ds[var_name].dims).issubset(expected_input_var_dims): - extra_dims = set(ds[var_name].dims) - set(expected_input_var_dims) - raise ValueError( - f"The variable {var_name} has dimensions {ds[var_name].dims} however the" - f" dimensions `{extra_dims}` are not in " - f" the `dims` defined for this input dataset: {expected_input_var_dims}" - ) - - # handle those mappings that involve just renaming or stacking dimensions - for arch_dim, input_dim_map in dim_mapping.items(): - method = input_dim_map.method - - if method == "rename": - source_dim = input_dim_map.dim - ds = ds.rename({source_dim: arch_dim}) - elif method == "stack": - source_dims = input_dim_map.dims - # when stacking we assume that the input_dims is a list of dimensions - # in the input dataset that we want to stack to create the architecture - # dimension, this is for example used for flatting the spatial dimensions - # into a single dimension representing the grid index - ds = ds.stack({arch_dim: source_dims}).reset_index(arch_dim) - else: - raise NotImplementedError(method) - - # Finally, we handle the stacking of variables to coordinate values. We - # might want to deal with variables that exist on multiple coordinate - # values that we want to stack over too. The dimensions to map from are - # expected to be given explicitly in the 'dims' key and the new string - # format to construct the new coordinate values is given in the 'name' key. - try: - arch_dim, variable_dim_map = variable_dim_mappings.popitem() - dims = variable_dim_map.dims or [] - _check_for_malformed_list_arg(dims) - name_format = variable_dim_map.name_format - if len(dims) == 0: - da = stack_variables_as_coord_values( - ds=ds, name_format=name_format, combined_dim_name=arch_dim - ) - elif len(dims) == 1: - da = stack_variables_by_coord_values( - ds=ds, - coord=dims[0], - name_format=name_format, - combined_dim_name=arch_dim, - ) - else: - # TODO: this will have to involved xrarrays MultiIndex, but lets leave - # this until we need it - raise NotImplementedError(len(dims)) - # set a flag we can use later to identify which coordinate the variables - # were mapped into - da.attrs["variables_mapping_dim"] = arch_dim - except ValueError as ex: - raise Exception( - f"There was an issue handling the following mapping:\n{variable_dim_map}" - f"\n from variables {list(ds.data_vars)} and dims {list(ds.dims)}" - ) from ex - - return da +from .stacking import stack_variables_as_coord_values, stack_variables_by_coord_values + + +def _check_for_malformed_list_arg(s): + if isinstance(s, str) and "," in s: + raise Exception( + "Rather than writing `{s}` to define a list you would `[{s}]` in the config file." + ) + + +def map_dims_and_variables(ds, dim_mapping, expected_input_var_dims): + """ + Map the input dimensions to the architecture dimensions + using the `dim_mapping` dictionary. Each key in the `dim_mapping` + describes the name of the architecture dimension to map to and the values + describe what to map from (through a `dict` named `input_dim_map`). + Finally, the function checks that each variable has the dimensions of + `expected_input_var_dims`. + + Each `input_dim_map` `dict` defines how to map the input dimensions with the following + entries: + + - 'dims': The list of dimensions in the input dataset to map to the + architecture dimension + - 'method': The method to use for mapping the variables to the + architecture dimension, with the following options: + - 'stack_variables_by_var_name': + map variables to coordinate values by stacking the variables along + the architecture dimension, the 'name' key should be the string + format to construct the new coordinate values for the architecture + dimension. Exactly one of this type of mapping should be used. + - 'stack': + used to map variables to the architecture dimension by stacking + the along the `dims` provided. + - 'rename' (or if the input_dim_map is a string naming the dimension to rename): + rename the provided dimension to the architecture dimension (only one + dimension must be given by `dims`) + - 'name_format': The string format to construct the new coordinate values + for the architecture dimension (only used for method 'stack_variables_by_var_name') + + Parameters + ---------- + ds : xr.Dataset + The dataset to map the dimensions and variables + dim_mapping : dict + The mapping of the input dimensions to the architecture + dimensions. + arch_dim : str + The name of the architecture dimension to map to + expected_input_var_dims : list + The list of dimensions that each variable in the input dataset + should have + + Returns + ------- + da: xr.DataArray + The dataset mapped to a single data-array with coordinates given by the keys + of the `dim_mapping` dictionary + """ + + # check that there is only one mapping defined going from the input variables + # store it so we do that last + dim_mapping = dim_mapping.copy() + variable_dim_mappings = {} + for arch_dim in list(dim_mapping.keys()): + if dim_mapping[arch_dim].method == "stack_variables_by_var_name": + variable_dim_mappings[arch_dim] = dim_mapping.pop(arch_dim) + if len(variable_dim_mappings) > 1: + raise ValueError( + "Only one mapping which requires stacking variables" + " into a single dataarray is allowed, found ones targeting" + f" the following arch dimensions: {list(variable_dim_mappings.keys())}" + ) + elif len(variable_dim_mappings) == 0: + raise Exception( + "At least one mapping should be defined for stacking variables, i.e. uses" + f" the method `stack_variables_by_var_name`. Current mapping is: {dim_mapping}" + ) + + # check that none of the variables have dims that are not in the expected_input_var_dims + for var_name in ds.data_vars: + if not set(ds[var_name].dims).issubset(expected_input_var_dims): + extra_dims = set(ds[var_name].dims) - set(expected_input_var_dims) + raise ValueError( + f"The variable {var_name} has dimensions {ds[var_name].dims} however the" + f" dimensions `{extra_dims}` are not in " + f" the `dims` defined for this input dataset: {expected_input_var_dims}" + ) + + # handle those mappings that involve just renaming or stacking dimensions + for arch_dim, input_dim_map in dim_mapping.items(): + method = input_dim_map.method + + if method == "rename": + source_dim = input_dim_map.dim + ds = ds.rename({source_dim: arch_dim}) + elif method == "stack": + source_dims = input_dim_map.dims + # when stacking we assume that the input_dims is a list of dimensions + # in the input dataset that we want to stack to create the architecture + # dimension, this is for example used for flatting the spatial dimensions + # into a single dimension representing the grid index + ds = ds.stack({arch_dim: source_dims}).reset_index(arch_dim) + else: + raise NotImplementedError(method) + + # Finally, we handle the stacking of variables to coordinate values. We + # might want to deal with variables that exist on multiple coordinate + # values that we want to stack over too. The dimensions to map from are + # expected to be given explicitly in the 'dims' key and the new string + # format to construct the new coordinate values is given in the 'name' key. + try: + arch_dim, variable_dim_map = variable_dim_mappings.popitem() + dims = variable_dim_map.dims or [] + _check_for_malformed_list_arg(dims) + name_format = variable_dim_map.name_format + if len(dims) == 0: + da = stack_variables_as_coord_values( + ds=ds, name_format=name_format, combined_dim_name=arch_dim + ) + elif len(dims) == 1: + da = stack_variables_by_coord_values( + ds=ds, + coord=dims[0], + name_format=name_format, + combined_dim_name=arch_dim, + ) + else: + # TODO: this will have to involved xrarrays MultiIndex, but lets leave + # this until we need it + raise NotImplementedError(len(dims)) + # set a flag we can use later to identify which coordinate the variables + # were mapped into + da.attrs["variables_mapping_dim"] = arch_dim + except ValueError as ex: + raise Exception( + f"There was an issue handling the following mapping:\n{variable_dim_map}" + f"\n from variables {list(ds.data_vars)} and dims {list(ds.dims)}" + ) from ex + + return da diff --git a/mllam_data_prep/ops/selection.py b/mllam_data_prep/ops/selection.py index 37b91c1..0f49711 100644 --- a/mllam_data_prep/ops/selection.py +++ b/mllam_data_prep/ops/selection.py @@ -1,119 +1,119 @@ -import datetime - -import pandas as pd - -from ..config import Range - - -def _normalize_slice_startstop(s): - if isinstance(s, pd.Timestamp): - return s - elif isinstance(s, str): - try: - return pd.Timestamp(s) - except ValueError: - return s - else: - return s - - -def _normalize_slice_step(s): - if isinstance(s, pd.Timedelta): - return s - elif isinstance(s, str): - try: - return pd.to_timedelta(s) - except ValueError: - return s - else: - return s - - -def select_by_kwargs(ds, **coord_ranges): - """ - Do `xr.Dataset.sel` on `ds` using the `coord_ranges` to select the coordinates, for each - entry in the dictionary, the key is the coordinate name and the value is the selection - to make, either given as 1) a list of values or a 2) dictionary with keys "start" and "end". - This functionally works like `xr.Dataset.sel` but can create slice objects for each - selection from the dictionary provided and also supports the use of ISO 8601 duration strings. In addition - the `step` size is used to check that the step size in the data is the same as the requested step size. - - In future time interpolation and subsampling could be done here - - Parameters - ---------- - ds : xr.Dataset - Dataset to select from - coord_ranges : dict - Dictionary with the coordinate names as keys and the selection to make as values, - either a list of values or a dictionary with keys "start" and "end" - (and optionally "step" for the slice object) - - Returns - ------- - xr.Dataset - Dataset with the selection made - """ - - for coord, selection in coord_ranges.items(): - if coord not in ds.coords: - raise ValueError(f"Coordinate {coord} not found in dataset") - if isinstance(selection, Range): - if selection.start is None and selection.end is None: - raise ValueError( - f"Selection for coordinate {coord} must have either 'start' and 'end' given" - ) - sel_start = _normalize_slice_startstop(selection.start) - sel_end = _normalize_slice_startstop(selection.end) - sel_step = _normalize_slice_step(selection.step) - - assert sel_start != sel_end, "Start and end cannot be the same" - - # we don't select with the step size for now, but simply check (below) that - # the step size in the data is the same as the requested step size - ds = ds.sel({coord: slice(sel_start, sel_end)}) - - if coord == "time": - check_point_in_dataset(coord, sel_start, ds) - check_point_in_dataset(coord, sel_end, ds) - if sel_step is not None: - check_step(sel_step, coord, ds) - - assert ( - len(ds[coord]) > 0 - ), f"You have selected an empty range {sel_start}:{sel_end} for coordinate {coord}" - - elif isinstance(selection, list): - ds = ds.sel({coord: selection}) - else: - raise NotImplementedError( - f"Selection for coordinate {coord} must be a list or a dict" - ) - return ds - - -def check_point_in_dataset(coord, point, ds): - """ - check that the requested point is in the data. - """ - if point is not None and point not in ds[coord].values: - raise ValueError( - f"Provided value for coordinate {coord} ({point}) is not in the data." - ) - - -def check_step(sel_step, coord, ds): - """ - check that the step requested is exactly what the data has - """ - all_steps = ds[coord].diff(dim=coord).values - first_step = all_steps[0].astype("timedelta64[s]").astype(datetime.timedelta) - - if not all(all_steps[0] == all_steps): - raise ValueError( - f"Step size for coordinate {coord} is not constant: {all_steps}" - ) - if sel_step != first_step: - raise ValueError( - f"Step size for coordinate {coord} is not the same as requested: {first_step} != {sel_step}" - ) +import datetime + +import pandas as pd + +from ..config import Range + + +def _normalize_slice_startstop(s): + if isinstance(s, pd.Timestamp): + return s + elif isinstance(s, str): + try: + return pd.Timestamp(s) + except ValueError: + return s + else: + return s + + +def _normalize_slice_step(s): + if isinstance(s, pd.Timedelta): + return s + elif isinstance(s, str): + try: + return pd.to_timedelta(s) + except ValueError: + return s + else: + return s + + +def select_by_kwargs(ds, **coord_ranges): + """ + Do `xr.Dataset.sel` on `ds` using the `coord_ranges` to select the coordinates, for each + entry in the dictionary, the key is the coordinate name and the value is the selection + to make, either given as 1) a list of values or a 2) dictionary with keys "start" and "end". + This functionally works like `xr.Dataset.sel` but can create slice objects for each + selection from the dictionary provided and also supports the use of ISO 8601 duration strings. In addition + the `step` size is used to check that the step size in the data is the same as the requested step size. + + In future time interpolation and subsampling could be done here + + Parameters + ---------- + ds : xr.Dataset + Dataset to select from + coord_ranges : dict + Dictionary with the coordinate names as keys and the selection to make as values, + either a list of values or a dictionary with keys "start" and "end" + (and optionally "step" for the slice object) + + Returns + ------- + xr.Dataset + Dataset with the selection made + """ + + for coord, selection in coord_ranges.items(): + if coord not in ds.coords: + raise ValueError(f"Coordinate {coord} not found in dataset") + if isinstance(selection, Range): + if selection.start is None and selection.end is None: + raise ValueError( + f"Selection for coordinate {coord} must have either 'start' and 'end' given" + ) + sel_start = _normalize_slice_startstop(selection.start) + sel_end = _normalize_slice_startstop(selection.end) + sel_step = _normalize_slice_step(selection.step) + + assert sel_start != sel_end, "Start and end cannot be the same" + + # we don't select with the step size for now, but simply check (below) that + # the step size in the data is the same as the requested step size + ds = ds.sel({coord: slice(sel_start, sel_end)}) + + if coord == "time": + check_point_in_dataset(coord, sel_start, ds) + check_point_in_dataset(coord, sel_end, ds) + if sel_step is not None: + check_step(sel_step, coord, ds) + + assert ( + len(ds[coord]) > 0 + ), f"You have selected an empty range {sel_start}:{sel_end} for coordinate {coord}" + + elif isinstance(selection, list): + ds = ds.sel({coord: selection}) + else: + raise NotImplementedError( + f"Selection for coordinate {coord} must be a list or a dict" + ) + return ds + + +def check_point_in_dataset(coord, point, ds): + """ + check that the requested point is in the data. + """ + if point is not None and point not in ds[coord].values: + raise ValueError( + f"Provided value for coordinate {coord} ({point}) is not in the data." + ) + + +def check_step(sel_step, coord, ds): + """ + check that the step requested is exactly what the data has + """ + all_steps = ds[coord].diff(dim=coord).values + first_step = all_steps[0].astype("timedelta64[s]").astype(datetime.timedelta) + + if not all(all_steps[0] == all_steps): + raise ValueError( + f"Step size for coordinate {coord} is not constant: {all_steps}" + ) + if sel_step != first_step: + raise ValueError( + f"Step size for coordinate {coord} is not the same as requested: {first_step} != {sel_step}" + ) diff --git a/mllam_data_prep/ops/stacking.py b/mllam_data_prep/ops/stacking.py index a56e0fd..fe37da1 100644 --- a/mllam_data_prep/ops/stacking.py +++ b/mllam_data_prep/ops/stacking.py @@ -1,132 +1,132 @@ -import xarray as xr - - -def stack_variables_as_coord_values(ds, name_format, combined_dim_name): - """ - combine all variables in an xr.Dataset into a single xr.DataArray - by stacking the variables along a new coordinate with the name given - by `name_format` (which should include the variable name, `var_name`) - - - Parameters - ---------- - ds : xr.Dataset - source dataset with variables to stack - name_format : str - format string to construct the new coordinate values for the - stacked variables, e.g. "{var_name}_level" - combined_dim_name : str - name of the new dimension to create for the stacked variables, for - example "forcing_feature" - - Returns - ------- - da_combined : xr.DataArray - The combined dataset with all variables stacked along the new - coordinate - """ - if "{var_name}" not in name_format: - raise ValueError( - "The name_format should include the variable name as" - " {var_name} to construct the new coordinate values" - ) - dataarrays = [] - for var_name in list(ds.data_vars): - da = ds[var_name].expand_dims(combined_dim_name) - da.coords[combined_dim_name] = [name_format.format(var_name=var_name)] - - # add extra coordinates (spanning along `combined_dim_name`) for - # keeping track of `units` and `long_name` attributes - for attr in ["units", "long_name"]: - da_attr = xr.DataArray( - [ds[var_name].attrs.get(attr, "")], - dims=[combined_dim_name], - coords={combined_dim_name: da.coords[combined_dim_name]}, - ) - da.coords[f"{combined_dim_name}_{attr}"] = da_attr - dataarrays.append(da) - da_combined = xr.concat(dataarrays, dim=combined_dim_name) - - return da_combined - - -def stack_variables_by_coord_values(ds, coord, name_format, combined_dim_name): - """ - combine all variables in an xr.Dataset on all coordinate values of `coord` - into a single xr.DataArray - - for example for a set of variables in a dataset, e.g. [u, v, t], on a set - of "levels" in a coordinate [50, 100] the output will combine all variables - into a single xr.DataArray with coordinate values given by the name_format - e.g. [u_l50, u_l100, v_l50, v_l100, t_l50, t_l100] if the format was - "{var_name}_l{level}" - - This is implemented by: - 1. iterating over all variables in the dataset - 2. for each variable, we create a new set of coordinate values which - include the variable name and the coordinate values, and rename the - coordinate to the `combined_dim_name` - 3. stack all the variables along the `combined_dim_name` dimension to - produce a single xr.DataArray - - In addition to the stacked variables, we also add extra coordinates for - keeping track of `units` and `long_name` attributes for each variable in - `{combined_dim_name}_units` and `{combined_dim_name}_long_name` - respectively. - - Parameters - ---------- - ds : xr.Dataset - dataset with variables as data_vars and `level_dim` as a coordinate - coord : str - name of the coordinate that should mapped over - name_format : str - format string to construct the new coordinate values for the - stacked levels - combined_dim_name : str - name of the new dimension to create for the stacked variables - - Returns - ------- - da_combined : xr.DataArray - The combined dataset with the stacked variables along the `coord` - """ - if "{var_name}" not in name_format: - raise ValueError( - "The name_format should include the variable name as" - " {var_name} to construct the new coordinate values" - ) - if f"{{{coord}}}" not in name_format: - raise ValueError( - "The name_format should include the coordinate name as" - f" {{{coord}}} to construct the new coordinate values" - ) - if coord not in ds.coords: - raise ValueError( - f"The coordinate {coord} is not in the dataset, found coords: {list(ds.coords)}" - ) - - datasets = [] - for var_name in list(ds.data_vars): - da = ds[var_name] - coord_values = da.coords[coord].values - new_coord_values = [ - name_format.format(var_name=var_name, **{coord: val}) - for val in coord_values - ] - da = da.assign_coords({coord: new_coord_values}).rename( - {coord: combined_dim_name} - ) - - # add extra coordinates for keeping track of `units` and `long_name` attributes - for attr in ["units", "long_name"]: - da_attr = xr.DataArray( - [ds[var_name].attrs.get(attr, "")] * len(coord_values), - dims=[combined_dim_name], - ) - da.coords[f"{combined_dim_name}_{attr}"] = da_attr - datasets.append(da) - - da_combined = xr.concat(datasets, dim=combined_dim_name) - - return da_combined +import xarray as xr + + +def stack_variables_as_coord_values(ds, name_format, combined_dim_name): + """ + combine all variables in an xr.Dataset into a single xr.DataArray + by stacking the variables along a new coordinate with the name given + by `name_format` (which should include the variable name, `var_name`) + + + Parameters + ---------- + ds : xr.Dataset + source dataset with variables to stack + name_format : str + format string to construct the new coordinate values for the + stacked variables, e.g. "{var_name}_level" + combined_dim_name : str + name of the new dimension to create for the stacked variables, for + example "forcing_feature" + + Returns + ------- + da_combined : xr.DataArray + The combined dataset with all variables stacked along the new + coordinate + """ + if "{var_name}" not in name_format: + raise ValueError( + "The name_format should include the variable name as" + " {var_name} to construct the new coordinate values" + ) + dataarrays = [] + for var_name in list(ds.data_vars): + da = ds[var_name].expand_dims(combined_dim_name) + da.coords[combined_dim_name] = [name_format.format(var_name=var_name)] + + # add extra coordinates (spanning along `combined_dim_name`) for + # keeping track of `units` and `long_name` attributes + for attr in ["units", "long_name"]: + da_attr = xr.DataArray( + [ds[var_name].attrs.get(attr, "")], + dims=[combined_dim_name], + coords={combined_dim_name: da.coords[combined_dim_name]}, + ) + da.coords[f"{combined_dim_name}_{attr}"] = da_attr + dataarrays.append(da) + da_combined = xr.concat(dataarrays, dim=combined_dim_name) + + return da_combined + + +def stack_variables_by_coord_values(ds, coord, name_format, combined_dim_name): + """ + combine all variables in an xr.Dataset on all coordinate values of `coord` + into a single xr.DataArray + + for example for a set of variables in a dataset, e.g. [u, v, t], on a set + of "levels" in a coordinate [50, 100] the output will combine all variables + into a single xr.DataArray with coordinate values given by the name_format + e.g. [u_l50, u_l100, v_l50, v_l100, t_l50, t_l100] if the format was + "{var_name}_l{level}" + + This is implemented by: + 1. iterating over all variables in the dataset + 2. for each variable, we create a new set of coordinate values which + include the variable name and the coordinate values, and rename the + coordinate to the `combined_dim_name` + 3. stack all the variables along the `combined_dim_name` dimension to + produce a single xr.DataArray + + In addition to the stacked variables, we also add extra coordinates for + keeping track of `units` and `long_name` attributes for each variable in + `{combined_dim_name}_units` and `{combined_dim_name}_long_name` + respectively. + + Parameters + ---------- + ds : xr.Dataset + dataset with variables as data_vars and `level_dim` as a coordinate + coord : str + name of the coordinate that should mapped over + name_format : str + format string to construct the new coordinate values for the + stacked levels + combined_dim_name : str + name of the new dimension to create for the stacked variables + + Returns + ------- + da_combined : xr.DataArray + The combined dataset with the stacked variables along the `coord` + """ + if "{var_name}" not in name_format: + raise ValueError( + "The name_format should include the variable name as" + " {var_name} to construct the new coordinate values" + ) + if f"{{{coord}}}" not in name_format: + raise ValueError( + "The name_format should include the coordinate name as" + f" {{{coord}}} to construct the new coordinate values" + ) + if coord not in ds.coords: + raise ValueError( + f"The coordinate {coord} is not in the dataset, found coords: {list(ds.coords)}" + ) + + datasets = [] + for var_name in list(ds.data_vars): + da = ds[var_name] + coord_values = da.coords[coord].values + new_coord_values = [ + name_format.format(var_name=var_name, **{coord: val}) + for val in coord_values + ] + da = da.assign_coords({coord: new_coord_values}).rename( + {coord: combined_dim_name} + ) + + # add extra coordinates for keeping track of `units` and `long_name` attributes + for attr in ["units", "long_name"]: + da_attr = xr.DataArray( + [ds[var_name].attrs.get(attr, "")] * len(coord_values), + dims=[combined_dim_name], + ) + da.coords[f"{combined_dim_name}_{attr}"] = da_attr + datasets.append(da) + + da_combined = xr.concat(datasets, dim=combined_dim_name) + + return da_combined diff --git a/mllam_data_prep/ops/statistics.py b/mllam_data_prep/ops/statistics.py index 10031c2..d0b0cb9 100644 --- a/mllam_data_prep/ops/statistics.py +++ b/mllam_data_prep/ops/statistics.py @@ -1,52 +1,52 @@ -from typing import Dict - -import xarray as xr - -from ..config import Statistics - - -def calc_stats( - ds: xr.Dataset, statistics_config: Statistics, splitting_dim: str -) -> Dict[str, xr.Dataset]: - """ - Calculate statistics for a given DataArray by applying the operations - specified in the Statistics object and reducing over the dimensions - specified in the Statistics object. - - Parameters - ---------- - ds : xr.Dataset - Dataset to calculate statistics for - statistics_config : Statistics - Configuration object specifying the operations and dimensions to reduce over - splitting_dim : str - Dimension along which splits are made, this is used to calculate differences - for operations prefixed with "diff_", for example "diff_mean" or "diff_std". - Only the variables which actually span along the splitting_dim will be included - in the output. - - Returns - ------- - stats : Dict[str, xr.Dataset] - Dictionary with the operation names as keys and the calculated statistics as values - """ - stats = {} - for op_split in statistics_config.ops: - try: - pre_op, op = op_split.split("_") - except ValueError: - op = op_split - pre_op = None - - if pre_op is not None: - if pre_op == "diff": - # subset to select only the variable which have the splitting_dim - vars_to_keep = [v for v in ds.data_vars if splitting_dim in ds[v].dims] - ds = ds[vars_to_keep].diff(dim=splitting_dim) - else: - raise NotImplementedError(pre_op) - - fn = getattr(ds, op) - stats[op_split] = fn(dim=statistics_config.dims) - - return stats +from typing import Dict + +import xarray as xr + +from ..config import Statistics + + +def calc_stats( + ds: xr.Dataset, statistics_config: Statistics, splitting_dim: str +) -> Dict[str, xr.Dataset]: + """ + Calculate statistics for a given DataArray by applying the operations + specified in the Statistics object and reducing over the dimensions + specified in the Statistics object. + + Parameters + ---------- + ds : xr.Dataset + Dataset to calculate statistics for + statistics_config : Statistics + Configuration object specifying the operations and dimensions to reduce over + splitting_dim : str + Dimension along which splits are made, this is used to calculate differences + for operations prefixed with "diff_", for example "diff_mean" or "diff_std". + Only the variables which actually span along the splitting_dim will be included + in the output. + + Returns + ------- + stats : Dict[str, xr.Dataset] + Dictionary with the operation names as keys and the calculated statistics as values + """ + stats = {} + for op_split in statistics_config.ops: + try: + pre_op, op = op_split.split("_") + except ValueError: + op = op_split + pre_op = None + + if pre_op is not None: + if pre_op == "diff": + # subset to select only the variable which have the splitting_dim + vars_to_keep = [v for v in ds.data_vars if splitting_dim in ds[v].dims] + ds = ds[vars_to_keep].diff(dim=splitting_dim) + else: + raise NotImplementedError(pre_op) + + fn = getattr(ds, op) + stats[op_split] = fn(dim=statistics_config.dims) + + return stats diff --git a/mllam_data_prep/ops/subsetting.py b/mllam_data_prep/ops/subsetting.py index 80f2ce1..ee2f6b0 100644 --- a/mllam_data_prep/ops/subsetting.py +++ b/mllam_data_prep/ops/subsetting.py @@ -1,50 +1,50 @@ -def extract_variable(ds, var_name, coords_to_sample=dict()): - """ - Extract specified variable from the provided input dataset. If - coordinates for subsetting are defined, then subset the variable along - them and check coordinate units. - - Parameters - ---------- - ds : xr.Dataset - Input dataset - var_name : Union[Dict, List] - Either a list or dictionary with variables to extract. - If a dictionary the keys are the variable name and the values are - entries for each coordinate and coordinate values to extract - coords_to_sample: Dict - Optional argument for subsetting/sampling along the specified - coordinates - - Returns - ---------- - da: xr.DataArray - Extracted variable (subsetted along the specified coordinates) - """ - - try: - da = ds[var_name] - except KeyError as ex: - raise KeyError( - f"Could not find the variable `{var_name}` in the dataset. " - f"The available variables are {list(ds.data_vars)}" - ) from ex - - for coord, sampling in coords_to_sample.items(): - coord_values = sampling.values - try: - da = da.sel(**{coord: coord_values}) - except KeyError as ex: - raise KeyError( - f"Could not find the all coordinate values `{coord_values}` in " - f"coordinate `{coord}` in the dataset" - ) from ex - expected_units = sampling.units - coord_units = da[coord].attrs.get("units", None) - if coord_units is not None and coord_units != expected_units: - raise ValueError( - f"Expected units {expected_units} for coordinate {coord}" - f" in variable {var_name} but got {coord_units}" - ) - - return da +def extract_variable(ds, var_name, coords_to_sample=dict()): + """ + Extract specified variable from the provided input dataset. If + coordinates for subsetting are defined, then subset the variable along + them and check coordinate units. + + Parameters + ---------- + ds : xr.Dataset + Input dataset + var_name : Union[Dict, List] + Either a list or dictionary with variables to extract. + If a dictionary the keys are the variable name and the values are + entries for each coordinate and coordinate values to extract + coords_to_sample: Dict + Optional argument for subsetting/sampling along the specified + coordinates + + Returns + ---------- + da: xr.DataArray + Extracted variable (subsetted along the specified coordinates) + """ + + try: + da = ds[var_name] + except KeyError as ex: + raise KeyError( + f"Could not find the variable `{var_name}` in the dataset. " + f"The available variables are {list(ds.data_vars)}" + ) from ex + + for coord, sampling in coords_to_sample.items(): + coord_values = sampling.values + try: + da = da.sel(**{coord: coord_values}) + except KeyError as ex: + raise KeyError( + f"Could not find the all coordinate values `{coord_values}` in " + f"coordinate `{coord}` in the dataset" + ) from ex + expected_units = sampling.units + coord_units = da[coord].attrs.get("units", None) + if coord_units is not None and coord_units != expected_units: + raise ValueError( + f"Expected units {expected_units} for coordinate {coord}" + f" in variable {var_name} but got {coord_units}" + ) + + return da diff --git a/pdm.lock b/pdm.lock index 1b01baa..73a95bf 100644 --- a/pdm.lock +++ b/pdm.lock @@ -1,1712 +1,1712 @@ -# This file is @generated by PDM. -# It is not intended for manual editing. - -[metadata] -groups = ["default", "dev"] -strategy = ["inherit_metadata"] -lock_version = "4.5.0" -content_hash = "sha256:a6e6fc954a417649253296c4f4639ed6c27c73a11d2a491d7f3d95c084692963" - -[[metadata.targets]] -requires_python = ">=3.9" - -[[package]] -name = "aiohappyeyeballs" -version = "2.6.1" -requires_python = ">=3.9" -summary = "Happy Eyeballs for asyncio" -groups = ["default"] -files = [ - {file = "aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8"}, - {file = "aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558"}, -] - -[[package]] -name = "aiohttp" -version = "3.11.14" -requires_python = ">=3.9" -summary = "Async http client/server framework (asyncio)" -groups = ["default"] -dependencies = [ - "aiohappyeyeballs>=2.3.0", - "aiosignal>=1.1.2", - "async-timeout<6.0,>=4.0; python_version < \"3.11\"", - "attrs>=17.3.0", - "frozenlist>=1.1.1", - "multidict<7.0,>=4.5", - "propcache>=0.2.0", - "yarl<2.0,>=1.17.0", -] -files = [ - {file = "aiohttp-3.11.14-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e2bc827c01f75803de77b134afdbf74fa74b62970eafdf190f3244931d7a5c0d"}, - {file = "aiohttp-3.11.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e365034c5cf6cf74f57420b57682ea79e19eb29033399dd3f40de4d0171998fa"}, - {file = "aiohttp-3.11.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c32593ead1a8c6aabd58f9d7ee706e48beac796bb0cb71d6b60f2c1056f0a65f"}, - {file = "aiohttp-3.11.14-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4e7c7ec4146a94a307ca4f112802a8e26d969018fabed526efc340d21d3e7d0"}, - {file = "aiohttp-3.11.14-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8b2df9feac55043759aa89f722a967d977d80f8b5865a4153fc41c93b957efc"}, - {file = "aiohttp-3.11.14-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c7571f99525c76a6280f5fe8e194eeb8cb4da55586c3c61c59c33a33f10cfce7"}, - {file = "aiohttp-3.11.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b59d096b5537ec7c85954cb97d821aae35cfccce3357a2cafe85660cc6295628"}, - {file = "aiohttp-3.11.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b42dbd097abb44b3f1156b4bf978ec5853840802d6eee2784857be11ee82c6a0"}, - {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b05774864c87210c531b48dfeb2f7659407c2dda8643104fb4ae5e2c311d12d9"}, - {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4e2e8ef37d4bc110917d038807ee3af82700a93ab2ba5687afae5271b8bc50ff"}, - {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e9faafa74dbb906b2b6f3eb9942352e9e9db8d583ffed4be618a89bd71a4e914"}, - {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:7e7abe865504f41b10777ac162c727af14e9f4db9262e3ed8254179053f63e6d"}, - {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:4848ae31ad44330b30f16c71e4f586cd5402a846b11264c412de99fa768f00f3"}, - {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2d0b46abee5b5737cb479cc9139b29f010a37b1875ee56d142aefc10686a390b"}, - {file = "aiohttp-3.11.14-cp310-cp310-win32.whl", hash = "sha256:a0d2c04a623ab83963576548ce098baf711a18e2c32c542b62322a0b4584b990"}, - {file = "aiohttp-3.11.14-cp310-cp310-win_amd64.whl", hash = "sha256:5409a59d5057f2386bb8b8f8bbcfb6e15505cedd8b2445db510563b5d7ea1186"}, - {file = "aiohttp-3.11.14-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f296d637a50bb15fb6a229fbb0eb053080e703b53dbfe55b1e4bb1c5ed25d325"}, - {file = "aiohttp-3.11.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ec6cd1954ca2bbf0970f531a628da1b1338f594bf5da7e361e19ba163ecc4f3b"}, - {file = "aiohttp-3.11.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:572def4aad0a4775af66d5a2b5923c7de0820ecaeeb7987dcbccda2a735a993f"}, - {file = "aiohttp-3.11.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c68e41c4d576cd6aa6c6d2eddfb32b2acfb07ebfbb4f9da991da26633a3db1a"}, - {file = "aiohttp-3.11.14-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99b8bbfc8111826aa8363442c0fc1f5751456b008737ff053570f06a151650b3"}, - {file = "aiohttp-3.11.14-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b0a200e85da5c966277a402736a96457b882360aa15416bf104ca81e6f5807b"}, - {file = "aiohttp-3.11.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d173c0ac508a2175f7c9a115a50db5fd3e35190d96fdd1a17f9cb10a6ab09aa1"}, - {file = "aiohttp-3.11.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:413fe39fd929329f697f41ad67936f379cba06fcd4c462b62e5b0f8061ee4a77"}, - {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:65c75b14ee74e8eeff2886321e76188cbe938d18c85cff349d948430179ad02c"}, - {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:321238a42ed463848f06e291c4bbfb3d15ba5a79221a82c502da3e23d7525d06"}, - {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:59a05cdc636431f7ce843c7c2f04772437dd816a5289f16440b19441be6511f1"}, - {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:daf20d9c3b12ae0fdf15ed92235e190f8284945563c4b8ad95b2d7a31f331cd3"}, - {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:05582cb2d156ac7506e68b5eac83179faedad74522ed88f88e5861b78740dc0e"}, - {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:12c5869e7ddf6b4b1f2109702b3cd7515667b437da90a5a4a50ba1354fe41881"}, - {file = "aiohttp-3.11.14-cp311-cp311-win32.whl", hash = "sha256:92868f6512714efd4a6d6cb2bfc4903b997b36b97baea85f744229f18d12755e"}, - {file = "aiohttp-3.11.14-cp311-cp311-win_amd64.whl", hash = "sha256:bccd2cb7aa5a3bfada72681bdb91637094d81639e116eac368f8b3874620a654"}, - {file = "aiohttp-3.11.14-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:70ab0f61c1a73d3e0342cedd9a7321425c27a7067bebeeacd509f96695b875fc"}, - {file = "aiohttp-3.11.14-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:602d4db80daf4497de93cb1ce00b8fc79969c0a7cf5b67bec96fa939268d806a"}, - {file = "aiohttp-3.11.14-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a8a0d127c10b8d89e69bbd3430da0f73946d839e65fec00ae48ca7916a31948"}, - {file = "aiohttp-3.11.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9f835cdfedcb3f5947304e85b8ca3ace31eef6346d8027a97f4de5fb687534"}, - {file = "aiohttp-3.11.14-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8aa5c68e1e68fff7cd3142288101deb4316b51f03d50c92de6ea5ce646e6c71f"}, - {file = "aiohttp-3.11.14-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b512f1de1c688f88dbe1b8bb1283f7fbeb7a2b2b26e743bb2193cbadfa6f307"}, - {file = "aiohttp-3.11.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc9253069158d57e27d47a8453d8a2c5a370dc461374111b5184cf2f147a3cc3"}, - {file = "aiohttp-3.11.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b2501f1b981e70932b4a552fc9b3c942991c7ae429ea117e8fba57718cdeed0"}, - {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:28a3d083819741592685762d51d789e6155411277050d08066537c5edc4066e6"}, - {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:0df3788187559c262922846087e36228b75987f3ae31dd0a1e5ee1034090d42f"}, - {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e73fa341d8b308bb799cf0ab6f55fc0461d27a9fa3e4582755a3d81a6af8c09"}, - {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:51ba80d473eb780a329d73ac8afa44aa71dfb521693ccea1dea8b9b5c4df45ce"}, - {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8d1dd75aa4d855c7debaf1ef830ff2dfcc33f893c7db0af2423ee761ebffd22b"}, - {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41cf0cefd9e7b5c646c2ef529c8335e7eafd326f444cc1cdb0c47b6bc836f9be"}, - {file = "aiohttp-3.11.14-cp312-cp312-win32.whl", hash = "sha256:948abc8952aff63de7b2c83bfe3f211c727da3a33c3a5866a0e2cf1ee1aa950f"}, - {file = "aiohttp-3.11.14-cp312-cp312-win_amd64.whl", hash = "sha256:3b420d076a46f41ea48e5fcccb996f517af0d406267e31e6716f480a3d50d65c"}, - {file = "aiohttp-3.11.14-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d14e274828561db91e4178f0057a915f3af1757b94c2ca283cb34cbb6e00b50"}, - {file = "aiohttp-3.11.14-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f30fc72daf85486cdcdfc3f5e0aea9255493ef499e31582b34abadbfaafb0965"}, - {file = "aiohttp-3.11.14-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4edcbe34e6dba0136e4cabf7568f5a434d89cc9de5d5155371acda275353d228"}, - {file = "aiohttp-3.11.14-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a7169ded15505f55a87f8f0812c94c9412623c744227b9e51083a72a48b68a5"}, - {file = "aiohttp-3.11.14-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad1f2fb9fe9b585ea4b436d6e998e71b50d2b087b694ab277b30e060c434e5db"}, - {file = "aiohttp-3.11.14-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:20412c7cc3720e47a47e63c0005f78c0c2370020f9f4770d7fc0075f397a9fb0"}, - {file = "aiohttp-3.11.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dd9766da617855f7e85f27d2bf9a565ace04ba7c387323cd3e651ac4329db91"}, - {file = "aiohttp-3.11.14-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:599b66582f7276ebefbaa38adf37585e636b6a7a73382eb412f7bc0fc55fb73d"}, - {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b41693b7388324b80f9acfabd479bd1c84f0bc7e8f17bab4ecd9675e9ff9c734"}, - {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:86135c32d06927339c8c5e64f96e4eee8825d928374b9b71a3c42379d7437058"}, - {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:04eb541ce1e03edc1e3be1917a0f45ac703e913c21a940111df73a2c2db11d73"}, - {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dc311634f6f28661a76cbc1c28ecf3b3a70a8edd67b69288ab7ca91058eb5a33"}, - {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:69bb252bfdca385ccabfd55f4cd740d421dd8c8ad438ded9637d81c228d0da49"}, - {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2b86efe23684b58a88e530c4ab5b20145f102916bbb2d82942cafec7bd36a647"}, - {file = "aiohttp-3.11.14-cp313-cp313-win32.whl", hash = "sha256:b9c60d1de973ca94af02053d9b5111c4fbf97158e139b14f1be68337be267be6"}, - {file = "aiohttp-3.11.14-cp313-cp313-win_amd64.whl", hash = "sha256:0a29be28e60e5610d2437b5b2fed61d6f3dcde898b57fb048aa5079271e7f6f3"}, - {file = "aiohttp-3.11.14-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:14fc03508359334edc76d35b2821832f092c8f092e4b356e74e38419dfe7b6de"}, - {file = "aiohttp-3.11.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:92007c89a8cb7be35befa2732b0b32bf3a394c1b22ef2dff0ef12537d98a7bda"}, - {file = "aiohttp-3.11.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6d3986112e34eaa36e280dc8286b9dd4cc1a5bcf328a7f147453e188f6fe148f"}, - {file = "aiohttp-3.11.14-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:749f1eb10e51dbbcdba9df2ef457ec060554842eea4d23874a3e26495f9e87b1"}, - {file = "aiohttp-3.11.14-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:781c8bd423dcc4641298c8c5a2a125c8b1c31e11f828e8d35c1d3a722af4c15a"}, - {file = "aiohttp-3.11.14-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:997b57e38aa7dc6caab843c5e042ab557bc83a2f91b7bd302e3c3aebbb9042a1"}, - {file = "aiohttp-3.11.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a8b0321e40a833e381d127be993b7349d1564b756910b28b5f6588a159afef3"}, - {file = "aiohttp-3.11.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8778620396e554b758b59773ab29c03b55047841d8894c5e335f12bfc45ebd28"}, - {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e906da0f2bcbf9b26cc2b144929e88cb3bf943dd1942b4e5af066056875c7618"}, - {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:87f0e003fb4dd5810c7fbf47a1239eaa34cd929ef160e0a54c570883125c4831"}, - {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:7f2dadece8b85596ac3ab1ec04b00694bdd62abc31e5618f524648d18d9dd7fa"}, - {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:fe846f0a98aa9913c2852b630cd39b4098f296e0907dd05f6c7b30d911afa4c3"}, - {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ced66c5c6ad5bcaf9be54560398654779ec1c3695f1a9cf0ae5e3606694a000a"}, - {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a40087b82f83bd671cbeb5f582c233d196e9653220404a798798bfc0ee189fff"}, - {file = "aiohttp-3.11.14-cp39-cp39-win32.whl", hash = "sha256:95d7787f2bcbf7cb46823036a8d64ccfbc2ffc7d52016b4044d901abceeba3db"}, - {file = "aiohttp-3.11.14-cp39-cp39-win_amd64.whl", hash = "sha256:22a8107896877212130c58f74e64b77f7007cb03cea8698be317272643602d45"}, - {file = "aiohttp-3.11.14.tar.gz", hash = "sha256:d6edc538c7480fa0a3b2bdd705f8010062d74700198da55d16498e1b49549b9c"}, -] - -[[package]] -name = "aiosignal" -version = "1.3.2" -requires_python = ">=3.9" -summary = "aiosignal: a list of registered asynchronous callbacks" -groups = ["default"] -dependencies = [ - "frozenlist>=1.1.0", -] -files = [ - {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, - {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, -] - -[[package]] -name = "asciitree" -version = "0.3.3" -summary = "Draws ASCII trees." -groups = ["default"] -files = [ - {file = "asciitree-0.3.3.tar.gz", hash = "sha256:4aa4b9b649f85e3fcb343363d97564aa1fb62e249677f2e18a96765145cc0f6e"}, -] - -[[package]] -name = "asttokens" -version = "3.0.0" -requires_python = ">=3.8" -summary = "Annotate AST trees with source code positions" -groups = ["dev"] -marker = "python_version > \"3.6\"" -files = [ - {file = "asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2"}, - {file = "asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7"}, -] - -[[package]] -name = "async-timeout" -version = "5.0.1" -requires_python = ">=3.8" -summary = "Timeout context manager for asyncio programs" -groups = ["default"] -marker = "python_version < \"3.11\"" -files = [ - {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, - {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, -] - -[[package]] -name = "attrs" -version = "25.3.0" -requires_python = ">=3.8" -summary = "Classes Without Boilerplate" -groups = ["default"] -files = [ - {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, - {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, -] - -[[package]] -name = "certifi" -version = "2025.1.31" -requires_python = ">=3.6" -summary = "Python package for providing Mozilla's CA Bundle." -groups = ["default"] -files = [ - {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, - {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, -] - -[[package]] -name = "cfgv" -version = "3.4.0" -requires_python = ">=3.8" -summary = "Validate configuration and produce human readable error messages." -groups = ["dev"] -files = [ - {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, - {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.4.1" -requires_python = ">=3.7" -summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -groups = ["default"] -files = [ - {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, - {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, - {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, -] - -[[package]] -name = "click" -version = "8.1.8" -requires_python = ">=3.7" -summary = "Composable command line interface toolkit" -groups = ["default"] -dependencies = [ - "colorama; platform_system == \"Windows\"", - "importlib-metadata; python_version < \"3.8\"", -] -files = [ - {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, - {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, -] - -[[package]] -name = "cloudpickle" -version = "3.1.1" -requires_python = ">=3.8" -summary = "Pickler class to extend the standard pickle.Pickler functionality" -groups = ["default"] -files = [ - {file = "cloudpickle-3.1.1-py3-none-any.whl", hash = "sha256:c8c5a44295039331ee9dad40ba100a9c7297b6f988e50e87ccdf3765a668350e"}, - {file = "cloudpickle-3.1.1.tar.gz", hash = "sha256:b216fa8ae4019d5482a8ac3c95d8f6346115d8835911fd4aefd1a445e4242c64"}, -] - -[[package]] -name = "colorama" -version = "0.4.6" -requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -summary = "Cross-platform colored terminal text." -groups = ["default", "dev"] -marker = "sys_platform == \"win32\" or platform_system == \"Windows\"" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "dask" -version = "2024.8.0" -requires_python = ">=3.9" -summary = "Parallel PyData with Task Scheduling" -groups = ["default"] -dependencies = [ - "click>=8.1", - "cloudpickle>=1.5.0", - "fsspec>=2021.09.0", - "importlib-metadata>=4.13.0; python_version < \"3.12\"", - "packaging>=20.0", - "partd>=1.4.0", - "pyyaml>=5.3.1", - "toolz>=0.10.0", -] -files = [ - {file = "dask-2024.8.0-py3-none-any.whl", hash = "sha256:250ea3df30d4a25958290eec4f252850091c6cfaed82d098179c3b25bba18309"}, - {file = "dask-2024.8.0.tar.gz", hash = "sha256:f1fec39373d2f101bc045529ad4e9b30e34e6eb33b7aa0fa7073aec7b1bf9eee"}, -] - -[[package]] -name = "dataclass-wizard" -version = "0.35.0" -summary = "Lightning-fast JSON wizardry for Python dataclasses — effortless serialization right out of the box!" -groups = ["default"] -dependencies = [ - "typing-extensions>=4.9.0; python_version <= \"3.12\"", -] -files = [ - {file = "dataclass-wizard-0.35.0.tar.gz", hash = "sha256:8e4b254991bf93416a48e2911bb985e3787cff11f00270c3d1165d2523cb3fb6"}, - {file = "dataclass_wizard-0.35.0-py2.py3-none-any.whl", hash = "sha256:3bb19292477f0bebb12e9cc9178f1a6b93d133af4ae065abf14b713142b32edf"}, -] - -[[package]] -name = "decorator" -version = "5.2.1" -requires_python = ">=3.8" -summary = "Decorators for Humans" -groups = ["dev"] -marker = "python_version > \"3.6\"" -files = [ - {file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"}, - {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"}, -] - -[[package]] -name = "deepdiff" -version = "8.4.2" -requires_python = ">=3.8" -summary = "Deep Difference and Search of any Python object/data. Recreate objects by adding adding deltas to each other." -groups = ["default"] -dependencies = [ - "orderly-set<6,>=5.3.0", -] -files = [ - {file = "deepdiff-8.4.2-py3-none-any.whl", hash = "sha256:7e39e5b26f3747c54f9d0e8b9b29daab670c3100166b77cc0185d5793121b099"}, - {file = "deepdiff-8.4.2.tar.gz", hash = "sha256:5c741c0867ebc7fcb83950ad5ed958369c17f424e14dee32a11c56073f4ee92a"}, -] - -[[package]] -name = "distlib" -version = "0.3.9" -summary = "Distribution utilities" -groups = ["dev"] -files = [ - {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, - {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, -] - -[[package]] -name = "exceptiongroup" -version = "1.2.2" -requires_python = ">=3.7" -summary = "Backport of PEP 654 (exception groups)" -groups = ["dev"] -marker = "python_version < \"3.11\"" -files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, -] - -[[package]] -name = "executing" -version = "2.2.0" -requires_python = ">=3.8" -summary = "Get the currently executing AST node of a frame, and other information" -groups = ["dev"] -marker = "python_version > \"3.6\"" -files = [ - {file = "executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa"}, - {file = "executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755"}, -] - -[[package]] -name = "fasteners" -version = "0.19" -requires_python = ">=3.6" -summary = "A python package that provides useful locks" -groups = ["default"] -marker = "sys_platform != \"emscripten\"" -files = [ - {file = "fasteners-0.19-py3-none-any.whl", hash = "sha256:758819cb5d94cdedf4e836988b74de396ceacb8e2794d21f82d131fd9ee77237"}, - {file = "fasteners-0.19.tar.gz", hash = "sha256:b4f37c3ac52d8a445af3a66bce57b33b5e90b97c696b7b984f530cf8f0ded09c"}, -] - -[[package]] -name = "filelock" -version = "3.18.0" -requires_python = ">=3.9" -summary = "A platform independent file lock." -groups = ["dev"] -files = [ - {file = "filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de"}, - {file = "filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2"}, -] - -[[package]] -name = "frozenlist" -version = "1.5.0" -requires_python = ">=3.8" -summary = "A list-like structure which implements collections.abc.MutableSequence" -groups = ["default"] -files = [ - {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a"}, - {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb"}, - {file = "frozenlist-1.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:15538c0cbf0e4fa11d1e3a71f823524b0c46299aed6e10ebb4c2089abd8c3bec"}, - {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e79225373c317ff1e35f210dd5f1344ff31066ba8067c307ab60254cd3a78ad5"}, - {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9272fa73ca71266702c4c3e2d4a28553ea03418e591e377a03b8e3659d94fa76"}, - {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:498524025a5b8ba81695761d78c8dd7382ac0b052f34e66939c42df860b8ff17"}, - {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92b5278ed9d50fe610185ecd23c55d8b307d75ca18e94c0e7de328089ac5dcba"}, - {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f3c8c1dacd037df16e85227bac13cca58c30da836c6f936ba1df0c05d046d8d"}, - {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f2ac49a9bedb996086057b75bf93538240538c6d9b38e57c82d51f75a73409d2"}, - {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e66cc454f97053b79c2ab09c17fbe3c825ea6b4de20baf1be28919460dd7877f"}, - {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5a3ba5f9a0dfed20337d3e966dc359784c9f96503674c2faf015f7fe8e96798c"}, - {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6321899477db90bdeb9299ac3627a6a53c7399c8cd58d25da094007402b039ab"}, - {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76e4753701248476e6286f2ef492af900ea67d9706a0155335a40ea21bf3b2f5"}, - {file = "frozenlist-1.5.0-cp310-cp310-win32.whl", hash = "sha256:977701c081c0241d0955c9586ffdd9ce44f7a7795df39b9151cd9a6fd0ce4cfb"}, - {file = "frozenlist-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:189f03b53e64144f90990d29a27ec4f7997d91ed3d01b51fa39d2dbe77540fd4"}, - {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fd74520371c3c4175142d02a976aee0b4cb4a7cc912a60586ffd8d5929979b30"}, - {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2f3f7a0fbc219fb4455264cae4d9f01ad41ae6ee8524500f381de64ffaa077d5"}, - {file = "frozenlist-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f47c9c9028f55a04ac254346e92977bf0f166c483c74b4232bee19a6697e4778"}, - {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0996c66760924da6e88922756d99b47512a71cfd45215f3570bf1e0b694c206a"}, - {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2fe128eb4edeabe11896cb6af88fca5346059f6c8d807e3b910069f39157869"}, - {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a8ea951bbb6cacd492e3948b8da8c502a3f814f5d20935aae74b5df2b19cf3d"}, - {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de537c11e4aa01d37db0d403b57bd6f0546e71a82347a97c6a9f0dcc532b3a45"}, - {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c2623347b933fcb9095841f1cc5d4ff0b278addd743e0e966cb3d460278840d"}, - {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cee6798eaf8b1416ef6909b06f7dc04b60755206bddc599f52232606e18179d3"}, - {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f5f9da7f5dbc00a604fe74aa02ae7c98bcede8a3b8b9666f9f86fc13993bc71a"}, - {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:90646abbc7a5d5c7c19461d2e3eeb76eb0b204919e6ece342feb6032c9325ae9"}, - {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:bdac3c7d9b705d253b2ce370fde941836a5f8b3c5c2b8fd70940a3ea3af7f4f2"}, - {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03d33c2ddbc1816237a67f66336616416e2bbb6beb306e5f890f2eb22b959cdf"}, - {file = "frozenlist-1.5.0-cp311-cp311-win32.whl", hash = "sha256:237f6b23ee0f44066219dae14c70ae38a63f0440ce6750f868ee08775073f942"}, - {file = "frozenlist-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:0cc974cc93d32c42e7b0f6cf242a6bd941c57c61b618e78b6c0a96cb72788c1d"}, - {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21"}, - {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d"}, - {file = "frozenlist-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e"}, - {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a"}, - {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a"}, - {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee"}, - {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6"}, - {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e"}, - {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9"}, - {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039"}, - {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784"}, - {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631"}, - {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f"}, - {file = "frozenlist-1.5.0-cp312-cp312-win32.whl", hash = "sha256:29d94c256679247b33a3dc96cce0f93cbc69c23bf75ff715919332fdbb6a32b8"}, - {file = "frozenlist-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:8969190d709e7c48ea386db202d708eb94bdb29207a1f269bab1196ce0dcca1f"}, - {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953"}, - {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0"}, - {file = "frozenlist-1.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2"}, - {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f"}, - {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608"}, - {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b"}, - {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840"}, - {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439"}, - {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de"}, - {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641"}, - {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e"}, - {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9"}, - {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03"}, - {file = "frozenlist-1.5.0-cp313-cp313-win32.whl", hash = "sha256:31a9ac2b38ab9b5a8933b693db4939764ad3f299fcaa931a3e605bc3460e693c"}, - {file = "frozenlist-1.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:11aabdd62b8b9c4b84081a3c246506d1cddd2dd93ff0ad53ede5defec7886b28"}, - {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9bbcdfaf4af7ce002694a4e10a0159d5a8d20056a12b05b45cea944a4953f972"}, - {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1893f948bf6681733aaccf36c5232c231e3b5166d607c5fa77773611df6dc336"}, - {file = "frozenlist-1.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b5e23253bb709ef57a8e95e6ae48daa9ac5f265637529e4ce6b003a37b2621f"}, - {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f253985bb515ecd89629db13cb58d702035ecd8cfbca7d7a7e29a0e6d39af5f"}, - {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04a5c6babd5e8fb7d3c871dc8b321166b80e41b637c31a995ed844a6139942b6"}, - {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9fe0f1c29ba24ba6ff6abf688cb0b7cf1efab6b6aa6adc55441773c252f7411"}, - {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:226d72559fa19babe2ccd920273e767c96a49b9d3d38badd7c91a0fdeda8ea08"}, - {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b731db116ab3aedec558573c1a5eec78822b32292fe4f2f0345b7f697745c2"}, - {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:366d8f93e3edfe5a918c874702f78faac300209a4d5bf38352b2c1bdc07a766d"}, - {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1b96af8c582b94d381a1c1f51ffaedeb77c821c690ea5f01da3d70a487dd0a9b"}, - {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c03eff4a41bd4e38415cbed054bbaff4a075b093e2394b6915dca34a40d1e38b"}, - {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:50cf5e7ee9b98f22bdecbabf3800ae78ddcc26e4a435515fc72d97903e8488e0"}, - {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e76bfbc72353269c44e0bc2cfe171900fbf7f722ad74c9a7b638052afe6a00c"}, - {file = "frozenlist-1.5.0-cp39-cp39-win32.whl", hash = "sha256:666534d15ba8f0fda3f53969117383d5dc021266b3c1a42c9ec4855e4b58b9d3"}, - {file = "frozenlist-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:5c28f4b5dbef8a0d8aad0d4de24d1e9e981728628afaf4ea0792f5d0939372f0"}, - {file = "frozenlist-1.5.0-py3-none-any.whl", hash = "sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3"}, - {file = "frozenlist-1.5.0.tar.gz", hash = "sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817"}, -] - -[[package]] -name = "fsspec" -version = "2025.3.0" -requires_python = ">=3.8" -summary = "File-system specification" -groups = ["default"] -files = [ - {file = "fsspec-2025.3.0-py3-none-any.whl", hash = "sha256:efb87af3efa9103f94ca91a7f8cb7a4df91af9f74fc106c9c7ea0efd7277c1b3"}, - {file = "fsspec-2025.3.0.tar.gz", hash = "sha256:a935fd1ea872591f2b5148907d103488fc523295e6c64b835cfad8c3eca44972"}, -] - -[[package]] -name = "identify" -version = "2.6.9" -requires_python = ">=3.9" -summary = "File identification library for Python" -groups = ["dev"] -files = [ - {file = "identify-2.6.9-py2.py3-none-any.whl", hash = "sha256:c98b4322da415a8e5a70ff6e51fbc2d2932c015532d77e9f8537b4ba7813b150"}, - {file = "identify-2.6.9.tar.gz", hash = "sha256:d40dfe3142a1421d8518e3d3985ef5ac42890683e32306ad614a29490abeb6bf"}, -] - -[[package]] -name = "idna" -version = "3.10" -requires_python = ">=3.6" -summary = "Internationalized Domain Names in Applications (IDNA)" -groups = ["default"] -files = [ - {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, - {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, -] - -[[package]] -name = "importlib-metadata" -version = "8.6.1" -requires_python = ">=3.9" -summary = "Read metadata from Python packages" -groups = ["default"] -marker = "python_version < \"3.12\"" -dependencies = [ - "typing-extensions>=3.6.4; python_version < \"3.8\"", - "zipp>=3.20", -] -files = [ - {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"}, - {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"}, -] - -[[package]] -name = "iniconfig" -version = "2.1.0" -requires_python = ">=3.8" -summary = "brain-dead simple config-ini parsing" -groups = ["dev"] -files = [ - {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, - {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, -] - -[[package]] -name = "ipdb" -version = "0.13.13" -requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -summary = "IPython-enabled pdb" -groups = ["dev"] -dependencies = [ - "decorator; python_version == \"3.5\"", - "decorator; python_version == \"3.6\"", - "decorator; python_version > \"3.6\" and python_version < \"3.11\"", - "decorator; python_version >= \"3.11\"", - "decorator<5.0.0; python_version == \"2.7\"", - "decorator<5.0.0; python_version == \"3.4\"", - "ipython<6.0.0,>=5.1.0; python_version == \"2.7\"", - "ipython<7.0.0,>=6.0.0; python_version == \"3.4\"", - "ipython<7.10.0,>=7.0.0; python_version == \"3.5\"", - "ipython<7.17.0,>=7.16.3; python_version == \"3.6\"", - "ipython>=7.31.1; python_version > \"3.6\" and python_version < \"3.11\"", - "ipython>=7.31.1; python_version >= \"3.11\"", - "pathlib; python_version == \"2.7\"", - "toml>=0.10.2; python_version == \"2.7\"", - "toml>=0.10.2; python_version == \"3.4\"", - "toml>=0.10.2; python_version == \"3.5\"", - "tomli; python_version == \"3.6\"", - "tomli; python_version > \"3.6\" and python_version < \"3.11\"", -] -files = [ - {file = "ipdb-0.13.13-py3-none-any.whl", hash = "sha256:45529994741c4ab6d2388bfa5d7b725c2cf7fe9deffabdb8a6113aa5ed449ed4"}, - {file = "ipdb-0.13.13.tar.gz", hash = "sha256:e3ac6018ef05126d442af680aad863006ec19d02290561ac88b8b1c0b0cfc726"}, -] - -[[package]] -name = "ipython" -version = "8.18.1" -requires_python = ">=3.9" -summary = "IPython: Productive Interactive Computing" -groups = ["dev"] -marker = "python_version > \"3.6\"" -dependencies = [ - "colorama; sys_platform == \"win32\"", - "decorator", - "exceptiongroup; python_version < \"3.11\"", - "jedi>=0.16", - "matplotlib-inline", - "pexpect>4.3; sys_platform != \"win32\"", - "prompt-toolkit<3.1.0,>=3.0.41", - "pygments>=2.4.0", - "stack-data", - "traitlets>=5", - "typing-extensions; python_version < \"3.10\"", -] -files = [ - {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, - {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, -] - -[[package]] -name = "isodate" -version = "0.7.2" -requires_python = ">=3.7" -summary = "An ISO 8601 date/time/duration parser and formatter" -groups = ["default"] -files = [ - {file = "isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15"}, - {file = "isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6"}, -] - -[[package]] -name = "jedi" -version = "0.19.2" -requires_python = ">=3.6" -summary = "An autocompletion tool for Python that can be used for text editors." -groups = ["dev"] -marker = "python_version > \"3.6\"" -dependencies = [ - "parso<0.9.0,>=0.8.4", -] -files = [ - {file = "jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9"}, - {file = "jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0"}, -] - -[[package]] -name = "locket" -version = "1.0.0" -requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -summary = "File-based locks for Python on Linux and Windows" -groups = ["default"] -files = [ - {file = "locket-1.0.0-py2.py3-none-any.whl", hash = "sha256:b6c819a722f7b6bd955b80781788e4a66a55628b858d347536b7e81325a3a5e3"}, - {file = "locket-1.0.0.tar.gz", hash = "sha256:5c0d4c052a8bbbf750e056a8e65ccd309086f4f0f18a2eac306a8dfa4112a632"}, -] - -[[package]] -name = "loguru" -version = "0.7.3" -requires_python = "<4.0,>=3.5" -summary = "Python logging made (stupidly) simple" -groups = ["default"] -dependencies = [ - "aiocontextvars>=0.2.0; python_version < \"3.7\"", - "colorama>=0.3.4; sys_platform == \"win32\"", - "win32-setctime>=1.0.0; sys_platform == \"win32\"", -] -files = [ - {file = "loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c"}, - {file = "loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6"}, -] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -requires_python = ">=3.8" -summary = "Python port of markdown-it. Markdown parsing, done right!" -groups = ["default"] -dependencies = [ - "mdurl~=0.1", -] -files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, -] - -[[package]] -name = "matplotlib-inline" -version = "0.1.7" -requires_python = ">=3.8" -summary = "Inline Matplotlib backend for Jupyter" -groups = ["dev"] -marker = "python_version > \"3.6\"" -dependencies = [ - "traitlets", -] -files = [ - {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, - {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, -] - -[[package]] -name = "mdurl" -version = "0.1.2" -requires_python = ">=3.7" -summary = "Markdown URL utilities" -groups = ["default"] -files = [ - {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, - {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, -] - -[[package]] -name = "multidict" -version = "6.2.0" -requires_python = ">=3.9" -summary = "multidict implementation" -groups = ["default"] -dependencies = [ - "typing-extensions>=4.1.0; python_version < \"3.11\"", -] -files = [ - {file = "multidict-6.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b9f6392d98c0bd70676ae41474e2eecf4c7150cb419237a41f8f96043fcb81d1"}, - {file = "multidict-6.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3501621d5e86f1a88521ea65d5cad0a0834c77b26f193747615b7c911e5422d2"}, - {file = "multidict-6.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:32ed748ff9ac682eae7859790d3044b50e3076c7d80e17a44239683769ff485e"}, - {file = "multidict-6.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc826b9a8176e686b67aa60fd6c6a7047b0461cae5591ea1dc73d28f72332a8a"}, - {file = "multidict-6.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:214207dcc7a6221d9942f23797fe89144128a71c03632bf713d918db99bd36de"}, - {file = "multidict-6.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05fefbc3cddc4e36da209a5e49f1094bbece9a581faa7f3589201fd95df40e5d"}, - {file = "multidict-6.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e851e6363d0dbe515d8de81fd544a2c956fdec6f8a049739562286727d4a00c3"}, - {file = "multidict-6.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32c9b4878f48be3e75808ea7e499d6223b1eea6d54c487a66bc10a1871e3dc6a"}, - {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7243c5a6523c5cfeca76e063efa5f6a656d1d74c8b1fc64b2cd1e84e507f7e2a"}, - {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0e5a644e50ef9fb87878d4d57907f03a12410d2aa3b93b3acdf90a741df52c49"}, - {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0dc25a3293c50744796e87048de5e68996104d86d940bb24bc3ec31df281b191"}, - {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:a49994481b99cd7dedde07f2e7e93b1d86c01c0fca1c32aded18f10695ae17eb"}, - {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:641cf2e3447c9ecff2f7aa6e9eee9eaa286ea65d57b014543a4911ff2799d08a"}, - {file = "multidict-6.2.0-cp310-cp310-win32.whl", hash = "sha256:0c383d28857f66f5aebe3e91d6cf498da73af75fbd51cedbe1adfb85e90c0460"}, - {file = "multidict-6.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:a33273a541f1e1a8219b2a4ed2de355848ecc0254264915b9290c8d2de1c74e1"}, - {file = "multidict-6.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:84e87a7d75fa36839a3a432286d719975362d230c70ebfa0948549cc38bd5b46"}, - {file = "multidict-6.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8de4d42dffd5ced9117af2ce66ba8722402541a3aa98ffdf78dde92badb68932"}, - {file = "multidict-6.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7d91a230c7f8af86c904a5a992b8c064b66330544693fd6759c3d6162382ecf"}, - {file = "multidict-6.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f6cad071960ba1914fa231677d21b1b4a3acdcce463cee41ea30bc82e6040cf"}, - {file = "multidict-6.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f74f2fc51555f4b037ef278efc29a870d327053aba5cb7d86ae572426c7cccc"}, - {file = "multidict-6.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:14ed9ed1bfedd72a877807c71113deac292bf485159a29025dfdc524c326f3e1"}, - {file = "multidict-6.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ac3fcf9a2d369bd075b2c2965544036a27ccd277fc3c04f708338cc57533081"}, - {file = "multidict-6.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fc6af8e39f7496047c7876314f4317736eac82bf85b54c7c76cf1a6f8e35d98"}, - {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5f8cb1329f42fadfb40d6211e5ff568d71ab49be36e759345f91c69d1033d633"}, - {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5389445f0173c197f4a3613713b5fb3f3879df1ded2a1a2e4bc4b5b9c5441b7e"}, - {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:94a7bb972178a8bfc4055db80c51efd24baefaced5e51c59b0d598a004e8305d"}, - {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da51d8928ad8b4244926fe862ba1795f0b6e68ed8c42cd2f822d435db9c2a8f4"}, - {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:063be88bd684782a0715641de853e1e58a2f25b76388538bd62d974777ce9bc2"}, - {file = "multidict-6.2.0-cp311-cp311-win32.whl", hash = "sha256:52b05e21ff05729fbea9bc20b3a791c3c11da61649ff64cce8257c82a020466d"}, - {file = "multidict-6.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1e2a2193d3aa5cbf5758f6d5680a52aa848e0cf611da324f71e5e48a9695cc86"}, - {file = "multidict-6.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:437c33561edb6eb504b5a30203daf81d4a9b727e167e78b0854d9a4e18e8950b"}, - {file = "multidict-6.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9f49585f4abadd2283034fc605961f40c638635bc60f5162276fec075f2e37a4"}, - {file = "multidict-6.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5dd7106d064d05896ce28c97da3f46caa442fe5a43bc26dfb258e90853b39b44"}, - {file = "multidict-6.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e25b11a0417475f093d0f0809a149aff3943c2c56da50fdf2c3c88d57fe3dfbd"}, - {file = "multidict-6.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac380cacdd3b183338ba63a144a34e9044520a6fb30c58aa14077157a033c13e"}, - {file = "multidict-6.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:61d5541f27533f803a941d3a3f8a3d10ed48c12cf918f557efcbf3cd04ef265c"}, - {file = "multidict-6.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:facaf11f21f3a4c51b62931feb13310e6fe3475f85e20d9c9fdce0d2ea561b87"}, - {file = "multidict-6.2.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:095a2eabe8c43041d3e6c2cb8287a257b5f1801c2d6ebd1dd877424f1e89cf29"}, - {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a0cc398350ef31167e03f3ca7c19313d4e40a662adcb98a88755e4e861170bdd"}, - {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7c611345bbe7cb44aabb877cb94b63e86f2d0db03e382667dbd037866d44b4f8"}, - {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8cd1a0644ccaf27e9d2f6d9c9474faabee21f0578fe85225cc5af9a61e1653df"}, - {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:89b3857652183b8206a891168af47bac10b970d275bba1f6ee46565a758c078d"}, - {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:125dd82b40f8c06d08d87b3510beaccb88afac94e9ed4a6f6c71362dc7dbb04b"}, - {file = "multidict-6.2.0-cp312-cp312-win32.whl", hash = "sha256:76b34c12b013d813e6cb325e6bd4f9c984db27758b16085926bbe7ceeaace626"}, - {file = "multidict-6.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:0b183a959fb88ad1be201de2c4bdf52fa8e46e6c185d76201286a97b6f5ee65c"}, - {file = "multidict-6.2.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5c5e7d2e300d5cb3b2693b6d60d3e8c8e7dd4ebe27cd17c9cb57020cac0acb80"}, - {file = "multidict-6.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:256d431fe4583c5f1e0f2e9c4d9c22f3a04ae96009b8cfa096da3a8723db0a16"}, - {file = "multidict-6.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a3c0ff89fe40a152e77b191b83282c9664357dce3004032d42e68c514ceff27e"}, - {file = "multidict-6.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef7d48207926edbf8b16b336f779c557dd8f5a33035a85db9c4b0febb0706817"}, - {file = "multidict-6.2.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3c099d3899b14e1ce52262eb82a5f5cb92157bb5106bf627b618c090a0eadc"}, - {file = "multidict-6.2.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e16e7297f29a544f49340012d6fc08cf14de0ab361c9eb7529f6a57a30cbfda1"}, - {file = "multidict-6.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:042028348dc5a1f2be6c666437042a98a5d24cee50380f4c0902215e5ec41844"}, - {file = "multidict-6.2.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:08549895e6a799bd551cf276f6e59820aa084f0f90665c0f03dd3a50db5d3c48"}, - {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4ccfd74957ef53fa7380aaa1c961f523d582cd5e85a620880ffabd407f8202c0"}, - {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:83b78c680d4b15d33042d330c2fa31813ca3974197bddb3836a5c635a5fd013f"}, - {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b4c153863dd6569f6511845922c53e39c8d61f6e81f228ad5443e690fca403de"}, - {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:98aa8325c7f47183b45588af9c434533196e241be0a4e4ae2190b06d17675c02"}, - {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9e658d1373c424457ddf6d55ec1db93c280b8579276bebd1f72f113072df8a5d"}, - {file = "multidict-6.2.0-cp313-cp313-win32.whl", hash = "sha256:3157126b028c074951839233647bd0e30df77ef1fedd801b48bdcad242a60f4e"}, - {file = "multidict-6.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:2e87f1926e91855ae61769ba3e3f7315120788c099677e0842e697b0bfb659f2"}, - {file = "multidict-6.2.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:2529ddbdaa424b2c6c2eb668ea684dd6b75b839d0ad4b21aad60c168269478d7"}, - {file = "multidict-6.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:13551d0e2d7201f0959725a6a769b6f7b9019a168ed96006479c9ac33fe4096b"}, - {file = "multidict-6.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d1996ee1330e245cd3aeda0887b4409e3930524c27642b046e4fae88ffa66c5e"}, - {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c537da54ce4ff7c15e78ab1292e5799d0d43a2108e006578a57f531866f64025"}, - {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f249badb360b0b4d694307ad40f811f83df4da8cef7b68e429e4eea939e49dd"}, - {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48d39b1824b8d6ea7de878ef6226efbe0773f9c64333e1125e0efcfdd18a24c7"}, - {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b99aac6bb2c37db336fa03a39b40ed4ef2818bf2dfb9441458165ebe88b793af"}, - {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07bfa8bc649783e703263f783f73e27fef8cd37baaad4389816cf6a133141331"}, - {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b2c00ad31fbc2cbac85d7d0fcf90853b2ca2e69d825a2d3f3edb842ef1544a2c"}, - {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d57a01a2a9fa00234aace434d8c131f0ac6e0ac6ef131eda5962d7e79edfb5b"}, - {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:abf5b17bc0cf626a8a497d89ac691308dbd825d2ac372aa990b1ca114e470151"}, - {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:f7716f7e7138252d88607228ce40be22660d6608d20fd365d596e7ca0738e019"}, - {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d5a36953389f35f0a4e88dc796048829a2f467c9197265504593f0e420571547"}, - {file = "multidict-6.2.0-cp313-cp313t-win32.whl", hash = "sha256:e653d36b1bf48fa78c7fcebb5fa679342e025121ace8c87ab05c1cefd33b34fc"}, - {file = "multidict-6.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ca23db5fb195b5ef4fd1f77ce26cadefdf13dba71dab14dadd29b34d457d7c44"}, - {file = "multidict-6.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b4f3d66dd0354b79761481fc15bdafaba0b9d9076f1f42cc9ce10d7fcbda205a"}, - {file = "multidict-6.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e2a2d6749e1ff2c9c76a72c6530d5baa601205b14e441e6d98011000f47a7ac"}, - {file = "multidict-6.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cca83a629f77402cfadd58352e394d79a61c8015f1694b83ab72237ec3941f88"}, - {file = "multidict-6.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:781b5dd1db18c9e9eacc419027b0acb5073bdec9de1675c0be25ceb10e2ad133"}, - {file = "multidict-6.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cf8d370b2fea27fb300825ec3984334f7dd54a581bde6456799ba3776915a656"}, - {file = "multidict-6.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:25bb96338512e2f46f615a2bb7c6012fe92a4a5ebd353e5020836a7e33120349"}, - {file = "multidict-6.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19e2819b0b468174de25c0ceed766606a07cedeab132383f1e83b9a4e96ccb4f"}, - {file = "multidict-6.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6aed763b6a1b28c46c055692836879328f0b334a6d61572ee4113a5d0c859872"}, - {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a1133414b771619aa3c3000701c11b2e4624a7f492f12f256aedde97c28331a2"}, - {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:639556758c36093b35e2e368ca485dada6afc2bd6a1b1207d85ea6dfc3deab27"}, - {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:163f4604e76639f728d127293d24c3e208b445b463168af3d031b92b0998bb90"}, - {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2325105e16d434749e1be8022f942876a936f9bece4ec41ae244e3d7fae42aaf"}, - {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e4371591e621579cb6da8401e4ea405b33ff25a755874a3567c4075ca63d56e2"}, - {file = "multidict-6.2.0-cp39-cp39-win32.whl", hash = "sha256:d1175b0e0d6037fab207f05774a176d71210ebd40b1c51f480a04b65ec5c786d"}, - {file = "multidict-6.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad81012b24b88aad4c70b2cbc2dad84018783221b7f923e926f4690ff8569da3"}, - {file = "multidict-6.2.0-py3-none-any.whl", hash = "sha256:5d26547423e5e71dcc562c4acdc134b900640a39abd9066d7326a7cc2324c530"}, - {file = "multidict-6.2.0.tar.gz", hash = "sha256:0085b0afb2446e57050140240a8595846ed64d1cbd26cef936bfab3192c673b8"}, -] - -[[package]] -name = "nodeenv" -version = "1.9.1" -requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -summary = "Node.js virtual environment builder" -groups = ["dev"] -files = [ - {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, - {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, -] - -[[package]] -name = "numcodecs" -version = "0.12.1" -requires_python = ">=3.8" -summary = "A Python package providing buffer compression and transformation codecs for use in data storage and communication applications." -groups = ["default"] -dependencies = [ - "numpy>=1.7", -] -files = [ - {file = "numcodecs-0.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d37f628fe92b3699e65831d5733feca74d2e33b50ef29118ffd41c13c677210e"}, - {file = "numcodecs-0.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:941b7446b68cf79f089bcfe92edaa3b154533dcbcd82474f994b28f2eedb1c60"}, - {file = "numcodecs-0.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e79bf9d1d37199ac00a60ff3adb64757523291d19d03116832e600cac391c51"}, - {file = "numcodecs-0.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:82d7107f80f9307235cb7e74719292d101c7ea1e393fe628817f0d635b7384f5"}, - {file = "numcodecs-0.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eeaf42768910f1c6eebf6c1bb00160728e62c9343df9e2e315dc9fe12e3f6071"}, - {file = "numcodecs-0.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:135b2d47563f7b9dc5ee6ce3d1b81b0f1397f69309e909f1a35bb0f7c553d45e"}, - {file = "numcodecs-0.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a191a8e347ecd016e5c357f2bf41fbcb026f6ffe78fff50c77ab12e96701d155"}, - {file = "numcodecs-0.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:21d8267bd4313f4d16f5b6287731d4c8ebdab236038f29ad1b0e93c9b2ca64ee"}, - {file = "numcodecs-0.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2f84df6b8693206365a5b37c005bfa9d1be486122bde683a7b6446af4b75d862"}, - {file = "numcodecs-0.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:760627780a8b6afdb7f942f2a0ddaf4e31d3d7eea1d8498cf0fd3204a33c4618"}, - {file = "numcodecs-0.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c258bd1d3dfa75a9b708540d23b2da43d63607f9df76dfa0309a7597d1de3b73"}, - {file = "numcodecs-0.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:e04649ea504aff858dbe294631f098fbfd671baf58bfc04fc48d746554c05d67"}, - {file = "numcodecs-0.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2fbb12a6a1abe95926f25c65e283762d63a9bf9e43c0de2c6a1a798347dfcb40"}, - {file = "numcodecs-0.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f2207871868b2464dc11c513965fd99b958a9d7cde2629be7b2dc84fdaab013b"}, - {file = "numcodecs-0.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abff3554a6892a89aacf7b642a044e4535499edf07aeae2f2e6e8fc08c9ba07f"}, - {file = "numcodecs-0.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:ef964d4860d3e6b38df0633caf3e51dc850a6293fd8e93240473642681d95136"}, - {file = "numcodecs-0.12.1.tar.gz", hash = "sha256:05d91a433733e7eef268d7e80ec226a0232da244289614a8f3826901aec1098e"}, -] - -[[package]] -name = "numpy" -version = "2.0.2" -requires_python = ">=3.9" -summary = "Fundamental package for array computing in Python" -groups = ["default"] -files = [ - {file = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"}, - {file = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"}, - {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8c5713284ce4e282544c68d1c3b2c7161d38c256d2eefc93c1d683cf47683e66"}, - {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:becfae3ddd30736fe1889a37f1f580e245ba79a5855bff5f2a29cb3ccc22dd7b"}, - {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2da5960c3cf0df7eafefd806d4e612c5e19358de82cb3c343631188991566ccd"}, - {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:496f71341824ed9f3d2fd36cf3ac57ae2e0165c143b55c3a035ee219413f3318"}, - {file = "numpy-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a61ec659f68ae254e4d237816e33171497e978140353c0c2038d46e63282d0c8"}, - {file = "numpy-2.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d731a1c6116ba289c1e9ee714b08a8ff882944d4ad631fd411106a30f083c326"}, - {file = "numpy-2.0.2-cp310-cp310-win32.whl", hash = "sha256:984d96121c9f9616cd33fbd0618b7f08e0cfc9600a7ee1d6fd9b239186d19d97"}, - {file = "numpy-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:c7b0be4ef08607dd04da4092faee0b86607f111d5ae68036f16cc787e250a131"}, - {file = "numpy-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:49ca4decb342d66018b01932139c0961a8f9ddc7589611158cb3c27cbcf76448"}, - {file = "numpy-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:11a76c372d1d37437857280aa142086476136a8c0f373b2e648ab2c8f18fb195"}, - {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:807ec44583fd708a21d4a11d94aedf2f4f3c3719035c76a2bbe1fe8e217bdc57"}, - {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8cafab480740e22f8d833acefed5cc87ce276f4ece12fdaa2e8903db2f82897a"}, - {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a15f476a45e6e5a3a79d8a14e62161d27ad897381fecfa4a09ed5322f2085669"}, - {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13e689d772146140a252c3a28501da66dfecd77490b498b168b501835041f951"}, - {file = "numpy-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9ea91dfb7c3d1c56a0e55657c0afb38cf1eeae4544c208dc465c3c9f3a7c09f9"}, - {file = "numpy-2.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c1c9307701fec8f3f7a1e6711f9089c06e6284b3afbbcd259f7791282d660a15"}, - {file = "numpy-2.0.2-cp311-cp311-win32.whl", hash = "sha256:a392a68bd329eafac5817e5aefeb39038c48b671afd242710b451e76090e81f4"}, - {file = "numpy-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:286cd40ce2b7d652a6f22efdfc6d1edf879440e53e76a75955bc0c826c7e64dc"}, - {file = "numpy-2.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:df55d490dea7934f330006d0f81e8551ba6010a5bf035a249ef61a94f21c500b"}, - {file = "numpy-2.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8df823f570d9adf0978347d1f926b2a867d5608f434a7cff7f7908c6570dcf5e"}, - {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9a92ae5c14811e390f3767053ff54eaee3bf84576d99a2456391401323f4ec2c"}, - {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:a842d573724391493a97a62ebbb8e731f8a5dcc5d285dfc99141ca15a3302d0c"}, - {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05e238064fc0610c840d1cf6a13bf63d7e391717d247f1bf0318172e759e692"}, - {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0123ffdaa88fa4ab64835dcbde75dcdf89c453c922f18dced6e27c90d1d0ec5a"}, - {file = "numpy-2.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:96a55f64139912d61de9137f11bf39a55ec8faec288c75a54f93dfd39f7eb40c"}, - {file = "numpy-2.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec9852fb39354b5a45a80bdab5ac02dd02b15f44b3804e9f00c556bf24b4bded"}, - {file = "numpy-2.0.2-cp312-cp312-win32.whl", hash = "sha256:671bec6496f83202ed2d3c8fdc486a8fc86942f2e69ff0e986140339a63bcbe5"}, - {file = "numpy-2.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:cfd41e13fdc257aa5778496b8caa5e856dc4896d4ccf01841daee1d96465467a"}, - {file = "numpy-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9059e10581ce4093f735ed23f3b9d283b9d517ff46009ddd485f1747eb22653c"}, - {file = "numpy-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:423e89b23490805d2a5a96fe40ec507407b8ee786d66f7328be214f9679df6dd"}, - {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:2b2955fa6f11907cf7a70dab0d0755159bca87755e831e47932367fc8f2f2d0b"}, - {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:97032a27bd9d8988b9a97a8c4d2c9f2c15a81f61e2f21404d7e8ef00cb5be729"}, - {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e795a8be3ddbac43274f18588329c72939870a16cae810c2b73461c40718ab1"}, - {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26b258c385842546006213344c50655ff1555a9338e2e5e02a0756dc3e803dd"}, - {file = "numpy-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fec9451a7789926bcf7c2b8d187292c9f93ea30284802a0ab3f5be8ab36865d"}, - {file = "numpy-2.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9189427407d88ff25ecf8f12469d4d39d35bee1db5d39fc5c168c6f088a6956d"}, - {file = "numpy-2.0.2-cp39-cp39-win32.whl", hash = "sha256:905d16e0c60200656500c95b6b8dca5d109e23cb24abc701d41c02d74c6b3afa"}, - {file = "numpy-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:a3f4ab0caa7f053f6797fcd4e1e25caee367db3112ef2b6ef82d749530768c73"}, - {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7f0a0c6f12e07fa94133c8a67404322845220c06a9e80e85999afe727f7438b8"}, - {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:312950fdd060354350ed123c0e25a71327d3711584beaef30cdaa93320c392d4"}, - {file = "numpy-2.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c"}, - {file = "numpy-2.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385"}, - {file = "numpy-2.0.2.tar.gz", hash = "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78"}, -] - -[[package]] -name = "orderly-set" -version = "5.3.0" -requires_python = ">=3.8" -summary = "Orderly set" -groups = ["default"] -files = [ - {file = "orderly_set-5.3.0-py3-none-any.whl", hash = "sha256:c2c0bfe604f5d3d9b24e8262a06feb612594f37aa3845650548befd7772945d1"}, - {file = "orderly_set-5.3.0.tar.gz", hash = "sha256:80b3d8fdd3d39004d9aad389eaa0eab02c71f0a0511ba3a6d54a935a6c6a0acc"}, -] - -[[package]] -name = "packaging" -version = "24.2" -requires_python = ">=3.8" -summary = "Core utilities for Python packages" -groups = ["default", "dev"] -files = [ - {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, - {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, -] - -[[package]] -name = "pandas" -version = "2.2.3" -requires_python = ">=3.9" -summary = "Powerful data structures for data analysis, time series, and statistics" -groups = ["default"] -dependencies = [ - "numpy>=1.22.4; python_version < \"3.11\"", - "numpy>=1.23.2; python_version == \"3.11\"", - "numpy>=1.26.0; python_version >= \"3.12\"", - "python-dateutil>=2.8.2", - "pytz>=2020.1", - "tzdata>=2022.7", -] -files = [ - {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, - {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, - {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, - {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, - {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, - {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, - {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, - {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, - {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, - {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, - {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, - {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, - {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, - {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, - {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, - {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, - {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, - {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, - {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, - {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, - {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, - {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, - {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, - {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, - {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, - {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, - {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, - {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, - {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, - {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, - {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, - {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, - {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, - {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, - {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, - {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, - {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, - {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, - {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, - {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, - {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, - {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, -] - -[[package]] -name = "parso" -version = "0.8.4" -requires_python = ">=3.6" -summary = "A Python Parser" -groups = ["dev"] -marker = "python_version > \"3.6\"" -files = [ - {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, - {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, -] - -[[package]] -name = "partd" -version = "1.4.2" -requires_python = ">=3.9" -summary = "Appendable key-value storage" -groups = ["default"] -dependencies = [ - "locket", - "toolz", -] -files = [ - {file = "partd-1.4.2-py3-none-any.whl", hash = "sha256:978e4ac767ec4ba5b86c6eaa52e5a2a3bc748a2ca839e8cc798f1cc6ce6efb0f"}, - {file = "partd-1.4.2.tar.gz", hash = "sha256:d022c33afbdc8405c226621b015e8067888173d85f7f5ecebb3cafed9a20f02c"}, -] - -[[package]] -name = "pexpect" -version = "4.9.0" -summary = "Pexpect allows easy control of interactive console applications." -groups = ["dev"] -marker = "sys_platform != \"win32\" and python_version > \"3.6\"" -dependencies = [ - "ptyprocess>=0.5", -] -files = [ - {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, - {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, -] - -[[package]] -name = "platformdirs" -version = "4.3.7" -requires_python = ">=3.9" -summary = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -groups = ["dev"] -files = [ - {file = "platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94"}, - {file = "platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351"}, -] - -[[package]] -name = "pluggy" -version = "1.5.0" -requires_python = ">=3.8" -summary = "plugin and hook calling mechanisms for python" -groups = ["dev"] -files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, -] - -[[package]] -name = "pre-commit" -version = "4.2.0" -requires_python = ">=3.9" -summary = "A framework for managing and maintaining multi-language pre-commit hooks." -groups = ["dev"] -dependencies = [ - "cfgv>=2.0.0", - "identify>=1.0.0", - "nodeenv>=0.11.1", - "pyyaml>=5.1", - "virtualenv>=20.10.0", -] -files = [ - {file = "pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd"}, - {file = "pre_commit-4.2.0.tar.gz", hash = "sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146"}, -] - -[[package]] -name = "prompt-toolkit" -version = "3.0.50" -requires_python = ">=3.8.0" -summary = "Library for building powerful interactive command lines in Python" -groups = ["dev"] -marker = "python_version > \"3.6\"" -dependencies = [ - "wcwidth", -] -files = [ - {file = "prompt_toolkit-3.0.50-py3-none-any.whl", hash = "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198"}, - {file = "prompt_toolkit-3.0.50.tar.gz", hash = "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab"}, -] - -[[package]] -name = "propcache" -version = "0.3.1" -requires_python = ">=3.9" -summary = "Accelerated property cache" -groups = ["default"] -files = [ - {file = "propcache-0.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f27785888d2fdd918bc36de8b8739f2d6c791399552333721b58193f68ea3e98"}, - {file = "propcache-0.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4e89cde74154c7b5957f87a355bb9c8ec929c167b59c83d90654ea36aeb6180"}, - {file = "propcache-0.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:730178f476ef03d3d4d255f0c9fa186cb1d13fd33ffe89d39f2cda4da90ceb71"}, - {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967a8eec513dbe08330f10137eacb427b2ca52118769e82ebcfcab0fba92a649"}, - {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b9145c35cc87313b5fd480144f8078716007656093d23059e8993d3a8fa730f"}, - {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e64e948ab41411958670f1093c0a57acfdc3bee5cf5b935671bbd5313bcf229"}, - {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:319fa8765bfd6a265e5fa661547556da381e53274bc05094fc9ea50da51bfd46"}, - {file = "propcache-0.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66d8ccbc902ad548312b96ed8d5d266d0d2c6d006fd0f66323e9d8f2dd49be7"}, - {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2d219b0dbabe75e15e581fc1ae796109b07c8ba7d25b9ae8d650da582bed01b0"}, - {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:cd6a55f65241c551eb53f8cf4d2f4af33512c39da5d9777694e9d9c60872f519"}, - {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9979643ffc69b799d50d3a7b72b5164a2e97e117009d7af6dfdd2ab906cb72cd"}, - {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4cf9e93a81979f1424f1a3d155213dc928f1069d697e4353edb8a5eba67c6259"}, - {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2fce1df66915909ff6c824bbb5eb403d2d15f98f1518e583074671a30fe0c21e"}, - {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4d0dfdd9a2ebc77b869a0b04423591ea8823f791293b527dc1bb896c1d6f1136"}, - {file = "propcache-0.3.1-cp310-cp310-win32.whl", hash = "sha256:1f6cc0ad7b4560e5637eb2c994e97b4fa41ba8226069c9277eb5ea7101845b42"}, - {file = "propcache-0.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:47ef24aa6511e388e9894ec16f0fbf3313a53ee68402bc428744a367ec55b833"}, - {file = "propcache-0.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7f30241577d2fef2602113b70ef7231bf4c69a97e04693bde08ddab913ba0ce5"}, - {file = "propcache-0.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:43593c6772aa12abc3af7784bff4a41ffa921608dd38b77cf1dfd7f5c4e71371"}, - {file = "propcache-0.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a75801768bbe65499495660b777e018cbe90c7980f07f8aa57d6be79ea6f71da"}, - {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6f1324db48f001c2ca26a25fa25af60711e09b9aaf4b28488602776f4f9a744"}, - {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cdb0f3e1eb6dfc9965d19734d8f9c481b294b5274337a8cb5cb01b462dcb7e0"}, - {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1eb34d90aac9bfbced9a58b266f8946cb5935869ff01b164573a7634d39fbcb5"}, - {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f35c7070eeec2cdaac6fd3fe245226ed2a6292d3ee8c938e5bb645b434c5f256"}, - {file = "propcache-0.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b23c11c2c9e6d4e7300c92e022046ad09b91fd00e36e83c44483df4afa990073"}, - {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3e19ea4ea0bf46179f8a3652ac1426e6dcbaf577ce4b4f65be581e237340420d"}, - {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:bd39c92e4c8f6cbf5f08257d6360123af72af9f4da75a690bef50da77362d25f"}, - {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b0313e8b923b3814d1c4a524c93dfecea5f39fa95601f6a9b1ac96cd66f89ea0"}, - {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e861ad82892408487be144906a368ddbe2dc6297074ade2d892341b35c59844a"}, - {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:61014615c1274df8da5991a1e5da85a3ccb00c2d4701ac6f3383afd3ca47ab0a"}, - {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:71ebe3fe42656a2328ab08933d420df5f3ab121772eef78f2dc63624157f0ed9"}, - {file = "propcache-0.3.1-cp311-cp311-win32.whl", hash = "sha256:58aa11f4ca8b60113d4b8e32d37e7e78bd8af4d1a5b5cb4979ed856a45e62005"}, - {file = "propcache-0.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:9532ea0b26a401264b1365146c440a6d78269ed41f83f23818d4b79497aeabe7"}, - {file = "propcache-0.3.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f78eb8422acc93d7b69964012ad7048764bb45a54ba7a39bb9e146c72ea29723"}, - {file = "propcache-0.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:89498dd49c2f9a026ee057965cdf8192e5ae070ce7d7a7bd4b66a8e257d0c976"}, - {file = "propcache-0.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09400e98545c998d57d10035ff623266927cb784d13dd2b31fd33b8a5316b85b"}, - {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa8efd8c5adc5a2c9d3b952815ff8f7710cefdcaf5f2c36d26aff51aeca2f12f"}, - {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2fe5c910f6007e716a06d269608d307b4f36e7babee5f36533722660e8c4a70"}, - {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a0ab8cf8cdd2194f8ff979a43ab43049b1df0b37aa64ab7eca04ac14429baeb7"}, - {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:563f9d8c03ad645597b8d010ef4e9eab359faeb11a0a2ac9f7b4bc8c28ebef25"}, - {file = "propcache-0.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb6e0faf8cb6b4beea5d6ed7b5a578254c6d7df54c36ccd3d8b3eb00d6770277"}, - {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1c5c7ab7f2bb3f573d1cb921993006ba2d39e8621019dffb1c5bc94cdbae81e8"}, - {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:050b571b2e96ec942898f8eb46ea4bfbb19bd5502424747e83badc2d4a99a44e"}, - {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e1c4d24b804b3a87e9350f79e2371a705a188d292fd310e663483af6ee6718ee"}, - {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e4fe2a6d5ce975c117a6bb1e8ccda772d1e7029c1cca1acd209f91d30fa72815"}, - {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:feccd282de1f6322f56f6845bf1207a537227812f0a9bf5571df52bb418d79d5"}, - {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ec314cde7314d2dd0510c6787326bbffcbdc317ecee6b7401ce218b3099075a7"}, - {file = "propcache-0.3.1-cp312-cp312-win32.whl", hash = "sha256:7d2d5a0028d920738372630870e7d9644ce437142197f8c827194fca404bf03b"}, - {file = "propcache-0.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:88c423efef9d7a59dae0614eaed718449c09a5ac79a5f224a8b9664d603f04a3"}, - {file = "propcache-0.3.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f1528ec4374617a7a753f90f20e2f551121bb558fcb35926f99e3c42367164b8"}, - {file = "propcache-0.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dc1915ec523b3b494933b5424980831b636fe483d7d543f7afb7b3bf00f0c10f"}, - {file = "propcache-0.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a110205022d077da24e60b3df8bcee73971be9575dec5573dd17ae5d81751111"}, - {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d249609e547c04d190e820d0d4c8ca03ed4582bcf8e4e160a6969ddfb57b62e5"}, - {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ced33d827625d0a589e831126ccb4f5c29dfdf6766cac441d23995a65825dcb"}, - {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4114c4ada8f3181af20808bedb250da6bae56660e4b8dfd9cd95d4549c0962f7"}, - {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:975af16f406ce48f1333ec5e912fe11064605d5c5b3f6746969077cc3adeb120"}, - {file = "propcache-0.3.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a34aa3a1abc50740be6ac0ab9d594e274f59960d3ad253cd318af76b996dd654"}, - {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9cec3239c85ed15bfaded997773fdad9fb5662b0a7cbc854a43f291eb183179e"}, - {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:05543250deac8e61084234d5fc54f8ebd254e8f2b39a16b1dce48904f45b744b"}, - {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5cb5918253912e088edbf023788de539219718d3b10aef334476b62d2b53de53"}, - {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f3bbecd2f34d0e6d3c543fdb3b15d6b60dd69970c2b4c822379e5ec8f6f621d5"}, - {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aca63103895c7d960a5b9b044a83f544b233c95e0dcff114389d64d762017af7"}, - {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a0a9898fdb99bf11786265468571e628ba60af80dc3f6eb89a3545540c6b0ef"}, - {file = "propcache-0.3.1-cp313-cp313-win32.whl", hash = "sha256:3a02a28095b5e63128bcae98eb59025924f121f048a62393db682f049bf4ac24"}, - {file = "propcache-0.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:813fbb8b6aea2fc9659815e585e548fe706d6f663fa73dff59a1677d4595a037"}, - {file = "propcache-0.3.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a444192f20f5ce8a5e52761a031b90f5ea6288b1eef42ad4c7e64fef33540b8f"}, - {file = "propcache-0.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0fbe94666e62ebe36cd652f5fc012abfbc2342de99b523f8267a678e4dfdee3c"}, - {file = "propcache-0.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f011f104db880f4e2166bcdcf7f58250f7a465bc6b068dc84c824a3d4a5c94dc"}, - {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e584b6d388aeb0001d6d5c2bd86b26304adde6d9bb9bfa9c4889805021b96de"}, - {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a17583515a04358b034e241f952f1715243482fc2c2945fd99a1b03a0bd77d6"}, - {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5aed8d8308215089c0734a2af4f2e95eeb360660184ad3912686c181e500b2e7"}, - {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d8e309ff9a0503ef70dc9a0ebd3e69cf7b3894c9ae2ae81fc10943c37762458"}, - {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b655032b202028a582d27aeedc2e813299f82cb232f969f87a4fde491a233f11"}, - {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9f64d91b751df77931336b5ff7bafbe8845c5770b06630e27acd5dbb71e1931c"}, - {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:19a06db789a4bd896ee91ebc50d059e23b3639c25d58eb35be3ca1cbe967c3bf"}, - {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:bef100c88d8692864651b5f98e871fb090bd65c8a41a1cb0ff2322db39c96c27"}, - {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:87380fb1f3089d2a0b8b00f006ed12bd41bd858fabfa7330c954c70f50ed8757"}, - {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e474fc718e73ba5ec5180358aa07f6aded0ff5f2abe700e3115c37d75c947e18"}, - {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:17d1c688a443355234f3c031349da69444be052613483f3e4158eef751abcd8a"}, - {file = "propcache-0.3.1-cp313-cp313t-win32.whl", hash = "sha256:359e81a949a7619802eb601d66d37072b79b79c2505e6d3fd8b945538411400d"}, - {file = "propcache-0.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:e7fb9a84c9abbf2b2683fa3e7b0d7da4d8ecf139a1c635732a8bda29c5214b0e"}, - {file = "propcache-0.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ed5f6d2edbf349bd8d630e81f474d33d6ae5d07760c44d33cd808e2f5c8f4ae6"}, - {file = "propcache-0.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:668ddddc9f3075af019f784456267eb504cb77c2c4bd46cc8402d723b4d200bf"}, - {file = "propcache-0.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0c86e7ceea56376216eba345aa1fc6a8a6b27ac236181f840d1d7e6a1ea9ba5c"}, - {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83be47aa4e35b87c106fc0c84c0fc069d3f9b9b06d3c494cd404ec6747544894"}, - {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:27c6ac6aa9fc7bc662f594ef380707494cb42c22786a558d95fcdedb9aa5d035"}, - {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64a956dff37080b352c1c40b2966b09defb014347043e740d420ca1eb7c9b908"}, - {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82de5da8c8893056603ac2d6a89eb8b4df49abf1a7c19d536984c8dd63f481d5"}, - {file = "propcache-0.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c3c3a203c375b08fd06a20da3cf7aac293b834b6f4f4db71190e8422750cca5"}, - {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b303b194c2e6f171cfddf8b8ba30baefccf03d36a4d9cab7fd0bb68ba476a3d7"}, - {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:916cd229b0150129d645ec51614d38129ee74c03293a9f3f17537be0029a9641"}, - {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a461959ead5b38e2581998700b26346b78cd98540b5524796c175722f18b0294"}, - {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:069e7212890b0bcf9b2be0a03afb0c2d5161d91e1bf51569a64f629acc7defbf"}, - {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ef2e4e91fb3945769e14ce82ed53007195e616a63aa43b40fb7ebaaf907c8d4c"}, - {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8638f99dca15b9dff328fb6273e09f03d1c50d9b6512f3b65a4154588a7595fe"}, - {file = "propcache-0.3.1-cp39-cp39-win32.whl", hash = "sha256:6f173bbfe976105aaa890b712d1759de339d8a7cef2fc0a1714cc1a1e1c47f64"}, - {file = "propcache-0.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:603f1fe4144420374f1a69b907494c3acbc867a581c2d49d4175b0de7cc64566"}, - {file = "propcache-0.3.1-py3-none-any.whl", hash = "sha256:9a8ecf38de50a7f518c21568c80f985e776397b902f1ce0b01f799aba1608b40"}, - {file = "propcache-0.3.1.tar.gz", hash = "sha256:40d980c33765359098837527e18eddefc9a24cea5b45e078a7f3bb5b032c6ecf"}, -] - -[[package]] -name = "psutil" -version = "7.0.0" -requires_python = ">=3.6" -summary = "Cross-platform lib for process and system monitoring in Python. NOTE: the syntax of this script MUST be kept compatible with Python 2.7." -groups = ["default"] -files = [ - {file = "psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25"}, - {file = "psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da"}, - {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91"}, - {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34"}, - {file = "psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993"}, - {file = "psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99"}, - {file = "psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553"}, - {file = "psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456"}, -] - -[[package]] -name = "ptyprocess" -version = "0.7.0" -summary = "Run a subprocess in a pseudo terminal" -groups = ["dev"] -marker = "sys_platform != \"win32\" and python_version > \"3.6\"" -files = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] - -[[package]] -name = "pure-eval" -version = "0.2.3" -summary = "Safely evaluate AST nodes without side effects" -groups = ["dev"] -marker = "python_version > \"3.6\"" -files = [ - {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, - {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, -] - -[[package]] -name = "pygments" -version = "2.19.1" -requires_python = ">=3.8" -summary = "Pygments is a syntax highlighting package written in Python." -groups = ["default", "dev"] -files = [ - {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, - {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, -] - -[[package]] -name = "pytest" -version = "8.3.5" -requires_python = ">=3.8" -summary = "pytest: simple powerful testing with Python" -groups = ["dev"] -dependencies = [ - "colorama; sys_platform == \"win32\"", - "exceptiongroup>=1.0.0rc8; python_version < \"3.11\"", - "iniconfig", - "packaging", - "pluggy<2,>=1.5", - "tomli>=1; python_version < \"3.11\"", -] -files = [ - {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, - {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, -] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -summary = "Extensions to the standard Python datetime module" -groups = ["default"] -dependencies = [ - "six>=1.5", -] -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[[package]] -name = "pytz" -version = "2025.2" -summary = "World timezone definitions, modern and historical" -groups = ["default"] -files = [ - {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, - {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.2" -requires_python = ">=3.8" -summary = "YAML parser and emitter for Python" -groups = ["default", "dev"] -files = [ - {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, - {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, - {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, - {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, - {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, - {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, - {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, - {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, - {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, - {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, - {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, - {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, - {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, -] - -[[package]] -name = "requests" -version = "2.32.3" -requires_python = ">=3.8" -summary = "Python HTTP for Humans." -groups = ["default"] -dependencies = [ - "certifi>=2017.4.17", - "charset-normalizer<4,>=2", - "idna<4,>=2.5", - "urllib3<3,>=1.21.1", -] -files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, -] - -[[package]] -name = "rich" -version = "13.9.4" -requires_python = ">=3.8.0" -summary = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -groups = ["default"] -dependencies = [ - "markdown-it-py>=2.2.0", - "pygments<3.0.0,>=2.13.0", - "typing-extensions<5.0,>=4.0.0; python_version < \"3.11\"", -] -files = [ - {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, - {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, -] - -[[package]] -name = "semver" -version = "3.0.4" -requires_python = ">=3.7" -summary = "Python helper for Semantic Versioning (https://semver.org)" -groups = ["default"] -files = [ - {file = "semver-3.0.4-py3-none-any.whl", hash = "sha256:9c824d87ba7f7ab4a1890799cec8596f15c1241cb473404ea1cb0c55e4b04746"}, - {file = "semver-3.0.4.tar.gz", hash = "sha256:afc7d8c584a5ed0a11033af086e8af226a9c0b206f313e0301f8dd7b6b589602"}, -] - -[[package]] -name = "six" -version = "1.17.0" -requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -summary = "Python 2 and 3 compatibility utilities" -groups = ["default"] -files = [ - {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, - {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, -] - -[[package]] -name = "stack-data" -version = "0.6.3" -summary = "Extract data from python stack frames and tracebacks for informative displays" -groups = ["dev"] -marker = "python_version > \"3.6\"" -dependencies = [ - "asttokens>=2.1.0", - "executing>=1.2.0", - "pure-eval", -] -files = [ - {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, - {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, -] - -[[package]] -name = "tomli" -version = "2.2.1" -requires_python = ">=3.8" -summary = "A lil' TOML parser" -groups = ["dev"] -marker = "python_version < \"3.11\"" -files = [ - {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, - {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, - {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, - {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, - {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, - {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, - {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, - {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, - {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, - {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, -] - -[[package]] -name = "toolz" -version = "1.0.0" -requires_python = ">=3.8" -summary = "List processing tools and functional utilities" -groups = ["default"] -files = [ - {file = "toolz-1.0.0-py3-none-any.whl", hash = "sha256:292c8f1c4e7516bf9086f8850935c799a874039c8bcf959d47b600e4c44a6236"}, - {file = "toolz-1.0.0.tar.gz", hash = "sha256:2c86e3d9a04798ac556793bced838816296a2f085017664e4995cb40a1047a02"}, -] - -[[package]] -name = "traitlets" -version = "5.14.3" -requires_python = ">=3.8" -summary = "Traitlets Python configuration system" -groups = ["dev"] -marker = "python_version > \"3.6\"" -files = [ - {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, - {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, -] - -[[package]] -name = "typing-extensions" -version = "4.13.0" -requires_python = ">=3.8" -summary = "Backported and Experimental Type Hints for Python 3.8+" -groups = ["default", "dev"] -marker = "python_version <= \"3.12\"" -files = [ - {file = "typing_extensions-4.13.0-py3-none-any.whl", hash = "sha256:c8dd92cc0d6425a97c18fbb9d1954e5ff92c1ca881a309c45f06ebc0b79058e5"}, - {file = "typing_extensions-4.13.0.tar.gz", hash = "sha256:0a4ac55a5820789d87e297727d229866c9650f6521b64206413c4fbada24d95b"}, -] - -[[package]] -name = "tzdata" -version = "2025.2" -requires_python = ">=2" -summary = "Provider of IANA time zone data" -groups = ["default"] -files = [ - {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, - {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, -] - -[[package]] -name = "urllib3" -version = "2.3.0" -requires_python = ">=3.9" -summary = "HTTP library with thread-safe connection pooling, file post, and more." -groups = ["default"] -files = [ - {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, - {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, -] - -[[package]] -name = "virtualenv" -version = "20.29.3" -requires_python = ">=3.8" -summary = "Virtual Python Environment builder" -groups = ["dev"] -dependencies = [ - "distlib<1,>=0.3.7", - "filelock<4,>=3.12.2", - "importlib-metadata>=6.6; python_version < \"3.8\"", - "platformdirs<5,>=3.9.1", -] -files = [ - {file = "virtualenv-20.29.3-py3-none-any.whl", hash = "sha256:3e3d00f5807e83b234dfb6122bf37cfadf4be216c53a49ac059d02414f819170"}, - {file = "virtualenv-20.29.3.tar.gz", hash = "sha256:95e39403fcf3940ac45bc717597dba16110b74506131845d9b687d5e73d947ac"}, -] - -[[package]] -name = "wcwidth" -version = "0.2.13" -summary = "Measures the displayed width of unicode strings in a terminal" -groups = ["dev"] -marker = "python_version > \"3.6\"" -dependencies = [ - "backports-functools-lru-cache>=1.2.1; python_version < \"3.2\"", -] -files = [ - {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, - {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, -] - -[[package]] -name = "win32-setctime" -version = "1.2.0" -requires_python = ">=3.5" -summary = "A small Python utility to set file creation time on Windows" -groups = ["default"] -marker = "sys_platform == \"win32\"" -files = [ - {file = "win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390"}, - {file = "win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0"}, -] - -[[package]] -name = "xarray" -version = "2024.7.0" -requires_python = ">=3.9" -summary = "N-D labeled arrays and datasets in Python" -groups = ["default"] -dependencies = [ - "numpy>=1.23", - "packaging>=23.1", - "pandas>=2.0", -] -files = [ - {file = "xarray-2024.7.0-py3-none-any.whl", hash = "sha256:1b0fd51ec408474aa1f4a355d75c00cc1c02bd425d97b2c2e551fd21810e7f64"}, - {file = "xarray-2024.7.0.tar.gz", hash = "sha256:4cae512d121a8522d41e66d942fb06c526bc1fd32c2c181d5fe62fe65b671638"}, -] - -[[package]] -name = "yarl" -version = "1.18.3" -requires_python = ">=3.9" -summary = "Yet another URL library" -groups = ["default"] -dependencies = [ - "idna>=2.0", - "multidict>=4.0", - "propcache>=0.2.0", -] -files = [ - {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"}, - {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"}, - {file = "yarl-1.18.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:602d98f2c2d929f8e697ed274fbadc09902c4025c5a9963bf4e9edfc3ab6f7ed"}, - {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c654d5207c78e0bd6d749f6dae1dcbbfde3403ad3a4b11f3c5544d9906969dde"}, - {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5094d9206c64181d0f6e76ebd8fb2f8fe274950a63890ee9e0ebfd58bf9d787b"}, - {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35098b24e0327fc4ebdc8ffe336cee0a87a700c24ffed13161af80124b7dc8e5"}, - {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3236da9272872443f81fedc389bace88408f64f89f75d1bdb2256069a8730ccc"}, - {file = "yarl-1.18.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2c08cc9b16f4f4bc522771d96734c7901e7ebef70c6c5c35dd0f10845270bcd"}, - {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80316a8bd5109320d38eef8833ccf5f89608c9107d02d2a7f985f98ed6876990"}, - {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c1e1cc06da1491e6734f0ea1e6294ce00792193c463350626571c287c9a704db"}, - {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fea09ca13323376a2fdfb353a5fa2e59f90cd18d7ca4eaa1fd31f0a8b4f91e62"}, - {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e3b9fd71836999aad54084906f8663dffcd2a7fb5cdafd6c37713b2e72be1760"}, - {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:757e81cae69244257d125ff31663249b3013b5dc0a8520d73694aed497fb195b"}, - {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b1771de9944d875f1b98a745bc547e684b863abf8f8287da8466cf470ef52690"}, - {file = "yarl-1.18.3-cp310-cp310-win32.whl", hash = "sha256:8874027a53e3aea659a6d62751800cf6e63314c160fd607489ba5c2edd753cf6"}, - {file = "yarl-1.18.3-cp310-cp310-win_amd64.whl", hash = "sha256:93b2e109287f93db79210f86deb6b9bbb81ac32fc97236b16f7433db7fc437d8"}, - {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069"}, - {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193"}, - {file = "yarl-1.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889"}, - {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8"}, - {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca"}, - {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8"}, - {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae"}, - {file = "yarl-1.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3"}, - {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb"}, - {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e"}, - {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59"}, - {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d"}, - {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e"}, - {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a"}, - {file = "yarl-1.18.3-cp311-cp311-win32.whl", hash = "sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1"}, - {file = "yarl-1.18.3-cp311-cp311-win_amd64.whl", hash = "sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5"}, - {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50"}, - {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576"}, - {file = "yarl-1.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640"}, - {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2"}, - {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75"}, - {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512"}, - {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba"}, - {file = "yarl-1.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb"}, - {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272"}, - {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6"}, - {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e"}, - {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb"}, - {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393"}, - {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285"}, - {file = "yarl-1.18.3-cp312-cp312-win32.whl", hash = "sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2"}, - {file = "yarl-1.18.3-cp312-cp312-win_amd64.whl", hash = "sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477"}, - {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb"}, - {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa"}, - {file = "yarl-1.18.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782"}, - {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0"}, - {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482"}, - {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186"}, - {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58"}, - {file = "yarl-1.18.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53"}, - {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2"}, - {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8"}, - {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1"}, - {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a"}, - {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10"}, - {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8"}, - {file = "yarl-1.18.3-cp313-cp313-win32.whl", hash = "sha256:61ee62ead9b68b9123ec24bc866cbef297dd266175d53296e2db5e7f797f902d"}, - {file = "yarl-1.18.3-cp313-cp313-win_amd64.whl", hash = "sha256:578e281c393af575879990861823ef19d66e2b1d0098414855dd367e234f5b3c"}, - {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:61e5e68cb65ac8f547f6b5ef933f510134a6bf31bb178be428994b0cb46c2a04"}, - {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe57328fbc1bfd0bd0514470ac692630f3901c0ee39052ae47acd1d90a436719"}, - {file = "yarl-1.18.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a440a2a624683108a1b454705ecd7afc1c3438a08e890a1513d468671d90a04e"}, - {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c7907c8548bcd6ab860e5f513e727c53b4a714f459b084f6580b49fa1b9cee"}, - {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4f6450109834af88cb4cc5ecddfc5380ebb9c228695afc11915a0bf82116789"}, - {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9ca04806f3be0ac6d558fffc2fdf8fcef767e0489d2684a21912cc4ed0cd1b8"}, - {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77a6e85b90a7641d2e07184df5557132a337f136250caafc9ccaa4a2a998ca2c"}, - {file = "yarl-1.18.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6333c5a377c8e2f5fae35e7b8f145c617b02c939d04110c76f29ee3676b5f9a5"}, - {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0b3c92fa08759dbf12b3a59579a4096ba9af8dd344d9a813fc7f5070d86bbab1"}, - {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:4ac515b860c36becb81bb84b667466885096b5fc85596948548b667da3bf9f24"}, - {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:045b8482ce9483ada4f3f23b3774f4e1bf4f23a2d5c912ed5170f68efb053318"}, - {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:a4bb030cf46a434ec0225bddbebd4b89e6471814ca851abb8696170adb163985"}, - {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:54d6921f07555713b9300bee9c50fb46e57e2e639027089b1d795ecd9f7fa910"}, - {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1d407181cfa6e70077df3377938c08012d18893f9f20e92f7d2f314a437c30b1"}, - {file = "yarl-1.18.3-cp39-cp39-win32.whl", hash = "sha256:ac36703a585e0929b032fbaab0707b75dc12703766d0b53486eabd5139ebadd5"}, - {file = "yarl-1.18.3-cp39-cp39-win_amd64.whl", hash = "sha256:ba87babd629f8af77f557b61e49e7c7cac36f22f871156b91e10a6e9d4f829e9"}, - {file = "yarl-1.18.3-py3-none-any.whl", hash = "sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b"}, - {file = "yarl-1.18.3.tar.gz", hash = "sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1"}, -] - -[[package]] -name = "zarr" -version = "2.18.2" -requires_python = ">=3.9" -summary = "An implementation of chunked, compressed, N-dimensional arrays for Python" -groups = ["default"] -dependencies = [ - "asciitree", - "fasteners; sys_platform != \"emscripten\"", - "numcodecs>=0.10.0", - "numpy>=1.23", -] -files = [ - {file = "zarr-2.18.2-py3-none-any.whl", hash = "sha256:a638754902f97efa99b406083fdc807a0e2ccf12a949117389d2a4ba9b05df38"}, - {file = "zarr-2.18.2.tar.gz", hash = "sha256:9bb393b8a0a38fb121dbb913b047d75db28de9890f6d644a217a73cf4ae74f47"}, -] - -[[package]] -name = "zipp" -version = "3.21.0" -requires_python = ">=3.9" -summary = "Backport of pathlib-compatible object wrapper for zip files" -groups = ["default"] -marker = "python_version < \"3.12\"" -files = [ - {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, - {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, -] +# This file is @generated by PDM. +# It is not intended for manual editing. + +[metadata] +groups = ["default", "dev"] +strategy = ["inherit_metadata"] +lock_version = "4.5.0" +content_hash = "sha256:a6e6fc954a417649253296c4f4639ed6c27c73a11d2a491d7f3d95c084692963" + +[[metadata.targets]] +requires_python = ">=3.9" + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +requires_python = ">=3.9" +summary = "Happy Eyeballs for asyncio" +groups = ["default"] +files = [ + {file = "aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8"}, + {file = "aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558"}, +] + +[[package]] +name = "aiohttp" +version = "3.11.14" +requires_python = ">=3.9" +summary = "Async http client/server framework (asyncio)" +groups = ["default"] +dependencies = [ + "aiohappyeyeballs>=2.3.0", + "aiosignal>=1.1.2", + "async-timeout<6.0,>=4.0; python_version < \"3.11\"", + "attrs>=17.3.0", + "frozenlist>=1.1.1", + "multidict<7.0,>=4.5", + "propcache>=0.2.0", + "yarl<2.0,>=1.17.0", +] +files = [ + {file = "aiohttp-3.11.14-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e2bc827c01f75803de77b134afdbf74fa74b62970eafdf190f3244931d7a5c0d"}, + {file = "aiohttp-3.11.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e365034c5cf6cf74f57420b57682ea79e19eb29033399dd3f40de4d0171998fa"}, + {file = "aiohttp-3.11.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c32593ead1a8c6aabd58f9d7ee706e48beac796bb0cb71d6b60f2c1056f0a65f"}, + {file = "aiohttp-3.11.14-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4e7c7ec4146a94a307ca4f112802a8e26d969018fabed526efc340d21d3e7d0"}, + {file = "aiohttp-3.11.14-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8b2df9feac55043759aa89f722a967d977d80f8b5865a4153fc41c93b957efc"}, + {file = "aiohttp-3.11.14-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c7571f99525c76a6280f5fe8e194eeb8cb4da55586c3c61c59c33a33f10cfce7"}, + {file = "aiohttp-3.11.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b59d096b5537ec7c85954cb97d821aae35cfccce3357a2cafe85660cc6295628"}, + {file = "aiohttp-3.11.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b42dbd097abb44b3f1156b4bf978ec5853840802d6eee2784857be11ee82c6a0"}, + {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b05774864c87210c531b48dfeb2f7659407c2dda8643104fb4ae5e2c311d12d9"}, + {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4e2e8ef37d4bc110917d038807ee3af82700a93ab2ba5687afae5271b8bc50ff"}, + {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e9faafa74dbb906b2b6f3eb9942352e9e9db8d583ffed4be618a89bd71a4e914"}, + {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:7e7abe865504f41b10777ac162c727af14e9f4db9262e3ed8254179053f63e6d"}, + {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:4848ae31ad44330b30f16c71e4f586cd5402a846b11264c412de99fa768f00f3"}, + {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2d0b46abee5b5737cb479cc9139b29f010a37b1875ee56d142aefc10686a390b"}, + {file = "aiohttp-3.11.14-cp310-cp310-win32.whl", hash = "sha256:a0d2c04a623ab83963576548ce098baf711a18e2c32c542b62322a0b4584b990"}, + {file = "aiohttp-3.11.14-cp310-cp310-win_amd64.whl", hash = "sha256:5409a59d5057f2386bb8b8f8bbcfb6e15505cedd8b2445db510563b5d7ea1186"}, + {file = "aiohttp-3.11.14-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f296d637a50bb15fb6a229fbb0eb053080e703b53dbfe55b1e4bb1c5ed25d325"}, + {file = "aiohttp-3.11.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ec6cd1954ca2bbf0970f531a628da1b1338f594bf5da7e361e19ba163ecc4f3b"}, + {file = "aiohttp-3.11.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:572def4aad0a4775af66d5a2b5923c7de0820ecaeeb7987dcbccda2a735a993f"}, + {file = "aiohttp-3.11.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c68e41c4d576cd6aa6c6d2eddfb32b2acfb07ebfbb4f9da991da26633a3db1a"}, + {file = "aiohttp-3.11.14-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99b8bbfc8111826aa8363442c0fc1f5751456b008737ff053570f06a151650b3"}, + {file = "aiohttp-3.11.14-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b0a200e85da5c966277a402736a96457b882360aa15416bf104ca81e6f5807b"}, + {file = "aiohttp-3.11.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d173c0ac508a2175f7c9a115a50db5fd3e35190d96fdd1a17f9cb10a6ab09aa1"}, + {file = "aiohttp-3.11.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:413fe39fd929329f697f41ad67936f379cba06fcd4c462b62e5b0f8061ee4a77"}, + {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:65c75b14ee74e8eeff2886321e76188cbe938d18c85cff349d948430179ad02c"}, + {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:321238a42ed463848f06e291c4bbfb3d15ba5a79221a82c502da3e23d7525d06"}, + {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:59a05cdc636431f7ce843c7c2f04772437dd816a5289f16440b19441be6511f1"}, + {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:daf20d9c3b12ae0fdf15ed92235e190f8284945563c4b8ad95b2d7a31f331cd3"}, + {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:05582cb2d156ac7506e68b5eac83179faedad74522ed88f88e5861b78740dc0e"}, + {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:12c5869e7ddf6b4b1f2109702b3cd7515667b437da90a5a4a50ba1354fe41881"}, + {file = "aiohttp-3.11.14-cp311-cp311-win32.whl", hash = "sha256:92868f6512714efd4a6d6cb2bfc4903b997b36b97baea85f744229f18d12755e"}, + {file = "aiohttp-3.11.14-cp311-cp311-win_amd64.whl", hash = "sha256:bccd2cb7aa5a3bfada72681bdb91637094d81639e116eac368f8b3874620a654"}, + {file = "aiohttp-3.11.14-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:70ab0f61c1a73d3e0342cedd9a7321425c27a7067bebeeacd509f96695b875fc"}, + {file = "aiohttp-3.11.14-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:602d4db80daf4497de93cb1ce00b8fc79969c0a7cf5b67bec96fa939268d806a"}, + {file = "aiohttp-3.11.14-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a8a0d127c10b8d89e69bbd3430da0f73946d839e65fec00ae48ca7916a31948"}, + {file = "aiohttp-3.11.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9f835cdfedcb3f5947304e85b8ca3ace31eef6346d8027a97f4de5fb687534"}, + {file = "aiohttp-3.11.14-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8aa5c68e1e68fff7cd3142288101deb4316b51f03d50c92de6ea5ce646e6c71f"}, + {file = "aiohttp-3.11.14-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b512f1de1c688f88dbe1b8bb1283f7fbeb7a2b2b26e743bb2193cbadfa6f307"}, + {file = "aiohttp-3.11.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc9253069158d57e27d47a8453d8a2c5a370dc461374111b5184cf2f147a3cc3"}, + {file = "aiohttp-3.11.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b2501f1b981e70932b4a552fc9b3c942991c7ae429ea117e8fba57718cdeed0"}, + {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:28a3d083819741592685762d51d789e6155411277050d08066537c5edc4066e6"}, + {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:0df3788187559c262922846087e36228b75987f3ae31dd0a1e5ee1034090d42f"}, + {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e73fa341d8b308bb799cf0ab6f55fc0461d27a9fa3e4582755a3d81a6af8c09"}, + {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:51ba80d473eb780a329d73ac8afa44aa71dfb521693ccea1dea8b9b5c4df45ce"}, + {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8d1dd75aa4d855c7debaf1ef830ff2dfcc33f893c7db0af2423ee761ebffd22b"}, + {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41cf0cefd9e7b5c646c2ef529c8335e7eafd326f444cc1cdb0c47b6bc836f9be"}, + {file = "aiohttp-3.11.14-cp312-cp312-win32.whl", hash = "sha256:948abc8952aff63de7b2c83bfe3f211c727da3a33c3a5866a0e2cf1ee1aa950f"}, + {file = "aiohttp-3.11.14-cp312-cp312-win_amd64.whl", hash = "sha256:3b420d076a46f41ea48e5fcccb996f517af0d406267e31e6716f480a3d50d65c"}, + {file = "aiohttp-3.11.14-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d14e274828561db91e4178f0057a915f3af1757b94c2ca283cb34cbb6e00b50"}, + {file = "aiohttp-3.11.14-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f30fc72daf85486cdcdfc3f5e0aea9255493ef499e31582b34abadbfaafb0965"}, + {file = "aiohttp-3.11.14-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4edcbe34e6dba0136e4cabf7568f5a434d89cc9de5d5155371acda275353d228"}, + {file = "aiohttp-3.11.14-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a7169ded15505f55a87f8f0812c94c9412623c744227b9e51083a72a48b68a5"}, + {file = "aiohttp-3.11.14-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad1f2fb9fe9b585ea4b436d6e998e71b50d2b087b694ab277b30e060c434e5db"}, + {file = "aiohttp-3.11.14-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:20412c7cc3720e47a47e63c0005f78c0c2370020f9f4770d7fc0075f397a9fb0"}, + {file = "aiohttp-3.11.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dd9766da617855f7e85f27d2bf9a565ace04ba7c387323cd3e651ac4329db91"}, + {file = "aiohttp-3.11.14-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:599b66582f7276ebefbaa38adf37585e636b6a7a73382eb412f7bc0fc55fb73d"}, + {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b41693b7388324b80f9acfabd479bd1c84f0bc7e8f17bab4ecd9675e9ff9c734"}, + {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:86135c32d06927339c8c5e64f96e4eee8825d928374b9b71a3c42379d7437058"}, + {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:04eb541ce1e03edc1e3be1917a0f45ac703e913c21a940111df73a2c2db11d73"}, + {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dc311634f6f28661a76cbc1c28ecf3b3a70a8edd67b69288ab7ca91058eb5a33"}, + {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:69bb252bfdca385ccabfd55f4cd740d421dd8c8ad438ded9637d81c228d0da49"}, + {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2b86efe23684b58a88e530c4ab5b20145f102916bbb2d82942cafec7bd36a647"}, + {file = "aiohttp-3.11.14-cp313-cp313-win32.whl", hash = "sha256:b9c60d1de973ca94af02053d9b5111c4fbf97158e139b14f1be68337be267be6"}, + {file = "aiohttp-3.11.14-cp313-cp313-win_amd64.whl", hash = "sha256:0a29be28e60e5610d2437b5b2fed61d6f3dcde898b57fb048aa5079271e7f6f3"}, + {file = "aiohttp-3.11.14-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:14fc03508359334edc76d35b2821832f092c8f092e4b356e74e38419dfe7b6de"}, + {file = "aiohttp-3.11.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:92007c89a8cb7be35befa2732b0b32bf3a394c1b22ef2dff0ef12537d98a7bda"}, + {file = "aiohttp-3.11.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6d3986112e34eaa36e280dc8286b9dd4cc1a5bcf328a7f147453e188f6fe148f"}, + {file = "aiohttp-3.11.14-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:749f1eb10e51dbbcdba9df2ef457ec060554842eea4d23874a3e26495f9e87b1"}, + {file = "aiohttp-3.11.14-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:781c8bd423dcc4641298c8c5a2a125c8b1c31e11f828e8d35c1d3a722af4c15a"}, + {file = "aiohttp-3.11.14-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:997b57e38aa7dc6caab843c5e042ab557bc83a2f91b7bd302e3c3aebbb9042a1"}, + {file = "aiohttp-3.11.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a8b0321e40a833e381d127be993b7349d1564b756910b28b5f6588a159afef3"}, + {file = "aiohttp-3.11.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8778620396e554b758b59773ab29c03b55047841d8894c5e335f12bfc45ebd28"}, + {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e906da0f2bcbf9b26cc2b144929e88cb3bf943dd1942b4e5af066056875c7618"}, + {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:87f0e003fb4dd5810c7fbf47a1239eaa34cd929ef160e0a54c570883125c4831"}, + {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:7f2dadece8b85596ac3ab1ec04b00694bdd62abc31e5618f524648d18d9dd7fa"}, + {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:fe846f0a98aa9913c2852b630cd39b4098f296e0907dd05f6c7b30d911afa4c3"}, + {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ced66c5c6ad5bcaf9be54560398654779ec1c3695f1a9cf0ae5e3606694a000a"}, + {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a40087b82f83bd671cbeb5f582c233d196e9653220404a798798bfc0ee189fff"}, + {file = "aiohttp-3.11.14-cp39-cp39-win32.whl", hash = "sha256:95d7787f2bcbf7cb46823036a8d64ccfbc2ffc7d52016b4044d901abceeba3db"}, + {file = "aiohttp-3.11.14-cp39-cp39-win_amd64.whl", hash = "sha256:22a8107896877212130c58f74e64b77f7007cb03cea8698be317272643602d45"}, + {file = "aiohttp-3.11.14.tar.gz", hash = "sha256:d6edc538c7480fa0a3b2bdd705f8010062d74700198da55d16498e1b49549b9c"}, +] + +[[package]] +name = "aiosignal" +version = "1.3.2" +requires_python = ">=3.9" +summary = "aiosignal: a list of registered asynchronous callbacks" +groups = ["default"] +dependencies = [ + "frozenlist>=1.1.0", +] +files = [ + {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, + {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, +] + +[[package]] +name = "asciitree" +version = "0.3.3" +summary = "Draws ASCII trees." +groups = ["default"] +files = [ + {file = "asciitree-0.3.3.tar.gz", hash = "sha256:4aa4b9b649f85e3fcb343363d97564aa1fb62e249677f2e18a96765145cc0f6e"}, +] + +[[package]] +name = "asttokens" +version = "3.0.0" +requires_python = ">=3.8" +summary = "Annotate AST trees with source code positions" +groups = ["dev"] +marker = "python_version > \"3.6\"" +files = [ + {file = "asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2"}, + {file = "asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7"}, +] + +[[package]] +name = "async-timeout" +version = "5.0.1" +requires_python = ">=3.8" +summary = "Timeout context manager for asyncio programs" +groups = ["default"] +marker = "python_version < \"3.11\"" +files = [ + {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, + {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, +] + +[[package]] +name = "attrs" +version = "25.3.0" +requires_python = ">=3.8" +summary = "Classes Without Boilerplate" +groups = ["default"] +files = [ + {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, + {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, +] + +[[package]] +name = "certifi" +version = "2025.1.31" +requires_python = ">=3.6" +summary = "Python package for providing Mozilla's CA Bundle." +groups = ["default"] +files = [ + {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, + {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +requires_python = ">=3.8" +summary = "Validate configuration and produce human readable error messages." +groups = ["dev"] +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.1" +requires_python = ">=3.7" +summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +groups = ["default"] +files = [ + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, +] + +[[package]] +name = "click" +version = "8.1.8" +requires_python = ">=3.7" +summary = "Composable command line interface toolkit" +groups = ["default"] +dependencies = [ + "colorama; platform_system == \"Windows\"", + "importlib-metadata; python_version < \"3.8\"", +] +files = [ + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, +] + +[[package]] +name = "cloudpickle" +version = "3.1.1" +requires_python = ">=3.8" +summary = "Pickler class to extend the standard pickle.Pickler functionality" +groups = ["default"] +files = [ + {file = "cloudpickle-3.1.1-py3-none-any.whl", hash = "sha256:c8c5a44295039331ee9dad40ba100a9c7297b6f988e50e87ccdf3765a668350e"}, + {file = "cloudpickle-3.1.1.tar.gz", hash = "sha256:b216fa8ae4019d5482a8ac3c95d8f6346115d8835911fd4aefd1a445e4242c64"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +summary = "Cross-platform colored terminal text." +groups = ["default", "dev"] +marker = "sys_platform == \"win32\" or platform_system == \"Windows\"" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "dask" +version = "2024.8.0" +requires_python = ">=3.9" +summary = "Parallel PyData with Task Scheduling" +groups = ["default"] +dependencies = [ + "click>=8.1", + "cloudpickle>=1.5.0", + "fsspec>=2021.09.0", + "importlib-metadata>=4.13.0; python_version < \"3.12\"", + "packaging>=20.0", + "partd>=1.4.0", + "pyyaml>=5.3.1", + "toolz>=0.10.0", +] +files = [ + {file = "dask-2024.8.0-py3-none-any.whl", hash = "sha256:250ea3df30d4a25958290eec4f252850091c6cfaed82d098179c3b25bba18309"}, + {file = "dask-2024.8.0.tar.gz", hash = "sha256:f1fec39373d2f101bc045529ad4e9b30e34e6eb33b7aa0fa7073aec7b1bf9eee"}, +] + +[[package]] +name = "dataclass-wizard" +version = "0.35.0" +summary = "Lightning-fast JSON wizardry for Python dataclasses — effortless serialization right out of the box!" +groups = ["default"] +dependencies = [ + "typing-extensions>=4.9.0; python_version <= \"3.12\"", +] +files = [ + {file = "dataclass-wizard-0.35.0.tar.gz", hash = "sha256:8e4b254991bf93416a48e2911bb985e3787cff11f00270c3d1165d2523cb3fb6"}, + {file = "dataclass_wizard-0.35.0-py2.py3-none-any.whl", hash = "sha256:3bb19292477f0bebb12e9cc9178f1a6b93d133af4ae065abf14b713142b32edf"}, +] + +[[package]] +name = "decorator" +version = "5.2.1" +requires_python = ">=3.8" +summary = "Decorators for Humans" +groups = ["dev"] +marker = "python_version > \"3.6\"" +files = [ + {file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"}, + {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"}, +] + +[[package]] +name = "deepdiff" +version = "8.4.2" +requires_python = ">=3.8" +summary = "Deep Difference and Search of any Python object/data. Recreate objects by adding adding deltas to each other." +groups = ["default"] +dependencies = [ + "orderly-set<6,>=5.3.0", +] +files = [ + {file = "deepdiff-8.4.2-py3-none-any.whl", hash = "sha256:7e39e5b26f3747c54f9d0e8b9b29daab670c3100166b77cc0185d5793121b099"}, + {file = "deepdiff-8.4.2.tar.gz", hash = "sha256:5c741c0867ebc7fcb83950ad5ed958369c17f424e14dee32a11c56073f4ee92a"}, +] + +[[package]] +name = "distlib" +version = "0.3.9" +summary = "Distribution utilities" +groups = ["dev"] +files = [ + {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, + {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +requires_python = ">=3.7" +summary = "Backport of PEP 654 (exception groups)" +groups = ["dev"] +marker = "python_version < \"3.11\"" +files = [ + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, +] + +[[package]] +name = "executing" +version = "2.2.0" +requires_python = ">=3.8" +summary = "Get the currently executing AST node of a frame, and other information" +groups = ["dev"] +marker = "python_version > \"3.6\"" +files = [ + {file = "executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa"}, + {file = "executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755"}, +] + +[[package]] +name = "fasteners" +version = "0.19" +requires_python = ">=3.6" +summary = "A python package that provides useful locks" +groups = ["default"] +marker = "sys_platform != \"emscripten\"" +files = [ + {file = "fasteners-0.19-py3-none-any.whl", hash = "sha256:758819cb5d94cdedf4e836988b74de396ceacb8e2794d21f82d131fd9ee77237"}, + {file = "fasteners-0.19.tar.gz", hash = "sha256:b4f37c3ac52d8a445af3a66bce57b33b5e90b97c696b7b984f530cf8f0ded09c"}, +] + +[[package]] +name = "filelock" +version = "3.18.0" +requires_python = ">=3.9" +summary = "A platform independent file lock." +groups = ["dev"] +files = [ + {file = "filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de"}, + {file = "filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2"}, +] + +[[package]] +name = "frozenlist" +version = "1.5.0" +requires_python = ">=3.8" +summary = "A list-like structure which implements collections.abc.MutableSequence" +groups = ["default"] +files = [ + {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a"}, + {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb"}, + {file = "frozenlist-1.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:15538c0cbf0e4fa11d1e3a71f823524b0c46299aed6e10ebb4c2089abd8c3bec"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e79225373c317ff1e35f210dd5f1344ff31066ba8067c307ab60254cd3a78ad5"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9272fa73ca71266702c4c3e2d4a28553ea03418e591e377a03b8e3659d94fa76"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:498524025a5b8ba81695761d78c8dd7382ac0b052f34e66939c42df860b8ff17"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92b5278ed9d50fe610185ecd23c55d8b307d75ca18e94c0e7de328089ac5dcba"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f3c8c1dacd037df16e85227bac13cca58c30da836c6f936ba1df0c05d046d8d"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f2ac49a9bedb996086057b75bf93538240538c6d9b38e57c82d51f75a73409d2"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e66cc454f97053b79c2ab09c17fbe3c825ea6b4de20baf1be28919460dd7877f"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5a3ba5f9a0dfed20337d3e966dc359784c9f96503674c2faf015f7fe8e96798c"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6321899477db90bdeb9299ac3627a6a53c7399c8cd58d25da094007402b039ab"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76e4753701248476e6286f2ef492af900ea67d9706a0155335a40ea21bf3b2f5"}, + {file = "frozenlist-1.5.0-cp310-cp310-win32.whl", hash = "sha256:977701c081c0241d0955c9586ffdd9ce44f7a7795df39b9151cd9a6fd0ce4cfb"}, + {file = "frozenlist-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:189f03b53e64144f90990d29a27ec4f7997d91ed3d01b51fa39d2dbe77540fd4"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fd74520371c3c4175142d02a976aee0b4cb4a7cc912a60586ffd8d5929979b30"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2f3f7a0fbc219fb4455264cae4d9f01ad41ae6ee8524500f381de64ffaa077d5"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f47c9c9028f55a04ac254346e92977bf0f166c483c74b4232bee19a6697e4778"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0996c66760924da6e88922756d99b47512a71cfd45215f3570bf1e0b694c206a"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2fe128eb4edeabe11896cb6af88fca5346059f6c8d807e3b910069f39157869"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a8ea951bbb6cacd492e3948b8da8c502a3f814f5d20935aae74b5df2b19cf3d"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de537c11e4aa01d37db0d403b57bd6f0546e71a82347a97c6a9f0dcc532b3a45"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c2623347b933fcb9095841f1cc5d4ff0b278addd743e0e966cb3d460278840d"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cee6798eaf8b1416ef6909b06f7dc04b60755206bddc599f52232606e18179d3"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f5f9da7f5dbc00a604fe74aa02ae7c98bcede8a3b8b9666f9f86fc13993bc71a"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:90646abbc7a5d5c7c19461d2e3eeb76eb0b204919e6ece342feb6032c9325ae9"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:bdac3c7d9b705d253b2ce370fde941836a5f8b3c5c2b8fd70940a3ea3af7f4f2"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03d33c2ddbc1816237a67f66336616416e2bbb6beb306e5f890f2eb22b959cdf"}, + {file = "frozenlist-1.5.0-cp311-cp311-win32.whl", hash = "sha256:237f6b23ee0f44066219dae14c70ae38a63f0440ce6750f868ee08775073f942"}, + {file = "frozenlist-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:0cc974cc93d32c42e7b0f6cf242a6bd941c57c61b618e78b6c0a96cb72788c1d"}, + {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21"}, + {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d"}, + {file = "frozenlist-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f"}, + {file = "frozenlist-1.5.0-cp312-cp312-win32.whl", hash = "sha256:29d94c256679247b33a3dc96cce0f93cbc69c23bf75ff715919332fdbb6a32b8"}, + {file = "frozenlist-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:8969190d709e7c48ea386db202d708eb94bdb29207a1f269bab1196ce0dcca1f"}, + {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953"}, + {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0"}, + {file = "frozenlist-1.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03"}, + {file = "frozenlist-1.5.0-cp313-cp313-win32.whl", hash = "sha256:31a9ac2b38ab9b5a8933b693db4939764ad3f299fcaa931a3e605bc3460e693c"}, + {file = "frozenlist-1.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:11aabdd62b8b9c4b84081a3c246506d1cddd2dd93ff0ad53ede5defec7886b28"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9bbcdfaf4af7ce002694a4e10a0159d5a8d20056a12b05b45cea944a4953f972"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1893f948bf6681733aaccf36c5232c231e3b5166d607c5fa77773611df6dc336"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b5e23253bb709ef57a8e95e6ae48daa9ac5f265637529e4ce6b003a37b2621f"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f253985bb515ecd89629db13cb58d702035ecd8cfbca7d7a7e29a0e6d39af5f"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04a5c6babd5e8fb7d3c871dc8b321166b80e41b637c31a995ed844a6139942b6"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9fe0f1c29ba24ba6ff6abf688cb0b7cf1efab6b6aa6adc55441773c252f7411"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:226d72559fa19babe2ccd920273e767c96a49b9d3d38badd7c91a0fdeda8ea08"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b731db116ab3aedec558573c1a5eec78822b32292fe4f2f0345b7f697745c2"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:366d8f93e3edfe5a918c874702f78faac300209a4d5bf38352b2c1bdc07a766d"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1b96af8c582b94d381a1c1f51ffaedeb77c821c690ea5f01da3d70a487dd0a9b"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c03eff4a41bd4e38415cbed054bbaff4a075b093e2394b6915dca34a40d1e38b"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:50cf5e7ee9b98f22bdecbabf3800ae78ddcc26e4a435515fc72d97903e8488e0"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e76bfbc72353269c44e0bc2cfe171900fbf7f722ad74c9a7b638052afe6a00c"}, + {file = "frozenlist-1.5.0-cp39-cp39-win32.whl", hash = "sha256:666534d15ba8f0fda3f53969117383d5dc021266b3c1a42c9ec4855e4b58b9d3"}, + {file = "frozenlist-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:5c28f4b5dbef8a0d8aad0d4de24d1e9e981728628afaf4ea0792f5d0939372f0"}, + {file = "frozenlist-1.5.0-py3-none-any.whl", hash = "sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3"}, + {file = "frozenlist-1.5.0.tar.gz", hash = "sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817"}, +] + +[[package]] +name = "fsspec" +version = "2025.3.0" +requires_python = ">=3.8" +summary = "File-system specification" +groups = ["default"] +files = [ + {file = "fsspec-2025.3.0-py3-none-any.whl", hash = "sha256:efb87af3efa9103f94ca91a7f8cb7a4df91af9f74fc106c9c7ea0efd7277c1b3"}, + {file = "fsspec-2025.3.0.tar.gz", hash = "sha256:a935fd1ea872591f2b5148907d103488fc523295e6c64b835cfad8c3eca44972"}, +] + +[[package]] +name = "identify" +version = "2.6.9" +requires_python = ">=3.9" +summary = "File identification library for Python" +groups = ["dev"] +files = [ + {file = "identify-2.6.9-py2.py3-none-any.whl", hash = "sha256:c98b4322da415a8e5a70ff6e51fbc2d2932c015532d77e9f8537b4ba7813b150"}, + {file = "identify-2.6.9.tar.gz", hash = "sha256:d40dfe3142a1421d8518e3d3985ef5ac42890683e32306ad614a29490abeb6bf"}, +] + +[[package]] +name = "idna" +version = "3.10" +requires_python = ">=3.6" +summary = "Internationalized Domain Names in Applications (IDNA)" +groups = ["default"] +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[[package]] +name = "importlib-metadata" +version = "8.6.1" +requires_python = ">=3.9" +summary = "Read metadata from Python packages" +groups = ["default"] +marker = "python_version < \"3.12\"" +dependencies = [ + "typing-extensions>=3.6.4; python_version < \"3.8\"", + "zipp>=3.20", +] +files = [ + {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"}, + {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"}, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +requires_python = ">=3.8" +summary = "brain-dead simple config-ini parsing" +groups = ["dev"] +files = [ + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, +] + +[[package]] +name = "ipdb" +version = "0.13.13" +requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +summary = "IPython-enabled pdb" +groups = ["dev"] +dependencies = [ + "decorator; python_version == \"3.5\"", + "decorator; python_version == \"3.6\"", + "decorator; python_version > \"3.6\" and python_version < \"3.11\"", + "decorator; python_version >= \"3.11\"", + "decorator<5.0.0; python_version == \"2.7\"", + "decorator<5.0.0; python_version == \"3.4\"", + "ipython<6.0.0,>=5.1.0; python_version == \"2.7\"", + "ipython<7.0.0,>=6.0.0; python_version == \"3.4\"", + "ipython<7.10.0,>=7.0.0; python_version == \"3.5\"", + "ipython<7.17.0,>=7.16.3; python_version == \"3.6\"", + "ipython>=7.31.1; python_version > \"3.6\" and python_version < \"3.11\"", + "ipython>=7.31.1; python_version >= \"3.11\"", + "pathlib; python_version == \"2.7\"", + "toml>=0.10.2; python_version == \"2.7\"", + "toml>=0.10.2; python_version == \"3.4\"", + "toml>=0.10.2; python_version == \"3.5\"", + "tomli; python_version == \"3.6\"", + "tomli; python_version > \"3.6\" and python_version < \"3.11\"", +] +files = [ + {file = "ipdb-0.13.13-py3-none-any.whl", hash = "sha256:45529994741c4ab6d2388bfa5d7b725c2cf7fe9deffabdb8a6113aa5ed449ed4"}, + {file = "ipdb-0.13.13.tar.gz", hash = "sha256:e3ac6018ef05126d442af680aad863006ec19d02290561ac88b8b1c0b0cfc726"}, +] + +[[package]] +name = "ipython" +version = "8.18.1" +requires_python = ">=3.9" +summary = "IPython: Productive Interactive Computing" +groups = ["dev"] +marker = "python_version > \"3.6\"" +dependencies = [ + "colorama; sys_platform == \"win32\"", + "decorator", + "exceptiongroup; python_version < \"3.11\"", + "jedi>=0.16", + "matplotlib-inline", + "pexpect>4.3; sys_platform != \"win32\"", + "prompt-toolkit<3.1.0,>=3.0.41", + "pygments>=2.4.0", + "stack-data", + "traitlets>=5", + "typing-extensions; python_version < \"3.10\"", +] +files = [ + {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, + {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, +] + +[[package]] +name = "isodate" +version = "0.7.2" +requires_python = ">=3.7" +summary = "An ISO 8601 date/time/duration parser and formatter" +groups = ["default"] +files = [ + {file = "isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15"}, + {file = "isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6"}, +] + +[[package]] +name = "jedi" +version = "0.19.2" +requires_python = ">=3.6" +summary = "An autocompletion tool for Python that can be used for text editors." +groups = ["dev"] +marker = "python_version > \"3.6\"" +dependencies = [ + "parso<0.9.0,>=0.8.4", +] +files = [ + {file = "jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9"}, + {file = "jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0"}, +] + +[[package]] +name = "locket" +version = "1.0.0" +requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +summary = "File-based locks for Python on Linux and Windows" +groups = ["default"] +files = [ + {file = "locket-1.0.0-py2.py3-none-any.whl", hash = "sha256:b6c819a722f7b6bd955b80781788e4a66a55628b858d347536b7e81325a3a5e3"}, + {file = "locket-1.0.0.tar.gz", hash = "sha256:5c0d4c052a8bbbf750e056a8e65ccd309086f4f0f18a2eac306a8dfa4112a632"}, +] + +[[package]] +name = "loguru" +version = "0.7.3" +requires_python = "<4.0,>=3.5" +summary = "Python logging made (stupidly) simple" +groups = ["default"] +dependencies = [ + "aiocontextvars>=0.2.0; python_version < \"3.7\"", + "colorama>=0.3.4; sys_platform == \"win32\"", + "win32-setctime>=1.0.0; sys_platform == \"win32\"", +] +files = [ + {file = "loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c"}, + {file = "loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6"}, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +requires_python = ">=3.8" +summary = "Python port of markdown-it. Markdown parsing, done right!" +groups = ["default"] +dependencies = [ + "mdurl~=0.1", +] +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[[package]] +name = "matplotlib-inline" +version = "0.1.7" +requires_python = ">=3.8" +summary = "Inline Matplotlib backend for Jupyter" +groups = ["dev"] +marker = "python_version > \"3.6\"" +dependencies = [ + "traitlets", +] +files = [ + {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, + {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +requires_python = ">=3.7" +summary = "Markdown URL utilities" +groups = ["default"] +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "multidict" +version = "6.2.0" +requires_python = ">=3.9" +summary = "multidict implementation" +groups = ["default"] +dependencies = [ + "typing-extensions>=4.1.0; python_version < \"3.11\"", +] +files = [ + {file = "multidict-6.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b9f6392d98c0bd70676ae41474e2eecf4c7150cb419237a41f8f96043fcb81d1"}, + {file = "multidict-6.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3501621d5e86f1a88521ea65d5cad0a0834c77b26f193747615b7c911e5422d2"}, + {file = "multidict-6.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:32ed748ff9ac682eae7859790d3044b50e3076c7d80e17a44239683769ff485e"}, + {file = "multidict-6.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc826b9a8176e686b67aa60fd6c6a7047b0461cae5591ea1dc73d28f72332a8a"}, + {file = "multidict-6.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:214207dcc7a6221d9942f23797fe89144128a71c03632bf713d918db99bd36de"}, + {file = "multidict-6.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05fefbc3cddc4e36da209a5e49f1094bbece9a581faa7f3589201fd95df40e5d"}, + {file = "multidict-6.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e851e6363d0dbe515d8de81fd544a2c956fdec6f8a049739562286727d4a00c3"}, + {file = "multidict-6.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32c9b4878f48be3e75808ea7e499d6223b1eea6d54c487a66bc10a1871e3dc6a"}, + {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7243c5a6523c5cfeca76e063efa5f6a656d1d74c8b1fc64b2cd1e84e507f7e2a"}, + {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0e5a644e50ef9fb87878d4d57907f03a12410d2aa3b93b3acdf90a741df52c49"}, + {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0dc25a3293c50744796e87048de5e68996104d86d940bb24bc3ec31df281b191"}, + {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:a49994481b99cd7dedde07f2e7e93b1d86c01c0fca1c32aded18f10695ae17eb"}, + {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:641cf2e3447c9ecff2f7aa6e9eee9eaa286ea65d57b014543a4911ff2799d08a"}, + {file = "multidict-6.2.0-cp310-cp310-win32.whl", hash = "sha256:0c383d28857f66f5aebe3e91d6cf498da73af75fbd51cedbe1adfb85e90c0460"}, + {file = "multidict-6.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:a33273a541f1e1a8219b2a4ed2de355848ecc0254264915b9290c8d2de1c74e1"}, + {file = "multidict-6.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:84e87a7d75fa36839a3a432286d719975362d230c70ebfa0948549cc38bd5b46"}, + {file = "multidict-6.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8de4d42dffd5ced9117af2ce66ba8722402541a3aa98ffdf78dde92badb68932"}, + {file = "multidict-6.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7d91a230c7f8af86c904a5a992b8c064b66330544693fd6759c3d6162382ecf"}, + {file = "multidict-6.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f6cad071960ba1914fa231677d21b1b4a3acdcce463cee41ea30bc82e6040cf"}, + {file = "multidict-6.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f74f2fc51555f4b037ef278efc29a870d327053aba5cb7d86ae572426c7cccc"}, + {file = "multidict-6.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:14ed9ed1bfedd72a877807c71113deac292bf485159a29025dfdc524c326f3e1"}, + {file = "multidict-6.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ac3fcf9a2d369bd075b2c2965544036a27ccd277fc3c04f708338cc57533081"}, + {file = "multidict-6.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fc6af8e39f7496047c7876314f4317736eac82bf85b54c7c76cf1a6f8e35d98"}, + {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5f8cb1329f42fadfb40d6211e5ff568d71ab49be36e759345f91c69d1033d633"}, + {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5389445f0173c197f4a3613713b5fb3f3879df1ded2a1a2e4bc4b5b9c5441b7e"}, + {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:94a7bb972178a8bfc4055db80c51efd24baefaced5e51c59b0d598a004e8305d"}, + {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da51d8928ad8b4244926fe862ba1795f0b6e68ed8c42cd2f822d435db9c2a8f4"}, + {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:063be88bd684782a0715641de853e1e58a2f25b76388538bd62d974777ce9bc2"}, + {file = "multidict-6.2.0-cp311-cp311-win32.whl", hash = "sha256:52b05e21ff05729fbea9bc20b3a791c3c11da61649ff64cce8257c82a020466d"}, + {file = "multidict-6.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1e2a2193d3aa5cbf5758f6d5680a52aa848e0cf611da324f71e5e48a9695cc86"}, + {file = "multidict-6.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:437c33561edb6eb504b5a30203daf81d4a9b727e167e78b0854d9a4e18e8950b"}, + {file = "multidict-6.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9f49585f4abadd2283034fc605961f40c638635bc60f5162276fec075f2e37a4"}, + {file = "multidict-6.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5dd7106d064d05896ce28c97da3f46caa442fe5a43bc26dfb258e90853b39b44"}, + {file = "multidict-6.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e25b11a0417475f093d0f0809a149aff3943c2c56da50fdf2c3c88d57fe3dfbd"}, + {file = "multidict-6.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac380cacdd3b183338ba63a144a34e9044520a6fb30c58aa14077157a033c13e"}, + {file = "multidict-6.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:61d5541f27533f803a941d3a3f8a3d10ed48c12cf918f557efcbf3cd04ef265c"}, + {file = "multidict-6.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:facaf11f21f3a4c51b62931feb13310e6fe3475f85e20d9c9fdce0d2ea561b87"}, + {file = "multidict-6.2.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:095a2eabe8c43041d3e6c2cb8287a257b5f1801c2d6ebd1dd877424f1e89cf29"}, + {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a0cc398350ef31167e03f3ca7c19313d4e40a662adcb98a88755e4e861170bdd"}, + {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7c611345bbe7cb44aabb877cb94b63e86f2d0db03e382667dbd037866d44b4f8"}, + {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8cd1a0644ccaf27e9d2f6d9c9474faabee21f0578fe85225cc5af9a61e1653df"}, + {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:89b3857652183b8206a891168af47bac10b970d275bba1f6ee46565a758c078d"}, + {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:125dd82b40f8c06d08d87b3510beaccb88afac94e9ed4a6f6c71362dc7dbb04b"}, + {file = "multidict-6.2.0-cp312-cp312-win32.whl", hash = "sha256:76b34c12b013d813e6cb325e6bd4f9c984db27758b16085926bbe7ceeaace626"}, + {file = "multidict-6.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:0b183a959fb88ad1be201de2c4bdf52fa8e46e6c185d76201286a97b6f5ee65c"}, + {file = "multidict-6.2.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5c5e7d2e300d5cb3b2693b6d60d3e8c8e7dd4ebe27cd17c9cb57020cac0acb80"}, + {file = "multidict-6.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:256d431fe4583c5f1e0f2e9c4d9c22f3a04ae96009b8cfa096da3a8723db0a16"}, + {file = "multidict-6.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a3c0ff89fe40a152e77b191b83282c9664357dce3004032d42e68c514ceff27e"}, + {file = "multidict-6.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef7d48207926edbf8b16b336f779c557dd8f5a33035a85db9c4b0febb0706817"}, + {file = "multidict-6.2.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3c099d3899b14e1ce52262eb82a5f5cb92157bb5106bf627b618c090a0eadc"}, + {file = "multidict-6.2.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e16e7297f29a544f49340012d6fc08cf14de0ab361c9eb7529f6a57a30cbfda1"}, + {file = "multidict-6.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:042028348dc5a1f2be6c666437042a98a5d24cee50380f4c0902215e5ec41844"}, + {file = "multidict-6.2.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:08549895e6a799bd551cf276f6e59820aa084f0f90665c0f03dd3a50db5d3c48"}, + {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4ccfd74957ef53fa7380aaa1c961f523d582cd5e85a620880ffabd407f8202c0"}, + {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:83b78c680d4b15d33042d330c2fa31813ca3974197bddb3836a5c635a5fd013f"}, + {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b4c153863dd6569f6511845922c53e39c8d61f6e81f228ad5443e690fca403de"}, + {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:98aa8325c7f47183b45588af9c434533196e241be0a4e4ae2190b06d17675c02"}, + {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9e658d1373c424457ddf6d55ec1db93c280b8579276bebd1f72f113072df8a5d"}, + {file = "multidict-6.2.0-cp313-cp313-win32.whl", hash = "sha256:3157126b028c074951839233647bd0e30df77ef1fedd801b48bdcad242a60f4e"}, + {file = "multidict-6.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:2e87f1926e91855ae61769ba3e3f7315120788c099677e0842e697b0bfb659f2"}, + {file = "multidict-6.2.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:2529ddbdaa424b2c6c2eb668ea684dd6b75b839d0ad4b21aad60c168269478d7"}, + {file = "multidict-6.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:13551d0e2d7201f0959725a6a769b6f7b9019a168ed96006479c9ac33fe4096b"}, + {file = "multidict-6.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d1996ee1330e245cd3aeda0887b4409e3930524c27642b046e4fae88ffa66c5e"}, + {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c537da54ce4ff7c15e78ab1292e5799d0d43a2108e006578a57f531866f64025"}, + {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f249badb360b0b4d694307ad40f811f83df4da8cef7b68e429e4eea939e49dd"}, + {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48d39b1824b8d6ea7de878ef6226efbe0773f9c64333e1125e0efcfdd18a24c7"}, + {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b99aac6bb2c37db336fa03a39b40ed4ef2818bf2dfb9441458165ebe88b793af"}, + {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07bfa8bc649783e703263f783f73e27fef8cd37baaad4389816cf6a133141331"}, + {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b2c00ad31fbc2cbac85d7d0fcf90853b2ca2e69d825a2d3f3edb842ef1544a2c"}, + {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d57a01a2a9fa00234aace434d8c131f0ac6e0ac6ef131eda5962d7e79edfb5b"}, + {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:abf5b17bc0cf626a8a497d89ac691308dbd825d2ac372aa990b1ca114e470151"}, + {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:f7716f7e7138252d88607228ce40be22660d6608d20fd365d596e7ca0738e019"}, + {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d5a36953389f35f0a4e88dc796048829a2f467c9197265504593f0e420571547"}, + {file = "multidict-6.2.0-cp313-cp313t-win32.whl", hash = "sha256:e653d36b1bf48fa78c7fcebb5fa679342e025121ace8c87ab05c1cefd33b34fc"}, + {file = "multidict-6.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ca23db5fb195b5ef4fd1f77ce26cadefdf13dba71dab14dadd29b34d457d7c44"}, + {file = "multidict-6.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b4f3d66dd0354b79761481fc15bdafaba0b9d9076f1f42cc9ce10d7fcbda205a"}, + {file = "multidict-6.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e2a2d6749e1ff2c9c76a72c6530d5baa601205b14e441e6d98011000f47a7ac"}, + {file = "multidict-6.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cca83a629f77402cfadd58352e394d79a61c8015f1694b83ab72237ec3941f88"}, + {file = "multidict-6.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:781b5dd1db18c9e9eacc419027b0acb5073bdec9de1675c0be25ceb10e2ad133"}, + {file = "multidict-6.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cf8d370b2fea27fb300825ec3984334f7dd54a581bde6456799ba3776915a656"}, + {file = "multidict-6.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:25bb96338512e2f46f615a2bb7c6012fe92a4a5ebd353e5020836a7e33120349"}, + {file = "multidict-6.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19e2819b0b468174de25c0ceed766606a07cedeab132383f1e83b9a4e96ccb4f"}, + {file = "multidict-6.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6aed763b6a1b28c46c055692836879328f0b334a6d61572ee4113a5d0c859872"}, + {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a1133414b771619aa3c3000701c11b2e4624a7f492f12f256aedde97c28331a2"}, + {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:639556758c36093b35e2e368ca485dada6afc2bd6a1b1207d85ea6dfc3deab27"}, + {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:163f4604e76639f728d127293d24c3e208b445b463168af3d031b92b0998bb90"}, + {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2325105e16d434749e1be8022f942876a936f9bece4ec41ae244e3d7fae42aaf"}, + {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e4371591e621579cb6da8401e4ea405b33ff25a755874a3567c4075ca63d56e2"}, + {file = "multidict-6.2.0-cp39-cp39-win32.whl", hash = "sha256:d1175b0e0d6037fab207f05774a176d71210ebd40b1c51f480a04b65ec5c786d"}, + {file = "multidict-6.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad81012b24b88aad4c70b2cbc2dad84018783221b7f923e926f4690ff8569da3"}, + {file = "multidict-6.2.0-py3-none-any.whl", hash = "sha256:5d26547423e5e71dcc562c4acdc134b900640a39abd9066d7326a7cc2324c530"}, + {file = "multidict-6.2.0.tar.gz", hash = "sha256:0085b0afb2446e57050140240a8595846ed64d1cbd26cef936bfab3192c673b8"}, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +summary = "Node.js virtual environment builder" +groups = ["dev"] +files = [ + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, +] + +[[package]] +name = "numcodecs" +version = "0.12.1" +requires_python = ">=3.8" +summary = "A Python package providing buffer compression and transformation codecs for use in data storage and communication applications." +groups = ["default"] +dependencies = [ + "numpy>=1.7", +] +files = [ + {file = "numcodecs-0.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d37f628fe92b3699e65831d5733feca74d2e33b50ef29118ffd41c13c677210e"}, + {file = "numcodecs-0.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:941b7446b68cf79f089bcfe92edaa3b154533dcbcd82474f994b28f2eedb1c60"}, + {file = "numcodecs-0.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e79bf9d1d37199ac00a60ff3adb64757523291d19d03116832e600cac391c51"}, + {file = "numcodecs-0.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:82d7107f80f9307235cb7e74719292d101c7ea1e393fe628817f0d635b7384f5"}, + {file = "numcodecs-0.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eeaf42768910f1c6eebf6c1bb00160728e62c9343df9e2e315dc9fe12e3f6071"}, + {file = "numcodecs-0.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:135b2d47563f7b9dc5ee6ce3d1b81b0f1397f69309e909f1a35bb0f7c553d45e"}, + {file = "numcodecs-0.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a191a8e347ecd016e5c357f2bf41fbcb026f6ffe78fff50c77ab12e96701d155"}, + {file = "numcodecs-0.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:21d8267bd4313f4d16f5b6287731d4c8ebdab236038f29ad1b0e93c9b2ca64ee"}, + {file = "numcodecs-0.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2f84df6b8693206365a5b37c005bfa9d1be486122bde683a7b6446af4b75d862"}, + {file = "numcodecs-0.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:760627780a8b6afdb7f942f2a0ddaf4e31d3d7eea1d8498cf0fd3204a33c4618"}, + {file = "numcodecs-0.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c258bd1d3dfa75a9b708540d23b2da43d63607f9df76dfa0309a7597d1de3b73"}, + {file = "numcodecs-0.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:e04649ea504aff858dbe294631f098fbfd671baf58bfc04fc48d746554c05d67"}, + {file = "numcodecs-0.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2fbb12a6a1abe95926f25c65e283762d63a9bf9e43c0de2c6a1a798347dfcb40"}, + {file = "numcodecs-0.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f2207871868b2464dc11c513965fd99b958a9d7cde2629be7b2dc84fdaab013b"}, + {file = "numcodecs-0.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abff3554a6892a89aacf7b642a044e4535499edf07aeae2f2e6e8fc08c9ba07f"}, + {file = "numcodecs-0.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:ef964d4860d3e6b38df0633caf3e51dc850a6293fd8e93240473642681d95136"}, + {file = "numcodecs-0.12.1.tar.gz", hash = "sha256:05d91a433733e7eef268d7e80ec226a0232da244289614a8f3826901aec1098e"}, +] + +[[package]] +name = "numpy" +version = "2.0.2" +requires_python = ">=3.9" +summary = "Fundamental package for array computing in Python" +groups = ["default"] +files = [ + {file = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8c5713284ce4e282544c68d1c3b2c7161d38c256d2eefc93c1d683cf47683e66"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:becfae3ddd30736fe1889a37f1f580e245ba79a5855bff5f2a29cb3ccc22dd7b"}, + {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2da5960c3cf0df7eafefd806d4e612c5e19358de82cb3c343631188991566ccd"}, + {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:496f71341824ed9f3d2fd36cf3ac57ae2e0165c143b55c3a035ee219413f3318"}, + {file = "numpy-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a61ec659f68ae254e4d237816e33171497e978140353c0c2038d46e63282d0c8"}, + {file = "numpy-2.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d731a1c6116ba289c1e9ee714b08a8ff882944d4ad631fd411106a30f083c326"}, + {file = "numpy-2.0.2-cp310-cp310-win32.whl", hash = "sha256:984d96121c9f9616cd33fbd0618b7f08e0cfc9600a7ee1d6fd9b239186d19d97"}, + {file = "numpy-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:c7b0be4ef08607dd04da4092faee0b86607f111d5ae68036f16cc787e250a131"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:49ca4decb342d66018b01932139c0961a8f9ddc7589611158cb3c27cbcf76448"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:11a76c372d1d37437857280aa142086476136a8c0f373b2e648ab2c8f18fb195"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:807ec44583fd708a21d4a11d94aedf2f4f3c3719035c76a2bbe1fe8e217bdc57"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8cafab480740e22f8d833acefed5cc87ce276f4ece12fdaa2e8903db2f82897a"}, + {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a15f476a45e6e5a3a79d8a14e62161d27ad897381fecfa4a09ed5322f2085669"}, + {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13e689d772146140a252c3a28501da66dfecd77490b498b168b501835041f951"}, + {file = "numpy-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9ea91dfb7c3d1c56a0e55657c0afb38cf1eeae4544c208dc465c3c9f3a7c09f9"}, + {file = "numpy-2.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c1c9307701fec8f3f7a1e6711f9089c06e6284b3afbbcd259f7791282d660a15"}, + {file = "numpy-2.0.2-cp311-cp311-win32.whl", hash = "sha256:a392a68bd329eafac5817e5aefeb39038c48b671afd242710b451e76090e81f4"}, + {file = "numpy-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:286cd40ce2b7d652a6f22efdfc6d1edf879440e53e76a75955bc0c826c7e64dc"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:df55d490dea7934f330006d0f81e8551ba6010a5bf035a249ef61a94f21c500b"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8df823f570d9adf0978347d1f926b2a867d5608f434a7cff7f7908c6570dcf5e"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9a92ae5c14811e390f3767053ff54eaee3bf84576d99a2456391401323f4ec2c"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:a842d573724391493a97a62ebbb8e731f8a5dcc5d285dfc99141ca15a3302d0c"}, + {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05e238064fc0610c840d1cf6a13bf63d7e391717d247f1bf0318172e759e692"}, + {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0123ffdaa88fa4ab64835dcbde75dcdf89c453c922f18dced6e27c90d1d0ec5a"}, + {file = "numpy-2.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:96a55f64139912d61de9137f11bf39a55ec8faec288c75a54f93dfd39f7eb40c"}, + {file = "numpy-2.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec9852fb39354b5a45a80bdab5ac02dd02b15f44b3804e9f00c556bf24b4bded"}, + {file = "numpy-2.0.2-cp312-cp312-win32.whl", hash = "sha256:671bec6496f83202ed2d3c8fdc486a8fc86942f2e69ff0e986140339a63bcbe5"}, + {file = "numpy-2.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:cfd41e13fdc257aa5778496b8caa5e856dc4896d4ccf01841daee1d96465467a"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9059e10581ce4093f735ed23f3b9d283b9d517ff46009ddd485f1747eb22653c"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:423e89b23490805d2a5a96fe40ec507407b8ee786d66f7328be214f9679df6dd"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:2b2955fa6f11907cf7a70dab0d0755159bca87755e831e47932367fc8f2f2d0b"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:97032a27bd9d8988b9a97a8c4d2c9f2c15a81f61e2f21404d7e8ef00cb5be729"}, + {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e795a8be3ddbac43274f18588329c72939870a16cae810c2b73461c40718ab1"}, + {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26b258c385842546006213344c50655ff1555a9338e2e5e02a0756dc3e803dd"}, + {file = "numpy-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fec9451a7789926bcf7c2b8d187292c9f93ea30284802a0ab3f5be8ab36865d"}, + {file = "numpy-2.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9189427407d88ff25ecf8f12469d4d39d35bee1db5d39fc5c168c6f088a6956d"}, + {file = "numpy-2.0.2-cp39-cp39-win32.whl", hash = "sha256:905d16e0c60200656500c95b6b8dca5d109e23cb24abc701d41c02d74c6b3afa"}, + {file = "numpy-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:a3f4ab0caa7f053f6797fcd4e1e25caee367db3112ef2b6ef82d749530768c73"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7f0a0c6f12e07fa94133c8a67404322845220c06a9e80e85999afe727f7438b8"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:312950fdd060354350ed123c0e25a71327d3711584beaef30cdaa93320c392d4"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385"}, + {file = "numpy-2.0.2.tar.gz", hash = "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78"}, +] + +[[package]] +name = "orderly-set" +version = "5.3.0" +requires_python = ">=3.8" +summary = "Orderly set" +groups = ["default"] +files = [ + {file = "orderly_set-5.3.0-py3-none-any.whl", hash = "sha256:c2c0bfe604f5d3d9b24e8262a06feb612594f37aa3845650548befd7772945d1"}, + {file = "orderly_set-5.3.0.tar.gz", hash = "sha256:80b3d8fdd3d39004d9aad389eaa0eab02c71f0a0511ba3a6d54a935a6c6a0acc"}, +] + +[[package]] +name = "packaging" +version = "24.2" +requires_python = ">=3.8" +summary = "Core utilities for Python packages" +groups = ["default", "dev"] +files = [ + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, +] + +[[package]] +name = "pandas" +version = "2.2.3" +requires_python = ">=3.9" +summary = "Powerful data structures for data analysis, time series, and statistics" +groups = ["default"] +dependencies = [ + "numpy>=1.22.4; python_version < \"3.11\"", + "numpy>=1.23.2; python_version == \"3.11\"", + "numpy>=1.26.0; python_version >= \"3.12\"", + "python-dateutil>=2.8.2", + "pytz>=2020.1", + "tzdata>=2022.7", +] +files = [ + {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, + {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, + {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, + {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, + {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, + {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, + {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, +] + +[[package]] +name = "parso" +version = "0.8.4" +requires_python = ">=3.6" +summary = "A Python Parser" +groups = ["dev"] +marker = "python_version > \"3.6\"" +files = [ + {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, + {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, +] + +[[package]] +name = "partd" +version = "1.4.2" +requires_python = ">=3.9" +summary = "Appendable key-value storage" +groups = ["default"] +dependencies = [ + "locket", + "toolz", +] +files = [ + {file = "partd-1.4.2-py3-none-any.whl", hash = "sha256:978e4ac767ec4ba5b86c6eaa52e5a2a3bc748a2ca839e8cc798f1cc6ce6efb0f"}, + {file = "partd-1.4.2.tar.gz", hash = "sha256:d022c33afbdc8405c226621b015e8067888173d85f7f5ecebb3cafed9a20f02c"}, +] + +[[package]] +name = "pexpect" +version = "4.9.0" +summary = "Pexpect allows easy control of interactive console applications." +groups = ["dev"] +marker = "sys_platform != \"win32\" and python_version > \"3.6\"" +dependencies = [ + "ptyprocess>=0.5", +] +files = [ + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, +] + +[[package]] +name = "platformdirs" +version = "4.3.7" +requires_python = ">=3.9" +summary = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +groups = ["dev"] +files = [ + {file = "platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94"}, + {file = "platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351"}, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +requires_python = ">=3.8" +summary = "plugin and hook calling mechanisms for python" +groups = ["dev"] +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[[package]] +name = "pre-commit" +version = "4.2.0" +requires_python = ">=3.9" +summary = "A framework for managing and maintaining multi-language pre-commit hooks." +groups = ["dev"] +dependencies = [ + "cfgv>=2.0.0", + "identify>=1.0.0", + "nodeenv>=0.11.1", + "pyyaml>=5.1", + "virtualenv>=20.10.0", +] +files = [ + {file = "pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd"}, + {file = "pre_commit-4.2.0.tar.gz", hash = "sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146"}, +] + +[[package]] +name = "prompt-toolkit" +version = "3.0.50" +requires_python = ">=3.8.0" +summary = "Library for building powerful interactive command lines in Python" +groups = ["dev"] +marker = "python_version > \"3.6\"" +dependencies = [ + "wcwidth", +] +files = [ + {file = "prompt_toolkit-3.0.50-py3-none-any.whl", hash = "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198"}, + {file = "prompt_toolkit-3.0.50.tar.gz", hash = "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab"}, +] + +[[package]] +name = "propcache" +version = "0.3.1" +requires_python = ">=3.9" +summary = "Accelerated property cache" +groups = ["default"] +files = [ + {file = "propcache-0.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f27785888d2fdd918bc36de8b8739f2d6c791399552333721b58193f68ea3e98"}, + {file = "propcache-0.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4e89cde74154c7b5957f87a355bb9c8ec929c167b59c83d90654ea36aeb6180"}, + {file = "propcache-0.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:730178f476ef03d3d4d255f0c9fa186cb1d13fd33ffe89d39f2cda4da90ceb71"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967a8eec513dbe08330f10137eacb427b2ca52118769e82ebcfcab0fba92a649"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b9145c35cc87313b5fd480144f8078716007656093d23059e8993d3a8fa730f"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e64e948ab41411958670f1093c0a57acfdc3bee5cf5b935671bbd5313bcf229"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:319fa8765bfd6a265e5fa661547556da381e53274bc05094fc9ea50da51bfd46"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66d8ccbc902ad548312b96ed8d5d266d0d2c6d006fd0f66323e9d8f2dd49be7"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2d219b0dbabe75e15e581fc1ae796109b07c8ba7d25b9ae8d650da582bed01b0"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:cd6a55f65241c551eb53f8cf4d2f4af33512c39da5d9777694e9d9c60872f519"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9979643ffc69b799d50d3a7b72b5164a2e97e117009d7af6dfdd2ab906cb72cd"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4cf9e93a81979f1424f1a3d155213dc928f1069d697e4353edb8a5eba67c6259"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2fce1df66915909ff6c824bbb5eb403d2d15f98f1518e583074671a30fe0c21e"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4d0dfdd9a2ebc77b869a0b04423591ea8823f791293b527dc1bb896c1d6f1136"}, + {file = "propcache-0.3.1-cp310-cp310-win32.whl", hash = "sha256:1f6cc0ad7b4560e5637eb2c994e97b4fa41ba8226069c9277eb5ea7101845b42"}, + {file = "propcache-0.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:47ef24aa6511e388e9894ec16f0fbf3313a53ee68402bc428744a367ec55b833"}, + {file = "propcache-0.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7f30241577d2fef2602113b70ef7231bf4c69a97e04693bde08ddab913ba0ce5"}, + {file = "propcache-0.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:43593c6772aa12abc3af7784bff4a41ffa921608dd38b77cf1dfd7f5c4e71371"}, + {file = "propcache-0.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a75801768bbe65499495660b777e018cbe90c7980f07f8aa57d6be79ea6f71da"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6f1324db48f001c2ca26a25fa25af60711e09b9aaf4b28488602776f4f9a744"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cdb0f3e1eb6dfc9965d19734d8f9c481b294b5274337a8cb5cb01b462dcb7e0"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1eb34d90aac9bfbced9a58b266f8946cb5935869ff01b164573a7634d39fbcb5"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f35c7070eeec2cdaac6fd3fe245226ed2a6292d3ee8c938e5bb645b434c5f256"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b23c11c2c9e6d4e7300c92e022046ad09b91fd00e36e83c44483df4afa990073"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3e19ea4ea0bf46179f8a3652ac1426e6dcbaf577ce4b4f65be581e237340420d"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:bd39c92e4c8f6cbf5f08257d6360123af72af9f4da75a690bef50da77362d25f"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b0313e8b923b3814d1c4a524c93dfecea5f39fa95601f6a9b1ac96cd66f89ea0"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e861ad82892408487be144906a368ddbe2dc6297074ade2d892341b35c59844a"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:61014615c1274df8da5991a1e5da85a3ccb00c2d4701ac6f3383afd3ca47ab0a"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:71ebe3fe42656a2328ab08933d420df5f3ab121772eef78f2dc63624157f0ed9"}, + {file = "propcache-0.3.1-cp311-cp311-win32.whl", hash = "sha256:58aa11f4ca8b60113d4b8e32d37e7e78bd8af4d1a5b5cb4979ed856a45e62005"}, + {file = "propcache-0.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:9532ea0b26a401264b1365146c440a6d78269ed41f83f23818d4b79497aeabe7"}, + {file = "propcache-0.3.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f78eb8422acc93d7b69964012ad7048764bb45a54ba7a39bb9e146c72ea29723"}, + {file = "propcache-0.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:89498dd49c2f9a026ee057965cdf8192e5ae070ce7d7a7bd4b66a8e257d0c976"}, + {file = "propcache-0.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09400e98545c998d57d10035ff623266927cb784d13dd2b31fd33b8a5316b85b"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa8efd8c5adc5a2c9d3b952815ff8f7710cefdcaf5f2c36d26aff51aeca2f12f"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2fe5c910f6007e716a06d269608d307b4f36e7babee5f36533722660e8c4a70"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a0ab8cf8cdd2194f8ff979a43ab43049b1df0b37aa64ab7eca04ac14429baeb7"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:563f9d8c03ad645597b8d010ef4e9eab359faeb11a0a2ac9f7b4bc8c28ebef25"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb6e0faf8cb6b4beea5d6ed7b5a578254c6d7df54c36ccd3d8b3eb00d6770277"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1c5c7ab7f2bb3f573d1cb921993006ba2d39e8621019dffb1c5bc94cdbae81e8"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:050b571b2e96ec942898f8eb46ea4bfbb19bd5502424747e83badc2d4a99a44e"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e1c4d24b804b3a87e9350f79e2371a705a188d292fd310e663483af6ee6718ee"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e4fe2a6d5ce975c117a6bb1e8ccda772d1e7029c1cca1acd209f91d30fa72815"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:feccd282de1f6322f56f6845bf1207a537227812f0a9bf5571df52bb418d79d5"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ec314cde7314d2dd0510c6787326bbffcbdc317ecee6b7401ce218b3099075a7"}, + {file = "propcache-0.3.1-cp312-cp312-win32.whl", hash = "sha256:7d2d5a0028d920738372630870e7d9644ce437142197f8c827194fca404bf03b"}, + {file = "propcache-0.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:88c423efef9d7a59dae0614eaed718449c09a5ac79a5f224a8b9664d603f04a3"}, + {file = "propcache-0.3.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f1528ec4374617a7a753f90f20e2f551121bb558fcb35926f99e3c42367164b8"}, + {file = "propcache-0.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dc1915ec523b3b494933b5424980831b636fe483d7d543f7afb7b3bf00f0c10f"}, + {file = "propcache-0.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a110205022d077da24e60b3df8bcee73971be9575dec5573dd17ae5d81751111"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d249609e547c04d190e820d0d4c8ca03ed4582bcf8e4e160a6969ddfb57b62e5"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ced33d827625d0a589e831126ccb4f5c29dfdf6766cac441d23995a65825dcb"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4114c4ada8f3181af20808bedb250da6bae56660e4b8dfd9cd95d4549c0962f7"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:975af16f406ce48f1333ec5e912fe11064605d5c5b3f6746969077cc3adeb120"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a34aa3a1abc50740be6ac0ab9d594e274f59960d3ad253cd318af76b996dd654"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9cec3239c85ed15bfaded997773fdad9fb5662b0a7cbc854a43f291eb183179e"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:05543250deac8e61084234d5fc54f8ebd254e8f2b39a16b1dce48904f45b744b"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5cb5918253912e088edbf023788de539219718d3b10aef334476b62d2b53de53"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f3bbecd2f34d0e6d3c543fdb3b15d6b60dd69970c2b4c822379e5ec8f6f621d5"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aca63103895c7d960a5b9b044a83f544b233c95e0dcff114389d64d762017af7"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a0a9898fdb99bf11786265468571e628ba60af80dc3f6eb89a3545540c6b0ef"}, + {file = "propcache-0.3.1-cp313-cp313-win32.whl", hash = "sha256:3a02a28095b5e63128bcae98eb59025924f121f048a62393db682f049bf4ac24"}, + {file = "propcache-0.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:813fbb8b6aea2fc9659815e585e548fe706d6f663fa73dff59a1677d4595a037"}, + {file = "propcache-0.3.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a444192f20f5ce8a5e52761a031b90f5ea6288b1eef42ad4c7e64fef33540b8f"}, + {file = "propcache-0.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0fbe94666e62ebe36cd652f5fc012abfbc2342de99b523f8267a678e4dfdee3c"}, + {file = "propcache-0.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f011f104db880f4e2166bcdcf7f58250f7a465bc6b068dc84c824a3d4a5c94dc"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e584b6d388aeb0001d6d5c2bd86b26304adde6d9bb9bfa9c4889805021b96de"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a17583515a04358b034e241f952f1715243482fc2c2945fd99a1b03a0bd77d6"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5aed8d8308215089c0734a2af4f2e95eeb360660184ad3912686c181e500b2e7"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d8e309ff9a0503ef70dc9a0ebd3e69cf7b3894c9ae2ae81fc10943c37762458"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b655032b202028a582d27aeedc2e813299f82cb232f969f87a4fde491a233f11"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9f64d91b751df77931336b5ff7bafbe8845c5770b06630e27acd5dbb71e1931c"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:19a06db789a4bd896ee91ebc50d059e23b3639c25d58eb35be3ca1cbe967c3bf"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:bef100c88d8692864651b5f98e871fb090bd65c8a41a1cb0ff2322db39c96c27"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:87380fb1f3089d2a0b8b00f006ed12bd41bd858fabfa7330c954c70f50ed8757"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e474fc718e73ba5ec5180358aa07f6aded0ff5f2abe700e3115c37d75c947e18"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:17d1c688a443355234f3c031349da69444be052613483f3e4158eef751abcd8a"}, + {file = "propcache-0.3.1-cp313-cp313t-win32.whl", hash = "sha256:359e81a949a7619802eb601d66d37072b79b79c2505e6d3fd8b945538411400d"}, + {file = "propcache-0.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:e7fb9a84c9abbf2b2683fa3e7b0d7da4d8ecf139a1c635732a8bda29c5214b0e"}, + {file = "propcache-0.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ed5f6d2edbf349bd8d630e81f474d33d6ae5d07760c44d33cd808e2f5c8f4ae6"}, + {file = "propcache-0.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:668ddddc9f3075af019f784456267eb504cb77c2c4bd46cc8402d723b4d200bf"}, + {file = "propcache-0.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0c86e7ceea56376216eba345aa1fc6a8a6b27ac236181f840d1d7e6a1ea9ba5c"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83be47aa4e35b87c106fc0c84c0fc069d3f9b9b06d3c494cd404ec6747544894"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:27c6ac6aa9fc7bc662f594ef380707494cb42c22786a558d95fcdedb9aa5d035"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64a956dff37080b352c1c40b2966b09defb014347043e740d420ca1eb7c9b908"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82de5da8c8893056603ac2d6a89eb8b4df49abf1a7c19d536984c8dd63f481d5"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c3c3a203c375b08fd06a20da3cf7aac293b834b6f4f4db71190e8422750cca5"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b303b194c2e6f171cfddf8b8ba30baefccf03d36a4d9cab7fd0bb68ba476a3d7"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:916cd229b0150129d645ec51614d38129ee74c03293a9f3f17537be0029a9641"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a461959ead5b38e2581998700b26346b78cd98540b5524796c175722f18b0294"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:069e7212890b0bcf9b2be0a03afb0c2d5161d91e1bf51569a64f629acc7defbf"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ef2e4e91fb3945769e14ce82ed53007195e616a63aa43b40fb7ebaaf907c8d4c"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8638f99dca15b9dff328fb6273e09f03d1c50d9b6512f3b65a4154588a7595fe"}, + {file = "propcache-0.3.1-cp39-cp39-win32.whl", hash = "sha256:6f173bbfe976105aaa890b712d1759de339d8a7cef2fc0a1714cc1a1e1c47f64"}, + {file = "propcache-0.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:603f1fe4144420374f1a69b907494c3acbc867a581c2d49d4175b0de7cc64566"}, + {file = "propcache-0.3.1-py3-none-any.whl", hash = "sha256:9a8ecf38de50a7f518c21568c80f985e776397b902f1ce0b01f799aba1608b40"}, + {file = "propcache-0.3.1.tar.gz", hash = "sha256:40d980c33765359098837527e18eddefc9a24cea5b45e078a7f3bb5b032c6ecf"}, +] + +[[package]] +name = "psutil" +version = "7.0.0" +requires_python = ">=3.6" +summary = "Cross-platform lib for process and system monitoring in Python. NOTE: the syntax of this script MUST be kept compatible with Python 2.7." +groups = ["default"] +files = [ + {file = "psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25"}, + {file = "psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993"}, + {file = "psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99"}, + {file = "psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553"}, + {file = "psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456"}, +] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +summary = "Run a subprocess in a pseudo terminal" +groups = ["dev"] +marker = "sys_platform != \"win32\" and python_version > \"3.6\"" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "pure-eval" +version = "0.2.3" +summary = "Safely evaluate AST nodes without side effects" +groups = ["dev"] +marker = "python_version > \"3.6\"" +files = [ + {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, + {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, +] + +[[package]] +name = "pygments" +version = "2.19.1" +requires_python = ">=3.8" +summary = "Pygments is a syntax highlighting package written in Python." +groups = ["default", "dev"] +files = [ + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, +] + +[[package]] +name = "pytest" +version = "8.3.5" +requires_python = ">=3.8" +summary = "pytest: simple powerful testing with Python" +groups = ["dev"] +dependencies = [ + "colorama; sys_platform == \"win32\"", + "exceptiongroup>=1.0.0rc8; python_version < \"3.11\"", + "iniconfig", + "packaging", + "pluggy<2,>=1.5", + "tomli>=1; python_version < \"3.11\"", +] +files = [ + {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, + {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +summary = "Extensions to the standard Python datetime module" +groups = ["default"] +dependencies = [ + "six>=1.5", +] +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[[package]] +name = "pytz" +version = "2025.2" +summary = "World timezone definitions, modern and historical" +groups = ["default"] +files = [ + {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, + {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +requires_python = ">=3.8" +summary = "YAML parser and emitter for Python" +groups = ["default", "dev"] +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "requests" +version = "2.32.3" +requires_python = ">=3.8" +summary = "Python HTTP for Humans." +groups = ["default"] +dependencies = [ + "certifi>=2017.4.17", + "charset-normalizer<4,>=2", + "idna<4,>=2.5", + "urllib3<3,>=1.21.1", +] +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[[package]] +name = "rich" +version = "13.9.4" +requires_python = ">=3.8.0" +summary = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +groups = ["default"] +dependencies = [ + "markdown-it-py>=2.2.0", + "pygments<3.0.0,>=2.13.0", + "typing-extensions<5.0,>=4.0.0; python_version < \"3.11\"", +] +files = [ + {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, + {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, +] + +[[package]] +name = "semver" +version = "3.0.4" +requires_python = ">=3.7" +summary = "Python helper for Semantic Versioning (https://semver.org)" +groups = ["default"] +files = [ + {file = "semver-3.0.4-py3-none-any.whl", hash = "sha256:9c824d87ba7f7ab4a1890799cec8596f15c1241cb473404ea1cb0c55e4b04746"}, + {file = "semver-3.0.4.tar.gz", hash = "sha256:afc7d8c584a5ed0a11033af086e8af226a9c0b206f313e0301f8dd7b6b589602"}, +] + +[[package]] +name = "six" +version = "1.17.0" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +summary = "Python 2 and 3 compatibility utilities" +groups = ["default"] +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "stack-data" +version = "0.6.3" +summary = "Extract data from python stack frames and tracebacks for informative displays" +groups = ["dev"] +marker = "python_version > \"3.6\"" +dependencies = [ + "asttokens>=2.1.0", + "executing>=1.2.0", + "pure-eval", +] +files = [ + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, +] + +[[package]] +name = "tomli" +version = "2.2.1" +requires_python = ">=3.8" +summary = "A lil' TOML parser" +groups = ["dev"] +marker = "python_version < \"3.11\"" +files = [ + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, +] + +[[package]] +name = "toolz" +version = "1.0.0" +requires_python = ">=3.8" +summary = "List processing tools and functional utilities" +groups = ["default"] +files = [ + {file = "toolz-1.0.0-py3-none-any.whl", hash = "sha256:292c8f1c4e7516bf9086f8850935c799a874039c8bcf959d47b600e4c44a6236"}, + {file = "toolz-1.0.0.tar.gz", hash = "sha256:2c86e3d9a04798ac556793bced838816296a2f085017664e4995cb40a1047a02"}, +] + +[[package]] +name = "traitlets" +version = "5.14.3" +requires_python = ">=3.8" +summary = "Traitlets Python configuration system" +groups = ["dev"] +marker = "python_version > \"3.6\"" +files = [ + {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, + {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, +] + +[[package]] +name = "typing-extensions" +version = "4.13.0" +requires_python = ">=3.8" +summary = "Backported and Experimental Type Hints for Python 3.8+" +groups = ["default", "dev"] +marker = "python_version <= \"3.12\"" +files = [ + {file = "typing_extensions-4.13.0-py3-none-any.whl", hash = "sha256:c8dd92cc0d6425a97c18fbb9d1954e5ff92c1ca881a309c45f06ebc0b79058e5"}, + {file = "typing_extensions-4.13.0.tar.gz", hash = "sha256:0a4ac55a5820789d87e297727d229866c9650f6521b64206413c4fbada24d95b"}, +] + +[[package]] +name = "tzdata" +version = "2025.2" +requires_python = ">=2" +summary = "Provider of IANA time zone data" +groups = ["default"] +files = [ + {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, + {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, +] + +[[package]] +name = "urllib3" +version = "2.3.0" +requires_python = ">=3.9" +summary = "HTTP library with thread-safe connection pooling, file post, and more." +groups = ["default"] +files = [ + {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, + {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, +] + +[[package]] +name = "virtualenv" +version = "20.29.3" +requires_python = ">=3.8" +summary = "Virtual Python Environment builder" +groups = ["dev"] +dependencies = [ + "distlib<1,>=0.3.7", + "filelock<4,>=3.12.2", + "importlib-metadata>=6.6; python_version < \"3.8\"", + "platformdirs<5,>=3.9.1", +] +files = [ + {file = "virtualenv-20.29.3-py3-none-any.whl", hash = "sha256:3e3d00f5807e83b234dfb6122bf37cfadf4be216c53a49ac059d02414f819170"}, + {file = "virtualenv-20.29.3.tar.gz", hash = "sha256:95e39403fcf3940ac45bc717597dba16110b74506131845d9b687d5e73d947ac"}, +] + +[[package]] +name = "wcwidth" +version = "0.2.13" +summary = "Measures the displayed width of unicode strings in a terminal" +groups = ["dev"] +marker = "python_version > \"3.6\"" +dependencies = [ + "backports-functools-lru-cache>=1.2.1; python_version < \"3.2\"", +] +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] + +[[package]] +name = "win32-setctime" +version = "1.2.0" +requires_python = ">=3.5" +summary = "A small Python utility to set file creation time on Windows" +groups = ["default"] +marker = "sys_platform == \"win32\"" +files = [ + {file = "win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390"}, + {file = "win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0"}, +] + +[[package]] +name = "xarray" +version = "2024.7.0" +requires_python = ">=3.9" +summary = "N-D labeled arrays and datasets in Python" +groups = ["default"] +dependencies = [ + "numpy>=1.23", + "packaging>=23.1", + "pandas>=2.0", +] +files = [ + {file = "xarray-2024.7.0-py3-none-any.whl", hash = "sha256:1b0fd51ec408474aa1f4a355d75c00cc1c02bd425d97b2c2e551fd21810e7f64"}, + {file = "xarray-2024.7.0.tar.gz", hash = "sha256:4cae512d121a8522d41e66d942fb06c526bc1fd32c2c181d5fe62fe65b671638"}, +] + +[[package]] +name = "yarl" +version = "1.18.3" +requires_python = ">=3.9" +summary = "Yet another URL library" +groups = ["default"] +dependencies = [ + "idna>=2.0", + "multidict>=4.0", + "propcache>=0.2.0", +] +files = [ + {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"}, + {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"}, + {file = "yarl-1.18.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:602d98f2c2d929f8e697ed274fbadc09902c4025c5a9963bf4e9edfc3ab6f7ed"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c654d5207c78e0bd6d749f6dae1dcbbfde3403ad3a4b11f3c5544d9906969dde"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5094d9206c64181d0f6e76ebd8fb2f8fe274950a63890ee9e0ebfd58bf9d787b"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35098b24e0327fc4ebdc8ffe336cee0a87a700c24ffed13161af80124b7dc8e5"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3236da9272872443f81fedc389bace88408f64f89f75d1bdb2256069a8730ccc"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2c08cc9b16f4f4bc522771d96734c7901e7ebef70c6c5c35dd0f10845270bcd"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80316a8bd5109320d38eef8833ccf5f89608c9107d02d2a7f985f98ed6876990"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c1e1cc06da1491e6734f0ea1e6294ce00792193c463350626571c287c9a704db"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fea09ca13323376a2fdfb353a5fa2e59f90cd18d7ca4eaa1fd31f0a8b4f91e62"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e3b9fd71836999aad54084906f8663dffcd2a7fb5cdafd6c37713b2e72be1760"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:757e81cae69244257d125ff31663249b3013b5dc0a8520d73694aed497fb195b"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b1771de9944d875f1b98a745bc547e684b863abf8f8287da8466cf470ef52690"}, + {file = "yarl-1.18.3-cp310-cp310-win32.whl", hash = "sha256:8874027a53e3aea659a6d62751800cf6e63314c160fd607489ba5c2edd753cf6"}, + {file = "yarl-1.18.3-cp310-cp310-win_amd64.whl", hash = "sha256:93b2e109287f93db79210f86deb6b9bbb81ac32fc97236b16f7433db7fc437d8"}, + {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069"}, + {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193"}, + {file = "yarl-1.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a"}, + {file = "yarl-1.18.3-cp311-cp311-win32.whl", hash = "sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1"}, + {file = "yarl-1.18.3-cp311-cp311-win_amd64.whl", hash = "sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5"}, + {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50"}, + {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576"}, + {file = "yarl-1.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285"}, + {file = "yarl-1.18.3-cp312-cp312-win32.whl", hash = "sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2"}, + {file = "yarl-1.18.3-cp312-cp312-win_amd64.whl", hash = "sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477"}, + {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb"}, + {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa"}, + {file = "yarl-1.18.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8"}, + {file = "yarl-1.18.3-cp313-cp313-win32.whl", hash = "sha256:61ee62ead9b68b9123ec24bc866cbef297dd266175d53296e2db5e7f797f902d"}, + {file = "yarl-1.18.3-cp313-cp313-win_amd64.whl", hash = "sha256:578e281c393af575879990861823ef19d66e2b1d0098414855dd367e234f5b3c"}, + {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:61e5e68cb65ac8f547f6b5ef933f510134a6bf31bb178be428994b0cb46c2a04"}, + {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe57328fbc1bfd0bd0514470ac692630f3901c0ee39052ae47acd1d90a436719"}, + {file = "yarl-1.18.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a440a2a624683108a1b454705ecd7afc1c3438a08e890a1513d468671d90a04e"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c7907c8548bcd6ab860e5f513e727c53b4a714f459b084f6580b49fa1b9cee"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4f6450109834af88cb4cc5ecddfc5380ebb9c228695afc11915a0bf82116789"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9ca04806f3be0ac6d558fffc2fdf8fcef767e0489d2684a21912cc4ed0cd1b8"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77a6e85b90a7641d2e07184df5557132a337f136250caafc9ccaa4a2a998ca2c"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6333c5a377c8e2f5fae35e7b8f145c617b02c939d04110c76f29ee3676b5f9a5"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0b3c92fa08759dbf12b3a59579a4096ba9af8dd344d9a813fc7f5070d86bbab1"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:4ac515b860c36becb81bb84b667466885096b5fc85596948548b667da3bf9f24"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:045b8482ce9483ada4f3f23b3774f4e1bf4f23a2d5c912ed5170f68efb053318"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:a4bb030cf46a434ec0225bddbebd4b89e6471814ca851abb8696170adb163985"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:54d6921f07555713b9300bee9c50fb46e57e2e639027089b1d795ecd9f7fa910"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1d407181cfa6e70077df3377938c08012d18893f9f20e92f7d2f314a437c30b1"}, + {file = "yarl-1.18.3-cp39-cp39-win32.whl", hash = "sha256:ac36703a585e0929b032fbaab0707b75dc12703766d0b53486eabd5139ebadd5"}, + {file = "yarl-1.18.3-cp39-cp39-win_amd64.whl", hash = "sha256:ba87babd629f8af77f557b61e49e7c7cac36f22f871156b91e10a6e9d4f829e9"}, + {file = "yarl-1.18.3-py3-none-any.whl", hash = "sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b"}, + {file = "yarl-1.18.3.tar.gz", hash = "sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1"}, +] + +[[package]] +name = "zarr" +version = "2.18.2" +requires_python = ">=3.9" +summary = "An implementation of chunked, compressed, N-dimensional arrays for Python" +groups = ["default"] +dependencies = [ + "asciitree", + "fasteners; sys_platform != \"emscripten\"", + "numcodecs>=0.10.0", + "numpy>=1.23", +] +files = [ + {file = "zarr-2.18.2-py3-none-any.whl", hash = "sha256:a638754902f97efa99b406083fdc807a0e2ccf12a949117389d2a4ba9b05df38"}, + {file = "zarr-2.18.2.tar.gz", hash = "sha256:9bb393b8a0a38fb121dbb913b047d75db28de9890f6d644a217a73cf4ae74f47"}, +] + +[[package]] +name = "zipp" +version = "3.21.0" +requires_python = ">=3.9" +summary = "Backport of pathlib-compatible object wrapper for zip files" +groups = ["default"] +marker = "python_version < \"3.12\"" +files = [ + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, +] diff --git a/pyproject.toml b/pyproject.toml index 9778172..b665ce0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,58 +1,58 @@ -[project] -name = "mllam-data-prep" -version = "0.6.1" -description = "dataset preparation for data-driven weather models" -authors = [ - {name = "Leif Denby", email = "lcd@dmi.dk"}, - {name = "Hauke Schulz", email = "has@dmi.dk"}, - {name = "Emy Alerskans", email = "ea@dmi.dk"}, - {name = "Eleni Briola", email = "elb@dmi.dk"}, - {name = "Joel Oskarsson", email = "joel.oskarsson@liu.se"}, - {name = "Kashif Rasul", email = "kashif.rasul@gmail.com"}, - {name = "Jordan Matelsky", email = "opensource@matelsky.com"}, - {name = "Martin Frølund", email = "maf@dmi.dk"}, - {name = "Simon Kamuk Christiansen", email = "skc@dmi.dk"}, -] -dependencies = [ - "xarray>=2024.2.0", - "zarr>=2.17.0", - "pyyaml>=6.0.1", - "loguru>=0.7.2", - "isodate>=0.6.1", - "requests>=2.31.0", - "aiohttp>=3.9.3", - "dataclass-wizard>=0.29.2", - "semver>=3.0.2", - "rich>=13.7.1", - "dask>=2024.2.1", - "psutil>=5.7.2", - "packaging>=23.1", - "deepdiff>=8.2.0", -] -requires-python = ">=3.9" -readme = "README.md" -license = {text = "MIT"} - -[project.optional-dependencies] -dask-distributed = [ - "dask[distributed]>=2024.7.1", - "bokeh!=3.0.*,>=2.4.2", -] -[build-system] -requires = ["pdm-backend"] -build-backend = "pdm.backend" - - -[tool.isort] -profile = "black" - -[tool.pdm] -distribution = true -[tool.pdm.dev-dependencies] -dev = [ - "pytest>=8.0.2", - "ipdb>=0.13.13", - "pre-commit>=3.7.1", -] -[project.scripts] -mllam_data_prep = "mllam_data_prep:cli.call" +[project] +name = "mllam-data-prep" +version = "0.6.1" +description = "dataset preparation for data-driven weather models" +authors = [ + {name = "Leif Denby", email = "lcd@dmi.dk"}, + {name = "Hauke Schulz", email = "has@dmi.dk"}, + {name = "Emy Alerskans", email = "ea@dmi.dk"}, + {name = "Eleni Briola", email = "elb@dmi.dk"}, + {name = "Joel Oskarsson", email = "joel.oskarsson@liu.se"}, + {name = "Kashif Rasul", email = "kashif.rasul@gmail.com"}, + {name = "Jordan Matelsky", email = "opensource@matelsky.com"}, + {name = "Martin Frølund", email = "maf@dmi.dk"}, + {name = "Simon Kamuk Christiansen", email = "skc@dmi.dk"}, +] +dependencies = [ + "xarray>=2024.2.0", + "zarr>=2.17.0", + "pyyaml>=6.0.1", + "loguru>=0.7.2", + "isodate>=0.6.1", + "requests>=2.31.0", + "aiohttp>=3.9.3", + "dataclass-wizard>=0.29.2", + "semver>=3.0.2", + "rich>=13.7.1", + "dask>=2024.2.1", + "psutil>=5.7.2", + "packaging>=23.1", + "deepdiff>=8.2.0", +] +requires-python = ">=3.9" +readme = "README.md" +license = {text = "MIT"} + +[project.optional-dependencies] +dask-distributed = [ + "dask[distributed]>=2024.7.1", + "bokeh!=3.0.*,>=2.4.2", +] +[build-system] +requires = ["pdm-backend"] +build-backend = "pdm.backend" + + +[tool.isort] +profile = "black" + +[tool.pdm] +distribution = true +[tool.pdm.dev-dependencies] +dev = [ + "pytest>=8.0.2", + "ipdb>=0.13.13", + "pre-commit>=3.7.1", +] +[project.scripts] +mllam_data_prep = "mllam_data_prep:cli.call" diff --git a/tests/data.py b/tests/data.py index 78739ee..38db738 100644 --- a/tests/data.py +++ b/tests/data.py @@ -1,281 +1,281 @@ -import uuid - -import isodate -import numpy as np -import pandas as pd -import xarray as xr - -SCHEMA_VERSION = "v0.5.0" - -NX, NY = 10, 8 -NT_ANALYSIS, NT_FORECAST = 5, 12 -NZ = 3 -DT_ANALYSIS = isodate.parse_duration("PT6H") -DT_FORECAST = isodate.parse_duration("PT1H") -T_START = isodate.parse_datetime("2000-01-01T00:00") -T_END_ANALYSIS = T_START + (NT_ANALYSIS - 1) * DT_ANALYSIS -T_END_FORECAST = T_START + (NT_FORECAST - 1) * DT_FORECAST -DEFAULT_FORECAST_VARS = ["u", "v", "t", "precip"] -DEFAULT_ATMOSPHERIC_ANALYSIS_VARS = ["u", "v", "t"] -DEFAULT_SURFACE_ANALYSIS_VARS = [ - "pres_seasurface", -] -DEFAULT_STATIC_VARS = ["topography_height", "land_area_fraction"] -ALL_DATA_KINDS = [ - "surface_forecast", - "surface_analysis", - "analysis_on_levels", - "forecast_on_levels", - "static", -] - - -def create_surface_forecast_dataset( - nt_analysis, nt_forecast, nx, ny, var_names=DEFAULT_FORECAST_VARS -): - """ - Create a fake forecast dataset with `nt_analysis` analysis times, `nt_forecast` - forecast times, `nx` grid points in x-direction and `ny` grid points in y-direction. - """ - ts_analysis = pd.date_range( - T_START, periods=nt_analysis, freq=DT_ANALYSIS - ).tz_localize(None) - ts_forecast = pd.date_range( - T_START, periods=nt_forecast, freq=DT_FORECAST - ).tz_localize(None) - - x = np.arange(nx) - y = np.arange(ny) - - dataarrays = {} - for var_name in var_names: - da = xr.DataArray( - np.random.random((nt_analysis, nt_forecast, nx, ny)), - dims=["analysis_time", "forecast_time", "x", "y"], - coords={ - "analysis_time": ts_analysis, - "forecast_time": ts_forecast, - "x": x, - "y": y, - }, - ) - dataarrays[var_name] = da - - ds = xr.Dataset(dataarrays) - return ds - - -def create_surface_analysis_dataset( - nt_analysis, nx, ny, var_names=DEFAULT_SURFACE_ANALYSIS_VARS -): - """ - Create a fake analysis dataset with `nt_analysis` analysis times, `nx` grid points - in x-direction and `ny` grid points in y-direction. - """ - ts_analysis = pd.date_range( - T_START, periods=nt_analysis, freq=DT_ANALYSIS - ).tz_localize(None) - - x = np.arange(nx) - y = np.arange(ny) - - dataarrays = {} - for var_name in var_names: - da = xr.DataArray( - np.random.random((nt_analysis, nx, ny)), - dims=["analysis_time", "x", "y"], - coords={ - "analysis_time": ts_analysis, - "x": x, - "y": y, - }, - ) - dataarrays[var_name] = da - - ds = xr.Dataset(dataarrays) - return ds - - -def create_analysis_dataset_on_levels( - nt_analysis, - nx, - ny, - nz, - level_dim="altitude", - var_names=DEFAULT_ATMOSPHERIC_ANALYSIS_VARS, -): - """ - Create a fake analysis dataset with `nt_analysis` analysis times, `nx` grid points in x-direction, - `ny` grid points in y-direction and `nz` levels, using level dimension `level_dim`. - - Parameters - ---------- - nt_analysis : int - Number of analysis times - nx : int - Number of grid points in x-direction - ny : int - Number of grid points in y-direction - nz : int - Number of levels - level_dim : str, optional - Name of the level dimension, by default "altitude" - """ - ts_analysis = pd.date_range( - T_START, periods=nt_analysis, freq=DT_ANALYSIS - ).tz_localize(None) - - x = np.arange(nx) - y = np.arange(ny) - z = np.arange(nz) - - dataarrays = {} - for var_name in var_names: - da = xr.DataArray( - np.random.random((nt_analysis, nz, nx, ny)), - dims=["analysis_time", level_dim, "x", "y"], - coords={ - "analysis_time": ts_analysis, - level_dim: z, - "x": x, - "y": y, - }, - ) - dataarrays[var_name] = da - - ds = xr.Dataset(dataarrays) - return ds - - -def create_forecast_dataset_on_levels( - nt_analysis, - nt_forecast, - nx, - ny, - nz, - level_dim="altitude", - var_names=DEFAULT_FORECAST_VARS, -): - """ - Create a fake forecast dataset with `nt_analysis` analysis times, `nt_forecast` - forecast times, `nx` grid points in x-direction, `ny` grid points in y-direction - and `nz` levels, using level dimension `level_dim`. - - Parameters - ---------- - nt_analysis : int - Number of analysis times - nt_forecast : int - Number of forecast times - nx : int - Number of grid points in x-direction - ny : int - Number of grid points in y-direction - nz : int - Number of levels - level_dim : str, optional - Name of the level dimension, by default "altitude" - """ - - ts_analysis = pd.date_range( - T_START, periods=nt_analysis, freq=DT_ANALYSIS - ).tz_localize(None) - ts_forecast = pd.date_range( - T_START, periods=nt_forecast, freq=DT_FORECAST - ).tz_localize(None) - - x = np.arange(nx) - y = np.arange(ny) - z = np.arange(nz) - - dataarrays = {} - for var_name in var_names: - da = xr.DataArray( - np.random.random((nt_analysis, nt_forecast, nz, nx, ny)), - dims=["analysis_time", "forecast_time", level_dim, "x", "y"], - coords={ - "analysis_time": ts_analysis, - "forecast_time": ts_forecast, - level_dim: z, - "x": x, - "y": y, - }, - ) - dataarrays[var_name] = da - - ds = xr.Dataset(dataarrays) - return ds - - -def create_static_dataset(nx, ny, var_names=DEFAULT_STATIC_VARS): - """ - Create a fake static dataset with `nx` grid points in x-direction and `ny` grid points in y-direction. - """ - x = np.arange(nx) - y = np.arange(ny) - - dataarrays = {} - for var_name in var_names: - da = xr.DataArray( - np.random.random((nx, ny)), - dims=["x", "y"], - coords={ - "x": x, - "y": y, - }, - ) - dataarrays[var_name] = da - - ds = xr.Dataset(dataarrays) - return ds - - -def create_data_collection(data_kinds, fp_root): - """ - Create a fake data collection with the given `data_kinds` and save it to `fp_root`, with - each dataset having the `data_kind` name with a unique suffix and saved in `.zarr` format. - - - Parameters - ---------- - data_kinds : list - List of data kinds to create, e.g. ["surface_forecast", "static"] - fp_root : str - Root directory to save the data collection - - Returns - ------- - dict - Dictionary of the created datasets with the key being the data_kind - and value being the path to the saved dataset - """ - datasets = {} - - # check that non of the data_kinds are repeated - if len(data_kinds) != len(set(data_kinds)): - raise ValueError( - "Data kinds should be unique, you're welcome to call this function twice :)" - ) - - for data_kind in data_kinds: - if data_kind == "surface_forecast": - ds = create_surface_forecast_dataset(NT_ANALYSIS, NT_FORECAST, NX, NY) - elif data_kind == "surface_analysis": - ds = create_surface_analysis_dataset(NT_ANALYSIS, NX, NY) - elif data_kind == "analysis_on_levels": - ds = create_analysis_dataset_on_levels(NT_ANALYSIS, NX, NY, NZ) - elif data_kind == "forecast_on_levels": - ds = create_forecast_dataset_on_levels(NT_ANALYSIS, NT_FORECAST, NX, NY, NZ) - elif data_kind == "static": - ds = create_static_dataset(NX, NY) - else: - raise ValueError(f"Unknown data kind: {data_kind}") - - identifier = str(uuid.uuid4()) - dataset_name = f"{data_kind}_{identifier}" - - fp = f"{fp_root}/{dataset_name}.zarr" - ds.to_zarr(fp, mode="w") - datasets[data_kind] = fp - - return datasets +import uuid + +import isodate +import numpy as np +import pandas as pd +import xarray as xr + +SCHEMA_VERSION = "v0.5.0" + +NX, NY = 10, 8 +NT_ANALYSIS, NT_FORECAST = 5, 12 +NZ = 3 +DT_ANALYSIS = isodate.parse_duration("PT6H") +DT_FORECAST = isodate.parse_duration("PT1H") +T_START = isodate.parse_datetime("2000-01-01T00:00") +T_END_ANALYSIS = T_START + (NT_ANALYSIS - 1) * DT_ANALYSIS +T_END_FORECAST = T_START + (NT_FORECAST - 1) * DT_FORECAST +DEFAULT_FORECAST_VARS = ["u", "v", "t", "precip"] +DEFAULT_ATMOSPHERIC_ANALYSIS_VARS = ["u", "v", "t"] +DEFAULT_SURFACE_ANALYSIS_VARS = [ + "pres_seasurface", +] +DEFAULT_STATIC_VARS = ["topography_height", "land_area_fraction"] +ALL_DATA_KINDS = [ + "surface_forecast", + "surface_analysis", + "analysis_on_levels", + "forecast_on_levels", + "static", +] + + +def create_surface_forecast_dataset( + nt_analysis, nt_forecast, nx, ny, var_names=DEFAULT_FORECAST_VARS +): + """ + Create a fake forecast dataset with `nt_analysis` analysis times, `nt_forecast` + forecast times, `nx` grid points in x-direction and `ny` grid points in y-direction. + """ + ts_analysis = pd.date_range( + T_START, periods=nt_analysis, freq=DT_ANALYSIS + ).tz_localize(None) + ts_forecast = pd.date_range( + T_START, periods=nt_forecast, freq=DT_FORECAST + ).tz_localize(None) + + x = np.arange(nx) + y = np.arange(ny) + + dataarrays = {} + for var_name in var_names: + da = xr.DataArray( + np.random.random((nt_analysis, nt_forecast, nx, ny)), + dims=["analysis_time", "forecast_time", "x", "y"], + coords={ + "analysis_time": ts_analysis, + "forecast_time": ts_forecast, + "x": x, + "y": y, + }, + ) + dataarrays[var_name] = da + + ds = xr.Dataset(dataarrays) + return ds + + +def create_surface_analysis_dataset( + nt_analysis, nx, ny, var_names=DEFAULT_SURFACE_ANALYSIS_VARS +): + """ + Create a fake analysis dataset with `nt_analysis` analysis times, `nx` grid points + in x-direction and `ny` grid points in y-direction. + """ + ts_analysis = pd.date_range( + T_START, periods=nt_analysis, freq=DT_ANALYSIS + ).tz_localize(None) + + x = np.arange(nx) + y = np.arange(ny) + + dataarrays = {} + for var_name in var_names: + da = xr.DataArray( + np.random.random((nt_analysis, nx, ny)), + dims=["analysis_time", "x", "y"], + coords={ + "analysis_time": ts_analysis, + "x": x, + "y": y, + }, + ) + dataarrays[var_name] = da + + ds = xr.Dataset(dataarrays) + return ds + + +def create_analysis_dataset_on_levels( + nt_analysis, + nx, + ny, + nz, + level_dim="altitude", + var_names=DEFAULT_ATMOSPHERIC_ANALYSIS_VARS, +): + """ + Create a fake analysis dataset with `nt_analysis` analysis times, `nx` grid points in x-direction, + `ny` grid points in y-direction and `nz` levels, using level dimension `level_dim`. + + Parameters + ---------- + nt_analysis : int + Number of analysis times + nx : int + Number of grid points in x-direction + ny : int + Number of grid points in y-direction + nz : int + Number of levels + level_dim : str, optional + Name of the level dimension, by default "altitude" + """ + ts_analysis = pd.date_range( + T_START, periods=nt_analysis, freq=DT_ANALYSIS + ).tz_localize(None) + + x = np.arange(nx) + y = np.arange(ny) + z = np.arange(nz) + + dataarrays = {} + for var_name in var_names: + da = xr.DataArray( + np.random.random((nt_analysis, nz, nx, ny)), + dims=["analysis_time", level_dim, "x", "y"], + coords={ + "analysis_time": ts_analysis, + level_dim: z, + "x": x, + "y": y, + }, + ) + dataarrays[var_name] = da + + ds = xr.Dataset(dataarrays) + return ds + + +def create_forecast_dataset_on_levels( + nt_analysis, + nt_forecast, + nx, + ny, + nz, + level_dim="altitude", + var_names=DEFAULT_FORECAST_VARS, +): + """ + Create a fake forecast dataset with `nt_analysis` analysis times, `nt_forecast` + forecast times, `nx` grid points in x-direction, `ny` grid points in y-direction + and `nz` levels, using level dimension `level_dim`. + + Parameters + ---------- + nt_analysis : int + Number of analysis times + nt_forecast : int + Number of forecast times + nx : int + Number of grid points in x-direction + ny : int + Number of grid points in y-direction + nz : int + Number of levels + level_dim : str, optional + Name of the level dimension, by default "altitude" + """ + + ts_analysis = pd.date_range( + T_START, periods=nt_analysis, freq=DT_ANALYSIS + ).tz_localize(None) + ts_forecast = pd.date_range( + T_START, periods=nt_forecast, freq=DT_FORECAST + ).tz_localize(None) + + x = np.arange(nx) + y = np.arange(ny) + z = np.arange(nz) + + dataarrays = {} + for var_name in var_names: + da = xr.DataArray( + np.random.random((nt_analysis, nt_forecast, nz, nx, ny)), + dims=["analysis_time", "forecast_time", level_dim, "x", "y"], + coords={ + "analysis_time": ts_analysis, + "forecast_time": ts_forecast, + level_dim: z, + "x": x, + "y": y, + }, + ) + dataarrays[var_name] = da + + ds = xr.Dataset(dataarrays) + return ds + + +def create_static_dataset(nx, ny, var_names=DEFAULT_STATIC_VARS): + """ + Create a fake static dataset with `nx` grid points in x-direction and `ny` grid points in y-direction. + """ + x = np.arange(nx) + y = np.arange(ny) + + dataarrays = {} + for var_name in var_names: + da = xr.DataArray( + np.random.random((nx, ny)), + dims=["x", "y"], + coords={ + "x": x, + "y": y, + }, + ) + dataarrays[var_name] = da + + ds = xr.Dataset(dataarrays) + return ds + + +def create_data_collection(data_kinds, fp_root): + """ + Create a fake data collection with the given `data_kinds` and save it to `fp_root`, with + each dataset having the `data_kind` name with a unique suffix and saved in `.zarr` format. + + + Parameters + ---------- + data_kinds : list + List of data kinds to create, e.g. ["surface_forecast", "static"] + fp_root : str + Root directory to save the data collection + + Returns + ------- + dict + Dictionary of the created datasets with the key being the data_kind + and value being the path to the saved dataset + """ + datasets = {} + + # check that non of the data_kinds are repeated + if len(data_kinds) != len(set(data_kinds)): + raise ValueError( + "Data kinds should be unique, you're welcome to call this function twice :)" + ) + + for data_kind in data_kinds: + if data_kind == "surface_forecast": + ds = create_surface_forecast_dataset(NT_ANALYSIS, NT_FORECAST, NX, NY) + elif data_kind == "surface_analysis": + ds = create_surface_analysis_dataset(NT_ANALYSIS, NX, NY) + elif data_kind == "analysis_on_levels": + ds = create_analysis_dataset_on_levels(NT_ANALYSIS, NX, NY, NZ) + elif data_kind == "forecast_on_levels": + ds = create_forecast_dataset_on_levels(NT_ANALYSIS, NT_FORECAST, NX, NY, NZ) + elif data_kind == "static": + ds = create_static_dataset(NX, NY) + else: + raise ValueError(f"Unknown data kind: {data_kind}") + + identifier = str(uuid.uuid4()) + dataset_name = f"{data_kind}_{identifier}" + + fp = f"{fp_root}/{dataset_name}.zarr" + ds.to_zarr(fp, mode="w") + datasets[data_kind] = fp + + return datasets diff --git a/tests/derive_variable/conftest.py b/tests/derive_variable/conftest.py index a81a3c3..20cc18e 100644 --- a/tests/derive_variable/conftest.py +++ b/tests/derive_variable/conftest.py @@ -1,34 +1,34 @@ -"""Fixtures for the derive_variable module tests.""" - -import datetime -from typing import List, Union - -import isodate -import numpy as np -import pandas as pd -import pytest -import xarray as xr - - -@pytest.fixture(name="time") -def fixture_time( - request, -) -> List[Union[np.datetime64, datetime.datetime, xr.DataArray]]: - """Fixture that returns test time data - - The fixture has to be indirectly parametrized with the number of time steps. - """ - ntime = request.param - return [ - np.datetime64("2004-06-11T00:00:00"), # invalid type - isodate.parse_datetime("1999-03-21T00:00"), - xr.DataArray( - pd.date_range( - start=isodate.parse_datetime("1999-03-21T00:00"), - periods=ntime, - freq=isodate.parse_duration("PT1H"), - ), - dims=["time"], - name="time", - ), - ] +"""Fixtures for the derive_variable module tests.""" + +import datetime +from typing import List, Union + +import isodate +import numpy as np +import pandas as pd +import pytest +import xarray as xr + + +@pytest.fixture(name="time") +def fixture_time( + request, +) -> List[Union[np.datetime64, datetime.datetime, xr.DataArray]]: + """Fixture that returns test time data + + The fixture has to be indirectly parametrized with the number of time steps. + """ + ntime = request.param + return [ + np.datetime64("2004-06-11T00:00:00"), # invalid type + isodate.parse_datetime("1999-03-21T00:00"), + xr.DataArray( + pd.date_range( + start=isodate.parse_datetime("1999-03-21T00:00"), + periods=ntime, + freq=isodate.parse_duration("PT1H"), + ), + dims=["time"], + name="time", + ), + ] diff --git a/tests/derive_variable/test_main.py b/tests/derive_variable/test_main.py index 804213a..3cee755 100644 --- a/tests/derive_variable/test_main.py +++ b/tests/derive_variable/test_main.py @@ -1,115 +1,115 @@ -"""Unit tests for the main module of the derive_variable operations.""" - -import sys -from types import ModuleType -from typing import Generator -from unittest.mock import MagicMock, patch - -import pytest -import xarray as xr - -from mllam_data_prep.ops.derive_variable.main import ( - _check_and_get_required_attributes, - _get_derived_variable_function, -) - - -@pytest.fixture(name="mock_import_module") -def fixture_mock_import_module() -> Generator[MagicMock, None, None]: - """Fixture to mock importlib.import_module.""" - with patch("importlib.import_module") as mock: - yield mock - - -@pytest.fixture() -def fixture_mock_sys_modules() -> Generator[None, None, None]: - """Fixture to mock sys.modules.""" - with patch.dict("sys.modules", {}): - yield - - -class TestGetDerivedVariableFunction: - """Tests for the _get_derived_variable_function.""" - - @pytest.mark.usefixtures("fixture_mock_sys_modules") - def test_function_in_sys_modules(self, mock_import_module: MagicMock) -> None: - """Test when the function to import is already in sys.modules.""" - # Mock the module and function - mock_module: ModuleType = MagicMock() - mock_function: MagicMock = MagicMock() - sys.modules["mock_module"] = mock_module - mock_module.mock_function = mock_function - - # Call the function - result = _get_derived_variable_function("mock_module.mock_function") - - # Assert the function is returned correctly - assert result == mock_function - - # Assert the module was not imported - mock_import_module.assert_not_called() - - def test_function_not_in_sys_modules(self, mock_import_module: MagicMock) -> None: - """Test when the function to import is not in sys.modules.""" - # Mock the module and function - mock_module: ModuleType = MagicMock() - mock_function: MagicMock = MagicMock() - mock_import_module.return_value = mock_module - mock_module.mock_function = mock_function - - # Call the function - result = _get_derived_variable_function("mock_module.mock_function") - - # Assert the function is returned correctly - assert result == mock_function - - -@patch( - "mllam_data_prep.ops.derive_variable.main.REQUIRED_FIELD_ATTRIBUTES", - ["units", "long_name"], -) -class TestCheckAndGetRequiredAttributes: - """Tests for the _check_and_get_required_attributes function.""" - - @pytest.mark.parametrize( - ["field_attrs", "expected_attributes", "expected_result"], - [ - [ - {"units": "m", "long_name": "test"}, - {"units": "m", "long_name": "test"}, - {"units": "m", "long_name": "test"}, - ], - [ - {"units": "m", "long_name": "test"}, - {}, - {"units": "m", "long_name": "test"}, - ], - [ - {"units": "m"}, - {"units": "m", "long_name": "test"}, - {"units": "m", "long_name": "test"}, - ], - [ - {"units": "m", "long_name": "old_name"}, - {"units": "m", "long_name": "new_name"}, - {"units": "m", "long_name": "new_name"}, - ], - ], - ) - def test_valid_input( - self, field_attrs, expected_attributes, expected_result - ) -> None: - """Test that the function returns the correct attributes with valid input.""" - field = xr.DataArray([1, 2, 3], attrs=field_attrs) - - result = _check_and_get_required_attributes(field, expected_attributes) - - assert result == expected_result - - def test_missing_attributes_raises_key_error(self) -> None: - """Test when required attributes are missing and not in expected attributes.""" - field = xr.DataArray([1, 2, 3], attrs={"units": "m"}) - expected_attributes = {"units": "m"} - - with pytest.raises(KeyError): - _check_and_get_required_attributes(field, expected_attributes) +"""Unit tests for the main module of the derive_variable operations.""" + +import sys +from types import ModuleType +from typing import Generator +from unittest.mock import MagicMock, patch + +import pytest +import xarray as xr + +from mllam_data_prep.ops.derive_variable.main import ( + _check_and_get_required_attributes, + _get_derived_variable_function, +) + + +@pytest.fixture(name="mock_import_module") +def fixture_mock_import_module() -> Generator[MagicMock, None, None]: + """Fixture to mock importlib.import_module.""" + with patch("importlib.import_module") as mock: + yield mock + + +@pytest.fixture() +def fixture_mock_sys_modules() -> Generator[None, None, None]: + """Fixture to mock sys.modules.""" + with patch.dict("sys.modules", {}): + yield + + +class TestGetDerivedVariableFunction: + """Tests for the _get_derived_variable_function.""" + + @pytest.mark.usefixtures("fixture_mock_sys_modules") + def test_function_in_sys_modules(self, mock_import_module: MagicMock) -> None: + """Test when the function to import is already in sys.modules.""" + # Mock the module and function + mock_module: ModuleType = MagicMock() + mock_function: MagicMock = MagicMock() + sys.modules["mock_module"] = mock_module + mock_module.mock_function = mock_function + + # Call the function + result = _get_derived_variable_function("mock_module.mock_function") + + # Assert the function is returned correctly + assert result == mock_function + + # Assert the module was not imported + mock_import_module.assert_not_called() + + def test_function_not_in_sys_modules(self, mock_import_module: MagicMock) -> None: + """Test when the function to import is not in sys.modules.""" + # Mock the module and function + mock_module: ModuleType = MagicMock() + mock_function: MagicMock = MagicMock() + mock_import_module.return_value = mock_module + mock_module.mock_function = mock_function + + # Call the function + result = _get_derived_variable_function("mock_module.mock_function") + + # Assert the function is returned correctly + assert result == mock_function + + +@patch( + "mllam_data_prep.ops.derive_variable.main.REQUIRED_FIELD_ATTRIBUTES", + ["units", "long_name"], +) +class TestCheckAndGetRequiredAttributes: + """Tests for the _check_and_get_required_attributes function.""" + + @pytest.mark.parametrize( + ["field_attrs", "expected_attributes", "expected_result"], + [ + [ + {"units": "m", "long_name": "test"}, + {"units": "m", "long_name": "test"}, + {"units": "m", "long_name": "test"}, + ], + [ + {"units": "m", "long_name": "test"}, + {}, + {"units": "m", "long_name": "test"}, + ], + [ + {"units": "m"}, + {"units": "m", "long_name": "test"}, + {"units": "m", "long_name": "test"}, + ], + [ + {"units": "m", "long_name": "old_name"}, + {"units": "m", "long_name": "new_name"}, + {"units": "m", "long_name": "new_name"}, + ], + ], + ) + def test_valid_input( + self, field_attrs, expected_attributes, expected_result + ) -> None: + """Test that the function returns the correct attributes with valid input.""" + field = xr.DataArray([1, 2, 3], attrs=field_attrs) + + result = _check_and_get_required_attributes(field, expected_attributes) + + assert result == expected_result + + def test_missing_attributes_raises_key_error(self) -> None: + """Test when required attributes are missing and not in expected attributes.""" + field = xr.DataArray([1, 2, 3], attrs={"units": "m"}) + expected_attributes = {"units": "m"} + + with pytest.raises(KeyError): + _check_and_get_required_attributes(field, expected_attributes) diff --git a/tests/derive_variable/test_physical_field.py b/tests/derive_variable/test_physical_field.py index 9258d65..2e83b96 100644 --- a/tests/derive_variable/test_physical_field.py +++ b/tests/derive_variable/test_physical_field.py @@ -1,77 +1,77 @@ -"""Unit tests for the `mllam_data_prep.ops.derive_variable.physical_field` module.""" - -import datetime -from typing import List, Union - -import numpy as np -import pytest -import xarray as xr - -from mllam_data_prep.ops.derive_variable.physical_field import calculate_toa_radiation - - -@pytest.fixture(name="lat") -def fixture_lat(request) -> List[Union[float, xr.DataArray]]: - """Fixture that returns test latitude data - - The fixture has to be indirectly parametrized with the number of coordinates, - the minimum and maximum latitude values. - """ - ncoord, lat_min, lat_max = request.param - return [ - 55.711, - xr.DataArray( - np.random.uniform(lat_min, lat_max, size=(ncoord, ncoord)), - dims=["x", "y"], - coords={"x": np.arange(ncoord), "y": np.arange(ncoord)}, - name="lat", - ), - ] - - -@pytest.fixture(name="lon") -def fixture_lon(request) -> List[Union[float, xr.DataArray]]: - """Fixture that returns test longitude data - - The fixture has to be indirectly parametrized with the number of coordinates, - the minimum and maximum longitude values. - """ - ncoord, lon_min, lon_max = request.param - return [ - 12.564, - xr.DataArray( - np.random.uniform(lon_min, lon_max, size=(ncoord, ncoord)), - dims=["x", "y"], - coords={"x": np.arange(ncoord), "y": np.arange(ncoord)}, - name="lon", - ), - ] - - -@pytest.mark.parametrize( - "lat", - # Format: (ncoord, lat_min, lat_max) - [(10, -90, 90), (10, -40, 40), (10, 40, -40), (10, -10, 10), (1000, -40, 40)], - indirect=True, -) -@pytest.mark.parametrize( - "lon", - # Format: (ncoord, lon_min, lon_max) - [(10, 0, 360), (10, -180, 180), (10, -90, 90), (10, 100, 110), (1000, -180, 180)], - indirect=True, -) -@pytest.mark.parametrize("time", [1, 10, 100], indirect=True) -def test_toa_radiation( - lat: Union[float, xr.DataArray], - lon: Union[float, xr.DataArray], - time: Union[np.datetime64, datetime.datetime, xr.DataArray], -): - """Test the `calculate_toa_radiation` function. - - Function from mllam_data_prep.ops.derive_variable.physical_field. - """ - if isinstance(time, (xr.DataArray, datetime.datetime)): - calculate_toa_radiation(lat, lon, time) - else: - with pytest.raises(TypeError): - calculate_toa_radiation(lat, lon, time) +"""Unit tests for the `mllam_data_prep.ops.derive_variable.physical_field` module.""" + +import datetime +from typing import List, Union + +import numpy as np +import pytest +import xarray as xr + +from mllam_data_prep.ops.derive_variable.physical_field import calculate_toa_radiation + + +@pytest.fixture(name="lat") +def fixture_lat(request) -> List[Union[float, xr.DataArray]]: + """Fixture that returns test latitude data + + The fixture has to be indirectly parametrized with the number of coordinates, + the minimum and maximum latitude values. + """ + ncoord, lat_min, lat_max = request.param + return [ + 55.711, + xr.DataArray( + np.random.uniform(lat_min, lat_max, size=(ncoord, ncoord)), + dims=["x", "y"], + coords={"x": np.arange(ncoord), "y": np.arange(ncoord)}, + name="lat", + ), + ] + + +@pytest.fixture(name="lon") +def fixture_lon(request) -> List[Union[float, xr.DataArray]]: + """Fixture that returns test longitude data + + The fixture has to be indirectly parametrized with the number of coordinates, + the minimum and maximum longitude values. + """ + ncoord, lon_min, lon_max = request.param + return [ + 12.564, + xr.DataArray( + np.random.uniform(lon_min, lon_max, size=(ncoord, ncoord)), + dims=["x", "y"], + coords={"x": np.arange(ncoord), "y": np.arange(ncoord)}, + name="lon", + ), + ] + + +@pytest.mark.parametrize( + "lat", + # Format: (ncoord, lat_min, lat_max) + [(10, -90, 90), (10, -40, 40), (10, 40, -40), (10, -10, 10), (1000, -40, 40)], + indirect=True, +) +@pytest.mark.parametrize( + "lon", + # Format: (ncoord, lon_min, lon_max) + [(10, 0, 360), (10, -180, 180), (10, -90, 90), (10, 100, 110), (1000, -180, 180)], + indirect=True, +) +@pytest.mark.parametrize("time", [1, 10, 100], indirect=True) +def test_toa_radiation( + lat: Union[float, xr.DataArray], + lon: Union[float, xr.DataArray], + time: Union[np.datetime64, datetime.datetime, xr.DataArray], +): + """Test the `calculate_toa_radiation` function. + + Function from mllam_data_prep.ops.derive_variable.physical_field. + """ + if isinstance(time, (xr.DataArray, datetime.datetime)): + calculate_toa_radiation(lat, lon, time) + else: + with pytest.raises(TypeError): + calculate_toa_radiation(lat, lon, time) diff --git a/tests/derive_variable/test_time_components.py b/tests/derive_variable/test_time_components.py index 8a5e216..4883b8d 100644 --- a/tests/derive_variable/test_time_components.py +++ b/tests/derive_variable/test_time_components.py @@ -1,58 +1,58 @@ -"""Unit tests for the `mllam_data_prep.ops.derive_variable.time_components` module.""" - -import datetime -from typing import Union - -import numpy as np -import pytest -import xarray as xr - -from mllam_data_prep.ops.derive_variable.time_components import ( - calculate_day_of_year, - calculate_hour_of_day, -) - - -@pytest.mark.parametrize("time", [1, 10, 1000], indirect=True) -@pytest.mark.parametrize( - "component", - [ - "cos", - "sin", - ], -) -def test_hour_of_day( - time: Union[np.datetime64, datetime.datetime, xr.DataArray], component: str -): - """Test the `calculate_hour_of_day` function. - - Function from mllam_data_prep.ops.derive_variable.time_components. - """ - if isinstance(time, (xr.DataArray, datetime.datetime)): - calculate_hour_of_day(time, component=component) - else: - with pytest.raises(TypeError): - calculate_hour_of_day(time, component=component) - - -@pytest.mark.parametrize("time", [1, 10, 1000], indirect=True) -@pytest.mark.parametrize( - "component", - [ - "cos", - "sin", - ], -) -def test_day_of_year( - time: Union[np.datetime64, datetime.datetime, xr.DataArray], component: str -): - """Test the `calculate_day_of_year` function. - - Function from mllam_data_prep.ops.derive_variable.time_components. - """ - - if isinstance(time, (xr.DataArray, datetime.datetime)): - calculate_day_of_year(time, component=component) - else: - with pytest.raises(TypeError): - calculate_day_of_year(time, component=component) +"""Unit tests for the `mllam_data_prep.ops.derive_variable.time_components` module.""" + +import datetime +from typing import Union + +import numpy as np +import pytest +import xarray as xr + +from mllam_data_prep.ops.derive_variable.time_components import ( + calculate_day_of_year, + calculate_hour_of_day, +) + + +@pytest.mark.parametrize("time", [1, 10, 1000], indirect=True) +@pytest.mark.parametrize( + "component", + [ + "cos", + "sin", + ], +) +def test_hour_of_day( + time: Union[np.datetime64, datetime.datetime, xr.DataArray], component: str +): + """Test the `calculate_hour_of_day` function. + + Function from mllam_data_prep.ops.derive_variable.time_components. + """ + if isinstance(time, (xr.DataArray, datetime.datetime)): + calculate_hour_of_day(time, component=component) + else: + with pytest.raises(TypeError): + calculate_hour_of_day(time, component=component) + + +@pytest.mark.parametrize("time", [1, 10, 1000], indirect=True) +@pytest.mark.parametrize( + "component", + [ + "cos", + "sin", + ], +) +def test_day_of_year( + time: Union[np.datetime64, datetime.datetime, xr.DataArray], component: str +): + """Test the `calculate_day_of_year` function. + + Function from mllam_data_prep.ops.derive_variable.time_components. + """ + + if isinstance(time, (xr.DataArray, datetime.datetime)): + calculate_day_of_year(time, component=component) + else: + with pytest.raises(TypeError): + calculate_day_of_year(time, component=component) diff --git a/tests/old_config_schema_examples/README.md b/tests/old_config_schema_examples/README.md index 3e1bed6..bbbbe4b 100644 --- a/tests/old_config_schema_examples/README.md +++ b/tests/old_config_schema_examples/README.md @@ -1,3 +1,3 @@ -This directoy should contain config examples with schema versions which are still supported by the current version. - -The folder structure is expected to be `{schema_version}/{config_name}.yaml`. +This directoy should contain config examples with schema versions which are still supported by the current version. + +The folder structure is expected to be `{schema_version}/{config_name}.yaml`. diff --git a/tests/old_config_schema_examples/v0.2.0/example.danra.yaml b/tests/old_config_schema_examples/v0.2.0/example.danra.yaml index 73aa0df..afff418 100644 --- a/tests/old_config_schema_examples/v0.2.0/example.danra.yaml +++ b/tests/old_config_schema_examples/v0.2.0/example.danra.yaml @@ -1,88 +1,88 @@ -schema_version: v0.2.0 -dataset_version: v0.1.0 - -output: - variables: - static: [grid_index, static_feature] - state: [time, grid_index, state_feature] - forcing: [time, grid_index, forcing_feature] - coord_ranges: - time: - start: 1990-09-03T00:00 - end: 1990-09-09T00:00 - step: PT3H - chunking: - time: 1 - splitting: - dim: time - splits: - train: - start: 1990-09-03T00:00 - end: 1990-09-06T00:00 - compute_statistics: - ops: [mean, std, diff_mean, diff_std] - dims: [grid_index, time] - val: - start: 1990-09-06T00:00 - end: 1990-09-07T00:00 - test: - start: 1990-09-07T00:00 - end: 1990-09-09T00:00 - -inputs: - danra_height_levels: - path: https://mllam-test-data.s3.eu-north-1.amazonaws.com/height_levels.zarr - dims: [time, x, y, altitude] - variables: - u: - altitude: - values: [100,] - units: m - v: - altitude: - values: [100, ] - units: m - dim_mapping: - time: - method: rename - dim: time - state_feature: - method: stack_variables_by_var_name - dims: [altitude] - name_format: f"{var_name}{altitude}m" - grid_index: - method: stack - dims: [x, y] - target_output_variable: state - - danra_surface: - path: https://mllam-test-data.s3.eu-north-1.amazonaws.com/single_levels.zarr - dims: [time, x, y] - variables: - # use surface incoming shortwave radiation as forcing - - swavr0m - dim_mapping: - time: - method: rename - dim: time - grid_index: - method: stack - dims: [x, y] - forcing_feature: - method: stack_variables_by_var_name - name_format: f"{var_name}" - target_output_variable: forcing - - danra_lsm: - path: https://mllam-test-data.s3.eu-north-1.amazonaws.com/lsm.zarr - dims: [x, y] - variables: - - lsm - dim_mapping: - grid_index: - method: stack - dims: [x, y] - static_feature: - method: stack_variables_by_var_name - name_format: f"{var_name}" - target_output_variable: static +schema_version: v0.2.0 +dataset_version: v0.1.0 + +output: + variables: + static: [grid_index, static_feature] + state: [time, grid_index, state_feature] + forcing: [time, grid_index, forcing_feature] + coord_ranges: + time: + start: 1990-09-03T00:00 + end: 1990-09-09T00:00 + step: PT3H + chunking: + time: 1 + splitting: + dim: time + splits: + train: + start: 1990-09-03T00:00 + end: 1990-09-06T00:00 + compute_statistics: + ops: [mean, std, diff_mean, diff_std] + dims: [grid_index, time] + val: + start: 1990-09-06T00:00 + end: 1990-09-07T00:00 + test: + start: 1990-09-07T00:00 + end: 1990-09-09T00:00 + +inputs: + danra_height_levels: + path: https://mllam-test-data.s3.eu-north-1.amazonaws.com/height_levels.zarr + dims: [time, x, y, altitude] + variables: + u: + altitude: + values: [100,] + units: m + v: + altitude: + values: [100, ] + units: m + dim_mapping: + time: + method: rename + dim: time + state_feature: + method: stack_variables_by_var_name + dims: [altitude] + name_format: f"{var_name}{altitude}m" + grid_index: + method: stack + dims: [x, y] + target_output_variable: state + + danra_surface: + path: https://mllam-test-data.s3.eu-north-1.amazonaws.com/single_levels.zarr + dims: [time, x, y] + variables: + # use surface incoming shortwave radiation as forcing + - swavr0m + dim_mapping: + time: + method: rename + dim: time + grid_index: + method: stack + dims: [x, y] + forcing_feature: + method: stack_variables_by_var_name + name_format: f"{var_name}" + target_output_variable: forcing + + danra_lsm: + path: https://mllam-test-data.s3.eu-north-1.amazonaws.com/lsm.zarr + dims: [x, y] + variables: + - lsm + dim_mapping: + grid_index: + method: stack + dims: [x, y] + static_feature: + method: stack_variables_by_var_name + name_format: f"{var_name}" + target_output_variable: static diff --git a/tests/old_config_schema_examples/v0.5.0/example.danra.yaml b/tests/old_config_schema_examples/v0.5.0/example.danra.yaml index 3edf126..4882845 100644 --- a/tests/old_config_schema_examples/v0.5.0/example.danra.yaml +++ b/tests/old_config_schema_examples/v0.5.0/example.danra.yaml @@ -1,99 +1,99 @@ -schema_version: v0.5.0 -dataset_version: v0.1.0 - -output: - variables: - static: [grid_index, static_feature] - state: [time, grid_index, state_feature] - forcing: [time, grid_index, forcing_feature] - coord_ranges: - time: - start: 1990-09-03T00:00 - end: 1990-09-09T00:00 - step: PT3H - chunking: - time: 1 - splitting: - dim: time - splits: - train: - start: 1990-09-03T00:00 - end: 1990-09-06T00:00 - compute_statistics: - ops: [mean, std, diff_mean, diff_std] - dims: [grid_index, time] - val: - start: 1990-09-06T00:00 - end: 1990-09-07T00:00 - test: - start: 1990-09-07T00:00 - end: 1990-09-09T00:00 - -inputs: - danra_height_levels: - path: https://mllam-test-data.s3.eu-north-1.amazonaws.com/height_levels.zarr - dims: [time, x, y, altitude] - variables: - u: - altitude: - values: [100,] - units: m - v: - altitude: - values: [100, ] - units: m - dim_mapping: - time: - method: rename - dim: time - state_feature: - method: stack_variables_by_var_name - dims: [altitude] - name_format: "{var_name}{altitude}m" - grid_index: - method: stack - dims: [x, y] - target_output_variable: state - - danra_surface: - path: https://mllam-test-data.s3.eu-north-1.amazonaws.com/single_levels.zarr - dims: [time, x, y] - variables: - # use surface incoming shortwave radiation as forcing - - swavr0m - dim_mapping: - time: - method: rename - dim: time - grid_index: - method: stack - dims: [x, y] - forcing_feature: - method: stack_variables_by_var_name - name_format: "{var_name}" - target_output_variable: forcing - - danra_lsm: - path: https://mllam-test-data.s3.eu-north-1.amazonaws.com/lsm.zarr - dims: [x, y] - variables: - - lsm - dim_mapping: - grid_index: - method: stack - dims: [x, y] - static_feature: - method: stack_variables_by_var_name - name_format: "{var_name}" - target_output_variable: static - -extra: - projection: - class_name: LambertConformal - kwargs: - central_longitude: 25.0 - central_latitude: 56.7 - standard_parallels: [56.7, 56.7] - globe: - semimajor_axis: 6367470.0 - semiminor_axis: 6367470.0 +schema_version: v0.5.0 +dataset_version: v0.1.0 + +output: + variables: + static: [grid_index, static_feature] + state: [time, grid_index, state_feature] + forcing: [time, grid_index, forcing_feature] + coord_ranges: + time: + start: 1990-09-03T00:00 + end: 1990-09-09T00:00 + step: PT3H + chunking: + time: 1 + splitting: + dim: time + splits: + train: + start: 1990-09-03T00:00 + end: 1990-09-06T00:00 + compute_statistics: + ops: [mean, std, diff_mean, diff_std] + dims: [grid_index, time] + val: + start: 1990-09-06T00:00 + end: 1990-09-07T00:00 + test: + start: 1990-09-07T00:00 + end: 1990-09-09T00:00 + +inputs: + danra_height_levels: + path: https://mllam-test-data.s3.eu-north-1.amazonaws.com/height_levels.zarr + dims: [time, x, y, altitude] + variables: + u: + altitude: + values: [100,] + units: m + v: + altitude: + values: [100, ] + units: m + dim_mapping: + time: + method: rename + dim: time + state_feature: + method: stack_variables_by_var_name + dims: [altitude] + name_format: "{var_name}{altitude}m" + grid_index: + method: stack + dims: [x, y] + target_output_variable: state + + danra_surface: + path: https://mllam-test-data.s3.eu-north-1.amazonaws.com/single_levels.zarr + dims: [time, x, y] + variables: + # use surface incoming shortwave radiation as forcing + - swavr0m + dim_mapping: + time: + method: rename + dim: time + grid_index: + method: stack + dims: [x, y] + forcing_feature: + method: stack_variables_by_var_name + name_format: "{var_name}" + target_output_variable: forcing + + danra_lsm: + path: https://mllam-test-data.s3.eu-north-1.amazonaws.com/lsm.zarr + dims: [x, y] + variables: + - lsm + dim_mapping: + grid_index: + method: stack + dims: [x, y] + static_feature: + method: stack_variables_by_var_name + name_format: "{var_name}" + target_output_variable: static + +extra: + projection: + class_name: LambertConformal + kwargs: + central_longitude: 25.0 + central_latitude: 56.7 + standard_parallels: [56.7, 56.7] + globe: + semimajor_axis: 6367470.0 + semiminor_axis: 6367470.0 diff --git a/tests/resources/sliced_example.danra.yaml b/tests/resources/sliced_example.danra.yaml index 6d60d85..5dd4dd5 100644 --- a/tests/resources/sliced_example.danra.yaml +++ b/tests/resources/sliced_example.danra.yaml @@ -1,62 +1,62 @@ -schema_version: v0.6.0 -dataset_version: v0.1.0 - -output: - variables: - state: [time, grid_index, state_feature] - coord_ranges: - time: - start: 1990-09-03T00:00 - end: 1990-09-09T00:00 - step: PT3H - chunking: - time: 1 - splitting: - dim: time - splits: - train: - start: 1990-09-03T00:00 - end: 1990-09-06T00:00 - compute_statistics: - ops: [mean, std, diff_mean, diff_std] - dims: [grid_index, time] - val: - start: 1990-09-06T00:00 - end: 1990-09-07T00:00 - test: - start: 1990-09-07T00:00 - end: 1990-09-09T00:00 - -inputs: - danra_height_levels: - path: https://mllam-test-data.s3.eu-north-1.amazonaws.com/height_levels.zarr - dims: [time, x, y, altitude] - variables: - u: - altitude: - values: [100,] - units: m - v: - altitude: - values: [100, ] - units: m - dim_mapping: - time: - method: rename - dim: time - state_feature: - method: stack_variables_by_var_name - dims: [altitude] - name_format: "{var_name}{altitude}m" - grid_index: - method: stack - dims: [x, y] - coord_ranges: - x: - start: -50000 - end: -40000 - y: - start: -50000 - end: -40000 - - target_output_variable: state +schema_version: v0.6.0 +dataset_version: v0.1.0 + +output: + variables: + state: [time, grid_index, state_feature] + coord_ranges: + time: + start: 1990-09-03T00:00 + end: 1990-09-09T00:00 + step: PT3H + chunking: + time: 1 + splitting: + dim: time + splits: + train: + start: 1990-09-03T00:00 + end: 1990-09-06T00:00 + compute_statistics: + ops: [mean, std, diff_mean, diff_std] + dims: [grid_index, time] + val: + start: 1990-09-06T00:00 + end: 1990-09-07T00:00 + test: + start: 1990-09-07T00:00 + end: 1990-09-09T00:00 + +inputs: + danra_height_levels: + path: https://mllam-test-data.s3.eu-north-1.amazonaws.com/height_levels.zarr + dims: [time, x, y, altitude] + variables: + u: + altitude: + values: [100,] + units: m + v: + altitude: + values: [100, ] + units: m + dim_mapping: + time: + method: rename + dim: time + state_feature: + method: stack_variables_by_var_name + dims: [altitude] + name_format: "{var_name}{altitude}m" + grid_index: + method: stack + dims: [x, y] + coord_ranges: + x: + start: -50000 + end: -40000 + y: + start: -50000 + end: -40000 + + target_output_variable: state diff --git a/tests/test_cli.py b/tests/test_cli.py index 064bbba..95a024a 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,14 +1,14 @@ -import tempfile - -import pytest -import xarray as xr - -from mllam_data_prep.cli import call - - -@pytest.mark.parametrize("args", [["example.danra.yaml"]]) -def test_call(args): - with tempfile.TemporaryDirectory(suffix=".zarr") as tmpdir: - args.extend(["--output", tmpdir]) - call(args) - _ = xr.open_zarr(tmpdir) +import tempfile + +import pytest +import xarray as xr + +from mllam_data_prep.cli import call + + +@pytest.mark.parametrize("args", [["example.danra.yaml"]]) +def test_call(args): + with tempfile.TemporaryDirectory(suffix=".zarr") as tmpdir: + args.extend(["--output", tmpdir]) + call(args) + _ = xr.open_zarr(tmpdir) diff --git a/tests/test_config.py b/tests/test_config.py index 5f7896a..7fd82fa 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -1,131 +1,131 @@ -import pytest -from dataclass_wizard.errors import MissingFields, UnknownJSONKey - -import mllam_data_prep as mdp - -INVALID_EXTRA_FIELDS_CONFIG_YAML = """ -schema_version: v0.1.0 -dataset_version: v0.1.0 - -output: - variables: - static: [grid_index, feature] - coord_ranges: - time: - start: 1990-09-03T00:00 - end: 1990-09-04T00:00 - step: PT3H - -inputs: {} -foobar: 42 -""" - -MISSING_FIELDS_CONFIG_YAML = """ -schema_version: v0.1.0 -dataset_version: v0.1.0 -""" - - -def test_get_config_issues(): - """Test that the Config class raises the correct exceptions when the YAML file is invalid.""" - with pytest.raises(UnknownJSONKey): - mdp.Config.from_yaml(INVALID_EXTRA_FIELDS_CONFIG_YAML) - - with pytest.raises(MissingFields): - mdp.Config.from_yaml(MISSING_FIELDS_CONFIG_YAML) - - -VALID_EXAMPLE_CONFIG_YAML = """ -schema_version: v0.1.0 -dataset_version: v0.1.0 - -output: - variables: - static: [grid_index, feature] - state: [time, grid_index, state_feature] - forcing: [time, grid_index, forcing_feature] - coord_ranges: - time: - start: 1990-09-03T00:00 - end: 1990-09-04T00:00 - step: PT3H - splitting: - dim: time - splits: - train: - start: 1990-09-03T00:00 - end: 1990-09-06T00:00 - compute_statistics: - ops: [mean, std] - dims: [grid_index, time] - validation: - start: 1990-09-06T00:00 - end: 1990-09-07T00:00 - test: - start: 1990-09-07T00:00 - end: 1990-09-09T00:00 - -inputs: - danra_height_levels: - path: ~/Desktop/mldev/height_levels.zarr - dims: [time, x, y, altitude] - variables: - u: - altitude: - values: [100, ] - units: m - v: - altitude: - values: [100, ] - units: m - dim_mapping: - time: - method: rename - dim: time - state_feature: - method: stack_variables_by_var_name - dims: [altitude] - name_format: f"{var_name}{altitude}m" - grid_index: - method: flatten - dims: [x, y] - target_output_variable: state - - danra_surface: - path: ~/Desktop/mldev/single_levels.zarr - dims: [time, x, y] - variables: - - pres_seasurface - dim_mapping: - time: - method: rename - dim: time - grid_index: - method: flatten - dims: [x, y] - forcing_feature: - method: stack_variables_by_var_name - name_format: f"{var_name}" - target_output_variable: forcing -""" - - -def test_get_config_nested(): - config = mdp.Config.from_yaml(VALID_EXAMPLE_CONFIG_YAML) - - for dataset_name, input_config in config.inputs.items(): - assert input_config.path is not None - assert input_config.variables is not None - assert input_config.target_output_variable is not None - with pytest.raises(AttributeError): - input_config.foobarfield - - -def test_config_roundtrip(): - original_config = mdp.Config.from_yaml(VALID_EXAMPLE_CONFIG_YAML) - roundtrip_config_dict = mdp.Config.from_dict(original_config.to_dict()) - roundtrip_config_yaml = mdp.Config.from_yaml(original_config.to_yaml()) - roundtrip_config_json = mdp.Config.from_json(original_config.to_json()) - assert original_config == roundtrip_config_dict - assert original_config == roundtrip_config_yaml - assert original_config == roundtrip_config_json +import pytest +from dataclass_wizard.errors import MissingFields, UnknownJSONKey + +import mllam_data_prep as mdp + +INVALID_EXTRA_FIELDS_CONFIG_YAML = """ +schema_version: v0.1.0 +dataset_version: v0.1.0 + +output: + variables: + static: [grid_index, feature] + coord_ranges: + time: + start: 1990-09-03T00:00 + end: 1990-09-04T00:00 + step: PT3H + +inputs: {} +foobar: 42 +""" + +MISSING_FIELDS_CONFIG_YAML = """ +schema_version: v0.1.0 +dataset_version: v0.1.0 +""" + + +def test_get_config_issues(): + """Test that the Config class raises the correct exceptions when the YAML file is invalid.""" + with pytest.raises(UnknownJSONKey): + mdp.Config.from_yaml(INVALID_EXTRA_FIELDS_CONFIG_YAML) + + with pytest.raises(MissingFields): + mdp.Config.from_yaml(MISSING_FIELDS_CONFIG_YAML) + + +VALID_EXAMPLE_CONFIG_YAML = """ +schema_version: v0.1.0 +dataset_version: v0.1.0 + +output: + variables: + static: [grid_index, feature] + state: [time, grid_index, state_feature] + forcing: [time, grid_index, forcing_feature] + coord_ranges: + time: + start: 1990-09-03T00:00 + end: 1990-09-04T00:00 + step: PT3H + splitting: + dim: time + splits: + train: + start: 1990-09-03T00:00 + end: 1990-09-06T00:00 + compute_statistics: + ops: [mean, std] + dims: [grid_index, time] + validation: + start: 1990-09-06T00:00 + end: 1990-09-07T00:00 + test: + start: 1990-09-07T00:00 + end: 1990-09-09T00:00 + +inputs: + danra_height_levels: + path: ~/Desktop/mldev/height_levels.zarr + dims: [time, x, y, altitude] + variables: + u: + altitude: + values: [100, ] + units: m + v: + altitude: + values: [100, ] + units: m + dim_mapping: + time: + method: rename + dim: time + state_feature: + method: stack_variables_by_var_name + dims: [altitude] + name_format: f"{var_name}{altitude}m" + grid_index: + method: flatten + dims: [x, y] + target_output_variable: state + + danra_surface: + path: ~/Desktop/mldev/single_levels.zarr + dims: [time, x, y] + variables: + - pres_seasurface + dim_mapping: + time: + method: rename + dim: time + grid_index: + method: flatten + dims: [x, y] + forcing_feature: + method: stack_variables_by_var_name + name_format: f"{var_name}" + target_output_variable: forcing +""" + + +def test_get_config_nested(): + config = mdp.Config.from_yaml(VALID_EXAMPLE_CONFIG_YAML) + + for dataset_name, input_config in config.inputs.items(): + assert input_config.path is not None + assert input_config.variables is not None + assert input_config.target_output_variable is not None + with pytest.raises(AttributeError): + input_config.foobarfield + + +def test_config_roundtrip(): + original_config = mdp.Config.from_yaml(VALID_EXAMPLE_CONFIG_YAML) + roundtrip_config_dict = mdp.Config.from_dict(original_config.to_dict()) + roundtrip_config_yaml = mdp.Config.from_yaml(original_config.to_yaml()) + roundtrip_config_json = mdp.Config.from_json(original_config.to_json()) + assert original_config == roundtrip_config_dict + assert original_config == roundtrip_config_yaml + assert original_config == roundtrip_config_json diff --git a/tests/test_dataset.py b/tests/test_dataset.py index e78a93b..cf3ca9a 100644 --- a/tests/test_dataset.py +++ b/tests/test_dataset.py @@ -1,280 +1,280 @@ -"""Tests for the output dataset created by `mllam-data-prep`.""" -import pytest -import yaml - -import mllam_data_prep as mdp - -with open("example.danra.yaml", "r") as file: - BASE_CONFIG = file.read() - -HEIGHT_LEVEL_TEST_SECTION = """\ -inputs: - danra_height_levels: - path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/height_levels.zarr - dims: [time, x, y, altitude] - variables: - u: - altitude: - values: [100, 50,] - units: m - v: - altitude: - values: [100, 50, ] - units: m - dim_mapping: - time: - method: rename - dim: time - state_feature: - method: stack_variables_by_var_name - dims: [altitude] - name_format: "{var_name}{altitude}m" - grid_index: - method: stack - dims: [x, y] - target_output_variable: state -""" - -PRESSURE_LEVEL_TEST_SECTION = """\ -inputs: - danra_pressure_levels: - path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/pressure_levels.zarr - dims: [time, x, y, pressure] - variables: - u: - pressure: - values: [1000,] - units: hPa - v: - pressure: - values: [1000, ] - units: hPa - dim_mapping: - time: - method: rename - dim: time - state_feature: - method: stack_variables_by_var_name - dims: [pressure] - name_format: "{var_name}{pressure}m" - grid_index: - method: stack - dims: [x, y] - target_output_variable: state -""" - -SINGLE_LEVEL_SELECTED_VARIABLES_TEST_SECTION = """\ -inputs: - danra_single_levels: - path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/single_levels.zarr - dims: [time, x, y] - variables: - - t2m - - pres_seasurface - dim_mapping: - time: - method: rename - dim: time - state_feature: - method: stack_variables_by_var_name - name_format: "{var_name}" - grid_index: - method: stack - dims: [x, y] - target_output_variable: state -""" - -SINGLE_LEVEL_DERIVED_VARIABLES_TEST_SECTION = """\ -inputs: - danra_single_levels: - path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/single_levels.zarr - dims: [time, x, y] - derived_variables: - # derive variables to be used as forcings - toa_radiation: - kwargs: - time: ds_input.time - lat: ds_input.lat - lon: ds_input.lon - function: mllam_data_prep.ops.derive_variable.physical_field.calculate_toa_radiation - dim_mapping: - time: - method: rename - dim: time - state_feature: - method: stack_variables_by_var_name - name_format: "{var_name}" - grid_index: - method: stack - dims: [x, y] - target_output_variable: state -""" - -INVALID_PRESSURE_LEVEL_TEST_SECTION = """\ -inputs: - danra_pressure_levels: - path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/pressure_levels.zarr - dims: [time, x, y, pressure] - variables: - z: - pressure: - values: [1000,] - units: hPa - t: - pressure: - values: [800, ] - units: hPa - dim_mapping: - time: - method: rename - dim: time - state_feature: - method: stack_variables_by_var_name - dims: [pressure] - name_format: "{var_name}{pressure}m" - grid_index: - method: stack - dims: [x, y] - target_output_variable: state -""" - - -def update_config(config: str, update: str): - """ - Update provided config. - - Parameters - ---------- - config: str - String with config in yaml format - update: str - String with the update in yaml format - - Returns - ------- - config: Config - Updated config - """ - original_config = mdp.Config.from_yaml(config) - update = yaml.safe_load(update) - modified_config = original_config.to_dict() - modified_config.update(update) - modified_config = mdp.Config.from_dict(modified_config) - - return modified_config - - -@pytest.mark.parametrize( - "base_config, new_inputs_section", - [ - (BASE_CONFIG, "{}"), # Does not modify the example config - (BASE_CONFIG, PRESSURE_LEVEL_TEST_SECTION), - (BASE_CONFIG, HEIGHT_LEVEL_TEST_SECTION), - (BASE_CONFIG, SINGLE_LEVEL_SELECTED_VARIABLES_TEST_SECTION), - (BASE_CONFIG, SINGLE_LEVEL_DERIVED_VARIABLES_TEST_SECTION), - ], -) -def test_selected_output_variables(base_config, new_inputs_section): - """ - Test that the variables specified in each input dataset are - present in the output dataset. - """ - # Modify the example config - config = update_config(base_config, new_inputs_section) - - # Create the dataset - ds = mdp.create_dataset(config=config) - - # Check that the output variables are the ones selected - for _, input_config in config.inputs.items(): - target_output_variable = input_config.target_output_variable - - # Get the expected selected variable names - selected_variables = input_config.variables or [] - if isinstance(selected_variables, dict): - selected_var_names = list(selected_variables.keys()) - elif isinstance(selected_variables, list): - selected_var_names = selected_variables - else: - pytest.fail( - "Expected either 'list' or 'dict' but got" - f" type {type(selected_variables)} for 'variables'." - ) - - # Get the expected derived variable names - derived_variables = input_config.derived_variables or [] - if isinstance(derived_variables, dict): - derived_var_names = list(derived_variables.keys()) - elif isinstance(derived_variables, list): - derived_var_names = derived_variables - else: - pytest.fail( - "Expected either 'list' or 'dict' but got" - f" type {type(derived_variables)} for 'derived_variables'." - ) - - dim_mapping = input_config.dim_mapping[target_output_variable + "_feature"] - dims = dim_mapping.dims or [] - name_format = dim_mapping.name_format - - if len(dims) == 0: - selected_vars = selected_var_names - derived_vars = derived_var_names - elif len(dims) == 1: - coord = dims[0] - # Stack the variable names by coordinates, as is done in - # mdp.ops.stacking.stack_variables_by_coord_values - selected_vars = [] - for var_name in selected_var_names: - coord_values = selected_variables[var_name][coord].values - formatted_var_names = [ - name_format.format(var_name=var_name, **{coord: val}) - for val in coord_values - ] - selected_vars += formatted_var_names - # We currently do not support stacking of variables by coordinates - # for the derived variables - derived_vars = [] - - expected_variables = selected_vars + derived_vars - output_variables = ds[target_output_variable + "_feature"].values - - if set(expected_variables) != set(output_variables): - # Check if there are missing or extra variable - missing_vars = list(set(expected_variables) - set(output_variables)) - extra_vars = list(set(output_variables) - set(expected_variables)) - - error_message = ( - f"Expected {expected_variables}, but got {output_variables}." - ) - if missing_vars: - error_message += f"\nMissing variables: {missing_vars}" - if extra_vars: - error_message += f"\nExtra variables: {extra_vars}" - - pytest.fail(error_message) - - -@pytest.mark.parametrize( - "base_config, update, expected_result", - [ - ( - BASE_CONFIG, - "{}", - False, - ), # Do not modify the example config - should return False since we're expecting no nans - ( - BASE_CONFIG, - INVALID_PRESSURE_LEVEL_TEST_SECTION, - True, - ), # Dataset with nans - should return True - ], -) -def test_output_dataset_for_nans(base_config, update, expected_result): - """ - Test that the output dataset does not contain any nan values. - """ - config = update_config(base_config, update) - ds = mdp.create_dataset(config=config) - nan_in_ds = any(ds.isnull().any().compute().to_array()) - assert nan_in_ds == expected_result +"""Tests for the output dataset created by `mllam-data-prep`.""" +import pytest +import yaml + +import mllam_data_prep as mdp + +with open("example.danra.yaml", "r") as file: + BASE_CONFIG = file.read() + +HEIGHT_LEVEL_TEST_SECTION = """\ +inputs: + danra_height_levels: + path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/height_levels.zarr + dims: [time, x, y, altitude] + variables: + u: + altitude: + values: [100, 50,] + units: m + v: + altitude: + values: [100, 50, ] + units: m + dim_mapping: + time: + method: rename + dim: time + state_feature: + method: stack_variables_by_var_name + dims: [altitude] + name_format: "{var_name}{altitude}m" + grid_index: + method: stack + dims: [x, y] + target_output_variable: state +""" + +PRESSURE_LEVEL_TEST_SECTION = """\ +inputs: + danra_pressure_levels: + path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/pressure_levels.zarr + dims: [time, x, y, pressure] + variables: + u: + pressure: + values: [1000,] + units: hPa + v: + pressure: + values: [1000, ] + units: hPa + dim_mapping: + time: + method: rename + dim: time + state_feature: + method: stack_variables_by_var_name + dims: [pressure] + name_format: "{var_name}{pressure}m" + grid_index: + method: stack + dims: [x, y] + target_output_variable: state +""" + +SINGLE_LEVEL_SELECTED_VARIABLES_TEST_SECTION = """\ +inputs: + danra_single_levels: + path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/single_levels.zarr + dims: [time, x, y] + variables: + - t2m + - pres_seasurface + dim_mapping: + time: + method: rename + dim: time + state_feature: + method: stack_variables_by_var_name + name_format: "{var_name}" + grid_index: + method: stack + dims: [x, y] + target_output_variable: state +""" + +SINGLE_LEVEL_DERIVED_VARIABLES_TEST_SECTION = """\ +inputs: + danra_single_levels: + path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/single_levels.zarr + dims: [time, x, y] + derived_variables: + # derive variables to be used as forcings + toa_radiation: + kwargs: + time: ds_input.time + lat: ds_input.lat + lon: ds_input.lon + function: mllam_data_prep.ops.derive_variable.physical_field.calculate_toa_radiation + dim_mapping: + time: + method: rename + dim: time + state_feature: + method: stack_variables_by_var_name + name_format: "{var_name}" + grid_index: + method: stack + dims: [x, y] + target_output_variable: state +""" + +INVALID_PRESSURE_LEVEL_TEST_SECTION = """\ +inputs: + danra_pressure_levels: + path: https://object-store.os-api.cci1.ecmwf.int/mllam-testdata/danra_cropped/v0.2.0/pressure_levels.zarr + dims: [time, x, y, pressure] + variables: + z: + pressure: + values: [1000,] + units: hPa + t: + pressure: + values: [800, ] + units: hPa + dim_mapping: + time: + method: rename + dim: time + state_feature: + method: stack_variables_by_var_name + dims: [pressure] + name_format: "{var_name}{pressure}m" + grid_index: + method: stack + dims: [x, y] + target_output_variable: state +""" + + +def update_config(config: str, update: str): + """ + Update provided config. + + Parameters + ---------- + config: str + String with config in yaml format + update: str + String with the update in yaml format + + Returns + ------- + config: Config + Updated config + """ + original_config = mdp.Config.from_yaml(config) + update = yaml.safe_load(update) + modified_config = original_config.to_dict() + modified_config.update(update) + modified_config = mdp.Config.from_dict(modified_config) + + return modified_config + + +@pytest.mark.parametrize( + "base_config, new_inputs_section", + [ + (BASE_CONFIG, "{}"), # Does not modify the example config + (BASE_CONFIG, PRESSURE_LEVEL_TEST_SECTION), + (BASE_CONFIG, HEIGHT_LEVEL_TEST_SECTION), + (BASE_CONFIG, SINGLE_LEVEL_SELECTED_VARIABLES_TEST_SECTION), + (BASE_CONFIG, SINGLE_LEVEL_DERIVED_VARIABLES_TEST_SECTION), + ], +) +def test_selected_output_variables(base_config, new_inputs_section): + """ + Test that the variables specified in each input dataset are + present in the output dataset. + """ + # Modify the example config + config = update_config(base_config, new_inputs_section) + + # Create the dataset + ds = mdp.create_dataset(config=config) + + # Check that the output variables are the ones selected + for _, input_config in config.inputs.items(): + target_output_variable = input_config.target_output_variable + + # Get the expected selected variable names + selected_variables = input_config.variables or [] + if isinstance(selected_variables, dict): + selected_var_names = list(selected_variables.keys()) + elif isinstance(selected_variables, list): + selected_var_names = selected_variables + else: + pytest.fail( + "Expected either 'list' or 'dict' but got" + f" type {type(selected_variables)} for 'variables'." + ) + + # Get the expected derived variable names + derived_variables = input_config.derived_variables or [] + if isinstance(derived_variables, dict): + derived_var_names = list(derived_variables.keys()) + elif isinstance(derived_variables, list): + derived_var_names = derived_variables + else: + pytest.fail( + "Expected either 'list' or 'dict' but got" + f" type {type(derived_variables)} for 'derived_variables'." + ) + + dim_mapping = input_config.dim_mapping[target_output_variable + "_feature"] + dims = dim_mapping.dims or [] + name_format = dim_mapping.name_format + + if len(dims) == 0: + selected_vars = selected_var_names + derived_vars = derived_var_names + elif len(dims) == 1: + coord = dims[0] + # Stack the variable names by coordinates, as is done in + # mdp.ops.stacking.stack_variables_by_coord_values + selected_vars = [] + for var_name in selected_var_names: + coord_values = selected_variables[var_name][coord].values + formatted_var_names = [ + name_format.format(var_name=var_name, **{coord: val}) + for val in coord_values + ] + selected_vars += formatted_var_names + # We currently do not support stacking of variables by coordinates + # for the derived variables + derived_vars = [] + + expected_variables = selected_vars + derived_vars + output_variables = ds[target_output_variable + "_feature"].values + + if set(expected_variables) != set(output_variables): + # Check if there are missing or extra variable + missing_vars = list(set(expected_variables) - set(output_variables)) + extra_vars = list(set(output_variables) - set(expected_variables)) + + error_message = ( + f"Expected {expected_variables}, but got {output_variables}." + ) + if missing_vars: + error_message += f"\nMissing variables: {missing_vars}" + if extra_vars: + error_message += f"\nExtra variables: {extra_vars}" + + pytest.fail(error_message) + + +@pytest.mark.parametrize( + "base_config, update, expected_result", + [ + ( + BASE_CONFIG, + "{}", + False, + ), # Do not modify the example config - should return False since we're expecting no nans + ( + BASE_CONFIG, + INVALID_PRESSURE_LEVEL_TEST_SECTION, + True, + ), # Dataset with nans - should return True + ], +) +def test_output_dataset_for_nans(base_config, update, expected_result): + """ + Test that the output dataset does not contain any nan values. + """ + config = update_config(base_config, update) + ds = mdp.create_dataset(config=config) + nan_in_ds = any(ds.isnull().any().compute().to_array()) + assert nan_in_ds == expected_result diff --git a/tests/test_distributed.py b/tests/test_distributed.py index 727f871..1be056a 100644 --- a/tests/test_distributed.py +++ b/tests/test_distributed.py @@ -1,50 +1,50 @@ -import importlib -import tempfile - -import pytest -import xarray as xr - -from mllam_data_prep.cli import call - - -def call_wrapper(args): - with tempfile.TemporaryDirectory(suffix=".zarr") as tmpdir: - args.extend(["--output", tmpdir]) - call(args) - _ = xr.open_zarr(tmpdir) - - -def distributed(): - """Check if dask.distributed is installed""" - try: - importlib.import_module("dask.distributed") - - return True - except (ModuleNotFoundError, ImportError): - return False - - -@pytest.mark.parametrize( - "args", - [ - ["example.danra.yaml", "--dask-distributed-local-core-fraction", "1.0"], - ["example.danra.yaml", "--dask-distributed-local-core-fraction", "0.0"], - ["example.danra.yaml"], - ], -) -def test_run_distributed(args): - if distributed(): - call_wrapper(args) - elif not distributed() and "--dask-distributed-local-core-fraction" in args: - index = args.index("--dask-distributed-local-core-fraction") - core_fraction = float(args[index + 1]) - if core_fraction > 0: - pytest.raises( - ModuleNotFoundError, - call_wrapper, - args=args, - ) - else: - call_wrapper(args) - else: - call_wrapper(args) +import importlib +import tempfile + +import pytest +import xarray as xr + +from mllam_data_prep.cli import call + + +def call_wrapper(args): + with tempfile.TemporaryDirectory(suffix=".zarr") as tmpdir: + args.extend(["--output", tmpdir]) + call(args) + _ = xr.open_zarr(tmpdir) + + +def distributed(): + """Check if dask.distributed is installed""" + try: + importlib.import_module("dask.distributed") + + return True + except (ModuleNotFoundError, ImportError): + return False + + +@pytest.mark.parametrize( + "args", + [ + ["example.danra.yaml", "--dask-distributed-local-core-fraction", "1.0"], + ["example.danra.yaml", "--dask-distributed-local-core-fraction", "0.0"], + ["example.danra.yaml"], + ], +) +def test_run_distributed(args): + if distributed(): + call_wrapper(args) + elif not distributed() and "--dask-distributed-local-core-fraction" in args: + index = args.index("--dask-distributed-local-core-fraction") + core_fraction = float(args[index + 1]) + if core_fraction > 0: + pytest.raises( + ModuleNotFoundError, + call_wrapper, + args=args, + ) + else: + call_wrapper(args) + else: + call_wrapper(args) diff --git a/tests/test_existing_output.py b/tests/test_existing_output.py index 0a70ae8..a1adc90 100644 --- a/tests/test_existing_output.py +++ b/tests/test_existing_output.py @@ -1,122 +1,122 @@ -import shutil -from copy import deepcopy -from unittest.mock import patch - -import pytest -import xarray as xr - -import mllam_data_prep.config as mdp_config -from mllam_data_prep.create_dataset import create_dataset_zarr - - -@pytest.fixture -def mock_config(): - return mdp_config.Config( - schema_version="v0.6.0", - dataset_version="v1.0.0", - inputs={}, - output=mdp_config.Output(variables={}), - ) - - -@pytest.fixture -def mock_zarr_path(tmp_path): - return tmp_path / "test.zarr" - - -def test_handle_existing_dataset_always_overwrite(mock_config, mock_zarr_path): - config_path = mock_zarr_path.parent / "config.yaml" - mock_config.to_yaml_file(config_path) - ds = xr.Dataset( - attrs={"mdp_version": "v0.6.0", "creation_config": mock_config.to_yaml()} - ) - ds.to_zarr(str(mock_zarr_path)) - fn_rmtree = shutil.rmtree - with patch("shutil.rmtree") as mock_rmtree: - # ensure that the rmtree function is called, otherwise xarray won't be - # able to write to the zarr path - mock_rmtree.side_effect = fn_rmtree - create_dataset_zarr( - fp_config=config_path, fp_zarr=str(mock_zarr_path), overwrite="always" - ) - mock_rmtree.assert_called_once_with(mock_zarr_path) - - -def test_handle_existing_dataset_never_overwrite(mock_config, mock_zarr_path): - config_path = mock_zarr_path.parent / "config.yaml" - mock_config.to_yaml_file(config_path) - ds = xr.Dataset( - attrs={"mdp_version": "v0.6.0", "creation_config": mock_config.to_yaml()} - ) - ds.to_zarr(str(mock_zarr_path)) - with patch("shutil.rmtree") as mock_rmtree: - with pytest.raises(FileExistsError, match="There already exists a dataset at"): - create_dataset_zarr( - fp_config=config_path, fp_zarr=str(mock_zarr_path), overwrite="never" - ) - mock_rmtree.assert_not_called() - - -def test_handle_existing_dataset_on_config_change_same_config( - mock_config, mock_zarr_path -): - """ - Test that when the existing dataset has the same config as the current config, the zarr dataset is not deleted. - """ - config_path = mock_zarr_path.parent / "config.yaml" - mock_config.to_yaml_file(config_path) - ds = xr.Dataset( - attrs={"mdp_version": "v0.6.0", "creation_config": mock_config.to_yaml()} - ) - ds.to_zarr(str(mock_zarr_path)) - with patch("shutil.rmtree") as mock_rmtree: - create_dataset_zarr( - fp_config=config_path, - fp_zarr=str(mock_zarr_path), - overwrite="on_config_change", - ) - mock_rmtree.assert_not_called() - - -def test_handle_existing_dataset_on_config_change_different_config( - mock_config, mock_zarr_path -): - """ - Test that when the existing dataset has a different config than the current config, the zarr dataset is deleted. - """ - different_config = deepcopy(mock_config) - different_config.dataset_version = "2.0.0" - config_path = mock_zarr_path.parent / "config.yaml" - mock_config.to_yaml_file(config_path) - ds = xr.Dataset( - attrs={"mdp_version": "0.6.0", "creation_config": different_config.to_yaml()} - ) - ds.to_zarr(str(mock_zarr_path)) - fn_rmtree = shutil.rmtree - with patch("shutil.rmtree") as mock_rmtree: - # ensure that the rmtree function is called, otherwise xarray won't be - # able to write to the zarr path - mock_rmtree.side_effect = fn_rmtree - create_dataset_zarr( - fp_config=config_path, - fp_zarr=str(mock_zarr_path), - overwrite="on_config_change", - ) - mock_rmtree.assert_called_once_with(mock_zarr_path) - - -def test_handle_existing_dataset_older_version(mock_config, mock_zarr_path): - """ - Test that when the existing dataset was created with an older version of mllam-data-prep, an exception is raised. - Since for older versions we do not have the creation_config attribute, we cannot compare the configs. - """ - config_path = mock_zarr_path.parent / "config.yaml" - mock_config.to_yaml_file(config_path) - ds = xr.Dataset(attrs={"mdp_version": "0.5.0"}) - ds.to_zarr(str(mock_zarr_path)) - with pytest.raises(FileExistsError, match="older version of mllam-data-prep"): - create_dataset_zarr( - fp_config=config_path, - fp_zarr=str(mock_zarr_path), - overwrite="on_config_change", - ) +import shutil +from copy import deepcopy +from unittest.mock import patch + +import pytest +import xarray as xr + +import mllam_data_prep.config as mdp_config +from mllam_data_prep.create_dataset import create_dataset_zarr + + +@pytest.fixture +def mock_config(): + return mdp_config.Config( + schema_version="v0.6.0", + dataset_version="v1.0.0", + inputs={}, + output=mdp_config.Output(variables={}), + ) + + +@pytest.fixture +def mock_zarr_path(tmp_path): + return tmp_path / "test.zarr" + + +def test_handle_existing_dataset_always_overwrite(mock_config, mock_zarr_path): + config_path = mock_zarr_path.parent / "config.yaml" + mock_config.to_yaml_file(config_path) + ds = xr.Dataset( + attrs={"mdp_version": "v0.6.0", "creation_config": mock_config.to_yaml()} + ) + ds.to_zarr(str(mock_zarr_path)) + fn_rmtree = shutil.rmtree + with patch("shutil.rmtree") as mock_rmtree: + # ensure that the rmtree function is called, otherwise xarray won't be + # able to write to the zarr path + mock_rmtree.side_effect = fn_rmtree + create_dataset_zarr( + fp_config=config_path, fp_zarr=str(mock_zarr_path), overwrite="always" + ) + mock_rmtree.assert_called_once_with(mock_zarr_path) + + +def test_handle_existing_dataset_never_overwrite(mock_config, mock_zarr_path): + config_path = mock_zarr_path.parent / "config.yaml" + mock_config.to_yaml_file(config_path) + ds = xr.Dataset( + attrs={"mdp_version": "v0.6.0", "creation_config": mock_config.to_yaml()} + ) + ds.to_zarr(str(mock_zarr_path)) + with patch("shutil.rmtree") as mock_rmtree: + with pytest.raises(FileExistsError, match="There already exists a dataset at"): + create_dataset_zarr( + fp_config=config_path, fp_zarr=str(mock_zarr_path), overwrite="never" + ) + mock_rmtree.assert_not_called() + + +def test_handle_existing_dataset_on_config_change_same_config( + mock_config, mock_zarr_path +): + """ + Test that when the existing dataset has the same config as the current config, the zarr dataset is not deleted. + """ + config_path = mock_zarr_path.parent / "config.yaml" + mock_config.to_yaml_file(config_path) + ds = xr.Dataset( + attrs={"mdp_version": "v0.6.0", "creation_config": mock_config.to_yaml()} + ) + ds.to_zarr(str(mock_zarr_path)) + with patch("shutil.rmtree") as mock_rmtree: + create_dataset_zarr( + fp_config=config_path, + fp_zarr=str(mock_zarr_path), + overwrite="on_config_change", + ) + mock_rmtree.assert_not_called() + + +def test_handle_existing_dataset_on_config_change_different_config( + mock_config, mock_zarr_path +): + """ + Test that when the existing dataset has a different config than the current config, the zarr dataset is deleted. + """ + different_config = deepcopy(mock_config) + different_config.dataset_version = "2.0.0" + config_path = mock_zarr_path.parent / "config.yaml" + mock_config.to_yaml_file(config_path) + ds = xr.Dataset( + attrs={"mdp_version": "0.6.0", "creation_config": different_config.to_yaml()} + ) + ds.to_zarr(str(mock_zarr_path)) + fn_rmtree = shutil.rmtree + with patch("shutil.rmtree") as mock_rmtree: + # ensure that the rmtree function is called, otherwise xarray won't be + # able to write to the zarr path + mock_rmtree.side_effect = fn_rmtree + create_dataset_zarr( + fp_config=config_path, + fp_zarr=str(mock_zarr_path), + overwrite="on_config_change", + ) + mock_rmtree.assert_called_once_with(mock_zarr_path) + + +def test_handle_existing_dataset_older_version(mock_config, mock_zarr_path): + """ + Test that when the existing dataset was created with an older version of mllam-data-prep, an exception is raised. + Since for older versions we do not have the creation_config attribute, we cannot compare the configs. + """ + config_path = mock_zarr_path.parent / "config.yaml" + mock_config.to_yaml_file(config_path) + ds = xr.Dataset(attrs={"mdp_version": "0.5.0"}) + ds.to_zarr(str(mock_zarr_path)) + with pytest.raises(FileExistsError, match="older version of mllam-data-prep"): + create_dataset_zarr( + fp_config=config_path, + fp_zarr=str(mock_zarr_path), + overwrite="on_config_change", + ) diff --git a/tests/test_from_config.py b/tests/test_from_config.py index 1a89361..4517666 100644 --- a/tests/test_from_config.py +++ b/tests/test_from_config.py @@ -1,378 +1,378 @@ -import shutil -import tempfile -from pathlib import Path - -import isodate -import pytest -import yaml - -import mllam_data_prep as mdp -import tests.data as testdata - - -def test_gen_data(): - tmpdir = tempfile.TemporaryDirectory() - testdata.create_data_collection( - data_kinds=testdata.ALL_DATA_KINDS, fp_root=tmpdir.name - ) - - -def test_merging_static_and_surface_analysis(): - tmpdir = tempfile.TemporaryDirectory() - datasets = testdata.create_data_collection( - data_kinds=["surface_analysis", "static"], fp_root=tmpdir.name - ) - - # use 80% for training and 20% for testing - t_train_start = testdata.T_START - t_train_end = testdata.T_START + 0.8 * (testdata.T_END_ANALYSIS - testdata.T_START) - t_test_start = t_train_end + testdata.DT_ANALYSIS - t_test_end = testdata.T_END_ANALYSIS - - config = dict( - schema_version=testdata.SCHEMA_VERSION, - dataset_version="v0.1.0", - output=dict( - variables=dict( - static=["grid_index", "static_feature"], - state=["time", "grid_index", "state_feature"], - forcing=["time", "grid_index", "forcing_feature"], - ), - coord_ranges=dict( - time=dict( - start=testdata.T_START.isoformat(), - end=testdata.T_END_ANALYSIS.isoformat(), - step=isodate.duration_isoformat(testdata.DT_ANALYSIS), - ) - ), - splitting=dict( - dim="time", - splits=dict( - train=dict( - start=t_train_start.isoformat(), - end=t_train_end.isoformat(), - compute_statistics=dict( - ops=["mean", "std"], - dims=["time", "grid_index"], - ), - ), - test=dict( - start=t_test_start.isoformat(), - end=t_test_end.isoformat(), - ), - ), - ), - ), - inputs=dict( - danra_surface=dict( - path=datasets["surface_analysis"], - dims=["analysis_time", "x", "y"], - variables=testdata.DEFAULT_SURFACE_ANALYSIS_VARS, - dim_mapping=dict( - time=dict( - method="rename", - dim="analysis_time", - ), - grid_index=dict( - method="stack", - dims=["x", "y"], - ), - forcing_feature=dict( - method="stack_variables_by_var_name", - name_format="{var_name}", - ), - ), - target_output_variable="forcing", - ), - danra_static=dict( - path=datasets["static"], - dims=["x", "y"], - variables=testdata.DEFAULT_STATIC_VARS, - dim_mapping=dict( - grid_index=dict( - method="stack", - dims=["x", "y"], - ), - static_feature=dict( - method="stack_variables_by_var_name", - name_format="{var_name}", - ), - ), - target_output_variable="static", - ), - ), - ) - - # write yaml config to file - fn_config = "config.yaml" - fp_config = Path(tmpdir.name) / fn_config - with open(fp_config, "w") as f: - yaml.dump(config, f) - - mdp.create_dataset_zarr(fp_config=fp_config) - - -@pytest.mark.parametrize("source_data_contains_time_range", [True, False]) -@pytest.mark.parametrize( - "time_stepsize", - [testdata.DT_ANALYSIS, testdata.DT_ANALYSIS * 2, testdata.DT_ANALYSIS / 2], -) -def test_time_selection(source_data_contains_time_range, time_stepsize): - """ - Check that time selection works as expected, so that when source - data doesn't contain the time range specified in the config and exception - is raised, and otherwise that the correct timesteps are in the output - """ - - tmpdir = tempfile.TemporaryDirectory() - datasets = testdata.create_data_collection( - data_kinds=["surface_analysis", "static"], fp_root=tmpdir.name - ) - - t_start_dataset = testdata.T_START - t_end_dataset = t_start_dataset + (testdata.NT_ANALYSIS - 1) * testdata.DT_ANALYSIS - - if source_data_contains_time_range: - t_start_config = t_start_dataset - t_end_config = t_end_dataset - else: - t_start_config = t_start_dataset - testdata.DT_ANALYSIS - t_end_config = t_end_dataset + testdata.DT_ANALYSIS - - config = dict( - schema_version=testdata.SCHEMA_VERSION, - dataset_version="v0.1.0", - output=dict( - variables=dict( - static=["grid_index", "feature"], - state=["time", "grid_index", "feature"], - forcing=["time", "grid_index", "feature"], - ), - coord_ranges=dict( - time=dict( - start=t_start_config.isoformat(), - end=t_end_config.isoformat(), - step=isodate.duration_isoformat(time_stepsize), - ) - ), - ), - inputs=dict( - danra_surface=dict( - path=datasets["surface_analysis"], - dims=["analysis_time", "x", "y"], - variables=testdata.DEFAULT_SURFACE_ANALYSIS_VARS, - dim_mapping=dict( - time=dict( - method="rename", - dim="analysis_time", - ), - grid_index=dict( - method="stack", - dims=["x", "y"], - ), - feature=dict( - method="stack_variables_by_var_name", - name_format="{var_name}", - ), - ), - target_output_variable="forcing", - ), - ), - ) - - # write yaml config to file - fn_config = "config.yaml" - fp_config = Path(tmpdir.name) / fn_config - with open(fp_config, "w") as f: - yaml.dump(config, f) - - # run the main function - if source_data_contains_time_range and time_stepsize == testdata.DT_ANALYSIS: - mdp.create_dataset_zarr(fp_config=fp_config) - else: - print( - f"Expecting ValueError for source_data_contains_time_range={source_data_contains_time_range} and time_stepsize={time_stepsize}" - ) - with pytest.raises(ValueError): - mdp.create_dataset_zarr(fp_config=fp_config) - - -@pytest.mark.parametrize("use_common_feature_var_name", [True, False]) -def test_feature_collision(use_common_feature_var_name): - """ - Use to arch target_output_variable variables which have a different number of features and - therefore need a unique feature dimension for each target_output_variable. This should raise - a ValueError if the feature coordinates have the same name - """ - tmpdir = tempfile.TemporaryDirectory() - datasets = testdata.create_data_collection( - data_kinds=["surface_analysis", "static"], fp_root=tmpdir.name - ) - - if use_common_feature_var_name: - static_feature_var_name = state_feature_var_name = "feature" - else: - static_feature_var_name = "static_feature" - state_feature_var_name = "state_feature" - - config = dict( - schema_version=testdata.SCHEMA_VERSION, - dataset_version="v0.1.0", - output=dict( - variables=dict( - static=["grid_index", static_feature_var_name], - state=["time", "grid_index", state_feature_var_name], - ), - ), - inputs=dict( - danra_surface=dict( - path=datasets["surface_analysis"], - dims=["analysis_time", "x", "y"], - variables=testdata.DEFAULT_SURFACE_ANALYSIS_VARS, - dim_mapping={ - "time": dict( - method="rename", - dim="analysis_time", - ), - "grid_index": dict( - method="stack", - dims=["x", "y"], - ), - state_feature_var_name: dict( - method="stack_variables_by_var_name", - name_format="{var_name}", - ), - }, - target_output_variable="state", - ), - danra_static=dict( - path=datasets["static"], - dims=["x", "y"], - variables=testdata.DEFAULT_STATIC_VARS, - dim_mapping={ - "grid_index": dict( - dims=["x", "y"], - method="stack", - ), - static_feature_var_name: dict( - method="stack_variables_by_var_name", - name_format="{var_name}", - ), - }, - target_output_variable="static", - ), - ), - ) - - # write yaml config to file - fn_config = "config.yaml" - fp_config = Path(tmpdir.name) / fn_config - with open(fp_config, "w") as f: - yaml.dump(config, f) - - if use_common_feature_var_name: - with pytest.raises(mdp.InvalidConfigException): - mdp.create_dataset_zarr(fp_config=fp_config) - else: - mdp.create_dataset_zarr(fp_config=fp_config) - - -@pytest.mark.slow -def test_danra_example(): - fp_config = Path(__file__).parent.parent / "example.danra.yaml" - with tempfile.TemporaryDirectory(suffix=".zarr") as tmpdir: - mdp.create_dataset_zarr(fp_config=fp_config, fp_zarr=tmpdir) - - -@pytest.mark.parametrize("extra_content", [None, {"foobar": {"baz": 42}}]) -def test_optional_extra_section(extra_content): - """ - Test to ensure that the optional `extra` section of the config can contain - arbitrary information and is not required for the config to be valid - """ - tmpdir = tempfile.TemporaryDirectory() - datasets = testdata.create_data_collection( - data_kinds=["static"], fp_root=tmpdir.name - ) - - config_dict = dict( - schema_version=testdata.SCHEMA_VERSION, - dataset_version="v0.1.0", - output=dict( - variables=dict( - static=["grid_index", "static_feature"], - ), - ), - inputs=dict( - danra_static=dict( - path=datasets["static"], - dims=["x", "y"], - variables=testdata.DEFAULT_STATIC_VARS, - dim_mapping=dict( - grid_index=dict( - method="stack", - dims=["x", "y"], - ), - static_feature=dict( - method="stack_variables_by_var_name", - name_format="{var_name}", - ), - ), - target_output_variable="static", - ), - ), - ) - - if extra_content is not None: - config_dict["extra"] = extra_content - - # write yaml config to file - fn_config = "config.yaml" - fp_config = Path(tmpdir.name) / fn_config - with open(fp_config, "w") as f: - yaml.dump(config_dict, f) - - mdp.create_dataset_zarr(fp_config=fp_config) - - -CONFIG_REVISION_EXAMPLES_PATH = Path(__file__).parent / "old_config_schema_examples" - - -def find_config_revision_examples(): - """ - Build a dictionary of examples for each revision of the config schema - so that we can check that the examples are valid and up-to-date - """ - examples = {} - for fp in CONFIG_REVISION_EXAMPLES_PATH.rglob("*.yaml"): - revision = fp.parent.name - examples[revision] = fp - - return examples.values() - - -@pytest.mark.slow -@pytest.mark.parametrize("fp_example", find_config_revision_examples()) -def test_config_revision_examples(fp_example): - """ - Ensure that all the examples (which may be using different config schema - versions)in the `config_examples` directory are valid - """ - tmpdir = tempfile.TemporaryDirectory() - - # copy example to tempdir - fp_config_copy = Path(tmpdir.name) / fp_example.name - shutil.copy(fp_example, fp_config_copy) - - mdp.create_dataset_zarr(fp_config=fp_config_copy) - - -def test_sliced_dataset_can_instantiate_with_right_dimensions(): - """ - The sliced example has a 10x10 km slice, so there should be 4x4 = 16 points herekj. - """ - fp = "tests/resources/sliced_example.danra.yaml" - config = mdp.Config.from_yaml(open(fp)) - ds = mdp.create_dataset(config) - # We pick a 10x10km slice of the data which should result in 16 grid points. - assert ds.state.shape == (2, 49, 16) +import shutil +import tempfile +from pathlib import Path + +import isodate +import pytest +import yaml + +import mllam_data_prep as mdp +import tests.data as testdata + + +def test_gen_data(): + tmpdir = tempfile.TemporaryDirectory() + testdata.create_data_collection( + data_kinds=testdata.ALL_DATA_KINDS, fp_root=tmpdir.name + ) + + +def test_merging_static_and_surface_analysis(): + tmpdir = tempfile.TemporaryDirectory() + datasets = testdata.create_data_collection( + data_kinds=["surface_analysis", "static"], fp_root=tmpdir.name + ) + + # use 80% for training and 20% for testing + t_train_start = testdata.T_START + t_train_end = testdata.T_START + 0.8 * (testdata.T_END_ANALYSIS - testdata.T_START) + t_test_start = t_train_end + testdata.DT_ANALYSIS + t_test_end = testdata.T_END_ANALYSIS + + config = dict( + schema_version=testdata.SCHEMA_VERSION, + dataset_version="v0.1.0", + output=dict( + variables=dict( + static=["grid_index", "static_feature"], + state=["time", "grid_index", "state_feature"], + forcing=["time", "grid_index", "forcing_feature"], + ), + coord_ranges=dict( + time=dict( + start=testdata.T_START.isoformat(), + end=testdata.T_END_ANALYSIS.isoformat(), + step=isodate.duration_isoformat(testdata.DT_ANALYSIS), + ) + ), + splitting=dict( + dim="time", + splits=dict( + train=dict( + start=t_train_start.isoformat(), + end=t_train_end.isoformat(), + compute_statistics=dict( + ops=["mean", "std"], + dims=["time", "grid_index"], + ), + ), + test=dict( + start=t_test_start.isoformat(), + end=t_test_end.isoformat(), + ), + ), + ), + ), + inputs=dict( + danra_surface=dict( + path=datasets["surface_analysis"], + dims=["analysis_time", "x", "y"], + variables=testdata.DEFAULT_SURFACE_ANALYSIS_VARS, + dim_mapping=dict( + time=dict( + method="rename", + dim="analysis_time", + ), + grid_index=dict( + method="stack", + dims=["x", "y"], + ), + forcing_feature=dict( + method="stack_variables_by_var_name", + name_format="{var_name}", + ), + ), + target_output_variable="forcing", + ), + danra_static=dict( + path=datasets["static"], + dims=["x", "y"], + variables=testdata.DEFAULT_STATIC_VARS, + dim_mapping=dict( + grid_index=dict( + method="stack", + dims=["x", "y"], + ), + static_feature=dict( + method="stack_variables_by_var_name", + name_format="{var_name}", + ), + ), + target_output_variable="static", + ), + ), + ) + + # write yaml config to file + fn_config = "config.yaml" + fp_config = Path(tmpdir.name) / fn_config + with open(fp_config, "w") as f: + yaml.dump(config, f) + + mdp.create_dataset_zarr(fp_config=fp_config) + + +@pytest.mark.parametrize("source_data_contains_time_range", [True, False]) +@pytest.mark.parametrize( + "time_stepsize", + [testdata.DT_ANALYSIS, testdata.DT_ANALYSIS * 2, testdata.DT_ANALYSIS / 2], +) +def test_time_selection(source_data_contains_time_range, time_stepsize): + """ + Check that time selection works as expected, so that when source + data doesn't contain the time range specified in the config and exception + is raised, and otherwise that the correct timesteps are in the output + """ + + tmpdir = tempfile.TemporaryDirectory() + datasets = testdata.create_data_collection( + data_kinds=["surface_analysis", "static"], fp_root=tmpdir.name + ) + + t_start_dataset = testdata.T_START + t_end_dataset = t_start_dataset + (testdata.NT_ANALYSIS - 1) * testdata.DT_ANALYSIS + + if source_data_contains_time_range: + t_start_config = t_start_dataset + t_end_config = t_end_dataset + else: + t_start_config = t_start_dataset - testdata.DT_ANALYSIS + t_end_config = t_end_dataset + testdata.DT_ANALYSIS + + config = dict( + schema_version=testdata.SCHEMA_VERSION, + dataset_version="v0.1.0", + output=dict( + variables=dict( + static=["grid_index", "feature"], + state=["time", "grid_index", "feature"], + forcing=["time", "grid_index", "feature"], + ), + coord_ranges=dict( + time=dict( + start=t_start_config.isoformat(), + end=t_end_config.isoformat(), + step=isodate.duration_isoformat(time_stepsize), + ) + ), + ), + inputs=dict( + danra_surface=dict( + path=datasets["surface_analysis"], + dims=["analysis_time", "x", "y"], + variables=testdata.DEFAULT_SURFACE_ANALYSIS_VARS, + dim_mapping=dict( + time=dict( + method="rename", + dim="analysis_time", + ), + grid_index=dict( + method="stack", + dims=["x", "y"], + ), + feature=dict( + method="stack_variables_by_var_name", + name_format="{var_name}", + ), + ), + target_output_variable="forcing", + ), + ), + ) + + # write yaml config to file + fn_config = "config.yaml" + fp_config = Path(tmpdir.name) / fn_config + with open(fp_config, "w") as f: + yaml.dump(config, f) + + # run the main function + if source_data_contains_time_range and time_stepsize == testdata.DT_ANALYSIS: + mdp.create_dataset_zarr(fp_config=fp_config) + else: + print( + f"Expecting ValueError for source_data_contains_time_range={source_data_contains_time_range} and time_stepsize={time_stepsize}" + ) + with pytest.raises(ValueError): + mdp.create_dataset_zarr(fp_config=fp_config) + + +@pytest.mark.parametrize("use_common_feature_var_name", [True, False]) +def test_feature_collision(use_common_feature_var_name): + """ + Use to arch target_output_variable variables which have a different number of features and + therefore need a unique feature dimension for each target_output_variable. This should raise + a ValueError if the feature coordinates have the same name + """ + tmpdir = tempfile.TemporaryDirectory() + datasets = testdata.create_data_collection( + data_kinds=["surface_analysis", "static"], fp_root=tmpdir.name + ) + + if use_common_feature_var_name: + static_feature_var_name = state_feature_var_name = "feature" + else: + static_feature_var_name = "static_feature" + state_feature_var_name = "state_feature" + + config = dict( + schema_version=testdata.SCHEMA_VERSION, + dataset_version="v0.1.0", + output=dict( + variables=dict( + static=["grid_index", static_feature_var_name], + state=["time", "grid_index", state_feature_var_name], + ), + ), + inputs=dict( + danra_surface=dict( + path=datasets["surface_analysis"], + dims=["analysis_time", "x", "y"], + variables=testdata.DEFAULT_SURFACE_ANALYSIS_VARS, + dim_mapping={ + "time": dict( + method="rename", + dim="analysis_time", + ), + "grid_index": dict( + method="stack", + dims=["x", "y"], + ), + state_feature_var_name: dict( + method="stack_variables_by_var_name", + name_format="{var_name}", + ), + }, + target_output_variable="state", + ), + danra_static=dict( + path=datasets["static"], + dims=["x", "y"], + variables=testdata.DEFAULT_STATIC_VARS, + dim_mapping={ + "grid_index": dict( + dims=["x", "y"], + method="stack", + ), + static_feature_var_name: dict( + method="stack_variables_by_var_name", + name_format="{var_name}", + ), + }, + target_output_variable="static", + ), + ), + ) + + # write yaml config to file + fn_config = "config.yaml" + fp_config = Path(tmpdir.name) / fn_config + with open(fp_config, "w") as f: + yaml.dump(config, f) + + if use_common_feature_var_name: + with pytest.raises(mdp.InvalidConfigException): + mdp.create_dataset_zarr(fp_config=fp_config) + else: + mdp.create_dataset_zarr(fp_config=fp_config) + + +@pytest.mark.slow +def test_danra_example(): + fp_config = Path(__file__).parent.parent / "example.danra.yaml" + with tempfile.TemporaryDirectory(suffix=".zarr") as tmpdir: + mdp.create_dataset_zarr(fp_config=fp_config, fp_zarr=tmpdir) + + +@pytest.mark.parametrize("extra_content", [None, {"foobar": {"baz": 42}}]) +def test_optional_extra_section(extra_content): + """ + Test to ensure that the optional `extra` section of the config can contain + arbitrary information and is not required for the config to be valid + """ + tmpdir = tempfile.TemporaryDirectory() + datasets = testdata.create_data_collection( + data_kinds=["static"], fp_root=tmpdir.name + ) + + config_dict = dict( + schema_version=testdata.SCHEMA_VERSION, + dataset_version="v0.1.0", + output=dict( + variables=dict( + static=["grid_index", "static_feature"], + ), + ), + inputs=dict( + danra_static=dict( + path=datasets["static"], + dims=["x", "y"], + variables=testdata.DEFAULT_STATIC_VARS, + dim_mapping=dict( + grid_index=dict( + method="stack", + dims=["x", "y"], + ), + static_feature=dict( + method="stack_variables_by_var_name", + name_format="{var_name}", + ), + ), + target_output_variable="static", + ), + ), + ) + + if extra_content is not None: + config_dict["extra"] = extra_content + + # write yaml config to file + fn_config = "config.yaml" + fp_config = Path(tmpdir.name) / fn_config + with open(fp_config, "w") as f: + yaml.dump(config_dict, f) + + mdp.create_dataset_zarr(fp_config=fp_config) + + +CONFIG_REVISION_EXAMPLES_PATH = Path(__file__).parent / "old_config_schema_examples" + + +def find_config_revision_examples(): + """ + Build a dictionary of examples for each revision of the config schema + so that we can check that the examples are valid and up-to-date + """ + examples = {} + for fp in CONFIG_REVISION_EXAMPLES_PATH.rglob("*.yaml"): + revision = fp.parent.name + examples[revision] = fp + + return examples.values() + + +@pytest.mark.slow +@pytest.mark.parametrize("fp_example", find_config_revision_examples()) +def test_config_revision_examples(fp_example): + """ + Ensure that all the examples (which may be using different config schema + versions)in the `config_examples` directory are valid + """ + tmpdir = tempfile.TemporaryDirectory() + + # copy example to tempdir + fp_config_copy = Path(tmpdir.name) / fp_example.name + shutil.copy(fp_example, fp_config_copy) + + mdp.create_dataset_zarr(fp_config=fp_config_copy) + + +def test_sliced_dataset_can_instantiate_with_right_dimensions(): + """ + The sliced example has a 10x10 km slice, so there should be 4x4 = 16 points herekj. + """ + fp = "tests/resources/sliced_example.danra.yaml" + config = mdp.Config.from_yaml(open(fp)) + ds = mdp.create_dataset(config) + # We pick a 10x10km slice of the data which should result in 16 grid points. + assert ds.state.shape == (2, 49, 16) diff --git a/tests/test_selection.py b/tests/test_selection.py index 044b66e..c84b499 100644 --- a/tests/test_selection.py +++ b/tests/test_selection.py @@ -1,51 +1,51 @@ -import pytest -import xarray as xr - -import mllam_data_prep as mdp - - -@pytest.fixture -def ds(): - """ - Load the height_levels.zarr dataset - """ - fp = "https://mllam-test-data.s3.eu-north-1.amazonaws.com/height_levels.zarr" - return xr.open_zarr(fp) - - -def test_range_slice_within_range(ds): - """ - test if the slice is within the specified range - """ - x_start = -50000 - x_end = -40000 - y_start = -600000 - y_end = -590000 - coord_ranges = { - "x": mdp.config.Range(start=x_start, end=x_end), - "y": mdp.config.Range(start=y_start, end=y_end), - } - - ds = mdp.ops.selection.select_by_kwargs(ds, **coord_ranges) - assert ds.x.min() >= x_start - assert ds.x.max() <= x_end - assert ds.y.min() >= y_start - assert ds.y.max() <= y_end - - ds - - -@pytest.mark.parametrize("x_start, x_end", ([-50000, -51000], [0, 500000])) -def test_error_on_empty_range(ds, x_start, x_end): - """ - Test if an error is thrown if the chosen range is empty - """ - y_start = -600000 - y_end = -590000 - coord_ranges = { - "x": mdp.config.Range(start=x_start, end=x_end), - "y": mdp.config.Range(start=y_start, end=y_end), - } - - with pytest.raises(AssertionError): - ds = mdp.ops.selection.select_by_kwargs(ds, **coord_ranges) +import pytest +import xarray as xr + +import mllam_data_prep as mdp + + +@pytest.fixture +def ds(): + """ + Load the height_levels.zarr dataset + """ + fp = "https://mllam-test-data.s3.eu-north-1.amazonaws.com/height_levels.zarr" + return xr.open_zarr(fp) + + +def test_range_slice_within_range(ds): + """ + test if the slice is within the specified range + """ + x_start = -50000 + x_end = -40000 + y_start = -600000 + y_end = -590000 + coord_ranges = { + "x": mdp.config.Range(start=x_start, end=x_end), + "y": mdp.config.Range(start=y_start, end=y_end), + } + + ds = mdp.ops.selection.select_by_kwargs(ds, **coord_ranges) + assert ds.x.min() >= x_start + assert ds.x.max() <= x_end + assert ds.y.min() >= y_start + assert ds.y.max() <= y_end + + ds + + +@pytest.mark.parametrize("x_start, x_end", ([-50000, -51000], [0, 500000])) +def test_error_on_empty_range(ds, x_start, x_end): + """ + Test if an error is thrown if the chosen range is empty + """ + y_start = -600000 + y_end = -590000 + coord_ranges = { + "x": mdp.config.Range(start=x_start, end=x_end), + "y": mdp.config.Range(start=y_start, end=y_end), + } + + with pytest.raises(AssertionError): + ds = mdp.ops.selection.select_by_kwargs(ds, **coord_ranges) diff --git a/tests/test_stacking.py b/tests/test_stacking.py index d8fa859..d291889 100644 --- a/tests/test_stacking.py +++ b/tests/test_stacking.py @@ -1,87 +1,87 @@ -import numpy as np -import xarray as xr - -from mllam_data_prep.config import DimMapping -from mllam_data_prep.ops import mapping as mdp_mapping -from mllam_data_prep.ops import stacking as mdp_stacking - - -def test_stack_variables_along_coord(): - """ - Test the stacking of variables along a coordinate - - i.e. from variables [var1, var2] with levels [1, 2, 3] - to a single variable with levels [var1_l1, var1_l2, var1_l3, var2_l1, var2_l2, var2_l3] - """ - name_format = "{var_name}_l{level}" - nx, ny, nz = 10, 6, 3 - dims = ["x", "y", "level"] - ds = xr.Dataset( - { - "var1": xr.DataArray(np.random.random((nx, ny, nz)), dims=dims), - "var2": xr.DataArray(np.random.random((nx, ny, nz)), dims=dims), - }, - coords={"level": np.arange(nz)}, - ) - - combined_dim_name = "feature" - da_stacked = mdp_stacking.stack_variables_by_coord_values( - ds=ds, - coord="level", - name_format=name_format, - combined_dim_name=combined_dim_name, - ) - expected_coord_values = [ - name_format.format(var_name=v, level=level) - for v in ["var1", "var2"] - for level in range(nz) - ] - - assert da_stacked.dims == ("x", "y", "feature") - assert da_stacked.coords[combined_dim_name].values.tolist() == expected_coord_values - for v in expected_coord_values: - assert da_stacked.sel({combined_dim_name: v}).shape == (nx, ny) - - # check that the values are the same - for v in ["var1", "var2"]: - for level in [1, 2]: - expected_values = ds[v].sel(level=level).values - actual_values = da_stacked.sel( - {combined_dim_name: name_format.format(var_name=v, level=level)} - ).values - assert np.all(expected_values == actual_values) - - -def test_stack_xy_coords(): - """ - Test stacking two (or more) coordinates to create a single coordinate, for - example (x, y) grid coordinates to a single grid_index coordinate - """ - nx, ny, nz = 10, 6, 3 - dims = ["x", "y", "level"] - - ds = xr.Dataset( - { - "var1": xr.DataArray(np.random.random((nx, ny, nz)), dims=dims), - "var2": xr.DataArray(np.random.random((nx, ny, nz)), dims=dims), - }, - coords={"level": np.arange(nz)}, - ) - dim_mapping = dict( - grid_index=DimMapping( - method="stack", - dims=["x", "y"], - ), - feature=DimMapping( - method="stack_variables_by_var_name", - dims=["level"], - name_format="{level}_{var_name}", - ), - ) - - da_stacked = mdp_mapping.map_dims_and_variables( - ds=ds, dim_mapping=dim_mapping, expected_input_var_dims=dims - ) - - assert set(da_stacked.dims) == set(("grid_index", "feature")) - assert da_stacked.coords["grid_index"].shape == (nx * ny,) +import numpy as np +import xarray as xr + +from mllam_data_prep.config import DimMapping +from mllam_data_prep.ops import mapping as mdp_mapping +from mllam_data_prep.ops import stacking as mdp_stacking + + +def test_stack_variables_along_coord(): + """ + Test the stacking of variables along a coordinate + + i.e. from variables [var1, var2] with levels [1, 2, 3] + to a single variable with levels [var1_l1, var1_l2, var1_l3, var2_l1, var2_l2, var2_l3] + """ + name_format = "{var_name}_l{level}" + nx, ny, nz = 10, 6, 3 + dims = ["x", "y", "level"] + ds = xr.Dataset( + { + "var1": xr.DataArray(np.random.random((nx, ny, nz)), dims=dims), + "var2": xr.DataArray(np.random.random((nx, ny, nz)), dims=dims), + }, + coords={"level": np.arange(nz)}, + ) + + combined_dim_name = "feature" + da_stacked = mdp_stacking.stack_variables_by_coord_values( + ds=ds, + coord="level", + name_format=name_format, + combined_dim_name=combined_dim_name, + ) + expected_coord_values = [ + name_format.format(var_name=v, level=level) + for v in ["var1", "var2"] + for level in range(nz) + ] + + assert da_stacked.dims == ("x", "y", "feature") + assert da_stacked.coords[combined_dim_name].values.tolist() == expected_coord_values + for v in expected_coord_values: + assert da_stacked.sel({combined_dim_name: v}).shape == (nx, ny) + + # check that the values are the same + for v in ["var1", "var2"]: + for level in [1, 2]: + expected_values = ds[v].sel(level=level).values + actual_values = da_stacked.sel( + {combined_dim_name: name_format.format(var_name=v, level=level)} + ).values + assert np.all(expected_values == actual_values) + + +def test_stack_xy_coords(): + """ + Test stacking two (or more) coordinates to create a single coordinate, for + example (x, y) grid coordinates to a single grid_index coordinate + """ + nx, ny, nz = 10, 6, 3 + dims = ["x", "y", "level"] + + ds = xr.Dataset( + { + "var1": xr.DataArray(np.random.random((nx, ny, nz)), dims=dims), + "var2": xr.DataArray(np.random.random((nx, ny, nz)), dims=dims), + }, + coords={"level": np.arange(nz)}, + ) + dim_mapping = dict( + grid_index=DimMapping( + method="stack", + dims=["x", "y"], + ), + feature=DimMapping( + method="stack_variables_by_var_name", + dims=["level"], + name_format="{level}_{var_name}", + ), + ) + + da_stacked = mdp_mapping.map_dims_and_variables( + ds=ds, dim_mapping=dim_mapping, expected_input_var_dims=dims + ) + + assert set(da_stacked.dims) == set(("grid_index", "feature")) + assert da_stacked.coords["grid_index"].shape == (nx * ny,)