diff --git a/.env.example b/.env.example deleted file mode 100644 index b6318c0..0000000 --- a/.env.example +++ /dev/null @@ -1,16 +0,0 @@ -# API Settings -API_TITLE="Zonos API" -API_DESCRIPTION="API for Zonos Text-to-Speech Model" -API_VERSION="0.1.0" - -# Server Settings -PORT=8000 -WORKERS=4 - -# Model Settings -MODEL_NAME="Zyphra/Zonos-v0.1-hybrid" -MODEL_CACHE_DIR="./model_cache" - -# Audio Settings -SAMPLE_RATE=44100 -MAX_TEXT_LENGTH=1000 \ No newline at end of file diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2db8971..c187663 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,49 +1,106 @@ -name: CI/CD +name: Build and Push Docker Image on: push: - branches: [ '*' ] + branches: + - '**' + tags: ["v*.*.*"] pull_request: - branches: [ '*' ] + branches: + - '**' + +env: + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} + CACHE_FROM: type=registry,ref=${{ github.repository }}:buildcache + CACHE_TO: type=registry,ref=${{ github.repository }}:buildcache,mode=max jobs: - docker: + free-disk-space: runs-on: ubuntu-latest - if: github.event_name == 'push' + steps: + - name: Free Disk Space (Ubuntu) + uses: jlumbroso/free-disk-space@main + with: + tool-cache: false + android: true + dotnet: true + haskell: true + large-packages: true + docker-images: true + swap-storage: true + + build-and-push: + runs-on: ubuntu-latest + needs: free-disk-space # Ensure this job runs after freeing disk space permissions: contents: read packages: write - + id-token: write + steps: - - uses: actions/checkout@v3 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - - name: Log in to GitHub Container Registry - uses: docker/login-action@v2 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Extract metadata (tags, labels) for Docker - id: meta - uses: docker/metadata-action@v4 - with: - images: ghcr.io/${{ github.repository }} - tags: | - type=semver,pattern={{version}} - type=sha,format=long - type=ref,event=branch - type=raw,value=latest,enable={{is_default_branch}} - - - name: Build and push - uses: docker/build-push-action@v4 - with: - context: . - push: true - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} - cache-from: type=gha - cache-to: type=gha,mode=max \ No newline at end of file + - name: Checkout repository + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + with: + version: latest + driver-opts: | + image=moby/buildkit:latest + + - name: Log into registry ${{ env.REGISTRY }} + if: github.event_name != 'pull_request' + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + # Cache Python dependencies + - name: Cache pip packages + uses: actions/cache@v3 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt', '**/pyproject.toml') }} + restore-keys: | + ${{ runner.os }}-pip- + + # Set image name based on branch + - name: Set image name and tags + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + tags: | + # For main branch + type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }} + type=sha,format=short,prefix=,enable=${{ github.ref == 'refs/heads/main' }} + # For other branches + type=raw,value=dev-latest,enable=${{ github.ref != 'refs/heads/main' }} + type=sha,format=short,prefix=dev-,enable=${{ github.ref != 'refs/heads/main' }} + # For tags + type=ref,event=tag + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + type=semver,pattern={{major}} + + - name: Build and push Docker image + id: build-and-push + uses: docker/build-push-action@v5 + with: + context: . + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + cache-from: | + type=gha + type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:buildcache + cache-to: | + type=gha,mode=max + type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:buildcache,mode=max + platforms: linux/amd64 + build-args: | + BUILDKIT_INLINE_CACHE=1 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml deleted file mode 100644 index e8a2d8e..0000000 --- a/.pre-commit-config.yaml +++ /dev/null @@ -1,24 +0,0 @@ -repos: -- repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 - hooks: - - id: trailing-whitespace - - id: end-of-file-fixer - - id: check-yaml - - id: check-added-large-files - - id: debug-statements - - id: requirements-txt-fixer - -- repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.6 - hooks: - - id: ruff - args: [--fix] - - id: ruff-format - -- repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.7.1 - hooks: - - id: mypy - additional_dependencies: [types-all] - exclude: ^tests/ \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md deleted file mode 100644 index 2914316..0000000 --- a/CHANGELOG.md +++ /dev/null @@ -1,44 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), -and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - -## [1.1.0] - 2024-02-10 - -### Added -- Git submodule support for Zonos repository -- Automatic submodule updates in Docker builds -- Documentation for submodule management - -### Changed -- Updated Docker build process to use submodules -- Improved installation instructions for submodule handling -- Reorganized dependency installation process - -### Security -- Improved version control with git submodules -- Better dependency tracking and updates - -## [1.0.0] - 2025-02-11 - Initial Release (Unstable) - -### Added -- Initial development setup -- Basic FastAPI structure -- Docker configuration -- CI/CD pipeline -- Initial release of Zonos API -- Support for both Transformer and Hybrid model variants -- FastAPI implementation with comprehensive API endpoints -- Docker and docker-compose deployment with NVIDIA GPU support -- Prometheus and Grafana monitoring integration -- Voice cloning capabilities -- Audio continuation support -- Fine-grained emotion control -- Health checks and logging -- CORS support -- Swagger documentation -- Production-ready configurations -- GPU optimizations with flash-attention and mamba-ssm -- Comprehensive documentation diff --git a/Dockerfile b/Dockerfile index 65df327..269ff76 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,60 +1,68 @@ -FROM pytorch/pytorch:2.6.0-cuda12.4-cudnn9-devel +FROM pytorch/pytorch:2.1.0-cuda12.1-cudnn8-devel -WORKDIR /app +# Set Zonos working directory +WORKDIR /app/zonos -# Install system dependencies -RUN apt-get update && apt-get install -y \ - build-essential \ +# System packages +RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ + --mount=type=cache,target=/var/lib/apt,sharing=locked \ + apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ + ffmpeg \ libsndfile1 \ - espeak-ng \ - curl \ git \ + espeak-ng \ + && apt-get clean \ && rm -rf /var/lib/apt/lists/* -# Install uv -RUN pip install -U uv - -# Copy application code and submodules -COPY . . - -# Initialize and update submodules -RUN git submodule update --init --recursive --remote - -# Install dependencies with optimizations -RUN uv pip install --system --no-build-isolation -e .[compile] && \ - # Install flash-attention and other optimizations - pip install --no-build-isolation \ - flash-attn \ - mamba-ssm \ - causal-conv1d - -# Create a non-root user and setup directories -RUN useradd -m -u 1000 appuser && \ - mkdir -p /home/appuser/.cache/huggingface && \ - chown -R appuser:appuser /home/appuser/.cache && \ - mkdir -p /app/uploads && \ - chown -R appuser:appuser /app - -USER appuser - -# Set environment variables -ENV PORT=8000 -ENV WORKERS=4 -ENV MODEL_TYPE="Transformer" -ENV MODEL_CACHE_DIR="/home/appuser/.cache/huggingface" -ENV PYTHONUNBUFFERED=1 -# CUDA optimization settings -ENV CUDA_LAUNCH_BLOCKING=0 +# Install uv package manager +RUN --mount=type=cache,target=/root/.cache/pip \ + pip3 install --no-cache-dir uv + +# Clone Zonos directly into working directory +RUN git clone --depth 1 https://github.com/Zyphra/Zonos.git . \ + && git submodule update --init --recursive + +# Copy dependency specs and application code +COPY requirements.txt pyproject.toml ./ +COPY app/ app/ + +# Install basic Python dependencies first +RUN --mount=type=cache,target=/root/.cache/pip \ + uv pip install --system -r requirements.txt -e .[compile] + +# Install Flash Attention with specific compiler flags ENV TORCH_CUDA_ARCH_LIST="7.0;7.5;8.0;8.6+PTX" -ENV CUDA_HOME="/usr/local/cuda" -ENV MAX_JOBS=4 +ENV FLASH_ATTENTION_FORCE_BUILD=1 +RUN --mount=type=cache,target=/root/.cache/pip \ + uv pip install --system --no-build-isolation \ + git+https://github.com/Dao-AILab/flash-attention.git@v2.5.6 + +# Install remaining ML dependencies +RUN --mount=type=cache,target=/root/.cache/pip \ + uv pip install --system --no-build-isolation \ + mamba-ssm==2.2.4 \ + causal-conv1d==1.5.0.post8 + +RUN --mount=type=cache,target=/root/.cache/pip \ + uv pip install --system \ + kanjize>=1.5.0 \ + inflect>=7.5.0 \ + && rm -rf /root/.cache/pip/* + +RUN --mount=type=cache,target=/root/.cache/pip \ + uv pip install --system \ + phonemizer>=3.3.0 \ + sudachidict-full>=20241021 \ + sudachipy>=0.6.10 \ + && rm -rf /root/.cache/pip/* -# Expose the port -EXPOSE $PORT +# Copy application code last +COPY app/ app/ -# Add healthcheck -HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \ - CMD curl -f http://localhost:$PORT/health || exit 1 +# Environment variables +ENV PYTHONPATH=/app:/app/zonos \ + USE_GPU=true \ + PYTHONUNBUFFERED=1 -# Run the application with Gunicorn -CMD ["sh", "-c", "gunicorn main:app --workers $WORKERS --worker-class uvicorn.workers.UvicornWorker --bind 0.0.0.0:$PORT --timeout 300 --worker-tmp-dir /dev/shm"] \ No newline at end of file +# Run the application +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 7a4a3ea..0000000 --- a/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. \ No newline at end of file diff --git a/README.md b/README.md index e72b724..890cd13 100644 --- a/README.md +++ b/README.md @@ -1,172 +1,129 @@ # Zonos API -> ⚠️ **WARNING: UNSTABLE API - INITIAL RELEASE** ⚠️ -> -> This API is currently in its initial release phase (v1.0.0) and is considered unstable. -> Breaking changes may occur without notice. Use in production at your own risk. -> For development and testing purposes only. +A FastAPI-based REST API for the Zonos text-to-speech model. This API provides endpoints for generating high-quality speech from text using state-of-the-art machine learning models. -A production-grade FastAPI implementation of the Zonos Text-to-Speech model. - -## Credits - -This API is built on top of the [Zonos-v0.1-hybrid](https://huggingface.co/Zyphra/Zonos-v0.1-hybrid) and [Zonos-v0.1-transformer](https://huggingface.co/Zyphra/Zonos-v0.1-transformer) models created by [Zyphra](https://huggingface.co/Zyphra). The models feature: - -- Zero-shot TTS with voice cloning capabilities -- Support for multiple languages (100+ languages via eSpeak-ng) -- High-quality 44kHz audio output -- Fine-grained control over speaking rate, pitch, audio quality, and emotions -- Real-time performance (~2x real-time on RTX 4090) +## Features -For more information, visit the model cards on Hugging Face: [Hybrid](https://huggingface.co/Zyphra/Zonos-v0.1-hybrid) | [Transformer](https://huggingface.co/Zyphra/Zonos-v0.1-transformer). +- Text-to-speech generation using Zonos models +- Support for multiple languages +- Voice cloning capabilities +- Emotion control +- Various audio quality parameters +- GPU acceleration support -## Features +## Prerequisites -- FastAPI-based REST API for Zonos Text-to-Speech model -- Support for both Transformer and Hybrid model variants -- Docker and docker-compose support with NVIDIA GPU acceleration -- Production-ready with Gunicorn workers and optimizations -- Prometheus and Grafana monitoring integration -- Health checks and comprehensive logging -- CORS support and Swagger documentation -- Voice cloning and audio continuation support -- Fine-grained emotion and audio quality control +- Docker and Docker Compose +- NVIDIA GPU with CUDA support +- NVIDIA Container Toolkit installed -## Quick Start +## Installation -### Using Docker Compose (Recommended) +1. Clone the repository: ```bash -# Clone the repository with submodules -git clone --recursive https://github.com/manascb1344/zonos-api +git clone https://github.com/yourusername/zonos-api.git cd zonos-api - -# Or if you already cloned without --recursive: -git submodule update --init --recursive - -# Start the services (API, Prometheus, Grafana) -docker-compose up -d - -# The services will be available at: -# - API: http://localhost:8000 -# - Swagger docs: http://localhost:8000/docs -# - Prometheus: http://localhost:9090 -# - Grafana: http://localhost:3000 (admin/admin) - -# To update submodules to latest version: -git submodule update --remote -docker-compose up -d --build ``` -### Manual Installation - -1. Clone the repository with submodules: +2. Start the API using Docker Compose: ```bash -git clone --recursive https://github.com/manascb1344/zonos-api -cd zonos-api +docker-compose up --build ``` -2. Install system dependencies: +The API will be available at `http://localhost:8000` + +## Running with Docker + +1. Build the container: ```bash -apt-get update && apt-get install -y \ - build-essential \ - libsndfile1 \ - espeak-ng \ - curl +docker build -t zonos-api . ``` -3. Install Python dependencies: +2. Run the container: ```bash -# Install dependencies -pip install -r requirements.txt -pip install --no-build-isolation -e .[compile] # For GPU optimizations - -# Install Zonos from submodule -cd Zonos -pip install -e . -cd .. +docker run -d \ + --name zonos-api \ + --gpus all \ + -p 8000:8000 \ + -e CUDA_VISIBLE_DEVICES=0 \ + zonos-api ``` -4. Run the application: -```bash -# Development -uvicorn app.main:app --reload +## Environment Variables + +- `CUDA_VISIBLE_DEVICES`: Specify which GPU(s) to use (default: 0) +- `USE_GPU`: Enable/disable GPU usage (default: true) + +## Requirements + +- Docker with NVIDIA Container Toolkit installed +- NVIDIA GPU with CUDA support +- At least 8GB of GPU memory recommended -# Production -gunicorn app.main:app --workers 4 --worker-class uvicorn.workers.UvicornWorker --bind 0.0.0.0:8000 +## Verifying the Installation + +Check if the API is running: +```bash +curl http://localhost:8000/health ``` ## API Endpoints ### GET / -Root endpoint that returns API status and available models +Root endpoint that returns basic API information + +### GET /models +Returns a list of available TTS models -### GET /health -Health check endpoint that returns current model status +### GET /languages +Returns a list of supported languages -### POST /tts -Text-to-speech conversion endpoint +### GET /model/{model_name}/conditioners +Returns available conditioners for a specific model + +### POST /synthesize +Generate speech from text. Example request: -Request body: ```json { - "text": "Text to convert to speech", - "model_type": "Transformer", // or "Hybrid" - "language": "en-us", - "emotion1": 0.6, // Happiness - "emotion2": 0.05, // Sadness - "emotion3": 0.05, // Disgust - "emotion4": 0.05, // Fear - "emotion5": 0.05, // Surprise - "emotion6": 0.05, // Anger - "emotion7": 0.5, // Other - "emotion8": 0.6, // Neutral - "speaker_audio": null, // Optional: Path to reference voice - "prefix_audio": null, // Optional: Path to continue from - "cfg_scale": 2.0, - "min_p": 0.1, - "seed": 420 + "model_choice": "Zyphra/Zonos-v0.1-transformer", + "text": "Hello, this is a test.", + "language": "en-us", + "emotion_values": [1.0, 0.05, 0.05, 0.05, 0.05, 0.05, 0.1, 0.2], + "vq_score": 0.78, + "cfg_scale": 2.0, + "min_p": 0.15 } ``` -Response: Audio file (WAV format, 44.1kHz) - ## Environment Variables -- `PORT`: Server port (default: 8000) -- `WORKERS`: Number of Gunicorn workers (default: 4) -- `MODEL_TYPE`: Model variant to use (default: "Transformer") -- `MODEL_CACHE_DIR`: Directory for model caching -- Various CUDA optimization settings (see Dockerfile) +- `USE_GPU`: Set to "true" to enable GPU acceleration (default: true) +- `PYTHONPATH`: Set to the application root directory -## Production Deployment +## GPU Support -The application is containerized and optimized for production use. Features include: - -- NVIDIA GPU support with CUDA optimizations -- Resource limits and monitoring -- Automatic model caching -- Health checks and automatic restarts -- Prometheus metrics and Grafana dashboards -- Proper logging with rotation -- Shared memory optimization -- Security considerations (non-root user, proper permissions) +The API uses NVIDIA GPU acceleration by default. Make sure you have: +1. NVIDIA GPU with CUDA support +2. NVIDIA drivers installed +3. NVIDIA Container Toolkit installed and configured ## Development -### Prerequisites -- Python 3.9+ -- NVIDIA GPU with CUDA support (recommended) -- Docker and docker-compose (for containerized deployment) +To run the API in development mode: -### Local Development ```bash -# Start in development mode -uvicorn app.main:app --reload - -# Or with docker-compose docker-compose up --build ``` +The API will reload automatically when code changes are detected. + +## API Documentation + +Once the API is running, you can access: +- Swagger UI documentation at `http://localhost:8000/docs` +- ReDoc documentation at `http://localhost:8000/redoc` + ## License This project is licensed under the Apache License 2.0 - see the LICENSE file for details. diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 0000000..774141d --- /dev/null +++ b/app/__init__.py @@ -0,0 +1,5 @@ +""" +Zonos API package. +""" + +__version__ = "0.1.0" \ No newline at end of file diff --git a/app/config.py b/app/config.py index 2dd98f2..117bae2 100644 --- a/app/config.py +++ b/app/config.py @@ -1,34 +1,16 @@ -import torch -from pydantic_settings import BaseSettings -from typing import Optional +import os -class Settings(BaseSettings): - # API Settings - API_TITLE: str = "Zonos API" - API_DESCRIPTION: str = "API for Zonos Text-to-Speech Model" - API_VERSION: str = "0.1.0" - - # Server Settings - PORT: int = 8000 - WORKERS: int = 4 - - # Model Settings - MODEL_TYPE: str = "Transformer" # "Transformer" or "Hybrid" - MODEL_NAMES: dict = { - "Transformer": "Zyphra/Zonos-v0.1-transformer", - "Hybrid": "Zyphra/Zonos-v0.1-hybrid" - } - DEVICE: str = "cuda" if torch.cuda.is_available() else "cpu" - - # Audio Settings - SAMPLE_RATE: int = 44100 - MAX_TEXT_LENGTH: int = 1000 - MAX_NEW_TOKENS: int = 86 * 30 - - # Cache Settings - MODEL_CACHE_DIR: Optional[str] = None - - class Config: - env_file = ".env" +# Configuration +IS_HF_SPACES = os.getenv("SYSTEM", "") == "spaces" +MAX_CHARACTERS = 2000 -settings = Settings() \ No newline at end of file +HEADER_MARKDOWN = """ +# Zonos v0.1 +State of the art text-to-speech model [[model]](https://huggingface.co/collections/Zyphra/zonos-v01-67ac661c85e1898670823b4f). [[blog]](https://www.zyphra.com/post/beta-release-of-zonos-v0-1), [[Zyphra Audio (hosted service)]](https://maia.zyphra.com/sign-in?redirect_url=https%3A%2F%2Fmaia.zyphra.com%2Faudio) +## Unleashed +Use this space to generate long-form speech up to around ~2 minutes in length. To generate an unlimited length, clone this space and run it locally. +### Tips +- When providing prefix audio, include the text of the prefix audio in your speech text to ensure a smooth transition. +- The appropriate range of Speaking Rate and Pitch STD are highly dependent on the speaker audio. Start with the defaults and adjust as needed. +- Emotion sliders do not completely function intuitively, and require some experimentation to get the desired effect. +""".strip() \ No newline at end of file diff --git a/app/main.py b/app/main.py index ae06053..759a830 100644 --- a/app/main.py +++ b/app/main.py @@ -1,140 +1,147 @@ from fastapi import FastAPI, HTTPException -from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import StreamingResponse -from pydantic import BaseModel, Field -import soundfile as sf import io -from typing import Optional +import wave +import numpy as np +from typing import List, Optional import logging +import sys -from .config import settings -from .model import model +from .models import TTSRequest +from .services.tts import TTSService +from .config import MAX_CHARACTERS # Configure logging logging.basicConfig( level=logging.INFO, - format='%(asctime)s - %(name)s - %(levelname)s - %(message)s' + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', + handlers=[ + logging.StreamHandler(sys.stdout) + ] ) -logger = logging.getLogger(__name__) +logger = logging.getLogger("zonos-tts-api") +# Initialize FastAPI app app = FastAPI( - title=settings.API_TITLE, - description=settings.API_DESCRIPTION, - version=settings.API_VERSION, + title="Zonos Text-to-Speech API", + description="API for generating high-quality speech using Zonos models", + version="0.1.0" ) -# Add CORS middleware -app.add_middleware( - CORSMiddleware, - allow_origins=["*"], - allow_credentials=True, - allow_methods=["*"], - allow_headers=["*"], -) - -class TextToSpeechRequest(BaseModel): - text: str = Field(..., min_length=1, max_length=settings.MAX_TEXT_LENGTH) - model_type: str = Field(default="Transformer", pattern="^(Transformer|Hybrid)$") - language: str = Field(default="en-us", pattern=r"^[a-z]{2}-[A-Z]{2}$") - speaker_audio: Optional[str] = None - prefix_audio: Optional[str] = None - skip_speaker: bool = False - skip_emotion: bool = False - emotion1: float = Field(0.6, ge=0.0, le=1.0, description="Happiness") - emotion2: float = Field(0.05, ge=0.0, le=1.0, description="Sadness") - emotion3: float = Field(0.05, ge=0.0, le=1.0, description="Disgust") - emotion4: float = Field(0.05, ge=0.0, le=1.0, description="Fear") - emotion5: float = Field(0.05, ge=0.0, le=1.0, description="Surprise") - emotion6: float = Field(0.05, ge=0.0, le=1.0, description="Anger") - emotion7: float = Field(0.5, ge=0.0, le=1.0, description="Other") - emotion8: float = Field(0.6, ge=0.0, le=1.0, description="Neutral") - skip_vqscore_8: bool = True - vq_single: float = Field(0.78, ge=0.5, le=0.8) - fmax: int = Field(22050, ge=0, le=24000) - skip_fmax: bool = False - pitch_std: float = Field(20.0, ge=0.0, le=400.0) - skip_pitch_std: bool = False - speaking_rate: float = Field(15.0, ge=0.0, le=40.0) - skip_speaking_rate: bool = False - dnsmos_ovrl: float = Field(4.0, ge=1.0, le=5.0) - skip_dnsmos_ovrl: bool = True - speaker_noised: bool = False - skip_speaker_noised: bool = False - cfg_scale: float = Field(2.0, ge=1.0, le=5.0) - min_p: float = Field(0.1, ge=0.0, le=1.0) - seed: int = Field(420, ge=0) +# TTS service instance +_tts_service: Optional[TTSService] = None - class Config: - json_schema_extra = { - "example": { - "text": "Hello, this is a test of the Zonos text-to-speech system.", - "model_type": "Transformer", - "language": "en-us", - "cfg_scale": 2.0, - "min_p": 0.1, - "seed": 420 - } - } - -@app.on_event("startup") -async def startup_event(): - """Initialize the model on startup.""" - try: - await model.load() - except Exception as e: - logger.error(f"Failed to load model: {str(e)}") - raise RuntimeError("Failed to initialize the application") +def get_tts_service() -> TTSService: + """Get or initialize TTS service singleton.""" + global _tts_service + if _tts_service is None: + logger.info("Initializing TTS service") + _tts_service = TTSService() + return _tts_service @app.get("/") async def root(): - """Root endpoint returning API information.""" - return { - "message": "Welcome to Zonos API", - "version": settings.API_VERSION, - "status": "active", - "available_models": list(settings.MODEL_NAMES.keys()) - } + """Root endpoint.""" + logger.debug("Root endpoint accessed") + return {"message": "Zonos Text-to-Speech API"} + +@app.get("/models") +async def get_models(): + """Get available TTS models.""" + logger.debug("Fetching available models") + service = get_tts_service() + models = service.get_model_names() + logger.info(f"Retrieved {len(models)} models") + return {"models": models} -@app.get("/health") -async def health_check(): - """Health check endpoint.""" - if not model.is_loaded(): - raise HTTPException(status_code=503, detail="Model not loaded") - return {"status": "healthy", "current_model": model.current_model_type} +@app.get("/languages") +async def get_languages(): + """Get supported languages.""" + logger.debug("Fetching supported languages") + service = get_tts_service() + languages = service.get_supported_languages() + logger.info(f"Retrieved {len(languages)} supported languages") + return {"languages": languages} -@app.post("/tts") -async def text_to_speech(request: TextToSpeechRequest): +@app.get("/model/conditioners") +async def get_model_conditioners(model_name: str): + """Get available conditioners for a specific model.""" + logger.debug(f"Fetching conditioners for model: {model_name}") + service = get_tts_service() + logger.info(f"Requested model: {model_name}") + logger.debug(f"Available models: {service.get_model_names()}") + if model_name not in service.get_model_names(): + logger.warning(f"Model not found: {model_name}") + raise HTTPException(status_code=404, detail="Model not found") + conditioners = service.get_model_conditioners(model_name) + logger.info(f"Retrieved {len(conditioners)} conditioners for model {model_name}") + return {"conditioners": conditioners} + +@app.post("/synthesize") +async def synthesize_speech(request: TTSRequest): """Generate speech from text.""" + logger.info(f"Speech synthesis requested for text of length {len(request.text)} chars using model {request.model_choice}") + + if len(request.text) > MAX_CHARACTERS: + logger.warning(f"Text length ({len(request.text)}) exceeds maximum of {MAX_CHARACTERS} characters") + raise HTTPException( + status_code=400, + detail=f"Text length exceeds maximum of {MAX_CHARACTERS} characters" + ) + + service = get_tts_service() + try: - logger.info(f"Processing TTS request for text: {request.text[:50]}... using {request.model_type} model") + logger.debug(f"Starting audio generation with params: language={request.language}, " + f"speaking_rate={request.speaking_rate}, seed={request.seed}") - # Generate audio - audio_data, sample_rate = await model.generate_speech( - **request.dict() + # Generate audio using TTS service + (sample_rate, audio_data), seed = service.generate_audio( + model_choice=request.model_choice, + text=request.text, + language=request.language, + speaker_audio=request.speaker_audio, + prefix_audio=request.prefix_audio, + emotion_values=request.emotion_values, + vq_score=request.vq_score, + fmax=request.fmax, + pitch_std=request.pitch_std, + speaking_rate=request.speaking_rate, + dnsmos_ovrl=request.dnsmos_ovrl, + speaker_noised=request.speaker_noised, + cfg_scale=request.cfg_scale, + min_p=request.min_p, + seed=request.seed, + randomize_seed=request.randomize_seed, + unconditional_keys=request.unconditional_keys, + top_p=request.top_p, + top_k=request.top_k, + linear=request.linear, + confidence=request.confidence, + quadratic=request.quadratic, ) - + + logger.info(f"Successfully generated audio with sample rate {sample_rate}Hz, seed {seed}") + # Convert to WAV format - buffer = io.BytesIO() - sf.write(buffer, audio_data, sample_rate, format='WAV') - buffer.seek(0) - - return StreamingResponse( - buffer, + wav_buffer = io.BytesIO() + with wave.open(wav_buffer, 'wb') as wav_file: + wav_file.setnchannels(1) + wav_file.setsampwidth(2) # 16-bit audio + wav_file.setframerate(sample_rate) + wav_file.writeframes((audio_data * 32767).astype(np.int16).tobytes()) + + # Prepare response + wav_buffer.seek(0) + response = StreamingResponse( + wav_buffer, media_type="audio/wav", - headers={ - "Content-Disposition": f"attachment; filename=speech_{hash(request.text)[:8]}.wav" - } + headers={"x-seed": str(seed)} ) + + logger.debug("Returning WAV audio stream response") + return response except Exception as e: - logger.error(f"Error processing TTS request: {str(e)}") - raise HTTPException(status_code=500, detail=str(e)) - -if __name__ == "__main__": - import uvicorn - uvicorn.run( - "main:app", - host="0.0.0.0", - port=settings.PORT, - reload=True - ) \ No newline at end of file + logger.error(f"Error during speech synthesis: {str(e)}", exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) \ No newline at end of file diff --git a/app/model.py b/app/model.py deleted file mode 100644 index 6e02c72..0000000 --- a/app/model.py +++ /dev/null @@ -1,198 +0,0 @@ -import torch -import torchaudio -from transformers import AutoTokenizer -from Zonos.zonos.model import Zonos -from Zonos.zonos.conditioning import make_cond_dict -import logging -from typing import Tuple, Optional -import numpy as np -from .config import settings - -logger = logging.getLogger(__name__) - -class ZonosModel: - def __init__(self): - self.device = settings.DEVICE - self.model = None - self.tokenizer = None - self.sample_rate = settings.SAMPLE_RATE - self.current_model_type = None - - async def load(self, model_type: str = None) -> None: - """Load the model and tokenizer.""" - if model_type is None: - model_type = settings.MODEL_TYPE - - if self.current_model_type == model_type and self.is_loaded(): - logger.info(f"{model_type} model is already loaded") - return - - try: - logger.info(f"Loading Zonos {model_type} model on {self.device}...") - - # Clear CUDA cache if switching models - if self.model is not None: - del self.model - if torch.cuda.is_available(): - torch.cuda.empty_cache() - - model_name = settings.MODEL_NAMES[model_type] - self.tokenizer = AutoTokenizer.from_pretrained( - model_name, - cache_dir=settings.MODEL_CACHE_DIR - ) - self.model = Zonos.from_pretrained( - repo_id=model_name, - device=self.device - ) - - # Move model to device and optimize - self.model.to(self.device) - if self.device == "cuda": - self.model.bfloat16() - self.model.eval() - - self.current_model_type = model_type - logger.info(f"{model_type} model loaded successfully") - - except Exception as e: - logger.error(f"Error loading model: {str(e)}") - raise RuntimeError(f"Failed to load model: {str(e)}") - - def is_loaded(self) -> bool: - """Check if model and tokenizer are loaded.""" - return self.model is not None and self.tokenizer is not None - - @torch.no_grad() - async def generate_speech( - self, - text: str, - model_type: str = None, - language: str = "en-us", - speaker_audio: Optional[str] = None, - prefix_audio: Optional[str] = None, - skip_speaker: bool = False, - skip_emotion: bool = False, - emotion1: float = 0.6, - emotion2: float = 0.05, - emotion3: float = 0.05, - emotion4: float = 0.05, - emotion5: float = 0.05, - emotion6: float = 0.05, - emotion7: float = 0.5, - emotion8: float = 0.6, - skip_vqscore_8: bool = True, - vq_single: float = 0.78, - fmax: int = 22050, - skip_fmax: bool = False, - pitch_std: float = 20.0, - skip_pitch_std: bool = False, - speaking_rate: float = 15.0, - skip_speaking_rate: bool = False, - dnsmos_ovrl: float = 4.0, - skip_dnsmos_ovrl: bool = True, - speaker_noised: bool = False, - skip_speaker_noised: bool = False, - cfg_scale: float = 2.0, - min_p: float = 0.1, - seed: int = 420, - ) -> Tuple[np.ndarray, int]: - """Generate speech from text.""" - # Load or switch model if needed - await self.load(model_type) - - if not self.is_loaded(): - raise RuntimeError("Model not loaded") - - try: - # Handle speaker embedding - speaker_embedding = None - if speaker_audio and not skip_speaker: - wav, sr = torchaudio.load(speaker_audio) - speaker_embedding = self.model.make_speaker_embedding(wav, sr) - speaker_embedding = speaker_embedding.to(self.device, dtype=torch.bfloat16) - - # Handle audio prefix - audio_prefix_codes = None - if prefix_audio: - wav_prefix, sr_prefix = torchaudio.load(prefix_audio) - wav_prefix = wav_prefix.mean(0, keepdim=True) - wav_prefix = torchaudio.functional.resample(wav_prefix, sr_prefix, - self.model.autoencoder.sampling_rate) - wav_prefix = wav_prefix.to(self.device, dtype=torch.float32) - with torch.autocast(self.device, dtype=torch.float32): - audio_prefix_codes = self.model.autoencoder.encode(wav_prefix.unsqueeze(0)) - - # Prepare conditioning - uncond_keys = [] - if skip_speaker: uncond_keys.append("speaker") - if skip_emotion: uncond_keys.append("emotion") - if skip_vqscore_8: uncond_keys.append("vqscore_8") - if skip_fmax: uncond_keys.append("fmax") - if skip_pitch_std: uncond_keys.append("pitch_std") - if skip_speaking_rate: uncond_keys.append("speaking_rate") - if skip_dnsmos_ovrl: uncond_keys.append("dnsmos_ovrl") - if skip_speaker_noised: uncond_keys.append("speaker_noised") - - emotion_tensor = torch.tensor( - [[emotion1, emotion2, emotion3, emotion4, - emotion5, emotion6, emotion7, emotion8]], - device=self.device - ) - - vq_tensor = torch.tensor([vq_single] * 8, device=self.device).unsqueeze(0) - - cond_dict = make_cond_dict( - text=text, - language=language, - speaker=speaker_embedding, - emotion=emotion_tensor, - vqscore_8=vq_tensor, - fmax=float(fmax), - pitch_std=float(pitch_std), - speaking_rate=float(speaking_rate), - dnsmos_ovrl=float(dnsmos_ovrl), - speaker_noised=speaker_noised, - device=self.device, - unconditional_keys=uncond_keys, - ) - - # Prepare generation parameters - torch.manual_seed(seed) - conditioning = self.model.prepare_conditioning(cond_dict) - - # Generate audio - codes = self.model.generate( - prefix_conditioning=conditioning, - audio_prefix_codes=audio_prefix_codes, - max_new_tokens=settings.MAX_NEW_TOKENS, - cfg_scale=cfg_scale, - batch_size=1, - sampling_params=dict(min_p=min_p), - ) - - # Decode and return audio - wav_out = self.model.autoencoder.decode(codes).cpu().detach() - if wav_out.dim() == 2 and wav_out.size(0) > 1: - wav_out = wav_out[0:1, :] - - return wav_out.squeeze().numpy(), self.model.autoencoder.sampling_rate - - except Exception as e: - logger.error(f"Error generating speech: {str(e)}") - raise RuntimeError(f"Speech generation failed: {str(e)}") - - def _adjust_speed(self, audio: np.ndarray, speed: float) -> np.ndarray: - """Adjust the speed of the audio.""" - if speed == 1.0: - return audio - - # This is a simple implementation - you might want to use a more sophisticated method - return np.interp( - np.arange(0, len(audio) / speed), - np.arange(0, len(audio)), - audio - ) - -# Create a global instance -model = ZonosModel() \ No newline at end of file diff --git a/app/models.py b/app/models.py new file mode 100644 index 0000000..2e46622 --- /dev/null +++ b/app/models.py @@ -0,0 +1,37 @@ +from typing import List, Optional +from pydantic import BaseModel, Field + +class TTSRequest(BaseModel): + model_choice: str = Field(description="Model variant to use") + text: str = Field(description="Text to convert to speech") + language: str = Field(description="Language code", default="en-us") + speaker_audio: Optional[str] = Field(default=None, description="Path to speaker audio file") + prefix_audio: Optional[str] = Field(default=None, description="Path to prefix audio file") + emotion_values: List[float] = Field( + default=[1.0, 0.05, 0.05, 0.05, 0.05, 0.05, 0.1, 0.2], + description="List of 8 emotion values: happiness, sadness, disgust, fear, surprise, anger, other, neutral" + ) + vq_score: float = Field(default=0.78, description="VQ Score value") + fmax: float = Field(default=24000, description="Maximum frequency") + pitch_std: float = Field(default=45.0, description="Pitch standard deviation") + speaking_rate: float = Field(default=15.0, description="Speaking rate") + dnsmos_ovrl: float = Field(default=4.0, description="DNSMOS overall score") + speaker_noised: bool = Field(default=False, description="Whether to denoise speaker audio") + cfg_scale: float = Field(default=2.0, description="CFG scale value") + min_p: float = Field(default=0.15, description="Minimum probability") + seed: int = Field(default=420, description="Random seed") + randomize_seed: bool = Field(default=True, description="Whether to randomize seed") + unconditional_keys: List[str] = Field( + default=["emotion"], + description="List of conditioning keys to make unconditional" + ) + top_p: float = Field(default=0.95, description="Top-p sampling value") + top_k: int = Field(default=50, description="Top-k sampling value") + linear: float = Field(default=1.0, description="Linear scaling factor") + confidence: float = Field(default=0.1, description="Confidence scaling factor") + quadratic: float = Field(default=1.0, description="Quadratic scaling factor") + +class AudioResponse(BaseModel): + sample_rate: int + audio_data: bytes + seed: int \ No newline at end of file diff --git a/app/services/tts.py b/app/services/tts.py new file mode 100644 index 0000000..2f3137a --- /dev/null +++ b/app/services/tts.py @@ -0,0 +1,156 @@ +import torch +import torchaudio +import logging +from typing import List, Tuple, Optional, Dict, Any +import numpy as np +from zonos.model import Zonos +from zonos.conditioning import make_cond_dict, supported_language_codes + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +class TTSService: + def __init__(self, device: str = "cuda"): + self.device = device + self.model_names = ["Zyphra/Zonos-v0.1-transformer"] + self.models = { + name: Zonos.from_pretrained(name, device=device, backbone="torch") + for name in self.model_names + } + + # Debugging: Print loaded models + print(f"Loaded models: {self.models.keys()}") + + # Set models to eval mode + for model in self.models.values(): + model.requires_grad_(False).eval() + + def get_model_names(self) -> List[str]: + logger.info("Retrieving model names") + return self.model_names + + def get_supported_languages(self) -> List[str]: + logger.info("Retrieving supported languages") + return supported_language_codes + + def get_model_conditioners(self, model_choice: str) -> List[str]: + """Get list of conditioner names for a model""" + logger.info(f"Retrieving conditioners for model: {model_choice}") + model = self.models[model_choice] + return [c.name for c in model.prefix_conditioner.conditioners] + + def generate_audio( + self, + model_choice: str, + text: str, + language: str = "en-us", + speaker_audio: Optional[str] = None, + prefix_audio: Optional[str] = None, + emotion_values: List[float] = [1.0, 0.05, 0.05, 0.05, 0.05, 0.05, 0.1, 0.2], + vq_score: float = 0.78, + fmax: float = 24000, + pitch_std: float = 45.0, + speaking_rate: float = 15.0, + dnsmos_ovrl: float = 4.0, + speaker_noised: bool = False, + cfg_scale: float = 2.0, + min_p: float = 0.15, + seed: int = 420, + randomize_seed: bool = True, + unconditional_keys: List[str] = ["emotion"], + top_p: float = 0.95, + top_k: int = 50, + linear: float = 1.0, + confidence: float = 0.1, + quadratic: float = 1.0, + ) -> Tuple[Tuple[int, np.ndarray], int]: + """ + Generate audio using the specified model and parameters. + Returns a tuple of ((sample_rate, audio_data), seed). + """ + logger.info(f"Generating audio for model: {model_choice}") + selected_model = self.models[model_choice] + + if randomize_seed: + seed = torch.randint(0, 2**32 - 1, (1,)).item() + torch.manual_seed(seed) + + # Process speaker audio if provided + speaker_embedding = None + if speaker_audio is not None and "speaker" not in unconditional_keys: + logger.info(f"Processing speaker audio: {speaker_audio}") + wav, sr = torchaudio.load(speaker_audio) + speaker_embedding = selected_model.make_speaker_embedding(wav, sr) + speaker_embedding = speaker_embedding.to(self.device, dtype=torch.bfloat16) + + # Process prefix audio if provided + audio_prefix_codes = None + if prefix_audio is not None: + logger.info(f"Processing prefix audio: {prefix_audio}") + wav_prefix, sr_prefix = torchaudio.load(prefix_audio) + wav_prefix = wav_prefix.mean(0, keepdim=True) + wav_prefix = torchaudio.functional.resample( + wav_prefix, + sr_prefix, + selected_model.autoencoder.sampling_rate + ) + wav_prefix = wav_prefix.to(self.device, dtype=torch.float32) + with torch.autocast(self.device, dtype=torch.float32): + audio_prefix_codes = selected_model.autoencoder.encode(wav_prefix.unsqueeze(0)) + + # Prepare emotion tensor + emotion_tensor = torch.tensor(emotion_values, device=self.device) + + # Prepare VQ score tensor + vq_tensor = torch.tensor([vq_score] * 8, device=self.device).unsqueeze(0) + + # Create conditioning dictionary + logger.info("Creating conditioning dictionary") + cond_dict = make_cond_dict( + text=text, + language=language, + speaker=speaker_embedding, + emotion=emotion_tensor, + vqscore_8=vq_tensor, + fmax=float(fmax), + pitch_std=float(pitch_std), + speaking_rate=float(speaking_rate), + dnsmos_ovrl=float(dnsmos_ovrl), + speaker_noised=bool(speaker_noised), + device=self.device, + unconditional_keys=unconditional_keys, + ) + conditioning = selected_model.prepare_conditioning(cond_dict) + + # sampling parameters + sampling_params = { + "top_p": float(top_p), + "top_k": int(top_k), + "min_p": float(min_p), + "linear": float(linear), + "conf": float(confidence), + "quad": float(quadratic) + } + + # Generate audio + logger.info("Generating audio") + max_new_tokens = 86 * 60 # ~30 seconds of audio + codes = selected_model.generate( + prefix_conditioning=conditioning, + audio_prefix_codes=audio_prefix_codes, + max_new_tokens=max_new_tokens, + cfg_scale=float(cfg_scale), + batch_size=1, + sampling_params=sampling_params, + ) + + # Decode generated codes to waveform + logger.info("Decoding generated audio to waveform") + wav_out = selected_model.autoencoder.decode(codes).cpu().detach() + sr_out = selected_model.autoencoder.sampling_rate + if wav_out.dim() == 2 and wav_out.size(0) > 1: + wav_out = wav_out[0:1, :] + + logger.info("Audio generation complete") + return (sr_out, wav_out.squeeze().numpy()), seed \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 6e18df2..da31ff1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,89 +1,18 @@ version: '3.8' services: - api: - build: - context: . - dockerfile: Dockerfile - args: - BUILDKIT_INLINE_CACHE: 1 - GIT_SUBMODULE_UPDATE: 1 # Enable submodule update during build - image: zonos-api:latest + zonos: + build: . + image: zonos-local ports: - "8000:8000" environment: - - PORT=8000 - - WORKERS=4 - - MODEL_TYPE=Transformer - - MODEL_CACHE_DIR=/home/appuser/.cache/huggingface - - PYTHONUNBUFFERED=1 + - CUDA_VISIBLE_DEVICES=0 - NVIDIA_VISIBLE_DEVICES=all - - NVIDIA_DRIVER_CAPABILITIES=all - - CUDA_LAUNCH_BLOCKING=0 - - TORCH_CUDA_ARCH_LIST=7.0;7.5;8.0;8.6+PTX - - CUDA_HOME=/usr/local/cuda - - MAX_JOBS=4 - volumes: - - model_cache:/home/appuser/.cache/huggingface - - audio_uploads:/app/uploads - - /tmp/.X11-unix:/tmp/.X11-unix # For GPU monitoring deploy: resources: - limits: - memory: 16G - cpus: '8' + reservations: devices: - driver: nvidia - count: all - capabilities: [gpu] - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:8000/health"] - interval: 30s - timeout: 10s - retries: 3 - start_period: 40s - restart: unless-stopped - shm_size: '4gb' # Increased shared memory for better performance - ulimits: - memlock: -1 # Unlimited memory lock for GPU operations - runtime: nvidia # Enable NVIDIA runtime for GPU support - logging: - driver: "json-file" - options: - max-size: "200m" - max-file: "10" - - prometheus: - image: prom/prometheus:latest - volumes: - - ./prometheus.yml:/etc/prometheus/prometheus.yml - - prometheus_data:/prometheus - command: - - '--config.file=/etc/prometheus/prometheus.yml' - - '--storage.tsdb.path=/prometheus' - ports: - - "9090:9090" - depends_on: - - api - - grafana: - image: grafana/grafana:latest - volumes: - - grafana_data:/var/lib/grafana - environment: - - GF_SECURITY_ADMIN_PASSWORD=admin - - GF_USERS_ALLOW_SIGN_UP=false - ports: - - "3000:3000" - depends_on: - - prometheus - -volumes: - model_cache: - driver: local - audio_uploads: - driver: local - prometheus_data: - driver: local - grafana_data: - driver: local \ No newline at end of file + count: 1 + capabilities: [gpu] \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..951268c --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,36 @@ +[project] +name = "zonos-api" +version = "0.1.0" +description = "API for Zonos project" +readme = "README.md" +requires-python = ">=3.10" +dependencies = [ + "fastapi>=0.109.0", + "uvicorn>=0.27.0", + "pydantic>=2.6.0", + "python-multipart>=0.0.9", + "python-dotenv>=1.0.0" +] + +[project.optional-dependencies] +dev = [ + "black>=24.1.0", + "isort>=5.13.0", + "pytest>=8.0.0", + "pytest-asyncio>=0.23.0" +] + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["app"] + +[tool.black] +line-length = 88 +target-version = ["py310"] + +[tool.isort] +profile = "black" +multi_line_output = 3 diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..f3c6235 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +testpaths = tests +python_files = test_*.py +python_functions = test_* +addopts = -v --cov=app --cov-report=term-missing \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 325abc6..9bade7d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,24 +1,16 @@ -fastapi==0.104.1 -uvicorn==0.24.0 -python-multipart==0.0.6 -transformers==4.35.2 -torch==2.1.1 -torchaudio==2.1.1 -numpy==1.26.2 -pydantic==2.5.2 -pydantic-settings==2.1.0 -python-dotenv==1.0.0 -gunicorn==21.2.0 -soundfile==0.12.1 -inflect>=7.5.0 -kanjize>=1.5.0 -phonemizer>=3.3.0 -sudachidict-full>=20241021 -sudachipy>=0.6.10 -huggingface-hub>=0.28.1 +# Runtime dependencies +torch +torchaudio +fastapi +uvicorn +python-multipart +numpy +soundfile +gradio +pydantic -# Optional dependencies - requires CUDA -# Uncomment if you have CUDA installed and want to use GPU acceleration -# flash-attn>=2.7.3 -mamba-ssm>=2.2.4 -causal-conv1d>=1.5.0.post8 \ No newline at end of file +# Test dependencies +pytest +pytest-cov +httpx +pytest-asyncio \ No newline at end of file diff --git a/tests/test_api.py b/tests/test_api.py new file mode 100644 index 0000000..2bc51ec --- /dev/null +++ b/tests/test_api.py @@ -0,0 +1,65 @@ +from fastapi.testclient import TestClient +from unittest.mock import Mock, patch +import numpy as np + +from app.main import app +from app.services.tts import TTSService + +client = TestClient(app) + +def test_root(): + response = client.get("/") + assert response.status_code == 200 + assert response.json()["message"] == "Zonos Text-to-Speech API" + +def test_get_models(): + with patch('app.main.get_tts_service') as mock_get_service: + mock_service = Mock(spec=TTSService) + mock_service.get_model_names.return_value = [ + "Zyphra/Zonos-v0.1-transformer", + "Zyphra/Zonos-v0.1-hybrid" + ] + mock_get_service.return_value = mock_service + + response = client.get("/models") + assert response.status_code == 200 + assert "models" in response.json() + assert len(response.json()["models"]) == 2 + +def test_get_languages(): + with patch('app.main.get_tts_service') as mock_get_service: + mock_service = Mock(spec=TTSService) + mock_service.get_supported_languages.return_value = ["en-us", "es-es"] + mock_get_service.return_value = mock_service + + response = client.get("/languages") + assert response.status_code == 200 + assert "languages" in response.json() + assert len(response.json()["languages"]) == 2 + +def test_synthesize_speech(): + with patch('app.main.get_tts_service') as mock_get_service: + mock_service = Mock(spec=TTSService) + # Mock audio generation + mock_service.generate_audio.return_value = ( + (44100, np.zeros(44100).astype(np.float32)), # 1 second of silence + 42 # seed + ) + mock_get_service.return_value = mock_service + + test_request = { + "model_choice": "Zyphra/Zonos-v0.1-transformer", + "text": "Test speech", + "language": "en-us", + "emotion_values": [1.0, 0.05, 0.05, 0.05, 0.05, 0.05, 0.1, 0.2], + "vq_score": 0.78, + "cfg_scale": 2.0, + "min_p": 0.15, + "randomize_seed": False, + "seed": 42 + } + + response = client.post("/synthesize", json=test_request) + assert response.status_code == 200 + assert response.headers["content-type"] == "audio/wav" + assert response.headers["x-seed"] == "42" \ No newline at end of file diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..c83bfd1 --- /dev/null +++ b/uv.lock @@ -0,0 +1,439 @@ +version = 1 +revision = 1 +requires-python = ">=3.10" + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, +] + +[[package]] +name = "anyio" +version = "4.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/73/199a98fc2dae33535d6b8e8e6ec01f8c1d76c9adb096c6b7d64823038cde/anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a", size = 181126 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/eb/e7f063ad1fec6b3178a3cd82d1a3c4de82cccf283fc42746168188e1cdd5/anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a", size = 96041 }, +] + +[[package]] +name = "black" +version = "25.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "mypy-extensions" }, + { name = "packaging" }, + { name = "pathspec" }, + { name = "platformdirs" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/94/49/26a7b0f3f35da4b5a65f081943b7bcd22d7002f5f0fb8098ec1ff21cb6ef/black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666", size = 649449 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/3b/4ba3f93ac8d90410423fdd31d7541ada9bcee1df32fb90d26de41ed40e1d/black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32", size = 1629419 }, + { url = "https://files.pythonhosted.org/packages/b4/02/0bde0485146a8a5e694daed47561785e8b77a0466ccc1f3e485d5ef2925e/black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da", size = 1461080 }, + { url = "https://files.pythonhosted.org/packages/52/0e/abdf75183c830eaca7589144ff96d49bce73d7ec6ad12ef62185cc0f79a2/black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7", size = 1766886 }, + { url = "https://files.pythonhosted.org/packages/dc/a6/97d8bb65b1d8a41f8a6736222ba0a334db7b7b77b8023ab4568288f23973/black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9", size = 1419404 }, + { url = "https://files.pythonhosted.org/packages/7e/4f/87f596aca05c3ce5b94b8663dbfe242a12843caaa82dd3f85f1ffdc3f177/black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0", size = 1614372 }, + { url = "https://files.pythonhosted.org/packages/e7/d0/2c34c36190b741c59c901e56ab7f6e54dad8df05a6272a9747ecef7c6036/black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299", size = 1442865 }, + { url = "https://files.pythonhosted.org/packages/21/d4/7518c72262468430ead45cf22bd86c883a6448b9eb43672765d69a8f1248/black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096", size = 1749699 }, + { url = "https://files.pythonhosted.org/packages/58/db/4f5beb989b547f79096e035c4981ceb36ac2b552d0ac5f2620e941501c99/black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2", size = 1428028 }, + { url = "https://files.pythonhosted.org/packages/83/71/3fe4741df7adf015ad8dfa082dd36c94ca86bb21f25608eb247b4afb15b2/black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b", size = 1650988 }, + { url = "https://files.pythonhosted.org/packages/13/f3/89aac8a83d73937ccd39bbe8fc6ac8860c11cfa0af5b1c96d081facac844/black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc", size = 1453985 }, + { url = "https://files.pythonhosted.org/packages/6f/22/b99efca33f1f3a1d2552c714b1e1b5ae92efac6c43e790ad539a163d1754/black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f", size = 1783816 }, + { url = "https://files.pythonhosted.org/packages/18/7e/a27c3ad3822b6f2e0e00d63d58ff6299a99a5b3aee69fa77cd4b0076b261/black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba", size = 1440860 }, + { url = "https://files.pythonhosted.org/packages/98/87/0edf98916640efa5d0696e1abb0a8357b52e69e82322628f25bf14d263d1/black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f", size = 1650673 }, + { url = "https://files.pythonhosted.org/packages/52/e5/f7bf17207cf87fa6e9b676576749c6b6ed0d70f179a3d812c997870291c3/black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3", size = 1453190 }, + { url = "https://files.pythonhosted.org/packages/e3/ee/adda3d46d4a9120772fae6de454c8495603c37c4c3b9c60f25b1ab6401fe/black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171", size = 1782926 }, + { url = "https://files.pythonhosted.org/packages/cc/64/94eb5f45dcb997d2082f097a3944cfc7fe87e071907f677e80788a2d7b7a/black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18", size = 1442613 }, + { url = "https://files.pythonhosted.org/packages/09/71/54e999902aed72baf26bca0d50781b01838251a462612966e9fc4891eadd/black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717", size = 207646 }, +] + +[[package]] +name = "click" +version = "8.1.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188 }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/09/35/2495c4ac46b980e4ca1f6ad6db102322ef3ad2410b79fdde159a4b0f3b92/exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc", size = 28883 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", size = 16453 }, +] + +[[package]] +name = "fastapi" +version = "0.115.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "starlette" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a2/b2/5a5dc4affdb6661dea100324e19a7721d5dc524b464fe8e366c093fd7d87/fastapi-0.115.8.tar.gz", hash = "sha256:0ce9111231720190473e222cdf0f07f7206ad7e53ea02beb1d2dc36e2f0741e9", size = 295403 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/7d/2d6ce181d7a5f51dedb8c06206cbf0ec026a99bf145edd309f9e17c3282f/fastapi-0.115.8-py3-none-any.whl", hash = "sha256:753a96dd7e036b34eeef8babdfcfe3f28ff79648f86551eb36bfc1b0bf4a8cbf", size = 94814 }, +] + +[[package]] +name = "h11" +version = "0.14.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259 }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 }, +] + +[[package]] +name = "isort" +version = "6.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1c/28/b382d1656ac0ee4cef4bf579b13f9c6c813bff8a5cb5996669592c8c75fa/isort-6.0.0.tar.gz", hash = "sha256:75d9d8a1438a9432a7d7b54f2d3b45cad9a4a0fdba43617d9873379704a8bdf1", size = 828356 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c7/d6017f09ae5b1206fbe531f7af3b6dac1f67aedcbd2e79f3b386c27955d6/isort-6.0.0-py3-none-any.whl", hash = "sha256:567954102bb47bb12e0fae62606570faacddd441e45683968c8d1734fb1af892", size = 94053 }, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 }, +] + +[[package]] +name = "packaging" +version = "24.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451 }, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191 }, +] + +[[package]] +name = "platformdirs" +version = "4.3.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/fc/128cc9cb8f03208bdbf93d3aa862e16d376844a14f9a0ce5cf4507372de4/platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", size = 21302 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439 }, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, +] + +[[package]] +name = "pydantic" +version = "2.10.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b7/ae/d5220c5c52b158b1de7ca89fc5edb72f304a70a4c540c84c8844bf4008de/pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236", size = 761681 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/3c/8cc1cc84deffa6e25d2d0c688ebb80635dfdbf1dbea3e30c541c8cf4d860/pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584", size = 431696 }, +] + +[[package]] +name = "pydantic-core" +version = "2.27.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/01/f3e5ac5e7c25833db5eb555f7b7ab24cd6f8c322d3a3ad2d67a952dc0abc/pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39", size = 413443 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/bc/fed5f74b5d802cf9a03e83f60f18864e90e3aed7223adaca5ffb7a8d8d64/pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa", size = 1895938 }, + { url = "https://files.pythonhosted.org/packages/71/2a/185aff24ce844e39abb8dd680f4e959f0006944f4a8a0ea372d9f9ae2e53/pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c", size = 1815684 }, + { url = "https://files.pythonhosted.org/packages/c3/43/fafabd3d94d159d4f1ed62e383e264f146a17dd4d48453319fd782e7979e/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a", size = 1829169 }, + { url = "https://files.pythonhosted.org/packages/a2/d1/f2dfe1a2a637ce6800b799aa086d079998959f6f1215eb4497966efd2274/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5", size = 1867227 }, + { url = "https://files.pythonhosted.org/packages/7d/39/e06fcbcc1c785daa3160ccf6c1c38fea31f5754b756e34b65f74e99780b5/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c", size = 2037695 }, + { url = "https://files.pythonhosted.org/packages/7a/67/61291ee98e07f0650eb756d44998214231f50751ba7e13f4f325d95249ab/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7", size = 2741662 }, + { url = "https://files.pythonhosted.org/packages/32/90/3b15e31b88ca39e9e626630b4c4a1f5a0dfd09076366f4219429e6786076/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a", size = 1993370 }, + { url = "https://files.pythonhosted.org/packages/ff/83/c06d333ee3a67e2e13e07794995c1535565132940715931c1c43bfc85b11/pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236", size = 1996813 }, + { url = "https://files.pythonhosted.org/packages/7c/f7/89be1c8deb6e22618a74f0ca0d933fdcb8baa254753b26b25ad3acff8f74/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962", size = 2005287 }, + { url = "https://files.pythonhosted.org/packages/b7/7d/8eb3e23206c00ef7feee17b83a4ffa0a623eb1a9d382e56e4aa46fd15ff2/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9", size = 2128414 }, + { url = "https://files.pythonhosted.org/packages/4e/99/fe80f3ff8dd71a3ea15763878d464476e6cb0a2db95ff1c5c554133b6b83/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af", size = 2155301 }, + { url = "https://files.pythonhosted.org/packages/2b/a3/e50460b9a5789ca1451b70d4f52546fa9e2b420ba3bfa6100105c0559238/pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4", size = 1816685 }, + { url = "https://files.pythonhosted.org/packages/57/4c/a8838731cb0f2c2a39d3535376466de6049034d7b239c0202a64aaa05533/pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31", size = 1982876 }, + { url = "https://files.pythonhosted.org/packages/c2/89/f3450af9d09d44eea1f2c369f49e8f181d742f28220f88cc4dfaae91ea6e/pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc", size = 1893421 }, + { url = "https://files.pythonhosted.org/packages/9e/e3/71fe85af2021f3f386da42d291412e5baf6ce7716bd7101ea49c810eda90/pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7", size = 1814998 }, + { url = "https://files.pythonhosted.org/packages/a6/3c/724039e0d848fd69dbf5806894e26479577316c6f0f112bacaf67aa889ac/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15", size = 1826167 }, + { url = "https://files.pythonhosted.org/packages/2b/5b/1b29e8c1fb5f3199a9a57c1452004ff39f494bbe9bdbe9a81e18172e40d3/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306", size = 1865071 }, + { url = "https://files.pythonhosted.org/packages/89/6c/3985203863d76bb7d7266e36970d7e3b6385148c18a68cc8915fd8c84d57/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99", size = 2036244 }, + { url = "https://files.pythonhosted.org/packages/0e/41/f15316858a246b5d723f7d7f599f79e37493b2e84bfc789e58d88c209f8a/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459", size = 2737470 }, + { url = "https://files.pythonhosted.org/packages/a8/7c/b860618c25678bbd6d1d99dbdfdf0510ccb50790099b963ff78a124b754f/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048", size = 1992291 }, + { url = "https://files.pythonhosted.org/packages/bf/73/42c3742a391eccbeab39f15213ecda3104ae8682ba3c0c28069fbcb8c10d/pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d", size = 1994613 }, + { url = "https://files.pythonhosted.org/packages/94/7a/941e89096d1175d56f59340f3a8ebaf20762fef222c298ea96d36a6328c5/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b", size = 2002355 }, + { url = "https://files.pythonhosted.org/packages/6e/95/2359937a73d49e336a5a19848713555605d4d8d6940c3ec6c6c0ca4dcf25/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474", size = 2126661 }, + { url = "https://files.pythonhosted.org/packages/2b/4c/ca02b7bdb6012a1adef21a50625b14f43ed4d11f1fc237f9d7490aa5078c/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6", size = 2153261 }, + { url = "https://files.pythonhosted.org/packages/72/9d/a241db83f973049a1092a079272ffe2e3e82e98561ef6214ab53fe53b1c7/pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c", size = 1812361 }, + { url = "https://files.pythonhosted.org/packages/e8/ef/013f07248041b74abd48a385e2110aa3a9bbfef0fbd97d4e6d07d2f5b89a/pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc", size = 1982484 }, + { url = "https://files.pythonhosted.org/packages/10/1c/16b3a3e3398fd29dca77cea0a1d998d6bde3902fa2706985191e2313cc76/pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4", size = 1867102 }, + { url = "https://files.pythonhosted.org/packages/d6/74/51c8a5482ca447871c93e142d9d4a92ead74de6c8dc5e66733e22c9bba89/pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0", size = 1893127 }, + { url = "https://files.pythonhosted.org/packages/d3/f3/c97e80721735868313c58b89d2de85fa80fe8dfeeed84dc51598b92a135e/pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef", size = 1811340 }, + { url = "https://files.pythonhosted.org/packages/9e/91/840ec1375e686dbae1bd80a9e46c26a1e0083e1186abc610efa3d9a36180/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7", size = 1822900 }, + { url = "https://files.pythonhosted.org/packages/f6/31/4240bc96025035500c18adc149aa6ffdf1a0062a4b525c932065ceb4d868/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934", size = 1869177 }, + { url = "https://files.pythonhosted.org/packages/fa/20/02fbaadb7808be578317015c462655c317a77a7c8f0ef274bc016a784c54/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6", size = 2038046 }, + { url = "https://files.pythonhosted.org/packages/06/86/7f306b904e6c9eccf0668248b3f272090e49c275bc488a7b88b0823444a4/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c", size = 2685386 }, + { url = "https://files.pythonhosted.org/packages/8d/f0/49129b27c43396581a635d8710dae54a791b17dfc50c70164866bbf865e3/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2", size = 1997060 }, + { url = "https://files.pythonhosted.org/packages/0d/0f/943b4af7cd416c477fd40b187036c4f89b416a33d3cc0ab7b82708a667aa/pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4", size = 2004870 }, + { url = "https://files.pythonhosted.org/packages/35/40/aea70b5b1a63911c53a4c8117c0a828d6790483f858041f47bab0b779f44/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3", size = 1999822 }, + { url = "https://files.pythonhosted.org/packages/f2/b3/807b94fd337d58effc5498fd1a7a4d9d59af4133e83e32ae39a96fddec9d/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4", size = 2130364 }, + { url = "https://files.pythonhosted.org/packages/fc/df/791c827cd4ee6efd59248dca9369fb35e80a9484462c33c6649a8d02b565/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57", size = 2158303 }, + { url = "https://files.pythonhosted.org/packages/9b/67/4e197c300976af185b7cef4c02203e175fb127e414125916bf1128b639a9/pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc", size = 1834064 }, + { url = "https://files.pythonhosted.org/packages/1f/ea/cd7209a889163b8dcca139fe32b9687dd05249161a3edda62860430457a5/pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9", size = 1989046 }, + { url = "https://files.pythonhosted.org/packages/bc/49/c54baab2f4658c26ac633d798dab66b4c3a9bbf47cff5284e9c182f4137a/pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b", size = 1885092 }, + { url = "https://files.pythonhosted.org/packages/41/b1/9bc383f48f8002f99104e3acff6cba1231b29ef76cfa45d1506a5cad1f84/pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b", size = 1892709 }, + { url = "https://files.pythonhosted.org/packages/10/6c/e62b8657b834f3eb2961b49ec8e301eb99946245e70bf42c8817350cbefc/pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154", size = 1811273 }, + { url = "https://files.pythonhosted.org/packages/ba/15/52cfe49c8c986e081b863b102d6b859d9defc63446b642ccbbb3742bf371/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9", size = 1823027 }, + { url = "https://files.pythonhosted.org/packages/b1/1c/b6f402cfc18ec0024120602bdbcebc7bdd5b856528c013bd4d13865ca473/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9", size = 1868888 }, + { url = "https://files.pythonhosted.org/packages/bd/7b/8cb75b66ac37bc2975a3b7de99f3c6f355fcc4d89820b61dffa8f1e81677/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1", size = 2037738 }, + { url = "https://files.pythonhosted.org/packages/c8/f1/786d8fe78970a06f61df22cba58e365ce304bf9b9f46cc71c8c424e0c334/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a", size = 2685138 }, + { url = "https://files.pythonhosted.org/packages/a6/74/d12b2cd841d8724dc8ffb13fc5cef86566a53ed358103150209ecd5d1999/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e", size = 1997025 }, + { url = "https://files.pythonhosted.org/packages/a0/6e/940bcd631bc4d9a06c9539b51f070b66e8f370ed0933f392db6ff350d873/pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4", size = 2004633 }, + { url = "https://files.pythonhosted.org/packages/50/cc/a46b34f1708d82498c227d5d80ce615b2dd502ddcfd8376fc14a36655af1/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27", size = 1999404 }, + { url = "https://files.pythonhosted.org/packages/ca/2d/c365cfa930ed23bc58c41463bae347d1005537dc8db79e998af8ba28d35e/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee", size = 2130130 }, + { url = "https://files.pythonhosted.org/packages/f4/d7/eb64d015c350b7cdb371145b54d96c919d4db516817f31cd1c650cae3b21/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1", size = 2157946 }, + { url = "https://files.pythonhosted.org/packages/a4/99/bddde3ddde76c03b65dfd5a66ab436c4e58ffc42927d4ff1198ffbf96f5f/pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130", size = 1834387 }, + { url = "https://files.pythonhosted.org/packages/71/47/82b5e846e01b26ac6f1893d3c5f9f3a2eb6ba79be26eef0b759b4fe72946/pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee", size = 1990453 }, + { url = "https://files.pythonhosted.org/packages/51/b2/b2b50d5ecf21acf870190ae5d093602d95f66c9c31f9d5de6062eb329ad1/pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b", size = 1885186 }, + { url = "https://files.pythonhosted.org/packages/46/72/af70981a341500419e67d5cb45abe552a7c74b66326ac8877588488da1ac/pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e", size = 1891159 }, + { url = "https://files.pythonhosted.org/packages/ad/3d/c5913cccdef93e0a6a95c2d057d2c2cba347815c845cda79ddd3c0f5e17d/pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8", size = 1768331 }, + { url = "https://files.pythonhosted.org/packages/f6/f0/a3ae8fbee269e4934f14e2e0e00928f9346c5943174f2811193113e58252/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3", size = 1822467 }, + { url = "https://files.pythonhosted.org/packages/d7/7a/7bbf241a04e9f9ea24cd5874354a83526d639b02674648af3f350554276c/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f", size = 1979797 }, + { url = "https://files.pythonhosted.org/packages/4f/5f/4784c6107731f89e0005a92ecb8a2efeafdb55eb992b8e9d0a2be5199335/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133", size = 1987839 }, + { url = "https://files.pythonhosted.org/packages/6d/a7/61246562b651dff00de86a5f01b6e4befb518df314c54dec187a78d81c84/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc", size = 1998861 }, + { url = "https://files.pythonhosted.org/packages/86/aa/837821ecf0c022bbb74ca132e117c358321e72e7f9702d1b6a03758545e2/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50", size = 2116582 }, + { url = "https://files.pythonhosted.org/packages/81/b0/5e74656e95623cbaa0a6278d16cf15e10a51f6002e3ec126541e95c29ea3/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9", size = 2151985 }, + { url = "https://files.pythonhosted.org/packages/63/37/3e32eeb2a451fddaa3898e2163746b0cffbbdbb4740d38372db0490d67f3/pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151", size = 2004715 }, +] + +[[package]] +name = "pytest" +version = "8.3.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/05/35/30e0d83068951d90a01852cb1cef56e5d8a09d20c7f511634cc2f7e0372a/pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761", size = 1445919 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/92/76a1c94d3afee238333bc0a42b82935dd8f9cf8ce9e336ff87ee14d9e1cf/pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6", size = 343083 }, +] + +[[package]] +name = "pytest-asyncio" +version = "0.25.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f2/a8/ecbc8ede70921dd2f544ab1cadd3ff3bf842af27f87bbdea774c7baa1d38/pytest_asyncio-0.25.3.tar.gz", hash = "sha256:fc1da2cf9f125ada7e710b4ddad05518d4cee187ae9412e9ac9271003497f07a", size = 54239 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/67/17/3493c5624e48fd97156ebaec380dcaafee9506d7e2c46218ceebbb57d7de/pytest_asyncio-0.25.3-py3-none-any.whl", hash = "sha256:9e89518e0f9bd08928f97a3482fdc4e244df17529460bc038291ccaf8f85c7c3", size = 19467 }, +] + +[[package]] +name = "python-dotenv" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863 }, +] + +[[package]] +name = "python-multipart" +version = "0.0.20" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546 }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, +] + +[[package]] +name = "starlette" +version = "0.45.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ff/fb/2984a686808b89a6781526129a4b51266f678b2d2b97ab2d325e56116df8/starlette-0.45.3.tar.gz", hash = "sha256:2cbcba2a75806f8a41c722141486f37c28e30a0921c5f6fe4346cb0dcee1302f", size = 2574076 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/61/f2b52e107b1fc8944b33ef56bf6ac4ebbe16d91b94d2b87ce013bf63fb84/starlette-0.45.3-py3-none-any.whl", hash = "sha256:dfb6d332576f136ec740296c7e8bb8c8a7125044e7c6da30744718880cdd059d", size = 71507 }, +] + +[[package]] +name = "tomli" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077 }, + { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429 }, + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067 }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030 }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898 }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894 }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319 }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273 }, + { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310 }, + { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309 }, + { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762 }, + { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453 }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486 }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349 }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159 }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243 }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645 }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584 }, + { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875 }, + { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418 }, + { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708 }, + { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582 }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543 }, + { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691 }, + { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170 }, + { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530 }, + { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666 }, + { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954 }, + { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724 }, + { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383 }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257 }, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 }, +] + +[[package]] +name = "uvicorn" +version = "0.34.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4b/4d/938bd85e5bf2edeec766267a5015ad969730bb91e31b44021dfe8b22df6c/uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9", size = 76568 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4", size = 62315 }, +] + +[[package]] +name = "zonos-api" +version = "0.1.0" +source = { editable = "." } +dependencies = [ + { name = "fastapi" }, + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "python-multipart" }, + { name = "uvicorn" }, +] + +[package.optional-dependencies] +dev = [ + { name = "black" }, + { name = "isort" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, +] + +[package.metadata] +requires-dist = [ + { name = "black", marker = "extra == 'dev'", specifier = ">=24.1.0" }, + { name = "fastapi", specifier = ">=0.109.0" }, + { name = "isort", marker = "extra == 'dev'", specifier = ">=5.13.0" }, + { name = "pydantic", specifier = ">=2.6.0" }, + { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.0.0" }, + { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.23.0" }, + { name = "python-dotenv", specifier = ">=1.0.0" }, + { name = "python-multipart", specifier = ">=0.0.9" }, + { name = "uvicorn", specifier = ">=0.27.0" }, +] +provides-extras = ["dev"]