From 7e5854d3f80d748d1b963463824ee313a5bf48fa Mon Sep 17 00:00:00 2001 From: Sean Stewart Date: Thu, 22 Feb 2024 16:46:33 -0500 Subject: [PATCH 01/15] Implement asyncio support, plus session optimizations for more efficient calls. --- .circleci/config.yml | 2 +- contentful/client.py | 625 ------------------------ contentful/client/__init__.py | 17 + contentful/client/base.py | 379 ++++++++++++++ contentful/client/impl.py | 416 ++++++++++++++++ contentful/client/queries.py | 92 ++++ contentful/client/transport/__init__.py | 0 contentful/client/transport/abstract.py | 221 +++++++++ contentful/client/transport/aio.py | 108 ++++ contentful/client/transport/errors.py | 219 +++++++++ contentful/client/transport/retry.py | 128 +++++ contentful/client/transport/sio.py | 110 +++++ contentful/errors.py | 213 +------- contentful/utils.py | 83 +--- requirements.txt | 4 +- 15 files changed, 1698 insertions(+), 919 deletions(-) delete mode 100644 contentful/client.py create mode 100644 contentful/client/__init__.py create mode 100644 contentful/client/base.py create mode 100644 contentful/client/impl.py create mode 100644 contentful/client/queries.py create mode 100644 contentful/client/transport/__init__.py create mode 100644 contentful/client/transport/abstract.py create mode 100644 contentful/client/transport/aio.py create mode 100644 contentful/client/transport/errors.py create mode 100644 contentful/client/transport/retry.py create mode 100644 contentful/client/transport/sio.py diff --git a/.circleci/config.yml b/.circleci/config.yml index 1c895aa..c802d93 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -19,4 +19,4 @@ workflows: - test: matrix: parameters: - python-version: ["2.7", "3.7", "3.8", "3.9"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] diff --git a/contentful/client.py b/contentful/client.py deleted file mode 100644 index d04c325..0000000 --- a/contentful/client.py +++ /dev/null @@ -1,625 +0,0 @@ -import requests -import platform -from re import sub -from .utils import ConfigurationException -from .utils import retry_request, string_class -from .errors import get_error, RateLimitExceededError, EntryNotFoundError -from .resource_builder import ResourceBuilder -from .content_type_cache import ContentTypeCache - - -""" -contentful.client -~~~~~~~~~~~~~~~~~ - -This module implements the Contentful Delivery API Client, -allowing interaction with every method present in it. - -Complete API Documentation: https://www.contentful.com/developers/docs/references/content-delivery-api/ - -:copyright: (c) 2016 by Contentful GmbH. -:license: MIT, see LICENSE for more details. -""" - - -class Client(object): - """Constructs the API Client. - - :param space_id: Space ID of your target space. - :param access_token: API Access Token (Delivery by default, - Preview if overriding api_url). - :param api_url: (optional) URL of the Contentful Target API, - defaults to Delivery API (can be overriden for Preview API). - :param api_version: (optional) Target version of the Contentful API. - :param default_locale: (optional) Default Locale for your Space, - defaults to 'en-US'. - :param environment: (optional) Default Environment for client, defaults - to 'master'. - :param https: (optional) Boolean determining wether to use https - or http, defaults to True. - :param authorization_as_header: (optional) Boolean determining wether - to send access_token through a header or via GET params, - defaults to True. - :param raw_mode: (optional) Boolean determining wether to process the - response or return it raw after each API call, defaults to False. - :param gzip_encoded: (optional) Boolean determining wether to accept - gzip encoded results, defaults to True. - :param raise_errors: (optional) Boolean determining wether to raise - an exception on requests that aren't successful, defaults to True. - :param content_type_cache: (optional) Boolean determining wether to - store a Cache of the Content Types in order to properly coerce - Entry fields, defaults to True. - :param reuse_entries: (optional) Boolean determining wether to reuse - hydrated Entry and Asset objects within the same request when possible. - Defaults to False - :param timeout_s: (optional) Max time allowed for each API call, in seconds. - Defaults to 1s. - :param proxy_host: (optional) URL for Proxy, defaults to None. - :param proxy_port: (optional) Port for Proxy, defaults to None. - :param proxy_username: (optional) Username for Proxy, defaults to None. - :param proxy_password: (optional) Password for Proxy, defaults to None. - :param max_rate_limit_retries: (optional) Maximum amount of retries - after RateLimitError, defaults to 1. - :param max_rate_limit_wait: (optional) Timeout (in seconds) for waiting - for retry after RateLimitError, defaults to 60. - :param max_include_resolution_depth: (optional) Maximum include resolution - level for Resources, defaults to 20 (max include level * 2). - :param application_name: (optional) User application name, defaults to None. - :param application_version: (optional) User application version, defaults to None. - :param integration_name: (optional) Integration name, defaults to None. - :param integration_version: (optional) Integration version, defaults to None. - :return: :class:`Client ` object. - :rtype: contentful.Client - - Usage: - - >>> import contentful - >>> client = contentful.Client('cfexampleapi', 'b4c0n73n7fu1') - - """ - - def __init__( - self, - space_id, - access_token, - api_url='cdn.contentful.com', - api_version=1, - default_locale='en-US', - environment='master', - https=True, - authorization_as_header=True, - raw_mode=False, - gzip_encoded=True, - raise_errors=True, - content_type_cache=True, - reuse_entries=False, - timeout_s=1, - proxy_host=None, - proxy_port=None, - proxy_username=None, - proxy_password=None, - max_rate_limit_retries=1, - max_rate_limit_wait=60, - max_include_resolution_depth=20, - application_name=None, - application_version=None, - integration_name=None, - integration_version=None): - self.space_id = space_id - self.access_token = access_token - self.api_url = api_url - self.api_version = api_version - self.default_locale = default_locale - self.environment = environment - self.https = https - self.authorization_as_header = authorization_as_header - self.raw_mode = raw_mode - self.gzip_encoded = gzip_encoded - self.raise_errors = raise_errors - self.content_type_cache = content_type_cache - self.reuse_entries = reuse_entries - self.timeout_s = timeout_s - self.proxy_host = proxy_host - self.proxy_port = proxy_port - self.proxy_username = proxy_username - self.proxy_password = proxy_password - self.max_rate_limit_retries = max_rate_limit_retries - self.max_rate_limit_wait = max_rate_limit_wait - self.max_include_resolution_depth = max_include_resolution_depth - self.application_name = application_name - self.application_version = application_version - self.integration_name = integration_name - self.integration_version = integration_version - - self._validate_configuration() - if self.content_type_cache: - self._cache_content_types() - - def space(self, query=None): - """Fetches the current Space. - - API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/spaces/get-a-space - - :param query: (optional) Dict with API options. - :return: :class:`Space ` object. - :rtype: contentful.space.Space - - Usage: - - >>> space = client.space() - - """ - - return self._get('', query) - - def content_type(self, content_type_id, query=None): - """Fetches a Content Type by ID. - - API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/content-types/content-type/get-a-single-content-type - - :param content_type_id: The ID of the target Content Type. - :param query: (optional) Dict with API options. - :return: :class:`ContentType ` object. - :rtype: contentful.content_type.ContentType - - Usage: - >>> cat_content_type = client.content_type('cat') - - """ - - return self._get( - self.environment_url( - '/content_types/{0}'.format(content_type_id) - ), - query - ) - - def content_types(self, query=None): - """Fetches all Content Types from the Space. - - API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/content-types/content-model/get-the-content-model-of-a-space - - :param query: (optional) Dict with API options. - :return: List of :class:`ContentType ` objects. - :rtype: List of contentful.content_type.ContentType - - Usage: - >>> content_types = client.content_types() - [, - , - , - ] - """ - - return self._get( - self.environment_url('/content_types'), - query - ) - - def entry(self, entry_id, query=None): - """Fetches an Entry by ID. - - API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/entries/entry/get-a-single-entry - - :param entry_id: The ID of the target Entry. - :param query: (optional) Dict with API options. - :return: :class:`Entry ` object. - :rtype: contentful.entry.Entry - - Usage: - >>> nyancat_entry = client.entry('nyancat') - - """ - - if query is None: - query = {} - self._normalize_select(query) - - try: - query.update({'sys.id': entry_id}) - response = self._get( - self.environment_url('/entries'), - query - ) - - if self.raw_mode: - return response - return response[0] - except IndexError: - raise EntryNotFoundError( - "Entry not found for ID: '{0}'".format(entry_id) - ) - - def entries(self, query=None): - """Fetches all Entries from the Space (up to the set limit, can be modified in `query`). - - API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/entries/entries-collection/get-all-entries-of-a-space - - :param query: (optional) Dict with API options. - :return: List of :class:`Entry ` objects. - :rtype: List of contentful.entry.Entry - - Usage: - >>> entries = client.entries() - [, - , - , - , - , - , - , - , - , - ] - """ - - if query is None: - query = {} - self._normalize_select(query) - - return self._get( - self.environment_url('/entries'), - query - ) - - def asset(self, asset_id, query=None): - """Fetches an Asset by ID. - - API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/assets/asset/get-a-single-asset - - :param asset_id: The ID of the target Asset. - :param query: (optional) Dict with API options. - :return: :class:`Asset ` object. - :rtype: contentful.asset.Asset - - Usage: - >>> nyancat_asset = client.asset('nyancat') - - """ - - return self._get( - self.environment_url( - '/assets/{0}'.format(asset_id) - ), - query - ) - - def assets(self, query=None): - """Fetches all Assets from the Space (up to the set limit, can be modified in `query`). - - API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/assets/assets-collection/get-all-assets-of-a-space - - :param query: (optional) Dict with API options. - :return: List of :class:`Asset ` objects. - :rtype: List of contentful.asset.Asset - - Usage: - >>> assets = client.assets() - [, - , - , - ] - """ - - if query is None: - query = {} - self._normalize_select(query) - - return self._get( - self.environment_url('/assets'), - query - ) - - def locales(self, query=None): - """Fetches all Locales from the Environment (up to the set limit, can be modified in `query`). - - # TODO: fix url - API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/assets/assets-collection/get-all-assets-of-a-space - - :param query: (optional) Dict with API options. - :return: List of :class:`Locale ` objects. - :rtype: List of contentful.locale.Locale - - Usage: - >>> locales = client.locales() - [] - """ - - if query is None: - query = {} - - return self._get( - self.environment_url('/locales'), - query - ) - - def sync(self, query=None): - """Fetches content from the Sync API. - - API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/synchronization/initial-synchronization/query-entries - - :param query: (optional) Dict with API options. - :return: :class:`SyncPage ` object. - :rtype: contentful.sync_page.SyncPage - - Usage: - >>> sync_page = client.sync({'initial': True}) - - """ - - if query is None: - query = {} - self._normalize_sync(query) - - return self._get( - self.environment_url('/sync'), - query - ) - - def environment_url(self, url): - """Formats the URL with the environment.""" - - return "/environments/{0}{1}".format( - self.environment, - url - ) - - def _normalize_select(self, query): - """ - If the query contains the :select operator, we enforce :sys properties. - The SDK requires sys.type to function properly, but as other of our - SDKs require more parts of the :sys properties, we decided that every - SDK should include the complete :sys block to provide consistency - accross our SDKs. - """ - - if 'select' not in query: - return - - if isinstance( - query['select'], - string_class()): - query['select'] = [s.strip() for s in query['select'].split(',')] - - query['select'] = [s for s - in query['select'] - if not s.startswith('sys.')] - - if 'sys' not in query['select']: - query['select'].append('sys') - - def _normalize_sync(self, query): - """ - Booleans are not properly serialized for GET params, - therefore we enforce it to a truthy value. - """ - - if 'initial' in query: - query['initial'] = 'true' - - def _validate_configuration(self): - """ - Validates that required parameters are present. - """ - - if not self.space_id: - raise ConfigurationException( - 'You will need to initialize a client with a Space ID' - ) - if not self.access_token: - raise ConfigurationException( - 'You will need to initialize a client with an Access Token' - ) - if not self.api_url: - raise ConfigurationException( - 'The client configuration needs to contain an API URL' - ) - if not self.default_locale: - raise ConfigurationException( - 'The client configuration needs to contain a Default Locale' - ) - if not self.api_version or self.api_version < 1: - raise ConfigurationException( - 'The API Version must be a positive number' - ) - - def _cache_content_types(self): - """ - Updates the Content Type Cache. - """ - - ContentTypeCache.update_cache(self) - - def _contentful_user_agent(self): - """ - Sets the X-Contentful-User-Agent header. - """ - header = {} - from . import __version__ - header['sdk'] = { - 'name': 'contentful.py', - 'version': __version__ - } - header['app'] = { - 'name': self.application_name, - 'version': self.application_version - } - header['integration'] = { - 'name': self.integration_name, - 'version': self.integration_version - } - header['platform'] = { - 'name': 'python', - 'version': platform.python_version() - } - - os_name = platform.system() - if os_name == 'Darwin': - os_name = 'macOS' - elif not os_name or os_name == 'Java': - os_name = None - elif os_name and os_name not in ['macOS', 'Windows']: - os_name = 'Linux' - header['os'] = { - 'name': os_name, - 'version': platform.release() - } - - def format_header(key, values): - header = "{0} {1}".format(key, values['name']) - if values['version'] is not None: - header = "{0}/{1}".format(header, values['version']) - return "{0};".format(header) - - result = [] - for k, values in header.items(): - if not values['name']: - continue - result.append(format_header(k, values)) - - return ' '.join(result) - - def _request_headers(self): - """ - Sets the default Request Headers. - """ - - headers = { - 'X-Contentful-User-Agent': self._contentful_user_agent(), - 'Content-Type': 'application/vnd.contentful.delivery.v{0}+json'.format( # noqa: E501 - self.api_version - ) - } - - if self.authorization_as_header: - headers['Authorization'] = 'Bearer {0}'.format(self.access_token) - - headers['Accept-Encoding'] = 'gzip' if self.gzip_encoded else 'identity' - - return headers - - def _url(self, url): - """ - Creates the Request URL. - """ - - protocol = 'https' if self.https else 'http' - return '{0}://{1}/spaces/{2}{3}'.format( - protocol, - self.api_url, - self.space_id, - url - ) - - def _normalize_query(self, query): - """ - Converts Arrays in the query to comma - separaters lists for proper API handling. - """ - - for k, v in query.items(): - if isinstance(v, list): - query[k] = ','.join([str(e) for e in v]) - - def _http_get(self, url, query): - """ - Performs the HTTP GET Request. - """ - - if not self.authorization_as_header: - query.update({'access_token': self.access_token}) - - response = None - - self._normalize_query(query) - - kwargs = { - 'params': query, - 'headers': self._request_headers(), - 'timeout': self.timeout_s - } - - if self._has_proxy(): - kwargs['proxies'] = self._proxy_parameters() - - response = requests.get( - self._url(url), - **kwargs - ) - - if response.status_code == 429: - raise RateLimitExceededError(response) - - return response - - def _get(self, url, query=None): - """ - Wrapper for the HTTP Request, - Rate Limit Backoff is handled here, - Responses are Processed with ResourceBuilder. - """ - - if query is None: - query = {} - - response = retry_request(self)(self._http_get)(url, query=query) - - if self.raw_mode: - return response - - if response.status_code != 200: - error = get_error(response) - if self.raise_errors: - raise error - return error - - localized = query.get('locale', '') == '*' - return ResourceBuilder( - self.default_locale, - localized, - response.json(), - max_depth=self.max_include_resolution_depth, - reuse_entries=self.reuse_entries - ).build() - - def _has_proxy(self): - """ - Checks if a Proxy was set. - """ - - return self.proxy_host - - def _proxy_parameters(self): - """ - Builds Proxy parameters Dict from - client options. - """ - - proxy_protocol = '' - if self.proxy_host.startswith('https'): - proxy_protocol = 'https' - else: - proxy_protocol = 'http' - - proxy = '{0}://'.format(proxy_protocol) - if self.proxy_username and self.proxy_password: - proxy += '{0}:{1}@'.format(self.proxy_username, self.proxy_password) - - proxy += sub(r'https?(://)?', '', self.proxy_host) - - if self.proxy_port: - proxy += ':{0}'.format(self.proxy_port) - - return { - 'http': proxy, - 'https': proxy - } - - def __repr__(self): - return ''.format( # noqa: E501 - self.space_id, - self.access_token, - self.default_locale - ) diff --git a/contentful/client/__init__.py b/contentful/client/__init__.py new file mode 100644 index 0000000..0572c2b --- /dev/null +++ b/contentful/client/__init__.py @@ -0,0 +1,17 @@ +# flake8: noqa +from contentful.client.transport.errors import * +from contentful.client.base import * +from contentful.client.impl import * + +""" +contentful.client +~~~~~~~~~~~~~~~~~ + +This module implements the Contentful Delivery API Client, +allowing interaction with every method present in it. + +Complete API Documentation: https://www.contentful.com/developers/docs/references/content-delivery-api/ + +:copyright: (c) 2016 by Contentful GmbH. +:license: MIT, see LICENSE for more details. +""" diff --git a/contentful/client/base.py b/contentful/client/base.py new file mode 100644 index 0000000..f517553 --- /dev/null +++ b/contentful/client/base.py @@ -0,0 +1,379 @@ +from __future__ import annotations + +import platform +import re +from typing import TypedDict, TYPE_CHECKING, ClassVar + +from contentful import resource_builder, __version__ +from contentful.client import queries +from contentful.client.transport import abstract + +if TYPE_CHECKING: + from typing import TypeAlias, Any + from contentful.resource import Resource + + QueryT: TypeAlias = dict[str, Any] + + +""" +contentful.client.base +~~~~~~~~~~~~~~~~~~~~~~ + +This module provides the base implementation for the Contentful API Client. + +Complete API Documentation: https://www.contentful.com/developers/docs/references/content-delivery-api/ + +:copyright: (c) 2016 by Contentful GmbH. +:license: MIT, see LICENSE for more details. +""" + +__all__ = ("BaseClient", "ConfigurationException") + + +class BaseClient: + """Constructs the API Client. + + :param space_id: Space ID of your target space. + :param access_token: API Access Token (Delivery by default, + Preview if overriding api_url). + :param api_url: (optional) URL of the Contentful Target API, + defaults to Delivery API (can be overriden for Preview API). + :param api_version: (optional) Target version of the Contentful API. + :param default_locale: (optional) Default Locale for your Space, + defaults to 'en-US'. + :param environment: (optional) Default Environment for client, defaults + to 'master'. + :param https: (optional) Boolean determining wether to use https + or http, defaults to True. + :param authorization_as_header: (optional) Boolean determining wether + to send access_token through a header or via GET params, + defaults to True. + :param raw_mode: (optional) Boolean determining wether to process the + response or return it raw after each API call, defaults to False. + :param gzip_encoded: (optional) Boolean determining wether to accept + gzip encoded results, defaults to True. + :param raise_errors: (optional) Boolean determining wether to raise + an exception on requests that aren't successful, defaults to True. + :param content_type_cache: (optional) Boolean determining wether to + store a Cache of the Content Types in order to properly coerce + Entry fields, defaults to True. + :param reuse_entries: (optional) Boolean determining wether to reuse + hydrated Entry and Asset objects within the same request when possible. + Defaults to False + :param timeout_s: (optional) Max time allowed for each API call, in seconds. + Defaults to 1s. + :param proxy_host: (optional) URL for Proxy, defaults to None. + :param proxy_port: (optional) Port for Proxy, defaults to None. + :param proxy_username: (optional) Username for Proxy, defaults to None. + :param proxy_password: (optional) Password for Proxy, defaults to None. + :param max_rate_limit_retries: (optional) Maximum amount of retries + after RateLimitError, defaults to 1. + :param max_rate_limit_wait: (optional) Timeout (in seconds) for waiting + for retry after RateLimitError, defaults to 60. + :param max_include_resolution_depth: (optional) Maximum include resolution + level for Resources, defaults to 20 (max include level * 2). + :param application_name: (optional) User application name, defaults to None. + :param application_version: (optional) User application version, defaults to None. + :param integration_name: (optional) Integration name, defaults to None. + :param integration_version: (optional) Integration version, defaults to None. + :return: :class:`Client ` object. + :rtype: contentful.Client + + Usage: + + >>> import contentful + >>> client = contentful.Client('cfexampleapi', 'b4c0n73n7fu1') + + """ + + transport_cls: ClassVar[type[abstract.AbstractTransport]] + + def __init__( + self, + space_id: str, + access_token: str, + api_url: str = "cdn.contentful.com", + api_version: int = 1, + default_locale: str = "en-US", + environment: str = "master", + https: bool = True, + authorization_as_header: bool = True, + raw_mode: bool = False, + gzip_encoded: bool = True, + raise_errors: bool = True, + content_type_cache: bool = True, + reuse_entries: bool = False, + timeout_s: int = 1, + proxy_host: str | None = None, + proxy_port: str | None = None, + proxy_username: str | None = None, + proxy_password: str | None = None, + max_rate_limit_retries: str | None = 1, + max_rate_limit_wait: int = 60, + max_include_resolution_depth: int = 20, + application_name: str | None = None, + application_version: str | None = None, + integration_name: str | None = None, + integration_version: str | None = None, + ): + self.space_id = space_id + self.access_token = access_token + self.api_url = api_url + self.api_version = api_version + self.default_locale = default_locale + self.environment = environment + self.https = https + self.authorization_as_header = authorization_as_header + self.raw_mode = raw_mode + self.gzip_encoded = gzip_encoded + self.raise_errors = raise_errors + self.content_type_cache = content_type_cache + self.reuse_entries = reuse_entries + self.timeout_s = timeout_s + self.proxy_host = proxy_host + self.proxy_port = proxy_port + self.proxy_username = proxy_username + self.proxy_password = proxy_password + self.max_rate_limit_retries = max_rate_limit_retries + self.max_rate_limit_wait = max_rate_limit_wait + self.max_include_resolution_depth = max_include_resolution_depth + self.application_name = application_name + self.application_version = application_version + self.integration_name = integration_name + self.integration_version = integration_version + self._validate_configuration() + self._headers = None + self._client_info = None + self._proxy_info = None + self._transport = None + + def initialize(self): + raise NotImplementedError() + + def _get(self, url: str, query: QueryT | None = None): + """ + Wrapper for the HTTP Request, + Rate Limit Backoff is handled here, + Responses are Processed with ResourceBuilder. + """ + raise NotImplementedError() + + def _cache_content_types(self): + """ + Updates the Content Type Cache. + """ + + raise NotImplementedError() + + @property + def client_info(self) -> ClientInfo: + if self._client_info is None: + self._client_info = self._get_client_info() + + return self._client_info + + @property + def headers(self) -> dict[str, str]: + if self._headers is None: + self._headers = self._request_headers() + + return self._headers + + @property + def proxy_info(self) -> abstract.ProxyInfo: + if self._proxy_info is None: + self._proxy_info = self._proxy_parameters() + + return self._proxy_info + + @property + def transport(self) -> abstract.AbstractTransport: + if self._transport is None: + self._transport = self._get_transport() + + return self._transport + + def qualified_url(self) -> str: + scheme = "https" if self.https else "http" + hostname = self.api_url + if hostname.startswith("http"): + scheme = "" + + path = f"/spaces/{self.space_id}/environments/{self.environment}/" + url = f"{scheme}://{hostname}{path}" + return url + + def _get_transport(self) -> abstract.AbstractTransport: + base_url = self.qualified_url() + transport = self.transport_cls( + base_url=base_url, + timeout_s=self.timeout_s, + proxy_info=self.proxy_info, + default_headers=self.headers, + max_retries=self.max_rate_limit_retries, + max_retry_wait_seconds=self.max_rate_limit_wait, + ) + return transport + + def _get_client_info(self) -> ClientInfo: + os_name = platform.system() + if os_name == "Darwin": + os_name = "macOS" + elif not os_name or os_name == "Java": + os_name = None + elif os_name and os_name not in ("macOS", "Windows"): + os_name = "Linux" + + return ClientInfo( + sdk=VersionInfo(name="contentful.py", version=__version__), + platform=VersionInfo(name="python", version=platform.python_version()), + app=VersionInfo( + name=self.application_name, version=self.application_version + ), + integration=VersionInfo( + name=self.integration_name, version=self.integration_version + ), + os=VersionInfo(name=os_name, version=platform.release()), + ) + + def _validate_configuration(self): + """ + Validates that required parameters are present. + """ + + if not self.space_id: + raise ConfigurationException( + "You will need to initialize a client with a Space ID" + ) + if not self.access_token: + raise ConfigurationException( + "You will need to initialize a client with an Access Token" + ) + if not self.api_url: + raise ConfigurationException( + "The client configuration needs to contain an API URL" + ) + if not self.default_locale: + raise ConfigurationException( + "The client configuration needs to contain a Default Locale" + ) + if not self.api_version or self.api_version < 1: + raise ConfigurationException("The API Version must be a positive number") + + def _contentful_user_agent(self): + """ + Sets the X-Contentful-User-Agent header. + """ + + header_encoded = "" + for key, values in self.client_info.items(): + name = f"{key} {values['name']}" + if values["version"]: + name = f"{name}/{values['version']}" + header_encoded += f"{name}; " + + return header_encoded.rstrip(" ") + + def _request_headers(self): + """ + Sets the default Request Headers. + """ + + headers = { + "X-Contentful-User-Agent": self._contentful_user_agent(), + "Content-Type": f"application/vnd.contentful.delivery.v{self.api_version}+json", + "Accept-Encoding": "gzip" if self.gzip_encoded else "identity", + } + + if self.authorization_as_header: + headers["Authorization"] = f"Bearer {self.access_token}" + + return headers + + def _url(self, url): + """ + Creates the Request URL. + """ + + protocol = "https" if self.https else "http" + return "{0}://{1}/spaces/{2}{3}".format( + protocol, self.api_url, self.space_id, url + ) + + def _format_params(self, query: QueryT | None) -> RequestParams: + query = query or {} + params = queries.normalize(**query) + if not self.authorization_as_header: + params["access_token"] = self.access_token + + return params + + def _format_response(self, response: dict[str, Any], localized: bool) -> Resource: + builder = resource_builder.ResourceBuilder( + default_locale=self.default_locale, + localized=localized, + json=response, + max_depth=self.max_include_resolution_depth, + reuse_entries=self.reuse_entries, + ) + return builder.build() + + def _has_proxy(self) -> bool: + """ + Checks if a Proxy was set. + """ + + return self.proxy_host is not None + + def _proxy_parameters(self) -> abstract.ProxyInfo: + """ + Builds Proxy parameters Dict from + client options. + """ + if not self._has_proxy(): + return {"http": None, "https": None} + + proxy_protocol = "https" if self.proxy_host.startswith("https") else "http" + proxy = f"{proxy_protocol}://" + if self.proxy_username and self.proxy_password: + proxy += f"{self.proxy_username}:{self.proxy_password}@" + + proxy += re.sub(r"https?(://)?", "", self.proxy_host) + + if self.proxy_port: + proxy += f":{self.proxy_port}" + + return {"http": proxy, "https": proxy} + + def __repr__(self): + return ( + f"<{self.__class__.__name__} " + f"space_id={self.space_id!r} " + f"access_token={self.access_token!r} " + f"default_locale={self.default_locale!r}>" + ) + + +class RequestParams(TypedDict): + params: QueryT + + +class ClientInfo(TypedDict): + sdk: VersionInfo + platform: VersionInfo + app: VersionInfo + integration: VersionInfo + os: VersionInfo + + +class VersionInfo(TypedDict): + name: str + version: str | int | None + + +class ConfigurationException(Exception): + """Configuration Error Class""" + + pass diff --git a/contentful/client/impl.py b/contentful/client/impl.py new file mode 100644 index 0000000..48d1641 --- /dev/null +++ b/contentful/client/impl.py @@ -0,0 +1,416 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from contentful.client import base +from contentful.client.transport import aio, sio, errors + +if TYPE_CHECKING: + from contentful import ( + Asset, + ContentType, + Entry, + Locale, + Space, + ) + from contentful.sync_page import SyncPage + from contentful.client.base import QueryT + +""" +contentful.client.impl +~~~~~~~~~~~~~~~~~~~~~~ + +This module implements the Contentful Delivery API Client, +allowing interaction with every method present in it. + +Complete API Documentation: https://www.contentful.com/developers/docs/references/content-delivery-api/ + +:copyright: (c) 2016 by Contentful GmbH. +:license: MIT, see LICENSE for more details. +""" + +__all__ = ("Client", "AsyncClient") + + +class Client(base.BaseClient): + transport_cls = sio.SyncTransport + + def space(self, query: QueryT | None = None) -> Space: + """Fetches the current Space. + + API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/spaces/get-a-space + + :param query: (optional) Dict with API options. + :return: :class:`Space ` object. + :rtype: contentful.space.Space + + Usage: + + >>> space = client.space() + + """ + + return self._get("", query) + + def content_type( + self, content_type_id: str, query: QueryT | None = None + ) -> ContentType: + """Fetches a Content Type by ID. + + API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/content-types/content-type/get-a-single-content-type + + :param content_type_id: The ID of the target Content Type. + :param query: (optional) Dict with API options. + :return: :class:`ContentType ` object. + :rtype: contentful.content_type.ContentType + + Usage: + >>> cat_content_type = client.content_type('cat') + + """ + + return self._get(f"content_types/{content_type_id}", query) + + def content_types(self, query: QueryT | None = None) -> list[ContentType]: + """Fetches all Content Types from the Space. + + API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/content-types/content-model/get-the-content-model-of-a-space + + :param query: (optional) Dict with API options. + :return: List of :class:`ContentType ` objects. + :rtype: List of contentful.content_type.ContentType + + Usage: + >>> content_types = client.content_types() + [, + , + , + ] + """ + + return self._get("content_types", query) + + def entry(self, entry_id: str, query: QueryT | None = None) -> Entry: + """Fetches an Entry by ID. + + API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/entries/entry/get-a-single-entry + + :param entry_id: The ID of the target Entry. + :param query: (optional) Dict with API options. + :return: :class:`Entry ` object. + :rtype: contentful.entry.Entry + + Usage: + >>> nyancat_entry = client.entry('nyancat') + + """ + if query is None: + query = {} + + query = {**query, "sys.id": entry_id} + response = self._get("entries", query) + if self.raw_mode: + return response + return self._entry_callback(response, entry_id=entry_id) + + @staticmethod + def _entry_callback(entries: list[Entry], *, entry_id: str) -> Entry: + if not entries: + raise errors.EntryNotFoundError(f"Entry not found for ID: {entry_id!r}") + return entries[0] + + def entries(self, query: QueryT | None = None) -> list[Entry]: + """Fetches all Entries from the Space (up to the set limit, can be modified in `query`). + + API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/entries/entries-collection/get-all-entries-of-a-space + + :param query: (optional) Dict with API options. + :return: List of :class:`Entry ` objects. + :rtype: List of contentful.entry.Entry + + Usage: + >>> entries = client.entries() + [, + , + , + , + , + , + , + , + , + ] + """ + return self._get("entries", query) + + def asset(self, asset_id: str, query: QueryT | None = None) -> Asset: + """Fetches an Asset by ID. + + API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/assets/asset/get-a-single-asset + + :param asset_id: The ID of the target Asset. + :param query: (optional) Dict with API options. + :return: :class:`Asset ` object. + :rtype: contentful.asset.Asset + + Usage: + >>> nyancat_asset = client.asset('nyancat') + + """ + return self._get(f"assets/{asset_id}", query) + + def assets(self, query: QueryT | None = None) -> list[Asset]: + """Fetches all Assets from the Space (up to the set limit, can be modified in `query`). + + API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/assets/assets-collection/get-all-assets-of-a-space + + :param query: (optional) Dict with API options. + :return: List of :class:`Asset ` objects. + :rtype: List of contentful.asset.Asset + + Usage: + >>> assets = client.assets() + [, + , + , + ] + """ + return self._get("assets", query) + + def locales(self, query: QueryT | None = None) -> list[Locale]: + """Fetches all Locales from the Environment (up to the set limit, can be modified in `query`). + + # TODO: fix url + API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/assets/assets-collection/get-all-assets-of-a-space + + :param query: (optional) Dict with API options. + :return: List of :class:`Locale ` objects. + :rtype: List of contentful.locale.Locale + + Usage: + >>> locales = client.locales() + [] + """ + + return self._get("locales", query) + + def sync(self, query: QueryT | None = None) -> SyncPage: + """Fetches content from the Sync API. + + API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/synchronization/initial-synchronization/query-entries + + :param query: (optional) Dict with API options. + :return: :class:`SyncPage ` object. + :rtype: contentful.sync_page.SyncPage + + Usage: + >>> sync_page = client.sync({'initial': True}) + + """ + return self._get("sync", query) + + def initialize(self): + self.transport.initialize() + if self.content_type_cache: + self._cache_content_types() + + def _cache_content_types(self): ... + + def _get(self, url: str, query: QueryT | None = None): + params = self._format_params(query) + response = self.transport.get(url, params=params, raw_mode=self.raw_mode) + if self.raw_mode: + return response + return self._format_response(response) + + +class AsyncClient(base.BaseClient): + transport_cls = aio.AsyncTransport + + async def initialize(self): + await self.transport.initialize() + if self.content_type_cache: + await self._cache_content_types() + + async def _cache_content_types(self): ... + + async def _get(self, url: str, query: QueryT | None = None): + params = self._format_params(query) + response = await self.transport.get(url, params=params, raw_mode=self.raw_mode) + if self.raw_mode: + return response + return self._format_response(response) + + async def space(self, query: QueryT | None = None) -> Space: + """Fetches the current Space. + + API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/spaces/get-a-space + + :param query: (optional) Dict with API options. + :return: :class:`Space ` object. + :rtype: contentful.space.Space + + Usage: + + >>> space = await client.space() + + """ + + return await self._get("", query) + + async def content_type( + self, content_type_id: str, query: QueryT | None = None + ) -> ContentType: + """Fetches a Content Type by ID. + + API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/content-types/content-type/get-a-single-content-type + + :param content_type_id: The ID of the target Content Type. + :param query: (optional) Dict with API options. + :return: :class:`ContentType ` object. + :rtype: contentful.content_type.ContentType + + Usage: + >>> cat_content_type = await client.content_type('cat') + + """ + + return await self._get(f"content_types/{content_type_id}", query) + + async def content_types(self, query: QueryT | None = None) -> list[ContentType]: + """Fetches all Content Types from the Space. + + API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/content-types/content-model/get-the-content-model-of-a-space + + :param query: (optional) Dict with API options. + :return: List of :class:`ContentType ` objects. + :rtype: List of contentful.content_type.ContentType + + Usage: + >>> content_types = await client.content_types() + [, + , + , + ] + """ + + return await self._get("content_types", query) + + async def entry(self, entry_id: str, query: QueryT | None = None) -> Entry: + """Fetches an Entry by ID. + + API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/entries/entry/get-a-single-entry + + :param entry_id: The ID of the target Entry. + :param query: (optional) Dict with API options. + :return: :class:`Entry ` object. + :rtype: contentful.entry.Entry + + Usage: + >>> nyancat_entry = await client.entry('nyancat') + + """ + if query is None: + query = {} + + query = {**query, "sys.id": entry_id} + response = await self._get("entries", query) + if self.raw_mode: + return response + return self._entry_callback(response, entry_id=entry_id) + + @staticmethod + def _entry_callback(entries: list[Entry], *, entry_id: str) -> Entry: + if not entries: + raise errors.EntryNotFoundError(f"Entry not found for ID: {entry_id!r}") + return entries[0] + + async def entries(self, query: QueryT | None = None) -> list[Entry]: + """Fetches all Entries from the Space (up to the set limit, can be modified in `query`). + + API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/entries/entries-collection/get-all-entries-of-a-space + + :param query: (optional) Dict with API options. + :return: List of :class:`Entry ` objects. + :rtype: List of contentful.entry.Entry + + Usage: + >>> entries = await client.entries() + [, + , + , + , + , + , + , + , + , + ] + """ + return await self._get("entries", query) + + async def asset(self, asset_id: str, query: QueryT | None = None) -> Asset: + """Fetches an Asset by ID. + + API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/assets/asset/get-a-single-asset + + :param asset_id: The ID of the target Asset. + :param query: (optional) Dict with API options. + :return: :class:`Asset ` object. + :rtype: contentful.asset.Asset + + Usage: + >>> nyancat_asset = await client.asset('nyancat') + + """ + return await self._get(f"assets/{asset_id}", query) + + async def assets(self, query: QueryT | None = None) -> list[Asset]: + """Fetches all Assets from the Space (up to the set limit, can be modified in `query`). + + API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/assets/assets-collection/get-all-assets-of-a-space + + :param query: (optional) Dict with API options. + :return: List of :class:`Asset ` objects. + :rtype: List of contentful.asset.Asset + + Usage: + >>> assets = await client.assets() + [, + , + , + ] + """ + return await self._get("assets", query) + + async def locales(self, query: QueryT | None = None) -> list[Locale]: + """Fetches all Locales from the Environment (up to the set limit, can be modified in `query`). + + # TODO: fix url + API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/assets/assets-collection/get-all-assets-of-a-space + + :param query: (optional) Dict with API options. + :return: List of :class:`Locale ` objects. + :rtype: List of contentful.locale.Locale + + Usage: + >>> locales = await client.locales() + [] + """ + + return await self._get("locales", query) + + async def sync(self, query: QueryT | None = None) -> SyncPage: + """Fetches content from the Sync API. + + API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/synchronization/initial-synchronization/query-entries + + :param query: (optional) Dict with API options. + :return: :class:`SyncPage ` object. + :rtype: contentful.sync_page.SyncPage + + Usage: + >>> sync_page = await client.sync({'initial': True}) + + """ + return await self._get("sync", query) diff --git a/contentful/client/queries.py b/contentful/client/queries.py new file mode 100644 index 0000000..4ea987e --- /dev/null +++ b/contentful/client/queries.py @@ -0,0 +1,92 @@ +from __future__ import annotations + +from typing import Dict, Any, Iterator, Callable + +""" +contentful.client.queries +~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module implements normalization for query parameters. + +Complete API Documentation: https://www.contentful.com/developers/docs/references/content-delivery-api/ + +:copyright: (c) 2016 by Contentful GmbH. +:license: MIT, see LICENSE for more details. +""" + + +__all__ = ("normalize",) + + +def normalize(**query: Any) -> NormalizedQueryT: + if "initial" in query: + query["initial"] = normalize_nonstring(query["initial"]) + + if "select" in query: + query["select"] = normalize_select(query["select"]) + + normalized = { + k: ( + ",".join(iternormalize(*v)) + if isinstance(v, (list, tuple, set, frozenset)) + else v + ) + for k, v in query.items() + } + return normalized + + +def normalize_select(select: str | list[str]) -> str: + """ + + If the query contains the :select operator, we enforce :sys properties. + The SDK requires sys.type to function properly, but as other of our + SDKs require more parts of the :sys properties, we decided that every + SDK should include the complete :sys block to provide consistency + accross our SDKs. + """ + + q: list[str] | str = select + if isinstance(q, str): + q = q.split(",") + + filtered = ",".join(iternormalize(*q, filter=_filter_sys)) + normalized = ",".join(filtered, "sys") + return normalized + + +def _filter_sys(string: str) -> bool: + if string.startswith("sys.") or string == "sys": + return False + return True + + +def iternormalize( + *items: list[Any], filter: Callable[[str], bool] | None = None +) -> Iterator[str]: + gen = ( + # Always remove whitespace from strings, + # Always normalize non-strings into strings. + normalize_string(item) if isinstance(item, str) else normalize_nonstring(item) + for item in items + ) + if filter is not None: + yield from (it for it in gen if filter(it)) + return + yield from gen + + +def normalize_string(item: str) -> str: + """Remove whitespace padding from strings.""" + + return item.strip() + + +def normalize_nonstring(item: Any) -> str: + """Convert int, bool to string and lowercase bools.""" + + # add a .strip() to be consistent, this is a fall-through. + return str(item).lower().strip() + + +NormalizedQueryT = Dict[str, str] diff --git a/contentful/client/transport/__init__.py b/contentful/client/transport/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/contentful/client/transport/abstract.py b/contentful/client/transport/abstract.py new file mode 100644 index 0000000..6655254 --- /dev/null +++ b/contentful/client/transport/abstract.py @@ -0,0 +1,221 @@ +from __future__ import annotations + +import abc +import types +from typing import ( + Generic, + Protocol, + TypeVar, + overload, + Any, + Literal, + TypedDict, + Mapping, + ClassVar, +) + +import orjson + +from contentful.client.transport import errors, retry + +""" +contentful.client.transport.abstract +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module provides the abstract interface for communicating with the Contentful API over HTTP. + +Complete API Documentation: https://www.contentful.com/developers/docs/references/content-delivery-api/ + +:copyright: (c) 2016 by Contentful GmbH. +:license: MIT, see LICENSE for more details. +""" + + +SessionT = TypeVar("SessionT") +ResponseT = TypeVar("ResponseT") + + +class AbstractTransport(abc.ABC, Generic[SessionT, ResponseT]): + retry_cls: ClassVar[retry.BaseRetry] + + def __init__( + self, + *, + base_url: str, + timeout_s: int, + proxy_info: ProxyInfo | None = None, + default_headers: dict[str, str] | None = None, + max_retries: int = 1, + max_retry_wait_seconds: int = 60, + ) -> None: + self.base_url = base_url + self.timeout_s = timeout_s + self.proxy_info = proxy_info + self.default_headers = default_headers + self.max_retries = max_retries + self.max_retry_wait_seconds = max_retry_wait_seconds + self.retry = self.retry_cls( + max_retries=max_retries, max_wait_seconds=max_retry_wait_seconds + ) + self._session = None + + @abc.abstractmethod + def initialize(self) -> SessionT: ... + + @abc.abstractmethod + def close(self) -> None: ... + + @overload + def get( + self, + url: str, + *, + query: dict[str, Any] | None = None, + session: SessionT | None = None, + raw_mode: Literal[False], + **headers: str, + ) -> dict[str, Any]: ... + + @overload + def get( + self, + url: str, + *, + query: dict[str, Any] | None = None, + session: SessionT | None = None, + raw_mode: Literal[True], + **headers: str, + ) -> ResponseT: ... + + @abc.abstractmethod + def get( + self, + url: str, + *, + query: dict[str, Any] | None = None, + session: SessionT | None = None, + raw_mode: bool = False, + **headers: str, + ) -> dict[str, Any] | ResponseT: ... + + +class AbstractAsyncTransport(AbstractTransport[SessionT], abc.ABC, Generic[SessionT]): + + @abc.abstractmethod + def session( + self, *, session: SessionT | None = None + ) -> AsyncSessionContext[SessionT]: ... + + +class AbstractSyncTransport(AbstractTransport[SessionT], abc.ABC, Generic[SessionT]): + + @abc.abstractmethod + def session( + self, *, session: SessionT | None = None + ) -> SyncSessionContext[SessionT]: ... + + +class AsyncSessionContext(Protocol[SessionT]): + async def __aenter__(self) -> SessionT: ... + + @overload + async def __aexit__(self, exc_type: None, exc_val: None, exc_tb: None): ... + + @overload + async def __aexit__( + self, + exc_type: type[BaseException], + exc_val: BaseException, + exc_tb: types.TracebackType, + ): ... + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ): ... + + +class SyncSessionContext(Protocol[SessionT]): + + def __enter__(self) -> SessionT: ... + + @overload + def __exit__(self, exc_type: None, exc_val: None, exc_tb: None): ... + + @overload + def __exit__( + self, + exc_type: type[BaseException], + exc_val: BaseException, + exc_tb: types.TracebackType, + ): ... + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ): ... + + +class ProxyInfo(TypedDict): + http: str | None + https: str | None + + +@overload +def parse_response( + *, + status_code: int, + reason: str | None, + content: bytes, + headers: Mapping[str, str], + raw: ResponseT, + raw_mode: Literal[True], +) -> ResponseT: ... + + +@overload +def parse_response( + *, + status_code: int, + reason: str | None, + content: bytes, + headers: Mapping[str, str], + raw: ResponseT, + raw_mode: Literal[False], +) -> dict[str, Any]: ... + + +def parse_response( + *, + status_code: int, + reason: str | None, + content: bytes, + headers: Mapping[str, str], + raw: ResponseT, + raw_mode: bool, +) -> ResponseT | dict[str, Any]: + """Parse the received response, raising an error if necessary.""" + if status_code >= 400: + err_cls = errors.get_error_for_status_code(status_code) + body = orjson.loads(content) + info = errors.ErrorResponseInfo( + status_code=status_code, + reason=reason, + headers=headers, + content=content.decode(), + body=body, + ) + raise err_cls(reason, response=info) + + if raw_mode: + # Read the data from the fd before closing the connection. + return raw + + # Don't bother with .text/.json() since we know this is JSON. + # Passing the raw bytes to orjson will be much more efficient. + body = orjson.loads(content) + return body diff --git a/contentful/client/transport/aio.py b/contentful/client/transport/aio.py new file mode 100644 index 0000000..150630f --- /dev/null +++ b/contentful/client/transport/aio.py @@ -0,0 +1,108 @@ +from __future__ import annotations + +import contextlib +from typing import AsyncIterator, Iterator, Any + +import aiohttp + +from contentful.client.transport import errors, abstract, retry + +""" +contentful.client.transport.aio +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module provides the implementation for communicating with the Contentful API over HTTP using asynchronous IO. + +Complete API Documentation: https://www.contentful.com/developers/docs/references/content-delivery-api/ + +:copyright: (c) 2016 by Contentful GmbH. +:license: MIT, see LICENSE for more details. +""" + + +class AsyncTransport( + abstract.AbstractAsyncTransport[aiohttp.ClientSession, aiohttp.ClientResponse] +): + retry_cls = retry.AsyncRetry + + async def initialize(self) -> aiohttp.ClientSession: + if self._session is None: + self._session = aiohttp.ClientSession( + base_url=self.base_url, + timeout=aiohttp.ClientTimeout(total=self.timeout_s), + headers=self.default_headers, + ) + + return self._session + + async def close(self) -> None: + if self._session is None: + return + try: + await self._session.close() + finally: + self._session = None + + async def get( + self, + url: str, + *, + query: dict[str, Any] | None = None, + session: aiohttp.ClientSession | None = None, + raw_mode: bool = False, + **headers: str, + ) -> dict[str, Any] | aiohttp.ClientResponse: + response = await self.retry( + self._get, query=query, session=session, raw_mode=raw_mode, **headers + ) + return response + + async def _get( + self, + url: str, + *, + query: dict[str, Any] | None = None, + session: aiohttp.ClientSession | None = None, + raw_mode: bool = False, + **headers: str, + ) -> dict[str, Any] | aiohttp.ClientResponse: + async with self.session(session=session) as sess: + response: aiohttp.ClientResponse + async with sess.get(url, params=query, headers=headers) as response: + content = await response.read() + status_code = response.status + headers = response.headers + reason = response.reason + parsed = abstract.parse_response( + status_code=status_code, + reason=reason, + content=content, + headers=headers, + raw=response, + raw_mode=raw_mode, + ) + return parsed + + @contextlib.asynccontextmanager + async def session( + self, *, session: aiohttp.ClientSession | None = None + ) -> AsyncIterator[aiohttp.ClientSession]: + with translate_async_transport_errors(): + if session is not None: + yield session + return + + session = await self.initialize() + yield session + + +@contextlib.contextmanager +def translate_async_transport_errors() -> Iterator[None]: + try: + yield + # Can't connect to the Server + except aiohttp.ClientConnectionError as e: + raise errors.TransientHTTPError(str(e)) from e + # Malformed request, etc. + except (aiohttp.ClientError, ValueError) as e: + raise errors.PermanentHTTPError(e) from e diff --git a/contentful/client/transport/errors.py b/contentful/client/transport/errors.py new file mode 100644 index 0000000..67f8f79 --- /dev/null +++ b/contentful/client/transport/errors.py @@ -0,0 +1,219 @@ +from __future__ import annotations + +import dataclasses + +""" +contentful.client.transport.errors +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module implements the Error classes. + +API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/introduction/errors + +:copyright: (c) 2016 by Contentful GmbH. +:license: MIT, see LICENSE for more details. +""" + + +__all__ = ( + "get_error_for_status_code", + "HTTPError", + "PermanentHTTPError", + "BadRequestError", + "UnauthorizedError", + "AccessDeniedError", + "NotFoundError", + "EntryNotFoundError", + "TransientHTTPError", + "RateLimitExceededError", + "ServerError", + "BadGatewayError", + "ServiceUnavailableError", + "ErrorResponseInfo", +) + + +def get_error_for_status_code( + status_code, *, default: type[HTTPError] | None = None +) -> type[HTTPError]: + default = default or PermanentHTTPError + return _HTTP_STATUS_TO_ERROR_MAP.get(status_code, default) + + +class HTTPError(Exception): + response: ErrorResponseInfo + + def __init__( + self, message: str | None = None, *, response: ErrorResponseInfo = None + ): + self.response = response or ErrorResponseInfo() + message = message or self._best_available_message() + super().__init__(message) + + def _default_error_message(self) -> str: + return f"The following error was received: {self.response.content}" + + def _handle_details(self, details: list[dict] | str) -> str: + return f"{details}" + + def _has_additional_error_info(self): + return False + + def _additional_error_into(self) -> list[str]: + return [] + + def _best_available_message(self) -> str: + message = self.response.body.get("message") + details = self.response.body.get("details") + request_id = self.response.body.get("requestId") + status_str = ( + f"HTTP status code: {self.response.status_code}" + if self.response.status_code + else None + ) + message_str = f"Message: {message or self._default_error_message()}" + details_str = f"Details: {self._handle_details(details)}" if details else None + request_id_str = f"RequestId: {request_id}" if request_id else None + + messages = ( + status_str, + message_str, + details_str, + request_id_str, + *self._additional_error_into(), + ) + error_message = "\n".join(s for s in messages if s is not None) + return error_message + + +class PermanentHTTPError(HTTPError): ... + + +class BadRequestError(PermanentHTTPError): + + def _default_error_message(self) -> str: + return "The request was malformed or missing a required parameter." + + def _handle_details(self, details: list[dict | str] | str) -> str: + if isinstance(details, str): + return details + + gen = ( + s + for d in details + if (s := (d if isinstance(d, str) else d.get("details"))) is not None + ) + formatted = "\n\t".join(gen) + return formatted + + +class UnauthorizedError(PermanentHTTPError): + + def _default_error_message(self) -> str: + return "The authorization token was invalid" + + +class AccessDeniedError(PermanentHTTPError): + def _default_error_message(self) -> str: + return "The specified token does not have access to the requested resource." + + def _handle_details(self, details: dict) -> str: + return "\n\tReasons:\n\t\t{0}".format("\n\t\t".join(details["reasons"])) + + +class NotFoundError(PermanentHTTPError): + def _default_error_message(self) -> str: + return "The requested resource or endpoint could not be found" + + def _handle_details(self, details: dict | str) -> str: + if isinstance(details, str): + return details + + if "sys" in details: + resource_type = details["sys"].get("type", None) + resource_id = details["sys"].get("id", None) + else: + resource_type = details["type"] + resource_id = details.get("id", None) + + message = f"The requested {resource_type} could not be found." + if resource_id is not None: + message += f" ID: {resource_id}." + + return message + + +class EntryNotFoundError(NotFoundError): ... + + +class TransientHTTPError(HTTPError): + + def reset_time(self) -> int: + return 1 + + +class RateLimitExceededError(TransientHTTPError): + + RATE_LIMIT_RESET_HEADER_KEY = "x-contentful-ratelimit-reset" + + def reset_time(self) -> int: + """Returns the reset time in seconds until next available request.""" + if not self._has_reset_time(): + return super().reset_time() + + return int(self.response.headers[self.RATE_LIMIT_RESET_HEADER_KEY]) + + def _has_reset_time(self) -> bool: + return self.RATE_LIMIT_RESET_HEADER_KEY in self.response.headers + + def _has_additional_error_info(self) -> bool: + return self._has_reset_time() + + def _additional_error_info(self) -> bool: + if not self._has_additional_error_info(): + return [] + + return [f"Time until reset (seconds): {self.reset_time()}"] + + def _default_error_message(self): + return "Rate limit exceeded. Too many requests." + + +class ServerError(TransientHTTPError): + def _default_error_message(self) -> str: + return "Internal server error." + + +class BadGatewayError(TransientHTTPError): + + def _default_error_message(self) -> str: + return "The requested space is hibernated" + + +class ServiceUnavailableError(TransientHTTPError): + def _default_error_message(self) -> str: + return "The request was malformed or missing a required parameter." + + +@dataclasses.dataclass +class ErrorResponseInfo: + status_code: int = 0 + reason: str = "Unknown error" + headers: dict = dataclasses.field(default_factory=dict) + content: str = "" + body: dict = dataclasses.field(default_factory=dict) + + +_HTTP_STATUS_TO_ERROR_MAP = { + # User error, treat as "permanent" to not trigger unnecessary retries. + 400: BadRequestError, + 401: UnauthorizedError, + 403: AccessDeniedError, + 404: NotFoundError, + # Server errors, treat as "transient" to allow for retries. + 429: RateLimitExceededError, + 499: ServiceUnavailableError, + 500: ServerError, + 502: BadGatewayError, + 503: ServiceUnavailableError, +} diff --git a/contentful/client/transport/retry.py b/contentful/client/transport/retry.py new file mode 100644 index 0000000..158907a --- /dev/null +++ b/contentful/client/transport/retry.py @@ -0,0 +1,128 @@ +from __future__ import annotations + +import functools +import logging +import random +import time +from typing import Callable + +from contentful.client.transport import errors + +""" +contentful.client.transport.retry +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module implements automatic retry with jitter and backoff when receiving errors from the Contentful API. + +Complete API Documentation: https://www.contentful.com/developers/docs/references/content-delivery-api/ + +:copyright: (c) 2016 by Contentful GmbH. +:license: MIT, see LICENSE for more details. +""" + + +logger = logging.getLogger(__name__) +logger.addHandler(logging.NullHandler()) + + +__all__ = ("BaseRetry", "Retry", "AsyncRetry") + + +class BaseRetry: + def __init__( + self, + *, + max_retries: int = 1, + max_wait_seconds: int = 60, + ): + self.max_retries = max_retries + self.max_wait_seconds = max_wait_seconds + + def __call__( + self, + func: Callable, + *args, + **kwargs, + ): + raise NotImplementedError() + + def _report_error( + self, error: errors.TransientHTTPError, *, tries: int, reset_time: int + ) -> None: + prefix = ( + "Contentful API Rate Limit Hit! " + if isinstance(error, errors.RateLimitExceededError) + else "Contentful API Server Error! " + ) + retry_message = ( + f"Retrying - Retries left: {self.max_retries - tries} " + f"- Time until reset (seconds: {reset_time})" + ) + logger.debug( + f"{prefix}{retry_message}", + extra={"tries": tries, "reset_time": reset_time}, + ) + + +class Retry(BaseRetry): + """ + Decorator to retry function calls in case they raise transient exceptions + """ + + def __call__( + self, + func: Callable, + *args, + **kwargs, + ): + call = functools.partial(func, *args, **kwargs) + try: + return call() + except errors.TransientHTTPError as error: + tries = 1 + while tries < self.max_retries: + reset_time = error.reset_time() + if reset_time > self.max_wait_seconds: + raise + + self._report_error(error, tries=tries, reset_time=reset_time) + real_reset_time = reset_time * random.uniform(1.0, 1.2) + time.sleep(real_reset_time) + try: + return call() + except errors.TransientHTTPError: + tries += 1 + + raise + + +class AsyncRetry(BaseRetry): + """ + Decorator to retry async function calls in case they raise transient exceptions + """ + + async def __call__( + self, + func: Callable, + *args, + **kwargs, + ): + call = functools.partial(func, *args, **kwargs) + try: + return await call() + except errors.TransientHTTPError as error: + tries = 1 + while tries < self.max_retries: + reset_time = error.reset_time() + if reset_time > self.max_wait_seconds: + raise + + self._report_error(error, tries=tries, reset_time=reset_time) + real_reset_time = reset_time * random.uniform(1.0, 1.2) + time.sleep(real_reset_time) + try: + return await call() + except errors.TransientHTTPError: + tries += 1 + + raise diff --git a/contentful/client/transport/sio.py b/contentful/client/transport/sio.py new file mode 100644 index 0000000..49e38c5 --- /dev/null +++ b/contentful/client/transport/sio.py @@ -0,0 +1,110 @@ +from __future__ import annotations + +import contextlib +import urllib.parse +from typing import Iterator, Any + +import requests + +from contentful.client.transport import errors, abstract, retry + +""" +contentful.client.transport.sio +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module provides the implementation for communicating with the Contentful API over HTTP using synchronous IO. + +Complete API Documentation: https://www.contentful.com/developers/docs/references/content-delivery-api/ + +:copyright: (c) 2016 by Contentful GmbH. +:license: MIT, see LICENSE for more details. +""" + + +class SyncTransport( + abstract.AbstractSyncTransport[requests.Session, requests.Response] +): + retry_cls = retry.Retry + + def initialize(self) -> requests.Session: + if self._session is None: + self._session = requests.Session() + + return self._session + + def close(self) -> None: + if self._session is None: + return + try: + self._session.close() + finally: + self._session = None + + def get( + self, + url: str, + *, + query: dict[str, Any] | None = None, + session: requests.Response | None = None, + raw_mode: bool = False, + **headers: str, + ) -> dict[str, Any] | requests.Response: + response = self.retry( + self._get, query=query, session=session, raw_mode=raw_mode, **headers + ) + return response + + def _get( + self, + url: str, + *, + query: dict[str, Any] | None = None, + session: requests.Response | None = None, + raw_mode: bool = False, + **headers: str, + ) -> dict[str, Any] | requests.Response: + qualified_url = urllib.parse.urljoin(self.base_url, url) + sess: requests.Session + with self.session(session=session) as sess: + response: requests.Response + with sess.get(qualified_url, params=query, headers=headers) as response: + content = response.content + status_code = response.status_code + headers = response.headers + reason = response.reason + parsed = abstract.parse_response( + status_code=status_code, + reason=reason, + content=content, + headers=headers, + raw=response, + raw_mode=raw_mode, + ) + return parsed + + @contextlib.contextmanager + def session( + self, *, session: requests.Session | None = None + ) -> Iterator[requests.Session]: + with translate_sync_transport_errors(): + if session is not None: + yield session + return + + session = self.initialize() + yield session + + +@contextlib.contextmanager +def translate_sync_transport_errors() -> Iterator[None]: + try: + yield + # Can't connect to the server. + except ( + requests.exceptions.ConnectionError, + requests.exceptions.Timeout, + ) as e: + raise errors.TransientHTTPError(e) from e + # Malformed request, etc. + except requests.exceptions.RequestException as e: + raise errors.PermanentHTTPError(e) from e diff --git a/contentful/errors.py b/contentful/errors.py index 506fd10..b017993 100644 --- a/contentful/errors.py +++ b/contentful/errors.py @@ -1,3 +1,4 @@ +# flake8: noqa """ contentful.errors ~~~~~~~~~~~~~~~~~ @@ -9,213 +10,5 @@ :copyright: (c) 2016 by Contentful GmbH. :license: MIT, see LICENSE for more details. """ - - -class HTTPError(Exception): - """ - Base HTTP Error Class - """ - - def __init__(self, response): - self.response = response - self.status_code = response.status_code - - message = self._best_available_message(response) - super(HTTPError, self).__init__(message) - - def _default_error_message(self): - return "The following error was received: {0}".format(self.response.text) - - def _handle_details(self, details): - return "{0}".format(details) - - def _has_additional_error_info(self): - return False - - def _additional_error_info(self): - return [] - - def _best_available_message(self, response): - from .utils import json_error_class - - response_json = None - error_message = [ - "HTTP status code: {0}".format(self.status_code), - ] - try: - response_json = response.json() - - message = response_json.get('message', None) - details = response_json.get('details', None) - request_id = response_json.get('requestId', None) - - if message is not None: - error_message.append("Message: {0}".format(message)) - else: - error_message.append("Message: {0}".format(self._default_error_message())) - if details is not None: - error_message.append("Details: {0}".format(self._handle_details(details))) - if request_id is not None: - error_message.append("Request ID: {0}".format(request_id)) - except json_error_class(): - error_message.append("Message: {0}".format(self._default_error_message())) - - if self._has_additional_error_info(): - error_message += self._additional_error_info() - - return "\n".join(error_message) - - -class BadRequestError(HTTPError): - """ - 400 - """ - - def _default_error_message(self): - return "The request was malformed or missing a required parameter." - - def _handle_details(self, details): - from .utils import string_class - if isinstance(details, string_class()): - return details - - def _handle_detail(detail): - if isinstance(detail, string_class()): - return detail - return detail.get('details', None) - - inner_details = [_handle_detail(detail) for detail in details['errors']] - inner_details = [detail for detail in inner_details if detail is not None] # This works in both Py2 and Py3 - return "\n\t".join(inner_details) - - -class UnauthorizedError(HTTPError): - """ - 401 - """ - - def _default_error_message(self): - return "The authorization token was invalid." - - -class AccessDeniedError(HTTPError): - """ - 403 - """ - - def _default_error_message(self): - return "The specified token does not have access to the requested resource." - - def _handle_details(self, details): - return "\n\tReasons:\n\t\t{0}".format("\n\t\t".join(details['reasons'])) - - -class NotFoundError(HTTPError): - """ - 404 - """ - - def _default_error_message(self): - return "The requested resource or endpoint could not be found." - - def _handle_details(self, details): - from .utils import string_class - if isinstance(details, string_class()): - return details - - resource_type = None - resource_id = None - if 'sys' in details: - resource_type = details['sys'].get('type', None) - resource_id = details['sys'].get('id', None) - else: - resource_type = details['type'] - resource_id = details.get('id', None) - - message = "The requested {0} could not be found.".format(resource_type) - if resource_id is not None: - message += " ID: {0}.".format(resource_id) - - return message - - -class RateLimitExceededError(HTTPError): - """ - 429 - """ - - RATE_LIMIT_RESET_HEADER_KEY = 'x-contentful-ratelimit-reset' - - def _has_reset_time(self): - return self.RATE_LIMIT_RESET_HEADER_KEY in self.response.headers - - def reset_time(self): - """Returns the reset time in seconds until next available request.""" - - return int(self.response.headers[ - self.RATE_LIMIT_RESET_HEADER_KEY - ]) - - def _has_additional_error_info(self): - return self._has_reset_time() - - def _additional_error_info(self): - return ["Time until reset (seconds): {0}".format(self.reset_time())] - - def _default_error_message(self): - return "Rate limit exceeded. Too many requests." - - -class ServerError(HTTPError): - """ - 500 - """ - - def _default_error_message(self): - return "Internal server error." - - -class BadGatewayError(HTTPError): - """ - 502 - """ - - def _default_error_message(self): - return "The requested space is hibernated." - - -class ServiceUnavailableError(HTTPError): - """ - 503 - """ - - def _default_error_message(self): - return "The request was malformed or missing a required parameter." - - -class EntryNotFoundError(Exception): - """ - Error for entry not found. - """ - pass - - -def get_error(response): - """Gets Error by HTTP Status Code""" - - errors = { - 400: BadRequestError, - 401: UnauthorizedError, - 403: AccessDeniedError, - 404: NotFoundError, - 429: RateLimitExceededError, - 500: ServerError, - 502: BadGatewayError, - 503: ServiceUnavailableError - } - - error_class = HTTPError - if response.status_code in errors: - error_class = errors[response.status_code] - - return error_class(response) +# Backwards compat +from contentful.client.transport.errors import * diff --git a/contentful/utils.py b/contentful/utils.py index 258f1f0..8dfe378 100644 --- a/contentful/utils.py +++ b/contentful/utils.py @@ -1,17 +1,6 @@ import re -import sys -import time -import json -from random import uniform -from .errors import RateLimitExceededError import logging -try: # Python 2.7+ - from logging import NullHandler -except ImportError: - class NullHandler(logging.Handler): - def emit(self, record): - pass """ contentful.utils @@ -24,35 +13,10 @@ def emit(self, record): """ -logging.getLogger(__name__).addHandler(NullHandler()) +logging.getLogger(__name__).addHandler(logging.NullHandler()) log = logging.getLogger(__name__) -def unicode_class(): - """Returns the class that allows for unicode encoded strings - depends on the Python version.""" - - if sys.version_info[0] >= 3: - return str - return unicode # noqa: F821 - - -def string_class(): - """Returns the parent class for strings - depends on the Python version.""" - if sys.version_info[0] >= 3: - return str - return basestring # noqa: F821 - - -def json_error_class(): - """Returns the class for JSON decode errors - depends on the Python version.""" - if sys.version_info[0] >= 3 and sys.version_info[1] >= 5: - return json.JSONDecodeError - return ValueError - - def snake_case(a_string): """Returns a snake cased version of a string. @@ -135,48 +99,3 @@ def resource_for_link(link, includes, resources=None, locale=None): i['sys']['type'] == link['sys']['linkType']): return i return None - - -class ConfigurationException(Exception): - """Configuration Error Class""" - pass - - -class NotSupportedException(Exception): - """This exception is thrown when something is not supported by the API.""" - pass - - -class retry_request(object): - """ - Decorator to retry function calls in case they raise rate limit exceptions - """ - - def __init__(self, client): - self.client = client - - def __call__(self, http_call): - def wrapper(url, query=None): - exception = None - for i in range(self.client.max_rate_limit_retries + 1): - try: - return http_call(url, query) - except RateLimitExceededError as error: - exception = error - reset_time = error.reset_time() - - if reset_time > self.client.max_rate_limit_wait: - raise error - - retry_message = 'Contentful API Rate Limit Hit! ' - retry_message += "Retrying - Retries left: {0} ".format( - self.client.max_rate_limit_retries - i - ) - retry_message += "- Time until reset (seconds): {0}".format( - reset_time - ) - log.debug(retry_message) - time.sleep(reset_time * uniform(1.0, 1.2)) - if exception is not None: - raise exception - return wrapper diff --git a/requirements.txt b/requirements.txt index 451206a..24001bd 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,7 @@ python-dateutil==2.8.1 -requests>=2.20.0,<3.0 +requests>=2.16,<3 +aiohttp>=3.9,<4.0 +orjson>=3.9,<4.0 vcrpy==1.10.3 coverage==4.3.4 From 37c1c59b6b891f420dc34785d4ed3acd1b0aeafe Mon Sep 17 00:00:00 2001 From: Sean Stewart Date: Thu, 22 Feb 2024 18:26:17 -0500 Subject: [PATCH 02/15] Implement content-type caching. --- contentful/client/impl.py | 13 +++++++++++-- contentful/content_type_cache.py | 11 +++++++---- 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/contentful/client/impl.py b/contentful/client/impl.py index 48d1641..b70eb5b 100644 --- a/contentful/client/impl.py +++ b/contentful/client/impl.py @@ -4,6 +4,7 @@ from contentful.client import base from contentful.client.transport import aio, sio, errors +from contentful.content_type_cache import ContentTypeCache if TYPE_CHECKING: from contentful import ( @@ -214,7 +215,11 @@ def initialize(self): if self.content_type_cache: self._cache_content_types() - def _cache_content_types(self): ... + def _cache_content_types(self): + content_types = self.content_types() + ContentTypeCache.update_cache( + space_id=self.space_id, content_types=content_types + ) def _get(self, url: str, query: QueryT | None = None): params = self._format_params(query) @@ -232,7 +237,11 @@ async def initialize(self): if self.content_type_cache: await self._cache_content_types() - async def _cache_content_types(self): ... + async def _cache_content_types(self): + content_types = await self.content_types() + ContentTypeCache.update_cache( + space_id=self.space_id, content_types=content_types + ) async def _get(self, url: str, query: QueryT | None = None): params = self._format_params(query) diff --git a/contentful/content_type_cache.py b/contentful/content_type_cache.py index 5a068b0..c2b0e7a 100644 --- a/contentful/content_type_cache.py +++ b/contentful/content_type_cache.py @@ -1,3 +1,6 @@ +from __future__ import annotations + +from contentful.content_type import ContentType """ contentful.content_type_cache ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -9,7 +12,7 @@ """ -class ContentTypeCache(object): +class ContentTypeCache: """ Cache for Content Types. @@ -19,7 +22,7 @@ class ContentTypeCache(object): __CACHE__ = {} @classmethod - def get(cls, space_id, content_type_id): + def get(cls, space_id: str, content_type_id: str) -> ContentType | None: """ Fetches a Content Type from the Cache. """ @@ -30,9 +33,9 @@ def get(cls, space_id, content_type_id): return None @classmethod - def update_cache(cls, client): + def update_cache(cls, *, space_id: str, content_types: list[ContentType]): """ Updates the Cache with all Content Types from the Space. """ - cls.__CACHE__[client.space_id] = client.content_types() + cls.__CACHE__[space_id] = content_types From 13bccfc8a16637a2c9bcf0ad1d24c538a8d636da Mon Sep 17 00:00:00 2001 From: Sean Stewart Date: Thu, 22 Feb 2024 18:26:29 -0500 Subject: [PATCH 03/15] Fix requests declaration --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 24001bd..e0e7f9f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ python-dateutil==2.8.1 -requests>=2.16,<3 +requests>=2.20,<3 aiohttp>=3.9,<4.0 orjson>=3.9,<4.0 From b74fb0e0c99767de19ae24052bad3404bf670aab Mon Sep 17 00:00:00 2001 From: Sean Stewart Date: Fri, 23 Feb 2024 13:02:15 -0500 Subject: [PATCH 04/15] Start getting tests working with modern toolchain --- Makefile | 2 +- contentful/__init__.py | 2 +- contentful/client/base.py | 20 +- contentful/client/impl.py | 10 +- contentful/client/queries.py | 2 +- contentful/client/transport/abstract.py | 13 +- contentful/client/transport/aio.py | 3 + contentful/client/transport/sio.py | 13 +- contentful/content_type_field_types.py | 8 +- requirements.txt | 20 +- tests/client_test.py | 481 -------------- tests/content_type_cache_test.py | 23 - tests/errors_test.py | 371 ----------- tests/resource_test.py | 220 ------- tests/{array_test.py => test_array.py} | 0 tests/{asset_test.py => test_asset.py} | 0 tests/test_client.py | 607 ++++++++++++++++++ ...tent_type_test.py => test_content_type.py} | 0 tests/test_content_type_cache.py | 25 + ...eld_test.py => test_content_type_field.py} | 0 ...st.py => test_content_type_field_types.py} | 0 ...ed_asset_test.py => test_deleted_asset.py} | 0 ...ed_entry_test.py => test_deleted_entry.py} | 0 tests/{entry_test.py => test_entry.py} | 0 tests/test_errors.py | 387 +++++++++++ tests/{locale_test.py => test_locale.py} | 0 tests/test_resource.py | 186 ++++++ ...ilder_test.py => test_resource_builder.py} | 0 tests/{space_test.py => test_space.py} | 0 .../{sync_page_test.py => test_sync_page.py} | 0 tests/{utils_test.py => test_utils.py} | 0 31 files changed, 1262 insertions(+), 1131 deletions(-) delete mode 100644 tests/client_test.py delete mode 100644 tests/content_type_cache_test.py delete mode 100644 tests/errors_test.py delete mode 100644 tests/resource_test.py rename tests/{array_test.py => test_array.py} (100%) rename tests/{asset_test.py => test_asset.py} (100%) create mode 100644 tests/test_client.py rename tests/{content_type_test.py => test_content_type.py} (100%) create mode 100644 tests/test_content_type_cache.py rename tests/{content_type_field_test.py => test_content_type_field.py} (100%) rename tests/{content_type_field_types_test.py => test_content_type_field_types.py} (100%) rename tests/{deleted_asset_test.py => test_deleted_asset.py} (100%) rename tests/{deleted_entry_test.py => test_deleted_entry.py} (100%) rename tests/{entry_test.py => test_entry.py} (100%) create mode 100644 tests/test_errors.py rename tests/{locale_test.py => test_locale.py} (100%) create mode 100644 tests/test_resource.py rename tests/{resource_builder_test.py => test_resource_builder.py} (100%) rename tests/{space_test.py => test_space.py} (100%) rename tests/{sync_page_test.py => test_sync_page.py} (100%) rename tests/{utils_test.py => test_utils.py} (100%) diff --git a/Makefile b/Makefile index b469058..1695e15 100644 --- a/Makefile +++ b/Makefile @@ -48,7 +48,7 @@ lint: flake8 contentful test: - python setup.py test + python -m unittest discover tests test-all: tox diff --git a/contentful/__init__.py b/contentful/__init__.py index ea6d7e8..3f7cd5d 100644 --- a/contentful/__init__.py +++ b/contentful/__init__.py @@ -1,4 +1,4 @@ -from .client import Client # noqa: F401 +from .client import Client, AsyncClient # noqa: F401 from .entry import Entry # noqa: F401 from .asset import Asset # noqa: F401 from .space import Space # noqa: F401 diff --git a/contentful/client/base.py b/contentful/client/base.py index f517553..d25a18f 100644 --- a/contentful/client/base.py +++ b/contentful/client/base.py @@ -4,7 +4,7 @@ import re from typing import TypedDict, TYPE_CHECKING, ClassVar -from contentful import resource_builder, __version__ +from contentful import resource_builder from contentful.client import queries from contentful.client.transport import abstract @@ -218,6 +218,8 @@ def _get_transport(self) -> abstract.AbstractTransport: return transport def _get_client_info(self) -> ClientInfo: + from contentful import __version__ + os_name = platform.system() if os_name == "Darwin": os_name = "macOS" @@ -269,6 +271,9 @@ def _contentful_user_agent(self): header_encoded = "" for key, values in self.client_info.items(): + if values["name"] is None: + continue + name = f"{key} {values['name']}" if values["version"]: name = f"{name}/{values['version']}" @@ -302,7 +307,7 @@ def _url(self, url): protocol, self.api_url, self.space_id, url ) - def _format_params(self, query: QueryT | None) -> RequestParams: + def _format_params(self, query: QueryT | None) -> dict[str, str]: query = query or {} params = queries.normalize(**query) if not self.authorization_as_header: @@ -310,7 +315,10 @@ def _format_params(self, query: QueryT | None) -> RequestParams: return params - def _format_response(self, response: dict[str, Any], localized: bool) -> Resource: + def _format_response( + self, *, response: dict[str, Any], query: dict[str, str] + ) -> Resource: + localized = query.get("locale", "") == "*" builder = resource_builder.ResourceBuilder( default_locale=self.default_locale, localized=localized, @@ -349,17 +357,13 @@ def _proxy_parameters(self) -> abstract.ProxyInfo: def __repr__(self): return ( - f"<{self.__class__.__name__} " + f"" ) -class RequestParams(TypedDict): - params: QueryT - - class ClientInfo(TypedDict): sdk: VersionInfo platform: VersionInfo diff --git a/contentful/client/impl.py b/contentful/client/impl.py index b70eb5b..a1d3a54 100644 --- a/contentful/client/impl.py +++ b/contentful/client/impl.py @@ -51,7 +51,7 @@ def space(self, query: QueryT | None = None) -> Space: """ - return self._get("", query) + return self._get(f"/spaces/{self.space_id}", query) def content_type( self, content_type_id: str, query: QueryT | None = None @@ -223,10 +223,10 @@ def _cache_content_types(self): def _get(self, url: str, query: QueryT | None = None): params = self._format_params(query) - response = self.transport.get(url, params=params, raw_mode=self.raw_mode) + response = self.transport.get(url, query=params, raw_mode=self.raw_mode) if self.raw_mode: return response - return self._format_response(response) + return self._format_response(response=response, query=params) class AsyncClient(base.BaseClient): @@ -248,7 +248,7 @@ async def _get(self, url: str, query: QueryT | None = None): response = await self.transport.get(url, params=params, raw_mode=self.raw_mode) if self.raw_mode: return response - return self._format_response(response) + return self._format_response(response=response, query=params) async def space(self, query: QueryT | None = None) -> Space: """Fetches the current Space. @@ -265,7 +265,7 @@ async def space(self, query: QueryT | None = None) -> Space: """ - return await self._get("", query) + return await self._get(f"/spaces/{self.space_id}", query) async def content_type( self, content_type_id: str, query: QueryT | None = None diff --git a/contentful/client/queries.py b/contentful/client/queries.py index 4ea987e..dec84d2 100644 --- a/contentful/client/queries.py +++ b/contentful/client/queries.py @@ -51,7 +51,7 @@ def normalize_select(select: str | list[str]) -> str: q = q.split(",") filtered = ",".join(iternormalize(*q, filter=_filter_sys)) - normalized = ",".join(filtered, "sys") + normalized = ",".join((filtered, "sys")) return normalized diff --git a/contentful/client/transport/abstract.py b/contentful/client/transport/abstract.py index 6655254..6b3400e 100644 --- a/contentful/client/transport/abstract.py +++ b/contentful/client/transport/abstract.py @@ -99,7 +99,9 @@ def get( ) -> dict[str, Any] | ResponseT: ... -class AbstractAsyncTransport(AbstractTransport[SessionT], abc.ABC, Generic[SessionT]): +class AbstractAsyncTransport( + AbstractTransport[SessionT, ResponseT], abc.ABC, Generic[SessionT, ResponseT] +): @abc.abstractmethod def session( @@ -107,7 +109,9 @@ def session( ) -> AsyncSessionContext[SessionT]: ... -class AbstractSyncTransport(AbstractTransport[SessionT], abc.ABC, Generic[SessionT]): +class AbstractSyncTransport( + AbstractTransport[SessionT, ResponseT], abc.ABC, Generic[SessionT, ResponseT] +): @abc.abstractmethod def session( @@ -201,7 +205,10 @@ def parse_response( """Parse the received response, raising an error if necessary.""" if status_code >= 400: err_cls = errors.get_error_for_status_code(status_code) - body = orjson.loads(content) + try: + body = orjson.loads(content) + except orjson.JSONDecodeError: + body = {} info = errors.ErrorResponseInfo( status_code=status_code, reason=reason, diff --git a/contentful/client/transport/aio.py b/contentful/client/transport/aio.py index 150630f..35e7af5 100644 --- a/contentful/client/transport/aio.py +++ b/contentful/client/transport/aio.py @@ -106,3 +106,6 @@ def translate_async_transport_errors() -> Iterator[None]: # Malformed request, etc. except (aiohttp.ClientError, ValueError) as e: raise errors.PermanentHTTPError(e) from e + except Exception as e: + print(e) + raise diff --git a/contentful/client/transport/sio.py b/contentful/client/transport/sio.py index 49e38c5..c60a27b 100644 --- a/contentful/client/transport/sio.py +++ b/contentful/client/transport/sio.py @@ -29,6 +29,7 @@ class SyncTransport( def initialize(self) -> requests.Session: if self._session is None: self._session = requests.Session() + self._session.headers.update(self.default_headers) return self._session @@ -50,7 +51,7 @@ def get( **headers: str, ) -> dict[str, Any] | requests.Response: response = self.retry( - self._get, query=query, session=session, raw_mode=raw_mode, **headers + self._get, url, query=query, session=session, raw_mode=raw_mode, **headers ) return response @@ -67,7 +68,13 @@ def _get( sess: requests.Session with self.session(session=session) as sess: response: requests.Response - with sess.get(qualified_url, params=query, headers=headers) as response: + with sess.get( + qualified_url, + params=query, + headers=headers, + timeout=self.timeout_s, + proxies=self.proxy_info, + ) as response: content = response.content status_code = response.status_code headers = response.headers @@ -106,5 +113,5 @@ def translate_sync_transport_errors() -> Iterator[None]: ) as e: raise errors.TransientHTTPError(e) from e # Malformed request, etc. - except requests.exceptions.RequestException as e: + except (requests.exceptions.RequestException, ValueError) as e: raise errors.PermanentHTTPError(e) from e diff --git a/contentful/content_type_field_types.py b/contentful/content_type_field_types.py index 8b8edf9..52bbbbe 100644 --- a/contentful/content_type_field_types.py +++ b/contentful/content_type_field_types.py @@ -1,13 +1,13 @@ # -*- coding: utf-8 -*- try: - import simplejson as json + import orjson as json except ImportError: import json import dateutil.parser from collections import namedtuple -from .utils import unicode_class, resource_for_link, unresolvable +from .utils import resource_for_link, unresolvable from .resource import FieldsResource, Link, Resource """ @@ -44,7 +44,7 @@ class SymbolField(BasicField): def coerce(self, value, **kwargs): """Coerces value to str""" - return unicode_class()(value) + return str(value) class TextField(BasicField): @@ -53,7 +53,7 @@ class TextField(BasicField): def coerce(self, value, **kwargs): """Coerces value to str""" - return unicode_class()(value) + return str(value) class IntegerField(BasicField): diff --git a/requirements.txt b/requirements.txt index e0e7f9f..ae07a7c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,14 +1,14 @@ -python-dateutil==2.8.1 -requests>=2.20,<3 +python-dateutil>=2.8,<3 +requests>=2.20,<3.0 aiohttp>=3.9,<4.0 orjson>=3.9,<4.0 -vcrpy==1.10.3 -coverage==4.3.4 -flake8==3.3.0 -tox==2.5.0 -virtualenv==15.1.0 -requests-mock==1.5.2 +vcrpy>=6.0,<7.0 +coverage>=7,<8 +flake8>=7,<8 +tox>=4,<5 +virtualenv>=20,<21 +requests-mock>=1.5,<2 -Sphinx==1.6.3 -sphinxcontrib-websupport==1.0.1 +Sphinx>=7,<8 +sphinxcontrib-websupport>=1,<2 diff --git a/tests/client_test.py b/tests/client_test.py deleted file mode 100644 index 09ab5fb..0000000 --- a/tests/client_test.py +++ /dev/null @@ -1,481 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import unicode_literals - -import requests_mock -import vcr -import re -from unittest import TestCase - -from requests_mock import ANY - -from contentful.client import Client -from contentful.content_type_cache import ContentTypeCache -from contentful.errors import EntryNotFoundError -from contentful.errors import HTTPError -from contentful.utils import ConfigurationException -from contentful.entry import Entry - - -class ClientTest(TestCase): - def setUp(self): - ContentTypeCache.__CACHE__ = {} - - def test_client_repr(self): - self.assertEqual( - '', - str(Client('cfexampleapi', 'b4c0n73n7fu1', content_type_cache=False)) - ) - - def test_client_validations(self): - with self.assertRaises(ConfigurationException): - Client(None, 'foo') - with self.assertRaises(ConfigurationException): - Client('foo', None) - with self.assertRaises(ConfigurationException): - Client('foo', 'bar', api_url=None) - with self.assertRaises(ConfigurationException): - Client('foo', 'bar', default_locale=None) - with self.assertRaises(ConfigurationException): - Client('foo', 'bar', api_version=None) - - def test_uses_timeouts(self): - c = Client('cfexampleapi', 'b4c0n73n7fu1') - with requests_mock.mock() as m: - m.register_uri('GET', ANY, status_code=500) - self.assertRaises(HTTPError, c.entries) - self.assertEqual(m.call_count, 1) - self.assertEqual(m.request_history[0].timeout, 1) - - c = Client('cfexampleapi', 'b4c0n73n7fu1', timeout_s=0.1231570235) - with requests_mock.mock() as m: - m.register_uri('GET', ANY, status_code=500) - self.assertRaises(HTTPError, c.entries) - self.assertEqual(m.call_count, 1) - self.assertEqual(m.request_history[0].timeout, c.timeout_s) - - @vcr.use_cassette('fixtures/client/content_type_cache.yaml') - def test_client_creates_a_content_type_cache(self): - Client('cfexampleapi', 'b4c0n73n7fu1') - - self.assertTrue(len(ContentTypeCache.__CACHE__) > 0) - - def test_client_can_avoid_caching_content_types(self): - Client('cfexampleapi', 'b4c0n73n7fu1', content_type_cache=False) - - self.assertFalse(len(ContentTypeCache.__CACHE__) > 0) - - @vcr.use_cassette('fixtures/client/space.yaml') - def test_client_get_space(self): - client = Client('cfexampleapi', 'b4c0n73n7fu1', content_type_cache=False) - space = client.space() - - self.assertEqual(str(space), "") - - @vcr.use_cassette('fixtures/client/content_type.yaml') - def test_client_get_content_type(self): - client = Client('cfexampleapi', 'b4c0n73n7fu1', content_type_cache=False) - ct = client.content_type('cat') - - self.assertEqual(str(ct), "") - - @vcr.use_cassette('fixtures/client/content_types.yaml') - def test_client_get_content_types(self): - client = Client('cfexampleapi', 'b4c0n73n7fu1', content_type_cache=False) - cts = client.content_types() - - self.assertEqual(str(cts[0]), "") - - @vcr.use_cassette('fixtures/client/entry.yaml') - def test_client_entry(self): - client = Client('cfexampleapi', 'b4c0n73n7fu1', content_type_cache=False) - entry = client.entry('nyancat') - - self.assertEqual(str(entry), "") - self.assertEqual(str(entry.best_friend), "") - - @vcr.use_cassette('fixtures/client/entry_not_found.yaml') - def test_client_entry_not_found(self): - client = Client('cfexampleapi', 'b4c0n73n7fu1', content_type_cache=False) - self.assertRaises(EntryNotFoundError, client.entry, 'foobar') - - @vcr.use_cassette('fixtures/client/entries.yaml') - def test_client_entries(self): - client = Client('cfexampleapi', 'b4c0n73n7fu1', content_type_cache=False) - entries = client.entries() - - self.assertEqual(str(entries[0]), "") - - @vcr.use_cassette('fixtures/client/entries_select.yaml') - def test_client_entries_select(self): - client = Client('cfexampleapi', 'b4c0n73n7fu1', content_type_cache=False) - entries = client.entries({'content_type': 'cat', 'sys.id': 'nyancat', 'select': ['fields.name']}) - - self.assertEqual(str(entries[0]), "") - self.assertEqual(entries[0].fields(), {'name': 'Nyan Cat'}) - - @vcr.use_cassette('fixtures/client/entries_links_to_entry.yaml') - def test_client_entries_links_to_entry(self): - client = Client('cfexampleapi', 'b4c0n73n7fu1', content_type_cache=False) - entries = client.entries({'links_to_entry': 'nyancat'}) - self.assertEqual(len(entries), 1) - self.assertEqual(str(entries[0]), "") - - @vcr.use_cassette('fixtures/client/entry_incoming_references.yaml') - def test_entry_incoming_references(self): - client = Client('cfexampleapi', 'b4c0n73n7fu1', content_type_cache=False) - entry = client.entry('nyancat') - entries = entry.incoming_references(client) - self.assertEqual(len(entries), 1) - self.assertEqual(str(entries[0]), "") - - @vcr.use_cassette('fixtures/client/entry_incoming_references_with_query.yaml') - def test_entry_incoming_references_with_query(self): - client = Client('cfexampleapi', 'b4c0n73n7fu1', content_type_cache=False) - entry = client.entry('nyancat') - entries = entry.incoming_references(client, {'content_type': 'cat', 'select': ['fields.name']}) - self.assertEqual(len(entries), 1) - self.assertEqual(str(entries[0]), "") - self.assertEqual(entries[0].fields(), {'name': 'Happy Cat'}) - - @vcr.use_cassette('fixtures/client/entries_links_to_asset.yaml') - def test_client_entries_links_to_asset(self): - client = Client('cfexampleapi', 'b4c0n73n7fu1', content_type_cache=False) - entries = client.entries({'links_to_asset': 'nyancat'}) - self.assertEqual(len(entries), 1) - self.assertEqual(str(entries[0]), "") - - @vcr.use_cassette('fixtures/client/asset_incoming_references.yaml') - def test_asset_incoming_references(self): - client = Client('cfexampleapi', 'b4c0n73n7fu1', content_type_cache=False) - asset = client.asset('nyancat') - entries = asset.incoming_references(client) - self.assertEqual(len(entries), 1) - self.assertEqual(str(entries[0]), "") - - @vcr.use_cassette('fixtures/client/asset.yaml') - def test_client_asset(self): - client = Client('cfexampleapi', 'b4c0n73n7fu1', content_type_cache=False) - asset = client.asset('nyancat') - - self.assertEqual( - str(asset), - "" - ) - - @vcr.use_cassette('fixtures/client/locales_on_environment.yaml') - def test_client_locales_on_environment(self): - client = Client('facgnwwgj5fe', '', environment='testing', content_type_cache=False) - locales = client.locales() - - self.assertEqual(str(locales), "") - self.assertEqual(str(locales[0]), "") - - @vcr.use_cassette('fixtures/client/assets.yaml') - def test_client_assets(self): - client = Client('cfexampleapi', 'b4c0n73n7fu1', content_type_cache=False) - assets = client.assets() - - self.assertEqual( - str(assets[0]), - "" - ) - - @vcr.use_cassette('fixtures/client/sync.yaml') - def test_client_sync(self): - client = Client('cfexampleapi', 'b4c0n73n7fu1', content_type_cache=False) - sync = client.sync({'initial': True}) - - self.assertEqual( - str(sync), - "".format( - 'w5ZGw6JFwqZmVcKsE8Kow4grw45QdybCnV_Cg8OASMKpwo1UY8K8bsKFwqJrw7DDhcKnM2RDOVbDt1E-wo7CnDjChMKKGsK1wrzCrBzCqMOpZAwOOcOvCcOAwqHDv0XCiMKaOcOxZA8BJUzDr8K-wo1lNx7DnHE' - ) - ) - self.assertEqual(str(sync.items[0]), "") - - @vcr.use_cassette('fixtures/client/sync_environments.yaml') - def test_client_sync_with_environments(self): - client = Client('a22o2qgm356c', 'bfbc63cf745a037125dbcc64f716a9a0e9d091df1a79e84920b890f87a6e7ab9', environment='staging', content_type_cache=False) - sync = client.sync({'initial': True}) - - self.assertEqual(sync.items[0].environment.id, 'staging') - - @vcr.use_cassette('fixtures/client/array_endpoints.yaml') - def test_client_creates_wrapped_arrays(self): - client = Client('cfexampleapi', 'b4c0n73n7fu1', content_type_cache=False) - self.assertEqual(str(client.content_types()), "") - self.assertEqual(str(client.entries()), "") - self.assertEqual(str(client.assets()), "") - - # X-Contentful-User-Agent Headers - - def test_client_default_contentful_user_agent_headers(self): - client = Client('cfexampleapi', 'b4c0n73n7fu1', content_type_cache=False) - - from contentful import __version__ - import platform - expected = [ - 'sdk contentful.py/{0};'.format(__version__), - 'platform python/{0};'.format(platform.python_version()) - ] - header = client._contentful_user_agent() - for e in expected: - self.assertTrue(e in header) - - self.assertTrue(re.search('os (Windows|macOS|Linux)(\/.*)?;', header)) - - self.assertTrue('integration' not in header) - self.assertTrue('app' not in header) - - def test_client_with_integration_name_only_headers(self): - client = Client( - 'cfexampleapi', - 'b4c0n73n7fu1', - content_type_cache=False, - integration_name='foobar') - - header = client._contentful_user_agent() - self.assertTrue('integration foobar;' in header) - self.assertFalse('integration foobar/;' in header) - - def test_client_with_integration_headers(self): - client = Client( - 'cfexampleapi', - 'b4c0n73n7fu1', - content_type_cache=False, - integration_name='foobar', - integration_version='0.1.0') - - header = client._contentful_user_agent() - self.assertTrue('integration foobar/0.1.0;' in header) - - def test_client_with_application_name_only_headers(self): - client = Client( - 'cfexampleapi', - 'b4c0n73n7fu1', - content_type_cache=False, - application_name='foobar') - - header = client._contentful_user_agent() - self.assertTrue('app foobar;' in header) - self.assertFalse('app foobar/;' in header) - - def test_client_with_application_headers(self): - client = Client( - 'cfexampleapi', - 'b4c0n73n7fu1', - content_type_cache=False, - application_name='foobar', - application_version='0.1.0') - - header = client._contentful_user_agent() - self.assertTrue('app foobar/0.1.0;' in header) - - def test_client_with_integration_version_only_does_not_include_integration_in_header(self): - client = Client( - 'cfexampleapi', - 'b4c0n73n7fu1', - content_type_cache=False, - integration_version='0.1.0') - - header = client._contentful_user_agent() - self.assertFalse('integration /0.1.0' in header) - - def test_client_with_application_version_only_does_not_include_integration_in_header(self): - client = Client( - 'cfexampleapi', - 'b4c0n73n7fu1', - content_type_cache=False, - application_version='0.1.0') - - header = client._contentful_user_agent() - self.assertFalse('app /0.1.0;' in header) - - def test_client_with_all_headers(self): - client = Client( - 'cfexampleapi', - 'b4c0n73n7fu1', - content_type_cache=False, - application_name='foobar_app', - application_version='1.1.0', - integration_name='foobar integ', - integration_version='0.1.0') - - from contentful import __version__ - import platform - expected = [ - 'sdk contentful.py/{0};'.format(__version__), - 'platform python/{0};'.format(platform.python_version()), - 'app foobar_app/1.1.0;', - 'integration foobar integ/0.1.0;' - ] - header = client._contentful_user_agent() - for e in expected: - self.assertTrue(e in header) - - self.assertTrue(re.search('os (Windows|macOS|Linux)(\/.*)?;', header)) - - def test_client_headers(self): - client = Client( - 'cfexampleapi', - 'b4c0n73n7fu1', - content_type_cache=False, - application_name='foobar_app', - application_version='1.1.0', - integration_name='foobar integ', - integration_version='0.1.0') - - from contentful import __version__ - import platform - expected = [ - 'sdk contentful.py/{0};'.format(__version__), - 'platform python/{0};'.format(platform.python_version()), - 'app foobar_app/1.1.0;', - 'integration foobar integ/0.1.0;' - ] - header = client._request_headers()['X-Contentful-User-Agent'] - for e in expected: - self.assertTrue(e in header) - - self.assertTrue(re.search('os (Windows|macOS|Linux)(\/.*)?;', header)) - - # Integration Tests - - @vcr.use_cassette('fixtures/integration/issue-4.yaml') - def test_entries_dont_fail_with_unicode_characters(self): - client = Client('wltm0euukdog', 'bbe871957bb60f988af6cbeeccbb178c36cae09e36e8098357e27b51dd38d88e', content_type_cache=True) - entries = client.entries() - self.assertEqual(entries[0].name, '😅') - - @vcr.use_cassette('fixtures/integration/json-arrays.yaml') - def test_entries_dont_fail_with_arrays_as_json_root(self): - client = Client('4int1zgmkwcf', 'd2ac2076019bd4a8357811cbdd5563bb7186d90d77e53c265a1bafd9f83439e8', content_type_cache=True) - entries = client.entries() - self.assertEqual(entries[0].json, [{'foo': 'bar'}, {'baz': 'qux'}]) - - @vcr.use_cassette('fixtures/integration/issue-11.yaml') - def test_entries_with_none_values_on_all_fields(self): - client = Client('rtx5c7z0zbas', 'a6c8dc438d470c51d1094dad146a1f20fcdba41e21f4e263af6c3f70d8583634', content_type_cache=True) - entry = client.entries()[0] - self.assertEqual(entry.symbol, None) - self.assertEqual(entry.text, None) - self.assertEqual(entry.integer, None) - self.assertEqual(entry.number, None) - self.assertEqual(entry.date, None) - self.assertEqual(entry.location, None) - self.assertEqual(entry.asset, None) - self.assertEqual(entry.bool, None) - self.assertEqual(entry.json, None) - self.assertEqual(entry.link, None) - - @vcr.use_cassette('fixtures/integration/circular-references.yaml') - def test_circular_references_default_depth(self): - client = Client('rk19fq93y3vw', '821aa502a7ce820e46adb30fa6942889619aac4342a7021cfe15197c52a593cc', content_type_cache=True) - a = client.entry('6kdfS7uMs8owuEIoSaOcQk') - self.assertEqual(str(a), "") - self.assertEqual(str(a.b), "") - self.assertEqual(str(a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a), "") - self.assertEqual(str(a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b), "") - - @vcr.use_cassette('fixtures/integration/circular-references.yaml') - def test_circular_references_set_depth(self): - client = Client( - 'rk19fq93y3vw', - '821aa502a7ce820e46adb30fa6942889619aac4342a7021cfe15197c52a593cc', - content_type_cache=True, - max_include_resolution_depth=1 - ) - a = client.entry('6kdfS7uMs8owuEIoSaOcQk') - self.assertEqual(str(a), "") - self.assertEqual(str(a.b), "") - self.assertEqual(str(a.b.a), "") - - @vcr.use_cassette('fixtures/integration/circular-references.yaml') - def test_circular_references_with_reusable_entries(self): - client = Client('rk19fq93y3vw', '821aa502a7ce820e46adb30fa6942889619aac4342a7021cfe15197c52a593cc', content_type_cache=True, reuse_entries=True) - a = client.entry('6kdfS7uMs8owuEIoSaOcQk') - self.assertEqual(str(a), "") - self.assertEqual(str(a.b), "") - self.assertEqual(str(a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a), "") - self.assertEqual(str(a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b), "") - self.assertEqual(a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b._depth, 1) - self.assertEqual(a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a._depth, 0) - self.assertEqual(str(a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a), "") - - @vcr.use_cassette('fixtures/integration/errors-filtered.yaml') - def test_unresolvable_entries_dont_get_included(self): - client = Client( - '011npgaszg5o', - '42c9d93410a7319e9a735671fc1e415348f65e94a99fc768b70a7c649859d4fd' - ) - - entry = client.entry('1HR1QvURo4MoSqO0eqmUeO') - self.assertEqual(len(entry.modules), 2) - - @vcr.use_cassette('fixtures/fields/rich_text.yaml') - def test_rich_text_field(self): - client = Client( - 'jd7yc4wnatx3', - '6256b8ef7d66805ca41f2728271daf27e8fa6055873b802a813941a0fe696248', - gzip_encoded=False - ) - - entry = client.entry('4BupPSmi4M02m0U48AQCSM') - - expected_entry_occurrances = 2 - embedded_entry_index = 1 - for content in entry.body['content']: - if content['nodeType'] == 'embedded-entry-block': - self.assertTrue(isinstance(content['data']['target'], Entry)) - self.assertEqual(content['data']['target'].body, 'Embedded {0}'.format(embedded_entry_index)) - expected_entry_occurrances -= 1 - embedded_entry_index += 1 - self.assertEqual(expected_entry_occurrances, 0) - - @vcr.use_cassette('fixtures/fields/rich_text_lists_with_embeds.yaml') - def test_rich_text_field_with_embeds_in_lists(self): - client = Client( - 'jd7yc4wnatx3', - '6256b8ef7d66805ca41f2728271daf27e8fa6055873b802a813941a0fe696248', - gzip_encoded=False - ) - - entry = client.entry('6NGLswCREsGA28kGouScyY') - - # Hyperlink data is conserved - self.assertEqual(entry.body['content'][0], { - 'data': {}, - 'content': [ - {'marks': [], 'value': 'A link to ', 'nodeType': 'text', 'nodeClass': 'text'}, - { - 'data': {'uri': 'https://google.com'}, - 'content': [{'marks': [], 'value': 'google', 'nodeType': 'text', 'nodeClass': 'text'}], - 'nodeType': 'hyperlink', - 'nodeClass': 'inline' - }, - {'marks': [], 'value': '', 'nodeType': 'text', 'nodeClass': 'text'} - ], - 'nodeType': 'paragraph', - 'nodeClass': 'block' - }) - - # Unordered lists and ordered lists can contain embedded entries - self.assertEqual(entry.body['content'][3]['nodeType'], 'unordered-list') - self.assertEqual(str(entry.body['content'][3]['content'][2]['content'][0]['data']['target']), "") - - self.assertEqual(entry.body['content'][4]['nodeType'], 'ordered-list') - self.assertEqual(str(entry.body['content'][4]['content'][2]['content'][0]['data']['target']), "") - - @vcr.use_cassette('fixtures/integration/issue-41.yaml') - def test_rich_text_fields_should_not_get_hydrated_twice(self): - client = Client( - 'fds721b88p6b', - '45ba81cc69423fcd2e3f0a4779de29481bb5c11495bc7e14649a996cf984e98e', - gzip_encoded=False - ) - - entry = client.entry('1tBAu0wP9qAQEg6qCqMics') - - # Not failing is already a success - self.assertEqual(str(entry.children[0]), str(entry.children[1])) - self.assertEqual(str(entry.children[0].body), str(entry.children[1].body)) diff --git a/tests/content_type_cache_test.py b/tests/content_type_cache_test.py deleted file mode 100644 index 93b488e..0000000 --- a/tests/content_type_cache_test.py +++ /dev/null @@ -1,23 +0,0 @@ -import vcr -from unittest import TestCase -from contentful.content_type_cache import ContentTypeCache -from contentful import Client - - -class ContentTypeCacheTest(TestCase): - @vcr.use_cassette('fixtures/cache/cache.yaml') - def test_cache(self): - Client('o4h6g9w3pooi', 'b4c0n73n7fu1') - - cat_ct = ContentTypeCache.get('o4h6g9w3pooi', 'article') - self.assertEqual(str(cat_ct), "") - - @vcr.use_cassette('fixtures/cache/cache.yaml') - def test_cache_update(self): - ContentTypeCache.__CACHE__ = {} - client = Client('o4h6g9w3pooi', 'b4c0n73n7fu1', content_type_cache=False) - - ContentTypeCache.update_cache(client) - - cat_ct = ContentTypeCache.get('o4h6g9w3pooi', 'article') - self.assertEqual(str(cat_ct), "") diff --git a/tests/errors_test.py b/tests/errors_test.py deleted file mode 100644 index 172119a..0000000 --- a/tests/errors_test.py +++ /dev/null @@ -1,371 +0,0 @@ -import json -from unittest import TestCase -from contentful.errors import ( - HTTPError, - NotFoundError, - BadRequestError, - AccessDeniedError, - UnauthorizedError, - RateLimitExceededError, - ServerError, - ServiceUnavailableError, - get_error -) -from contentful.utils import retry_request, json_error_class -from contentful.client import Client - - -class MockResponse(object): - def __init__(self, status_code, json, headers=None, invalid_json=False): - self.status_code = status_code - self._json = json - self._invalid_json = invalid_json - self.headers = headers if headers is not None else {} - - def json(self): - if self._invalid_json: - raise json_error_class()('foo', 'foo', 0) - return json.loads(json.dumps(self._json)) - - @property - def text(self): - return self._json - - -http_attempts = 0 -def mock_http_call(url, query): - global http_attempts - if http_attempts < query.get('fail_until', 1): - http_attempts += 1 - raise RateLimitExceededError( - MockResponse(429, {'message': 'foo'}, headers={'x-contentful-ratelimit-reset': query.get('reset', 0.1)}) - ) - return 'succeed' - - -class ErrorsTest(TestCase): - def test_default_additional_info_is_empty(self): - response = MockResponse(512, "not json", invalid_json=True) - error = get_error(response) - - self.assertEqual(error._additional_error_info(), []) - - def test_default_error_message(self): - response = MockResponse(512, "not json", invalid_json=True) - error = get_error(response) - - expected_error = "\n".join([ - "HTTP status code: 512", - "Message: The following error was received: not json" - ]) - self.assertEqual(str(error), expected_error) - - def test_generic_details(self): - response = MockResponse(512, { - 'details': 'some text' - }) - error = get_error(response) - - expected_error = "\n".join([ - "HTTP status code: 512", - "Message: The following error was received: {'details': 'some text'}", - "Details: some text" - ]) - self.assertEqual(str(error), expected_error) - - def test_not_found_error(self): - response = MockResponse(404, { - 'message': 'The resource could not be found.', - 'details': { - 'type': 'Asset', - 'id': 'foobar' - }, - 'requestId': '$foobar123' - }) - - error = get_error(response) - - self.assertEqual(error.status_code, 404) - expected_error = "\n".join([ - "HTTP status code: 404", - "Message: The resource could not be found.", - "Details: The requested Asset could not be found. ID: foobar.", - "Request ID: $foobar123" - ]) - self.assertEqual(str(error), expected_error) - self.assertTrue(isinstance(error, NotFoundError)) - - def test_not_found_error_with_sys_on_details(self): - response = MockResponse(404, { - 'message': 'The resource could not be found.', - 'details': { - 'sys': { - 'type': 'Space', - 'id': 'foobar' - } - }, - 'requestId': '$foobar123' - }) - - error = get_error(response) - - self.assertEqual(error.status_code, 404) - expected_error = "\n".join([ - "HTTP status code: 404", - "Message: The resource could not be found.", - "Details: The requested Space could not be found. ID: foobar.", - "Request ID: $foobar123" - ]) - self.assertEqual(str(error), expected_error) - self.assertTrue(isinstance(error, NotFoundError)) - - def test_not_found_error_details_is_a_string(self): - response = MockResponse(404, { - 'message': 'The resource could not be found.', - 'details': 'The resource could not be found', - 'requestId': '$foobar123' - }) - - error = get_error(response) - - self.assertEqual(error.status_code, 404) - expected_error = "\n".join([ - "HTTP status code: 404", - "Message: The resource could not be found.", - "Details: The resource could not be found", - "Request ID: $foobar123" - ]) - self.assertEqual(str(error), expected_error) - self.assertTrue(isinstance(error, NotFoundError)) - - def test_bad_request_error(self): - response = MockResponse(400, { - 'message': 'The query you sent was invalid. Probably a filter or ordering specification is not applicable to the type of a field.', - 'details': { - 'errors': [ - { - 'details': 'The path "invalid_param" is not recognized' - } - ] - }, - 'requestId': '$foobar234' - }) - - error = get_error(response) - - self.assertEqual(error.status_code, 400) - expected_error = "\n".join([ - "HTTP status code: 400", - "Message: The query you sent was invalid. Probably a filter or ordering specification is not applicable to the type of a field.", - 'Details: The path "invalid_param" is not recognized', - "Request ID: $foobar234" - ]) - self.assertEqual(str(error), expected_error) - self.assertTrue(isinstance(error, BadRequestError)) - - def test_bad_request_error_details_is_string(self): - response = MockResponse(400, { - 'message': 'The query you sent was invalid. Probably a filter or ordering specification is not applicable to the type of a field.', - 'details': "some error", - 'requestId': '$foobar234' - }) - - error = get_error(response) - - self.assertEqual(error.status_code, 400) - expected_error = "\n".join([ - "HTTP status code: 400", - "Message: The query you sent was invalid. Probably a filter or ordering specification is not applicable to the type of a field.", - 'Details: some error', - "Request ID: $foobar234" - ]) - self.assertEqual(str(error), expected_error) - self.assertTrue(isinstance(error, BadRequestError)) - - def test_bad_request_error_errors_details_is_string(self): - response = MockResponse(400, { - 'message': 'The query you sent was invalid. Probably a filter or ordering specification is not applicable to the type of a field.', - 'details': { - 'errors': [ - 'some error' - ] - }, - 'requestId': '$foobar234' - }) - - error = get_error(response) - - self.assertEqual(error.status_code, 400) - expected_error = "\n".join([ - "HTTP status code: 400", - "Message: The query you sent was invalid. Probably a filter or ordering specification is not applicable to the type of a field.", - 'Details: some error', - "Request ID: $foobar234" - ]) - self.assertEqual(str(error), expected_error) - self.assertTrue(isinstance(error, BadRequestError)) - - def test_access_denied_error(self): - response = MockResponse(403, { - 'message': 'Access Denied', - 'details': { - 'reasons': [ - 'foo', - 'bar' - ] - } - }) - - error = get_error(response) - - self.assertEqual(error.status_code, 403) - expected_error = "\n".join([ - "HTTP status code: 403", - "Message: Access Denied", - "Details: ", - "\tReasons:", - "\t\tfoo", - "\t\tbar" - ]) - self.assertEqual(str(error), expected_error) - self.assertTrue(isinstance(error, AccessDeniedError)) - - def test_unauthorized_error(self): - response = MockResponse(401, { - 'message': 'The access token you sent could not be found or is invalid.', - 'requestId': '$foobar123' - }) - - error = get_error(response) - - self.assertEqual(error.status_code, 401) - expected_error = "\n".join([ - 'HTTP status code: 401', - 'Message: The access token you sent could not be found or is invalid.', - 'Request ID: $foobar123' - ]) - self.assertEqual(str(error), expected_error) - self.assertTrue(isinstance(error, UnauthorizedError)) - - def test_rate_limit_exceeded_error(self): - response = MockResponse(429, { - 'message': 'Rate Limit Exceeded' - }) - - error = get_error(response) - - self.assertEqual(error.status_code, 429) - expected_error = "\n".join([ - "HTTP status code: 429", - "Message: Rate Limit Exceeded" - ]) - self.assertEqual(str(error), expected_error) - self.assertTrue(isinstance(error, RateLimitExceededError)) - - def test_rate_limit_exceeded_error_with_time(self): - response = MockResponse(429, {}, headers={ - 'x-contentful-ratelimit-reset': 60 - }) - - error = get_error(response) - - self.assertEqual(error.status_code, 429) - expected_error = "\n".join([ - "HTTP status code: 429", - "Message: Rate limit exceeded. Too many requests.", - "Time until reset (seconds): 60" - ]) - self.assertEqual(str(error), expected_error) - self.assertTrue(isinstance(error, RateLimitExceededError)) - - def test_server_error(self): - response = MockResponse(500, { - 'message': 'Server Error' - }) - - error = get_error(response) - - self.assertEqual(error.status_code, 500) - expected_error = "\n".join([ - "HTTP status code: 500", - "Message: Server Error" - ]) - self.assertEqual(str(error), expected_error) - self.assertTrue(isinstance(error, ServerError)) - - def test_service_unavailable_error(self): - response = MockResponse(503, { - 'message': 'Service Unavailable' - }) - - error = get_error(response) - - self.assertEqual(error.status_code, 503) - expected_error = "\n".join([ - "HTTP status code: 503", - "Message: Service Unavailable" - ]) - self.assertEqual(str(error), expected_error) - self.assertTrue(isinstance(error, ServiceUnavailableError)) - - def test_other_error(self): - response = MockResponse(418, { - 'message': "I'm a Teapot" - }) - - error = get_error(response) - - self.assertEqual(error.status_code, 418) - expected_error = "\n".join([ - "HTTP status code: 418", - "Message: I'm a Teapot" - ]) - self.assertEqual(str(error), expected_error) - self.assertTrue(isinstance(error, HTTPError)) - - def test_rate_limit_retries(self): - global http_attempts - client = Client('cfexampleapi', 'b4c0n73n7fu1', content_type_cache=False) - - http_attempts = 0 - result = retry_request(client)(mock_http_call)('/foo', {}) - - self.assertEqual(http_attempts, 1) - self.assertEqual(result, 'succeed') - - def test_rate_limit_max_retries(self): - global http_attempts - client = Client('cfexampleapi', 'b4c0n73n7fu1', content_type_cache=False) - - http_attempts = 0 - self.assertRaises(RateLimitExceededError, retry_request(client)(mock_http_call), '/foo', {'fail_until': 2}) - - def test_rate_limit_max_wait(self): - global http_attempts - client = Client('cfexampleapi', 'b4c0n73n7fu1', content_type_cache=False) - - http_attempts = 0 - self.assertRaises(RateLimitExceededError, retry_request(client)(mock_http_call), '/foo', {'reset': 100}) - - def test_predefined_errors_default_message(self): - messages = { - 400: "The request was malformed or missing a required parameter.", - 401: "The authorization token was invalid.", - 403: "The specified token does not have access to the requested resource.", - 404: "The requested resource or endpoint could not be found.", - 429: "Rate limit exceeded. Too many requests.", - 500: "Internal server error.", - 502: "The requested space is hibernated.", - 503: "The request was malformed or missing a required parameter." - } - - for status_code, message in messages.items(): - response = MockResponse(status_code, "foo", invalid_json=True) - - error = get_error(response) - - expected_error = "\n".join([ - "HTTP status code: {0}".format(status_code), - "Message: {0}".format(message) - ]) - self.assertEqual(str(error), expected_error) diff --git a/tests/resource_test.py b/tests/resource_test.py deleted file mode 100644 index fe47d7a..0000000 --- a/tests/resource_test.py +++ /dev/null @@ -1,220 +0,0 @@ -import vcr -import pickle -from datetime import datetime -from unittest import TestCase -from contentful.resource import Resource, FieldsResource, Link -from contentful.client import Client - - -class ResourceTest(TestCase): - def test_resource(self): - resource = Resource({ - 'sys': { - 'space': { - 'sys': { - 'type': 'Link', - 'linkType': 'Space', - 'id': 'foo' - } - }, - 'contentType': { - 'sys': { - 'type': 'Link', - 'linkType': 'ContentType', - 'id': 'bar' - } - }, - 'type': 'Entry', - 'createdAt': '2016-06-06', - 'updatedAt': '2016-06-06', - 'deletedAt': '2016-06-06', - 'id': 'foobar', - 'version': 1 - } - }) - - self.assertEqual(str(resource.space), "") - self.assertEqual(str(resource.content_type), "") - self.assertEqual(resource.created_at, datetime(2016, 6, 6)) - self.assertEqual(resource.updated_at, datetime(2016, 6, 6)) - self.assertEqual(resource.deleted_at, datetime(2016, 6, 6)) - self.assertEqual(resource.id, 'foobar') - self.assertEqual(resource.version, 1) - - self.assertRaises(AttributeError, resource.__getattr__, 'foo') - - -class FieldsResourceTest(TestCase): - def test_fields_resource(self): - resource = FieldsResource({ - 'sys': { - 'space': { - 'sys': { - 'type': 'Link', - 'linkType': 'Space', - 'id': 'foo' - } - }, - 'contentType': { - 'sys': { - 'type': 'Link', - 'linkType': 'ContentType', - 'id': 'bar' - } - }, - 'type': 'Entry', - 'createdAt': '2016-06-06', - 'updatedAt': '2016-06-06', - 'deletedAt': '2016-06-06', - 'id': 'foobar', - 'version': 1, - 'locale': 'foo-locale' - }, - 'fields': { - 'foo': 'bar', - 'baz': 123, - 'qux': True - } - }) - - self.assertEqual(resource.foo, 'bar') - self.assertEqual(resource.baz, 123) - self.assertEqual(resource.qux, True) - self.assertEqual(resource.fields(), {'foo': 'bar', 'baz': 123, 'qux': True}) - self.assertEqual(resource.fields('foo-locale'), {'foo': 'bar', 'baz': 123, 'qux': True}) - self.assertEqual(resource.fields('bar-locale'), {}) - - self.assertRaises(AttributeError, resource.__getattr__, 'foobar') - - def test_pickleable_resource(self): - resource = FieldsResource({ - 'sys': { - 'space': { - 'sys': { - 'type': 'Link', - 'linkType': 'Space', - 'id': 'foo' - } - }, - 'contentType': { - 'sys': { - 'type': 'Link', - 'linkType': 'ContentType', - 'id': 'bar' - } - }, - 'type': 'Entry', - 'createdAt': '2016-06-06', - 'updatedAt': '2016-06-06', - 'deletedAt': '2016-06-06', - 'id': 'foobar', - 'version': 1, - 'locale': 'foo-locale' - }, - 'fields': { - 'foo': 'bar', - 'baz': 123, - 'qux': True - } - }) - - serialized_resource = pickle.dumps(resource) - deserialized_resource = pickle.loads(serialized_resource) - - self.assertEqual(resource.foo, deserialized_resource.foo) - self.assertEqual(resource.baz, deserialized_resource.baz) - self.assertEqual(resource.qux, deserialized_resource.qux) - self.assertEqual(resource.fields(), deserialized_resource.fields()) - self.assertEqual(resource.fields('foo-locale'), deserialized_resource.fields('foo-locale')) - self.assertEqual(resource.fields('bar-locale'), deserialized_resource.fields('bar-locale')) - - self.assertRaises(AttributeError, resource.__getattr__, 'foobar') - - def test_fields_resource_localized(self): - resource = FieldsResource({ - 'sys': { - 'space': { - 'sys': { - 'type': 'Link', - 'linkType': 'Space', - 'id': 'foo' - } - }, - 'contentType': { - 'sys': { - 'type': 'Link', - 'linkType': 'ContentType', - 'id': 'bar' - } - }, - 'type': 'Entry', - 'createdAt': '2016-06-06', - 'updatedAt': '2016-06-06', - 'deletedAt': '2016-06-06', - 'id': 'foobar', - 'version': 1, - 'locale': 'foo-locale' - }, - 'fields': { - 'foo': {'foo-locale': 'bar-foo', 'bar-locale': 'bar-bar'}, - 'baz': {'foo-locale': 123, 'bar-locale': 456}, - 'qux': {'foo-locale': True, 'bar-locale': False} - } - }, - localized=True) - - self.assertEqual(resource.foo, 'bar-foo') - self.assertEqual(resource.baz, 123) - self.assertEqual(resource.qux, True) - self.assertEqual(resource.fields(), {'foo': 'bar-foo', 'baz': 123, 'qux': True}) - self.assertEqual(resource.fields('foo-locale'), {'foo': 'bar-foo', 'baz': 123, 'qux': True}) - self.assertEqual(resource.fields('bar-locale'), {'foo': 'bar-bar', 'baz': 456, 'qux': False}) - self.assertEqual(resource.fields('baz-locale'), {}) - - self.assertRaises(AttributeError, resource.__getattr__, 'foobar') - - -class LinkTest(TestCase): - def test_link(self): - link = Link({ - 'sys': { - 'type': 'Link', - 'linkType': 'Space', - 'id': 'foo' - } - }) - - self.assertEqual(link.id, 'foo') - self.assertEqual(link.type, 'Link') - self.assertEqual(link.link_type, 'Space') - self.assertEqual(str(link), "") - - @vcr.use_cassette('fixtures/link/resolve_space.yaml') - def test_link_space_resource(self): - link = Link({ - 'sys': { - 'type': 'Link', - 'linkType': 'Space', - 'id': 'cfexampleapi' - } - }) - client = Client('cfexampleapi', 'b4c0n73n7fu1', content_type_cache=False) - - space = link.resolve(client) - - self.assertEqual(str(space), "") - - @vcr.use_cassette('fixtures/link/resolve_other.yaml') - def test_link_other_resource(self): - link = Link({ - 'sys': { - 'type': 'Link', - 'linkType': 'ContentType', - 'id': 'cat' - } - }) - client = Client('cfexampleapi', 'b4c0n73n7fu1', content_type_cache=False) - - cat_ct = link.resolve(client) - - self.assertEqual(str(cat_ct), "") diff --git a/tests/array_test.py b/tests/test_array.py similarity index 100% rename from tests/array_test.py rename to tests/test_array.py diff --git a/tests/asset_test.py b/tests/test_asset.py similarity index 100% rename from tests/asset_test.py rename to tests/test_asset.py diff --git a/tests/test_client.py b/tests/test_client.py new file mode 100644 index 0000000..49c765f --- /dev/null +++ b/tests/test_client.py @@ -0,0 +1,607 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +import requests_mock +import vcr +import re +from unittest import TestCase + +from requests_mock import ANY + +from contentful.client import Client +from contentful.content_type_cache import ContentTypeCache +from contentful.errors import EntryNotFoundError +from contentful.errors import HTTPError +from contentful.client.base import ConfigurationException +from contentful.entry import Entry + + +class ClientTest(TestCase): + def setUp(self): + ContentTypeCache.__CACHE__ = {} + + def test_client_repr(self): + self.assertEqual( + "", + str(Client("cfexampleapi", "b4c0n73n7fu1", content_type_cache=False)), + ) + + def test_client_validations(self): + with self.assertRaises(ConfigurationException): + Client(None, "foo") + with self.assertRaises(ConfigurationException): + Client("foo", None) + with self.assertRaises(ConfigurationException): + Client("foo", "bar", api_url=None) + with self.assertRaises(ConfigurationException): + Client("foo", "bar", default_locale=None) + with self.assertRaises(ConfigurationException): + Client("foo", "bar", api_version=None) + + def test_uses_timeouts(self): + c = Client("cfexampleapi", "b4c0n73n7fu1") + with requests_mock.mock() as m: + m.register_uri("GET", ANY, status_code=500) + self.assertRaises(HTTPError, c.entries) + self.assertEqual(m.call_count, 1) + self.assertEqual(m.request_history[0].timeout, 1) + + c = Client("cfexampleapi", "b4c0n73n7fu1", timeout_s=0.1231570235) + with requests_mock.mock() as m: + m.register_uri("GET", ANY, status_code=500) + self.assertRaises(HTTPError, c.entries) + self.assertEqual(m.call_count, 1) + self.assertEqual(m.request_history[0].timeout, c.timeout_s) + + def test_client_creates_a_content_type_cache(self): + with vcr.use_cassette("fixtures/client/content_type_cache.yaml"): + client = Client("cfexampleapi", "b4c0n73n7fu1") + client.initialize() + self.assertTrue(len(ContentTypeCache.__CACHE__) > 0) + + def test_client_can_avoid_caching_content_types(self): + Client("cfexampleapi", "b4c0n73n7fu1", content_type_cache=False) + + self.assertFalse(len(ContentTypeCache.__CACHE__) > 0) + + def test_client_get_space(self): + with vcr.use_cassette("fixtures/client/space.yaml"): + client = Client("cfexampleapi", "b4c0n73n7fu1", content_type_cache=False) + space = client.space() + + self.assertEqual( + str(space), "" + ) + + def test_client_get_content_type(self): + with vcr.use_cassette("fixtures/client/content_type.yaml"): + client = Client("cfexampleapi", "b4c0n73n7fu1", content_type_cache=False) + ct = client.content_type("cat") + + self.assertEqual(str(ct), "") + + def test_client_get_content_types(self): + with vcr.use_cassette("fixtures/client/content_types.yaml"): + client = Client("cfexampleapi", "b4c0n73n7fu1", content_type_cache=False) + cts = client.content_types() + + self.assertEqual( + str(cts[0]), "" + ) + + def test_client_entry(self): + with vcr.use_cassette("fixtures/client/entry.yaml"): + client = Client("cfexampleapi", "b4c0n73n7fu1", content_type_cache=False) + entry = client.entry("nyancat") + + self.assertEqual(str(entry), "") + self.assertEqual(str(entry.best_friend), "") + + def test_client_entry_not_found(self): + with vcr.use_cassette("fixtures/client/entry_not_found.yaml"): + client = Client("cfexampleapi", "b4c0n73n7fu1", content_type_cache=False) + self.assertRaises(EntryNotFoundError, client.entry, "foobar") + + def test_client_entries(self): + with vcr.use_cassette("fixtures/client/entries.yaml"): + client = Client("cfexampleapi", "b4c0n73n7fu1", content_type_cache=False) + entries = client.entries() + + self.assertEqual(str(entries[0]), "") + + def test_client_entries_select(self): + with vcr.use_cassette("fixtures/client/entries_select.yaml"): + client = Client("cfexampleapi", "b4c0n73n7fu1", content_type_cache=False) + entries = client.entries( + {"content_type": "cat", "sys.id": "nyancat", "select": ["fields.name"]} + ) + + self.assertEqual(str(entries[0]), "") + self.assertEqual(entries[0].fields(), {"name": "Nyan Cat"}) + + def test_client_entries_links_to_entry(self): + with vcr.use_cassette("fixtures/client/entries_links_to_entry.yaml"): + client = Client("cfexampleapi", "b4c0n73n7fu1", content_type_cache=False) + entries = client.entries({"links_to_entry": "nyancat"}) + self.assertEqual(len(entries), 1) + self.assertEqual(str(entries[0]), "") + + def test_entry_incoming_references(self): + with vcr.use_cassette("fixtures/client/entry_incoming_references.yaml"): + client = Client("cfexampleapi", "b4c0n73n7fu1", content_type_cache=False) + entry = client.entry("nyancat") + entries = entry.incoming_references(client) + self.assertEqual(len(entries), 1) + self.assertEqual(str(entries[0]), "") + + def test_entry_incoming_references_with_query(self): + with vcr.use_cassette( + "fixtures/client/entry_incoming_references_with_query.yaml" + ): + client = Client("cfexampleapi", "b4c0n73n7fu1", content_type_cache=False) + entry = client.entry("nyancat") + entries = entry.incoming_references( + client, {"content_type": "cat", "select": ["fields.name"]} + ) + self.assertEqual(len(entries), 1) + self.assertEqual(str(entries[0]), "") + self.assertEqual(entries[0].fields(), {"name": "Happy Cat"}) + + def test_client_entries_links_to_asset(self): + with vcr.use_cassette("fixtures/client/entries_links_to_asset.yaml"): + client = Client("cfexampleapi", "b4c0n73n7fu1", content_type_cache=False) + entries = client.entries({"links_to_asset": "nyancat"}) + self.assertEqual(len(entries), 1) + self.assertEqual(str(entries[0]), "") + + def test_asset_incoming_references(self): + with vcr.use_cassette("fixtures/client/asset_incoming_references.yaml"): + client = Client("cfexampleapi", "b4c0n73n7fu1", content_type_cache=False) + asset = client.asset("nyancat") + entries = asset.incoming_references(client) + self.assertEqual(len(entries), 1) + self.assertEqual(str(entries[0]), "") + + def test_client_asset(self): + with vcr.use_cassette("fixtures/client/asset.yaml"): + client = Client("cfexampleapi", "b4c0n73n7fu1", content_type_cache=False) + asset = client.asset("nyancat") + + self.assertEqual( + str(asset), + "", + ) + + def test_client_locales_on_environment(self): + with vcr.use_cassette("fixtures/client/locales_on_environment.yaml"): + client = Client( + "facgnwwgj5fe", + "", + environment="testing", + content_type_cache=False, + ) + locales = client.locales() + + self.assertEqual( + str(locales), "" + ) + self.assertEqual( + str(locales[0]), + "", + ) + + def test_client_assets(self): + with vcr.use_cassette("fixtures/client/assets.yaml"): + client = Client("cfexampleapi", "b4c0n73n7fu1", content_type_cache=False) + assets = client.assets() + + self.assertEqual( + str(assets[0]), + "", + ) + + def test_client_sync(self): + with vcr.use_cassette("fixtures/client/sync.yaml"): + client = Client("cfexampleapi", "b4c0n73n7fu1", content_type_cache=False) + sync = client.sync({"initial": True}) + + self.assertEqual( + str(sync), + "".format( + "w5ZGw6JFwqZmVcKsE8Kow4grw45QdybCnV_Cg8OASMKpwo1UY8K8bsKFwqJrw7DDhcKnM2RDOVbDt1E-wo7CnDjChMKKGsK1wrzCrBzCqMOpZAwOOcOvCcOAwqHDv0XCiMKaOcOxZA8BJUzDr8K-wo1lNx7DnHE" + ), + ) + self.assertEqual( + str(sync.items[0]), + "", + ) + + def test_client_sync_with_environments(self): + with vcr.use_cassette("fixtures/client/sync_environments.yaml"): + client = Client( + "a22o2qgm356c", + "bfbc63cf745a037125dbcc64f716a9a0e9d091df1a79e84920b890f87a6e7ab9", + environment="staging", + content_type_cache=False, + ) + sync = client.sync({"initial": True}) + + self.assertEqual(sync.items[0].environment.id, "staging") + + def test_client_creates_wrapped_arrays(self): + with vcr.use_cassette("fixtures/client/array_endpoints.yaml"): + client = Client("cfexampleapi", "b4c0n73n7fu1", content_type_cache=False) + self.assertEqual( + str(client.content_types()), + "", + ) + self.assertEqual( + str(client.entries()), + "", + ) + self.assertEqual( + str(client.assets()), "" + ) + + # X-Contentful-User-Agent Headers + + def test_client_default_contentful_user_agent_headers(self): + client = Client("cfexampleapi", "b4c0n73n7fu1", content_type_cache=False) + + from contentful import __version__ + import platform + + expected = [ + "sdk contentful.py/{0};".format(__version__), + "platform python/{0};".format(platform.python_version()), + ] + header = client._contentful_user_agent() + for e in expected: + self.assertTrue(e in header) + + self.assertTrue(re.search("os (Windows|macOS|Linux)(\/.*)?;", header)) + + self.assertTrue("integration" not in header) + self.assertTrue("app" not in header) + + def test_client_with_integration_name_only_headers(self): + client = Client( + "cfexampleapi", + "b4c0n73n7fu1", + content_type_cache=False, + integration_name="foobar", + ) + + header = client._contentful_user_agent() + self.assertTrue("integration foobar;" in header) + self.assertFalse("integration foobar/;" in header) + + def test_client_with_integration_headers(self): + client = Client( + "cfexampleapi", + "b4c0n73n7fu1", + content_type_cache=False, + integration_name="foobar", + integration_version="0.1.0", + ) + + header = client._contentful_user_agent() + self.assertTrue("integration foobar/0.1.0;" in header) + + def test_client_with_application_name_only_headers(self): + client = Client( + "cfexampleapi", + "b4c0n73n7fu1", + content_type_cache=False, + application_name="foobar", + ) + + header = client._contentful_user_agent() + self.assertTrue("app foobar;" in header) + self.assertFalse("app foobar/;" in header) + + def test_client_with_application_headers(self): + client = Client( + "cfexampleapi", + "b4c0n73n7fu1", + content_type_cache=False, + application_name="foobar", + application_version="0.1.0", + ) + + header = client._contentful_user_agent() + self.assertTrue("app foobar/0.1.0;" in header) + + def test_client_with_integration_version_only_does_not_include_integration_in_header( + self, + ): + client = Client( + "cfexampleapi", + "b4c0n73n7fu1", + content_type_cache=False, + integration_version="0.1.0", + ) + + header = client._contentful_user_agent() + self.assertFalse("integration /0.1.0" in header) + + def test_client_with_application_version_only_does_not_include_integration_in_header( + self, + ): + client = Client( + "cfexampleapi", + "b4c0n73n7fu1", + content_type_cache=False, + application_version="0.1.0", + ) + + header = client._contentful_user_agent() + self.assertFalse("app /0.1.0;" in header) + + def test_client_with_all_headers(self): + client = Client( + "cfexampleapi", + "b4c0n73n7fu1", + content_type_cache=False, + application_name="foobar_app", + application_version="1.1.0", + integration_name="foobar integ", + integration_version="0.1.0", + ) + + from contentful import __version__ + import platform + + expected = [ + "sdk contentful.py/{0};".format(__version__), + "platform python/{0};".format(platform.python_version()), + "app foobar_app/1.1.0;", + "integration foobar integ/0.1.0;", + ] + header = client._contentful_user_agent() + for e in expected: + self.assertTrue(e in header) + + self.assertTrue(re.search("os (Windows|macOS|Linux)(\/.*)?;", header)) + + def test_client_headers(self): + client = Client( + "cfexampleapi", + "b4c0n73n7fu1", + content_type_cache=False, + application_name="foobar_app", + application_version="1.1.0", + integration_name="foobar integ", + integration_version="0.1.0", + ) + + from contentful import __version__ + import platform + + expected = [ + "sdk contentful.py/{0};".format(__version__), + "platform python/{0};".format(platform.python_version()), + "app foobar_app/1.1.0;", + "integration foobar integ/0.1.0;", + ] + header = client._request_headers()["X-Contentful-User-Agent"] + for e in expected: + self.assertTrue(e in header) + + self.assertTrue(re.search("os (Windows|macOS|Linux)(\/.*)?;", header)) + + # Integration Tests + + def test_entries_dont_fail_with_unicode_characters(self): + with vcr.use_cassette("fixtures/integration/issue-4.yaml"): + client = Client( + "wltm0euukdog", + "bbe871957bb60f988af6cbeeccbb178c36cae09e36e8098357e27b51dd38d88e", + content_type_cache=True, + ) + entries = client.entries() + self.assertEqual(entries[0].name, "😅") + + def test_entries_dont_fail_with_arrays_as_json_root(self): + with vcr.use_cassette("fixtures/integration/json-arrays.yaml"): + client = Client( + "4int1zgmkwcf", + "d2ac2076019bd4a8357811cbdd5563bb7186d90d77e53c265a1bafd9f83439e8", + content_type_cache=True, + ) + entries = client.entries() + self.assertEqual(entries[0].json, [{"foo": "bar"}, {"baz": "qux"}]) + + def test_entries_with_none_values_on_all_fields(self): + with vcr.use_cassette("fixtures/integration/issue-11.yaml"): + client = Client( + "rtx5c7z0zbas", + "a6c8dc438d470c51d1094dad146a1f20fcdba41e21f4e263af6c3f70d8583634", + content_type_cache=True, + ) + entry = client.entries()[0] + self.assertEqual(entry.symbol, None) + self.assertEqual(entry.text, None) + self.assertEqual(entry.integer, None) + self.assertEqual(entry.number, None) + self.assertEqual(entry.date, None) + self.assertEqual(entry.location, None) + self.assertEqual(entry.asset, None) + self.assertEqual(entry.bool, None) + self.assertEqual(entry.json, None) + self.assertEqual(entry.link, None) + + def test_circular_references_default_depth(self): + with vcr.use_cassette("fixtures/integration/circular-references.yaml"): + client = Client( + "rk19fq93y3vw", + "821aa502a7ce820e46adb30fa6942889619aac4342a7021cfe15197c52a593cc", + content_type_cache=True, + ) + a = client.entry("6kdfS7uMs8owuEIoSaOcQk") + self.assertEqual(str(a), "") + self.assertEqual(str(a.b), "") + self.assertEqual( + str(a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a), + "", + ) + self.assertEqual( + str(a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b), + "", + ) + + def test_circular_references_set_depth(self): + with vcr.use_cassette("fixtures/integration/circular-references.yaml"): + client = Client( + "rk19fq93y3vw", + "821aa502a7ce820e46adb30fa6942889619aac4342a7021cfe15197c52a593cc", + content_type_cache=True, + max_include_resolution_depth=1, + ) + a = client.entry("6kdfS7uMs8owuEIoSaOcQk") + self.assertEqual(str(a), "") + self.assertEqual(str(a.b), "") + self.assertEqual(str(a.b.a), "") + + def test_circular_references_with_reusable_entries(self): + with vcr.use_cassette("fixtures/integration/circular-references.yaml"): + client = Client( + "rk19fq93y3vw", + "821aa502a7ce820e46adb30fa6942889619aac4342a7021cfe15197c52a593cc", + content_type_cache=True, + reuse_entries=True, + ) + a = client.entry("6kdfS7uMs8owuEIoSaOcQk") + self.assertEqual(str(a), "") + self.assertEqual(str(a.b), "") + self.assertEqual( + str(a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a), + "", + ) + self.assertEqual( + str(a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b), + "", + ) + self.assertEqual(a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b._depth, 1) + self.assertEqual(a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a._depth, 0) + self.assertEqual( + str( + a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a + ), + "", + ) + + def test_unresolvable_entries_dont_get_included(self): + with vcr.use_cassette("fixtures/integration/errors-filtered.yaml"): + client = Client( + "011npgaszg5o", + "42c9d93410a7319e9a735671fc1e415348f65e94a99fc768b70a7c649859d4fd", + ) + + entry = client.entry("1HR1QvURo4MoSqO0eqmUeO") + self.assertEqual(len(entry.modules), 2) + + def test_rich_text_field(self): + with vcr.use_cassette("fixtures/fields/rich_text.yaml"): + client = Client( + "jd7yc4wnatx3", + "6256b8ef7d66805ca41f2728271daf27e8fa6055873b802a813941a0fe696248", + gzip_encoded=False, + ) + + entry = client.entry("4BupPSmi4M02m0U48AQCSM") + + expected_entry_occurrances = 2 + embedded_entry_index = 1 + for content in entry.body["content"]: + if content["nodeType"] == "embedded-entry-block": + self.assertTrue(isinstance(content["data"]["target"], Entry)) + self.assertEqual( + content["data"]["target"].body, + "Embedded {0}".format(embedded_entry_index), + ) + expected_entry_occurrances -= 1 + embedded_entry_index += 1 + self.assertEqual(expected_entry_occurrances, 0) + + def test_rich_text_field_with_embeds_in_lists(self): + with vcr.use_cassette("fixtures/fields/rich_text_lists_with_embeds.yaml"): + client = Client( + "jd7yc4wnatx3", + "6256b8ef7d66805ca41f2728271daf27e8fa6055873b802a813941a0fe696248", + gzip_encoded=False, + ) + + entry = client.entry("6NGLswCREsGA28kGouScyY") + + # Hyperlink data is conserved + self.assertEqual( + entry.body["content"][0], + { + "data": {}, + "content": [ + { + "marks": [], + "value": "A link to ", + "nodeType": "text", + "nodeClass": "text", + }, + { + "data": {"uri": "https://google.com"}, + "content": [ + { + "marks": [], + "value": "google", + "nodeType": "text", + "nodeClass": "text", + } + ], + "nodeType": "hyperlink", + "nodeClass": "inline", + }, + { + "marks": [], + "value": "", + "nodeType": "text", + "nodeClass": "text", + }, + ], + "nodeType": "paragraph", + "nodeClass": "block", + }, + ) + + # Unordered lists and ordered lists can contain embedded entries + self.assertEqual(entry.body["content"][3]["nodeType"], "unordered-list") + self.assertEqual( + str( + entry.body["content"][3]["content"][2]["content"][0]["data"][ + "target" + ] + ), + "", + ) + + self.assertEqual(entry.body["content"][4]["nodeType"], "ordered-list") + self.assertEqual( + str( + entry.body["content"][4]["content"][2]["content"][0]["data"][ + "target" + ] + ), + "", + ) + + def test_rich_text_fields_should_not_get_hydrated_twice(self): + with vcr.use_cassette("fixtures/integration/issue-41.yaml"): + client = Client( + "fds721b88p6b", + "45ba81cc69423fcd2e3f0a4779de29481bb5c11495bc7e14649a996cf984e98e", + gzip_encoded=False, + ) + + entry = client.entry("1tBAu0wP9qAQEg6qCqMics") + + # Not failing is already a success + self.assertEqual(str(entry.children[0]), str(entry.children[1])) + self.assertEqual(str(entry.children[0].body), str(entry.children[1].body)) diff --git a/tests/content_type_test.py b/tests/test_content_type.py similarity index 100% rename from tests/content_type_test.py rename to tests/test_content_type.py diff --git a/tests/test_content_type_cache.py b/tests/test_content_type_cache.py new file mode 100644 index 0000000..ade7173 --- /dev/null +++ b/tests/test_content_type_cache.py @@ -0,0 +1,25 @@ +import vcr +from unittest import TestCase +from contentful.content_type_cache import ContentTypeCache +from contentful import Client + + +class ContentTypeCacheTest(TestCase): + def test_cache(self): + with vcr.use_cassette("fixtures/cache/cache.yaml"): + client = Client("o4h6g9w3pooi", "b4c0n73n7fu1") + client.initialize() + cat_ct = ContentTypeCache.get("o4h6g9w3pooi", "article") + self.assertEqual(str(cat_ct), "") + + def test_cache_update(self): + with vcr.use_cassette("fixtures/cache/cache.yaml"): + ContentTypeCache.__CACHE__ = {} + client = Client("o4h6g9w3pooi", "b4c0n73n7fu1", content_type_cache=False) + content_types = client.content_types() + ContentTypeCache.update_cache( + space_id=client.space_id, content_types=content_types + ) + + cat_ct = ContentTypeCache.get("o4h6g9w3pooi", "article") + self.assertEqual(str(cat_ct), "") diff --git a/tests/content_type_field_test.py b/tests/test_content_type_field.py similarity index 100% rename from tests/content_type_field_test.py rename to tests/test_content_type_field.py diff --git a/tests/content_type_field_types_test.py b/tests/test_content_type_field_types.py similarity index 100% rename from tests/content_type_field_types_test.py rename to tests/test_content_type_field_types.py diff --git a/tests/deleted_asset_test.py b/tests/test_deleted_asset.py similarity index 100% rename from tests/deleted_asset_test.py rename to tests/test_deleted_asset.py diff --git a/tests/deleted_entry_test.py b/tests/test_deleted_entry.py similarity index 100% rename from tests/deleted_entry_test.py rename to tests/test_deleted_entry.py diff --git a/tests/entry_test.py b/tests/test_entry.py similarity index 100% rename from tests/entry_test.py rename to tests/test_entry.py diff --git a/tests/test_errors.py b/tests/test_errors.py new file mode 100644 index 0000000..a096ba0 --- /dev/null +++ b/tests/test_errors.py @@ -0,0 +1,387 @@ +import json +from unittest import TestCase +from contentful import errors, utils, client +from contentful.client.transport import retry + + +class MockResponse(object): + def __init__(self, status_code, json, headers=None, invalid_json=False): + self.status_code = status_code + self._json = json + self._invalid_json = invalid_json + self.headers = headers if headers is not None else {} + + def json(self): + if self._invalid_json: + raise json.JSONDecodeError("foo", "foo", 0) + return json.loads(json.dumps(self._json)) + + @property + def text(self): + return self._json + + +http_attempts = 0 + + +def mock_http_call(url, query): + global http_attempts + if http_attempts < query.get("fail_until", 1): + http_attempts += 1 + raise errors.RateLimitExceededError( + MockResponse( + 429, + {"message": "foo"}, + headers={"x-contentful-ratelimit-reset": query.get("reset", 0.1)}, + ) + ) + return "succeed" + + +class ErrorsTest(TestCase): + def test_default_additional_info_is_empty(self): + response = MockResponse(512, "not json", invalid_json=True) + error = errors.get_error(response) + + self.assertEqual(error._additional_error_info(), []) + + def test_default_error_message(self): + response = MockResponse(512, "not json", invalid_json=True) + error = errors.get_error(response) + + expected_error = "\n".join( + [ + "HTTP status code: 512", + "Message: The following error was received: not json", + ] + ) + self.assertEqual(str(error), expected_error) + + def test_generic_details(self): + response = MockResponse(512, {"details": "some text"}) + error = errors.get_error(response) + + expected_error = "\n".join( + [ + "HTTP status code: 512", + "Message: The following error was received: {'details': 'some text'}", + "Details: some text", + ] + ) + self.assertEqual(str(error), expected_error) + + def test_not_found_error(self): + response = MockResponse( + 404, + { + "message": "The resource could not be found.", + "details": {"type": "Asset", "id": "foobar"}, + "requestId": "$foobar123", + }, + ) + + error = errors.get_error(response) + + self.assertEqual(error.status_code, 404) + expected_error = "\n".join( + [ + "HTTP status code: 404", + "Message: The resource could not be found.", + "Details: The requested Asset could not be found. ID: foobar.", + "Request ID: $foobar123", + ] + ) + self.assertEqual(str(error), expected_error) + self.assertTrue(isinstance(error, errors.NotFoundError)) + + def test_not_found_error_with_sys_on_details(self): + response = MockResponse( + 404, + { + "message": "The resource could not be found.", + "details": {"sys": {"type": "Space", "id": "foobar"}}, + "requestId": "$foobar123", + }, + ) + + error = errors.get_error(response) + + self.assertEqual(error.status_code, 404) + expected_error = "\n".join( + [ + "HTTP status code: 404", + "Message: The resource could not be found.", + "Details: The requested Space could not be found. ID: foobar.", + "Request ID: $foobar123", + ] + ) + self.assertEqual(str(error), expected_error) + self.assertTrue(isinstance(error, errors.NotFoundError)) + + def test_not_found_error_details_is_a_string(self): + response = MockResponse( + 404, + { + "message": "The resource could not be found.", + "details": "The resource could not be found", + "requestId": "$foobar123", + }, + ) + + error = errors.get_error(response) + + self.assertEqual(error.status_code, 404) + expected_error = "\n".join( + [ + "HTTP status code: 404", + "Message: The resource could not be found.", + "Details: The resource could not be found", + "Request ID: $foobar123", + ] + ) + self.assertEqual(str(error), expected_error) + self.assertTrue(isinstance(error, errors.NotFoundError)) + + def test_bad_request_error(self): + response = MockResponse( + 400, + { + "message": "The query you sent was invalid. Probably a filter or ordering specification is not applicable to the type of a field.", + "details": { + "errors": [ + {"details": 'The path "invalid_param" is not recognized'} + ] + }, + "requestId": "$foobar234", + }, + ) + + error = errors.get_error(response) + + self.assertEqual(error.status_code, 400) + expected_error = "\n".join( + [ + "HTTP status code: 400", + "Message: The query you sent was invalid. Probably a filter or ordering specification is not applicable to the type of a field.", + 'Details: The path "invalid_param" is not recognized', + "Request ID: $foobar234", + ] + ) + self.assertEqual(str(error), expected_error) + self.assertTrue(isinstance(error, errors.BadRequestError)) + + def test_bad_request_error_details_is_string(self): + response = MockResponse( + 400, + { + "message": "The query you sent was invalid. Probably a filter or ordering specification is not applicable to the type of a field.", + "details": "some error", + "requestId": "$foobar234", + }, + ) + + error = errors.get_error(response) + + self.assertEqual(error.status_code, 400) + expected_error = "\n".join( + [ + "HTTP status code: 400", + "Message: The query you sent was invalid. Probably a filter or ordering specification is not applicable to the type of a field.", + "Details: some error", + "Request ID: $foobar234", + ] + ) + self.assertEqual(str(error), expected_error) + self.assertTrue(isinstance(error, errors.BadRequestError)) + + def test_bad_request_error_errors_details_is_string(self): + response = MockResponse( + 400, + { + "message": "The query you sent was invalid. Probably a filter or ordering specification is not applicable to the type of a field.", + "details": {"errors": ["some error"]}, + "requestId": "$foobar234", + }, + ) + + error = errors.get_error(response) + + self.assertEqual(error.status_code, 400) + expected_error = "\n".join( + [ + "HTTP status code: 400", + "Message: The query you sent was invalid. Probably a filter or ordering specification is not applicable to the type of a field.", + "Details: some error", + "Request ID: $foobar234", + ] + ) + self.assertEqual(str(error), expected_error) + self.assertTrue(isinstance(error, errors.BadRequestError)) + + def test_access_denied_error(self): + response = MockResponse( + 403, {"message": "Access Denied", "details": {"reasons": ["foo", "bar"]}} + ) + + error = errors.get_error(response) + + self.assertEqual(error.status_code, 403) + expected_error = "\n".join( + [ + "HTTP status code: 403", + "Message: Access Denied", + "Details: ", + "\tReasons:", + "\t\tfoo", + "\t\tbar", + ] + ) + self.assertEqual(str(error), expected_error) + self.assertTrue(isinstance(error, errors.AccessDeniedError)) + + def test_unauthorized_error(self): + response = MockResponse( + 401, + { + "message": "The access token you sent could not be found or is invalid.", + "requestId": "$foobar123", + }, + ) + + error = errors.get_error(response) + + self.assertEqual(error.status_code, 401) + expected_error = "\n".join( + [ + "HTTP status code: 401", + "Message: The access token you sent could not be found or is invalid.", + "Request ID: $foobar123", + ] + ) + self.assertEqual(str(error), expected_error) + self.assertTrue(isinstance(error, errors.UnauthorizedError)) + + def test_rate_limit_exceeded_error(self): + response = MockResponse(429, {"message": "Rate Limit Exceeded"}) + + error = errors.get_error(response) + + self.assertEqual(error.status_code, 429) + expected_error = "\n".join( + ["HTTP status code: 429", "Message: Rate Limit Exceeded"] + ) + self.assertEqual(str(error), expected_error) + self.assertTrue(isinstance(error, errors.RateLimitExceededError)) + + def test_rate_limit_exceeded_error_with_time(self): + response = MockResponse(429, {}, headers={"x-contentful-ratelimit-reset": 60}) + + error = errors.get_error(response) + + self.assertEqual(error.status_code, 429) + expected_error = "\n".join( + [ + "HTTP status code: 429", + "Message: Rate limit exceeded. Too many requests.", + "Time until reset (seconds): 60", + ] + ) + self.assertEqual(str(error), expected_error) + self.assertTrue(isinstance(error, errors.RateLimitExceededError)) + + def test_server_error(self): + response = MockResponse(500, {"message": "Server Error"}) + + error = errors.get_error(response) + + self.assertEqual(error.status_code, 500) + expected_error = "\n".join(["HTTP status code: 500", "Message: Server Error"]) + self.assertEqual(str(error), expected_error) + self.assertTrue(isinstance(error, errors.ServerError)) + + def test_service_unavailable_error(self): + response = MockResponse(503, {"message": "Service Unavailable"}) + + error = errors.get_error(response) + + self.assertEqual(error.status_code, 503) + expected_error = "\n".join( + ["HTTP status code: 503", "Message: Service Unavailable"] + ) + self.assertEqual(str(error), expected_error) + self.assertTrue(isinstance(error, errors.ServiceUnavailableError)) + + def test_other_error(self): + response = MockResponse(418, {"message": "I'm a Teapot"}) + + error = errors.get_error(response) + + self.assertEqual(error.status_code, 418) + expected_error = "\n".join(["HTTP status code: 418", "Message: I'm a Teapot"]) + self.assertEqual(str(error), expected_error) + self.assertTrue(isinstance(error, errors.HTTPError)) + + def test_rate_limit_retries(self): + global http_attempts + client_ = client.Client( + "cfexampleapi", "b4c0n73n7fu1", content_type_cache=False + ) + + http_attempts = 0 + result = retry.Retry()(mock_http_call)("/foo", {}) + + self.assertEqual(http_attempts, 1) + self.assertEqual(result, "succeed") + + def test_rate_limit_max_retries(self): + global http_attempts + client_ = client.Client( + "cfexampleapi", "b4c0n73n7fu1", content_type_cache=False + ) + + http_attempts = 0 + self.assertRaises( + errors.RateLimitExceededError, + retry.Retry()(mock_http_call), + "/foo", + {"fail_until": 2}, + ) + + def test_rate_limit_max_wait(self): + global http_attempts + client_ = client.Client( + "cfexampleapi", "b4c0n73n7fu1", content_type_cache=False + ) + + http_attempts = 0 + self.assertRaises( + errors.RateLimitExceededError, + retry.Retry()(mock_http_call), + "/foo", + {"reset": 100}, + ) + + def test_predefined_errors_default_message(self): + messages = { + 400: "The request was malformed or missing a required parameter.", + 401: "The authorization token was invalid.", + 403: "The specified token does not have access to the requested resource.", + 404: "The requested resource or endpoint could not be found.", + 429: "Rate limit exceeded. Too many requests.", + 500: "Internal server error.", + 502: "The requested space is hibernated.", + 503: "The request was malformed or missing a required parameter.", + } + + for status_code, message in messages.items(): + response = MockResponse(status_code, "foo", invalid_json=True) + + error = errors.get_error(response) + + expected_error = "\n".join( + [ + "HTTP status code: {0}".format(status_code), + "Message: {0}".format(message), + ] + ) + self.assertEqual(str(error), expected_error) diff --git a/tests/locale_test.py b/tests/test_locale.py similarity index 100% rename from tests/locale_test.py rename to tests/test_locale.py diff --git a/tests/test_resource.py b/tests/test_resource.py new file mode 100644 index 0000000..95305e3 --- /dev/null +++ b/tests/test_resource.py @@ -0,0 +1,186 @@ +import vcr +import pickle +from datetime import datetime +from unittest import TestCase +from contentful.resource import Resource, FieldsResource, Link +from contentful.client import Client + + +class ResourceTest(TestCase): + def test_resource(self): + resource = Resource( + { + "sys": { + "space": { + "sys": {"type": "Link", "linkType": "Space", "id": "foo"} + }, + "contentType": { + "sys": {"type": "Link", "linkType": "ContentType", "id": "bar"} + }, + "type": "Entry", + "createdAt": "2016-06-06", + "updatedAt": "2016-06-06", + "deletedAt": "2016-06-06", + "id": "foobar", + "version": 1, + } + } + ) + + self.assertEqual(str(resource.space), "") + self.assertEqual(str(resource.content_type), "") + self.assertEqual(resource.created_at, datetime(2016, 6, 6)) + self.assertEqual(resource.updated_at, datetime(2016, 6, 6)) + self.assertEqual(resource.deleted_at, datetime(2016, 6, 6)) + self.assertEqual(resource.id, "foobar") + self.assertEqual(resource.version, 1) + + self.assertRaises(AttributeError, resource.__getattr__, "foo") + + +class FieldsResourceTest(TestCase): + def test_fields_resource(self): + resource = FieldsResource( + { + "sys": { + "space": { + "sys": {"type": "Link", "linkType": "Space", "id": "foo"} + }, + "contentType": { + "sys": {"type": "Link", "linkType": "ContentType", "id": "bar"} + }, + "type": "Entry", + "createdAt": "2016-06-06", + "updatedAt": "2016-06-06", + "deletedAt": "2016-06-06", + "id": "foobar", + "version": 1, + "locale": "foo-locale", + }, + "fields": {"foo": "bar", "baz": 123, "qux": True}, + } + ) + + self.assertEqual(resource.foo, "bar") + self.assertEqual(resource.baz, 123) + self.assertEqual(resource.qux, True) + self.assertEqual(resource.fields(), {"foo": "bar", "baz": 123, "qux": True}) + self.assertEqual( + resource.fields("foo-locale"), {"foo": "bar", "baz": 123, "qux": True} + ) + self.assertEqual(resource.fields("bar-locale"), {}) + + self.assertRaises(AttributeError, resource.__getattr__, "foobar") + + def test_pickleable_resource(self): + resource = FieldsResource( + { + "sys": { + "space": { + "sys": {"type": "Link", "linkType": "Space", "id": "foo"} + }, + "contentType": { + "sys": {"type": "Link", "linkType": "ContentType", "id": "bar"} + }, + "type": "Entry", + "createdAt": "2016-06-06", + "updatedAt": "2016-06-06", + "deletedAt": "2016-06-06", + "id": "foobar", + "version": 1, + "locale": "foo-locale", + }, + "fields": {"foo": "bar", "baz": 123, "qux": True}, + } + ) + + serialized_resource = pickle.dumps(resource) + deserialized_resource = pickle.loads(serialized_resource) + + self.assertEqual(resource.foo, deserialized_resource.foo) + self.assertEqual(resource.baz, deserialized_resource.baz) + self.assertEqual(resource.qux, deserialized_resource.qux) + self.assertEqual(resource.fields(), deserialized_resource.fields()) + self.assertEqual( + resource.fields("foo-locale"), deserialized_resource.fields("foo-locale") + ) + self.assertEqual( + resource.fields("bar-locale"), deserialized_resource.fields("bar-locale") + ) + + self.assertRaises(AttributeError, resource.__getattr__, "foobar") + + def test_fields_resource_localized(self): + resource = FieldsResource( + { + "sys": { + "space": { + "sys": {"type": "Link", "linkType": "Space", "id": "foo"} + }, + "contentType": { + "sys": {"type": "Link", "linkType": "ContentType", "id": "bar"} + }, + "type": "Entry", + "createdAt": "2016-06-06", + "updatedAt": "2016-06-06", + "deletedAt": "2016-06-06", + "id": "foobar", + "version": 1, + "locale": "foo-locale", + }, + "fields": { + "foo": {"foo-locale": "bar-foo", "bar-locale": "bar-bar"}, + "baz": {"foo-locale": 123, "bar-locale": 456}, + "qux": {"foo-locale": True, "bar-locale": False}, + }, + }, + localized=True, + ) + + self.assertEqual(resource.foo, "bar-foo") + self.assertEqual(resource.baz, 123) + self.assertEqual(resource.qux, True) + self.assertEqual(resource.fields(), {"foo": "bar-foo", "baz": 123, "qux": True}) + self.assertEqual( + resource.fields("foo-locale"), {"foo": "bar-foo", "baz": 123, "qux": True} + ) + self.assertEqual( + resource.fields("bar-locale"), {"foo": "bar-bar", "baz": 456, "qux": False} + ) + self.assertEqual(resource.fields("baz-locale"), {}) + + self.assertRaises(AttributeError, resource.__getattr__, "foobar") + + +class LinkTest(TestCase): + def test_link(self): + link = Link({"sys": {"type": "Link", "linkType": "Space", "id": "foo"}}) + + self.assertEqual(link.id, "foo") + self.assertEqual(link.type, "Link") + self.assertEqual(link.link_type, "Space") + self.assertEqual(str(link), "") + + def test_link_space_resource(self): + with vcr.use_cassette("fixtures/link/resolve_space.yaml"): + link = Link( + {"sys": {"type": "Link", "linkType": "Space", "id": "cfexampleapi"}} + ) + client = Client("cfexampleapi", "b4c0n73n7fu1", content_type_cache=False) + + space = link.resolve(client) + + self.assertEqual( + str(space), "" + ) + + def test_link_other_resource(self): + with vcr.use_cassette("fixtures/link/resolve_other.yaml"): + link = Link( + {"sys": {"type": "Link", "linkType": "ContentType", "id": "cat"}} + ) + client = Client("cfexampleapi", "b4c0n73n7fu1", content_type_cache=False) + + cat_ct = link.resolve(client) + + self.assertEqual(str(cat_ct), "") diff --git a/tests/resource_builder_test.py b/tests/test_resource_builder.py similarity index 100% rename from tests/resource_builder_test.py rename to tests/test_resource_builder.py diff --git a/tests/space_test.py b/tests/test_space.py similarity index 100% rename from tests/space_test.py rename to tests/test_space.py diff --git a/tests/sync_page_test.py b/tests/test_sync_page.py similarity index 100% rename from tests/sync_page_test.py rename to tests/test_sync_page.py diff --git a/tests/utils_test.py b/tests/test_utils.py similarity index 100% rename from tests/utils_test.py rename to tests/test_utils.py From a91093d679e65f8df5014cf555c106146e627be2 Mon Sep 17 00:00:00 2001 From: Sean Stewart Date: Fri, 23 Feb 2024 13:02:29 -0500 Subject: [PATCH 05/15] Start getting tests working with modern toolchain --- tox.ini | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/tox.ini b/tox.ini index c4e7bfb..d75e9d8 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = {py27,py34,py35,py36,pypy,pypy3}-{flakes,requests27,requests26,requests25,requests24,requests23,requests22,requests1,httplib2,urllib317,urllib319,urllib3110} +envlist = {py37,py38,py39,py310,py311,py312,pypy3}-{flakes} [testenv:flakes] skipsdist = True @@ -17,16 +17,6 @@ deps = python-dateutil vcrpy requests-mock - requests1: requests==1.2.3 - requests27: requests==2.7.0 - requests26: requests==2.6.0 - requests25: requests==2.5.0 - requests24: requests==2.4.0 - requests23: requests==2.3.0 - requests22: requests==2.2.1 - urllib317: urllib3==1.7.1 - urllib319: urllib3==1.9.1 - urllib3110: urllib3==1.10.2 [flake8] ; Usual line length should be kept to 80, From 332bc6ebc6a87475c03357c17b38b11096f9697f Mon Sep 17 00:00:00 2001 From: Sean Stewart Date: Wed, 28 Feb 2024 12:23:02 -0500 Subject: [PATCH 06/15] Ignore pycharm project files. --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index 66fd3ba..3d9516e 100644 --- a/.gitignore +++ b/.gitignore @@ -55,3 +55,6 @@ _docs/_build/ # PyBuilder target/ + +# dev +.idea From 6ebb4c095a2a88041b9524ae599cd2bab4bb3623 Mon Sep 17 00:00:00 2001 From: Sean Stewart Date: Wed, 28 Feb 2024 12:23:10 -0500 Subject: [PATCH 07/15] All tests passing. --- Makefile | 2 +- contentful/client/base.py | 7 +- contentful/client/transport/abstract.py | 18 +-- contentful/client/transport/errors.py | 41 +++-- contentful/client/transport/retry.py | 39 ++--- tests/__init__.py | 21 --- tests/test_client.py | 19 ++- tests/test_errors.py | 190 ++++++++++++++++-------- 8 files changed, 202 insertions(+), 135 deletions(-) diff --git a/Makefile b/Makefile index 1695e15..62fbb11 100644 --- a/Makefile +++ b/Makefile @@ -54,7 +54,7 @@ test-all: tox coverage: - coverage run --source contentful setup.py test + coverage run --source contentful python -m unittest discover tests coverage report -m flake8 contentful diff --git a/contentful/client/base.py b/contentful/client/base.py index d25a18f..2d5b447 100644 --- a/contentful/client/base.py +++ b/contentful/client/base.py @@ -196,13 +196,13 @@ def transport(self) -> abstract.AbstractTransport: return self._transport def qualified_url(self) -> str: - scheme = "https" if self.https else "http" + scheme = "https://" if self.https else "http://" hostname = self.api_url if hostname.startswith("http"): scheme = "" path = f"/spaces/{self.space_id}/environments/{self.environment}/" - url = f"{scheme}://{hostname}{path}" + url = f"{scheme}{hostname}{path}" return url def _get_transport(self) -> abstract.AbstractTransport: @@ -326,7 +326,8 @@ def _format_response( max_depth=self.max_include_resolution_depth, reuse_entries=self.reuse_entries, ) - return builder.build() + resource = builder.build() + return resource def _has_proxy(self) -> bool: """ diff --git a/contentful/client/transport/abstract.py b/contentful/client/transport/abstract.py index 6b3400e..241a96b 100644 --- a/contentful/client/transport/abstract.py +++ b/contentful/client/transport/abstract.py @@ -1,6 +1,7 @@ from __future__ import annotations import abc +import http import types from typing import ( Generic, @@ -204,19 +205,16 @@ def parse_response( ) -> ResponseT | dict[str, Any]: """Parse the received response, raising an error if necessary.""" if status_code >= 400: - err_cls = errors.get_error_for_status_code(status_code) - try: - body = orjson.loads(content) - except orjson.JSONDecodeError: - body = {} - info = errors.ErrorResponseInfo( - status_code=status_code, + if reason is None: + reason = http.HTTPStatus(status_code).phrase + + err = errors.get_error_for_status_code( + status_code, + content=content, reason=reason, headers=headers, - content=content.decode(), - body=body, ) - raise err_cls(reason, response=info) + raise err if raw_mode: # Read the data from the fd before closing the connection. diff --git a/contentful/client/transport/errors.py b/contentful/client/transport/errors.py index 67f8f79..43a58e4 100644 --- a/contentful/client/transport/errors.py +++ b/contentful/client/transport/errors.py @@ -2,6 +2,8 @@ import dataclasses +import orjson + """ contentful.client.transport.errors ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -34,10 +36,28 @@ def get_error_for_status_code( - status_code, *, default: type[HTTPError] | None = None -) -> type[HTTPError]: + status_code: int, + *, + default: type[HTTPError] | None = None, + content: bytes = b"{}", + reason: str | None = None, + headers: dict[str, str] | None = None, +) -> HTTPError: default = default or PermanentHTTPError - return _HTTP_STATUS_TO_ERROR_MAP.get(status_code, default) + err_cls = _HTTP_STATUS_TO_ERROR_MAP.get(status_code, default) + headers = headers or {} + try: + body = orjson.loads(content) + except orjson.JSONDecodeError: + body = {} + info = ErrorResponseInfo( + status_code=status_code, + reason=reason, + headers=headers, + content=content.decode(), + body=body, + ) + return err_cls(reason, response=info) class HTTPError(Exception): @@ -59,12 +79,15 @@ def _handle_details(self, details: list[dict] | str) -> str: def _has_additional_error_info(self): return False - def _additional_error_into(self) -> list[str]: + def _additional_error_info(self) -> list[str]: return [] def _best_available_message(self) -> str: message = self.response.body.get("message") details = self.response.body.get("details") + if isinstance(details, dict) and "errors" in details: + details = details["errors"] + request_id = self.response.body.get("requestId") status_str = ( f"HTTP status code: {self.response.status_code}" @@ -73,14 +96,14 @@ def _best_available_message(self) -> str: ) message_str = f"Message: {message or self._default_error_message()}" details_str = f"Details: {self._handle_details(details)}" if details else None - request_id_str = f"RequestId: {request_id}" if request_id else None + request_id_str = f"Request ID: {request_id}" if request_id else None messages = ( status_str, message_str, details_str, request_id_str, - *self._additional_error_into(), + *self._additional_error_info(), ) error_message = "\n".join(s for s in messages if s is not None) return error_message @@ -110,7 +133,7 @@ def _handle_details(self, details: list[dict | str] | str) -> str: class UnauthorizedError(PermanentHTTPError): def _default_error_message(self) -> str: - return "The authorization token was invalid" + return "The authorization token was invalid." class AccessDeniedError(PermanentHTTPError): @@ -123,7 +146,7 @@ def _handle_details(self, details: dict) -> str: class NotFoundError(PermanentHTTPError): def _default_error_message(self) -> str: - return "The requested resource or endpoint could not be found" + return "The requested resource or endpoint could not be found." def _handle_details(self, details: dict | str) -> str: if isinstance(details, str): @@ -187,7 +210,7 @@ def _default_error_message(self) -> str: class BadGatewayError(TransientHTTPError): def _default_error_message(self) -> str: - return "The requested space is hibernated" + return "The requested space is hibernated." class ServiceUnavailableError(TransientHTTPError): diff --git a/contentful/client/transport/retry.py b/contentful/client/transport/retry.py index 158907a..0c64afe 100644 --- a/contentful/client/transport/retry.py +++ b/contentful/client/transport/retry.py @@ -76,24 +76,19 @@ def __call__( **kwargs, ): call = functools.partial(func, *args, **kwargs) - try: - return call() - except errors.TransientHTTPError as error: - tries = 1 - while tries < self.max_retries: + tries = 0 + while tries <= self.max_retries: + try: + return call() + except errors.TransientHTTPError as error: + tries += 1 reset_time = error.reset_time() - if reset_time > self.max_wait_seconds: + if reset_time > self.max_wait_seconds or tries > self.max_retries: raise self._report_error(error, tries=tries, reset_time=reset_time) real_reset_time = reset_time * random.uniform(1.0, 1.2) time.sleep(real_reset_time) - try: - return call() - except errors.TransientHTTPError: - tries += 1 - - raise class AsyncRetry(BaseRetry): @@ -108,21 +103,15 @@ async def __call__( **kwargs, ): call = functools.partial(func, *args, **kwargs) - try: - return await call() - except errors.TransientHTTPError as error: - tries = 1 - while tries < self.max_retries: + tries = 0 + while tries <= self.max_retries: + try: + return await call() + except errors.TransientHTTPError as error: + tries += 1 reset_time = error.reset_time() - if reset_time > self.max_wait_seconds: + if reset_time > self.max_wait_seconds or tries >= self.max_retries: raise - self._report_error(error, tries=tries, reset_time=reset_time) real_reset_time = reset_time * random.uniform(1.0, 1.2) time.sleep(real_reset_time) - try: - return await call() - except errors.TransientHTTPError: - tries += 1 - - raise diff --git a/tests/__init__.py b/tests/__init__.py index cd93735..e69de29 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,21 +0,0 @@ -import sys -import os -from .asset_test import * -from .client_test import * -from .content_type_cache_test import * -from .content_type_field_types_test import * -from .content_type_field_test import * -from .content_type_test import * -from .deleted_asset_test import * -from .deleted_entry_test import * -from .entry_test import * -from .errors_test import * -from .locale_test import * -from .resource_builder_test import * -from .resource_test import * -from .space_test import * -from .sync_page_test import * -from .utils_test import * - - -sys.path.insert(0, os.path.abspath('..')) diff --git a/tests/test_client.py b/tests/test_client.py index 49c765f..27764a8 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -39,17 +39,26 @@ def test_client_validations(self): Client("foo", "bar", api_version=None) def test_uses_timeouts(self): - c = Client("cfexampleapi", "b4c0n73n7fu1") + with requests_mock.mock() as m: + c = Client("cfexampleapi", "b4c0n73n7fu1", max_rate_limit_retries=0) m.register_uri("GET", ANY, status_code=500) - self.assertRaises(HTTPError, c.entries) + with self.assertRaises(HTTPError): + c.entries() self.assertEqual(m.call_count, 1) self.assertEqual(m.request_history[0].timeout, 1) - c = Client("cfexampleapi", "b4c0n73n7fu1", timeout_s=0.1231570235) with requests_mock.mock() as m: + c = Client( + "cfexampleapi", + "b4c0n73n7fu1", + timeout_s=0.1231570235, + max_rate_limit_retries=0, + ) m.register_uri("GET", ANY, status_code=500) - self.assertRaises(HTTPError, c.entries) + with self.assertRaises(HTTPError): + c.entries() + self.assertEqual(m.call_count, 1) self.assertEqual(m.request_history[0].timeout, c.timeout_s) @@ -508,6 +517,7 @@ def test_rich_text_field(self): "6256b8ef7d66805ca41f2728271daf27e8fa6055873b802a813941a0fe696248", gzip_encoded=False, ) + client.initialize() entry = client.entry("4BupPSmi4M02m0U48AQCSM") @@ -531,6 +541,7 @@ def test_rich_text_field_with_embeds_in_lists(self): "6256b8ef7d66805ca41f2728271daf27e8fa6055873b802a813941a0fe696248", gzip_encoded=False, ) + client.initialize() entry = client.entry("6NGLswCREsGA28kGouScyY") diff --git a/tests/test_errors.py b/tests/test_errors.py index a096ba0..0a47130 100644 --- a/tests/test_errors.py +++ b/tests/test_errors.py @@ -1,53 +1,67 @@ -import json +from __future__ import annotations + +import orjson from unittest import TestCase -from contentful import errors, utils, client -from contentful.client.transport import retry +from contentful import client +from contentful.client.transport import errors class MockResponse(object): - def __init__(self, status_code, json, headers=None, invalid_json=False): + def __init__( + self, + status_code: int, + json: dict | list | str, + headers: dict | None = None, + invalid_json: bool = False, + ): self.status_code = status_code - self._json = json + self._json = json.encode() if isinstance(json, str) else orjson.dumps(json) self._invalid_json = invalid_json self.headers = headers if headers is not None else {} def json(self): if self._invalid_json: - raise json.JSONDecodeError("foo", "foo", 0) - return json.loads(json.dumps(self._json)) + raise orjson.JSONDecodeError("foo", "foo", 0) + return orjson.loads(self._json) @property - def text(self): + def content(self) -> bytes: return self._json + @property + def text(self): + return self._json.decode() + http_attempts = 0 -def mock_http_call(url, query): +def mock_http_call(url: str, query: dict) -> str: global http_attempts if http_attempts < query.get("fail_until", 1): http_attempts += 1 - raise errors.RateLimitExceededError( - MockResponse( - 429, - {"message": "foo"}, - headers={"x-contentful-ratelimit-reset": query.get("reset", 0.1)}, - ) + error = errors.get_error_for_status_code( + status_code=429, + content=b'{"message": "foo"}', + headers={"x-contentful-ratelimit-reset": query.get("reset", 0.1)}, ) + raise error return "succeed" class ErrorsTest(TestCase): def test_default_additional_info_is_empty(self): - response = MockResponse(512, "not json", invalid_json=True) - error = errors.get_error(response) - + error = errors.get_error_for_status_code( + 512, + content=b"not json", + ) self.assertEqual(error._additional_error_info(), []) def test_default_error_message(self): - response = MockResponse(512, "not json", invalid_json=True) - error = errors.get_error(response) + error = errors.get_error_for_status_code( + 512, + content=b"not json", + ) expected_error = "\n".join( [ @@ -58,13 +72,15 @@ def test_default_error_message(self): self.assertEqual(str(error), expected_error) def test_generic_details(self): - response = MockResponse(512, {"details": "some text"}) - error = errors.get_error(response) + error = errors.get_error_for_status_code( + 512, + content=b'{"details":"some text"}', + ) expected_error = "\n".join( [ "HTTP status code: 512", - "Message: The following error was received: {'details': 'some text'}", + 'Message: The following error was received: {"details":"some text"}', "Details: some text", ] ) @@ -80,9 +96,13 @@ def test_not_found_error(self): }, ) - error = errors.get_error(response) + error = errors.get_error_for_status_code( + response.status_code, + content=response.content, + headers=response.headers, + ) - self.assertEqual(error.status_code, 404) + self.assertEqual(error.response.status_code, 404) expected_error = "\n".join( [ "HTTP status code: 404", @@ -104,9 +124,13 @@ def test_not_found_error_with_sys_on_details(self): }, ) - error = errors.get_error(response) + error = errors.get_error_for_status_code( + response.status_code, + content=response.content, + headers=response.headers, + ) - self.assertEqual(error.status_code, 404) + self.assertEqual(error.response.status_code, 404) expected_error = "\n".join( [ "HTTP status code: 404", @@ -128,9 +152,13 @@ def test_not_found_error_details_is_a_string(self): }, ) - error = errors.get_error(response) + error = errors.get_error_for_status_code( + response.status_code, + content=response.content, + headers=response.headers, + ) - self.assertEqual(error.status_code, 404) + self.assertEqual(error.response.status_code, 404) expected_error = "\n".join( [ "HTTP status code: 404", @@ -156,9 +184,13 @@ def test_bad_request_error(self): }, ) - error = errors.get_error(response) + error = errors.get_error_for_status_code( + response.status_code, + content=response.content, + headers=response.headers, + ) - self.assertEqual(error.status_code, 400) + self.assertEqual(error.response.status_code, 400) expected_error = "\n".join( [ "HTTP status code: 400", @@ -180,9 +212,13 @@ def test_bad_request_error_details_is_string(self): }, ) - error = errors.get_error(response) + error = errors.get_error_for_status_code( + response.status_code, + content=response.content, + headers=response.headers, + ) - self.assertEqual(error.status_code, 400) + self.assertEqual(error.response.status_code, 400) expected_error = "\n".join( [ "HTTP status code: 400", @@ -204,9 +240,13 @@ def test_bad_request_error_errors_details_is_string(self): }, ) - error = errors.get_error(response) + error = errors.get_error_for_status_code( + response.status_code, + content=response.content, + headers=response.headers, + ) - self.assertEqual(error.status_code, 400) + self.assertEqual(error.response.status_code, 400) expected_error = "\n".join( [ "HTTP status code: 400", @@ -223,9 +263,13 @@ def test_access_denied_error(self): 403, {"message": "Access Denied", "details": {"reasons": ["foo", "bar"]}} ) - error = errors.get_error(response) + error = errors.get_error_for_status_code( + response.status_code, + content=response.content, + headers=response.headers, + ) - self.assertEqual(error.status_code, 403) + self.assertEqual(error.response.status_code, 403) expected_error = "\n".join( [ "HTTP status code: 403", @@ -248,9 +292,13 @@ def test_unauthorized_error(self): }, ) - error = errors.get_error(response) + error = errors.get_error_for_status_code( + response.status_code, + content=response.content, + headers=response.headers, + ) - self.assertEqual(error.status_code, 401) + self.assertEqual(error.response.status_code, 401) expected_error = "\n".join( [ "HTTP status code: 401", @@ -264,9 +312,13 @@ def test_unauthorized_error(self): def test_rate_limit_exceeded_error(self): response = MockResponse(429, {"message": "Rate Limit Exceeded"}) - error = errors.get_error(response) + error = errors.get_error_for_status_code( + response.status_code, + content=response.content, + headers=response.headers, + ) - self.assertEqual(error.status_code, 429) + self.assertEqual(error.response.status_code, 429) expected_error = "\n".join( ["HTTP status code: 429", "Message: Rate Limit Exceeded"] ) @@ -276,9 +328,13 @@ def test_rate_limit_exceeded_error(self): def test_rate_limit_exceeded_error_with_time(self): response = MockResponse(429, {}, headers={"x-contentful-ratelimit-reset": 60}) - error = errors.get_error(response) + error = errors.get_error_for_status_code( + response.status_code, + content=response.content, + headers=response.headers, + ) - self.assertEqual(error.status_code, 429) + self.assertEqual(error.response.status_code, 429) expected_error = "\n".join( [ "HTTP status code: 429", @@ -292,9 +348,13 @@ def test_rate_limit_exceeded_error_with_time(self): def test_server_error(self): response = MockResponse(500, {"message": "Server Error"}) - error = errors.get_error(response) + error = errors.get_error_for_status_code( + response.status_code, + content=response.content, + headers=response.headers, + ) - self.assertEqual(error.status_code, 500) + self.assertEqual(error.response.status_code, 500) expected_error = "\n".join(["HTTP status code: 500", "Message: Server Error"]) self.assertEqual(str(error), expected_error) self.assertTrue(isinstance(error, errors.ServerError)) @@ -302,9 +362,13 @@ def test_server_error(self): def test_service_unavailable_error(self): response = MockResponse(503, {"message": "Service Unavailable"}) - error = errors.get_error(response) + error = errors.get_error_for_status_code( + response.status_code, + content=response.content, + headers=response.headers, + ) - self.assertEqual(error.status_code, 503) + self.assertEqual(error.response.status_code, 503) expected_error = "\n".join( ["HTTP status code: 503", "Message: Service Unavailable"] ) @@ -314,9 +378,13 @@ def test_service_unavailable_error(self): def test_other_error(self): response = MockResponse(418, {"message": "I'm a Teapot"}) - error = errors.get_error(response) + error = errors.get_error_for_status_code( + response.status_code, + content=response.content, + headers=response.headers, + ) - self.assertEqual(error.status_code, 418) + self.assertEqual(error.response.status_code, 418) expected_error = "\n".join(["HTTP status code: 418", "Message: I'm a Teapot"]) self.assertEqual(str(error), expected_error) self.assertTrue(isinstance(error, errors.HTTPError)) @@ -328,7 +396,7 @@ def test_rate_limit_retries(self): ) http_attempts = 0 - result = retry.Retry()(mock_http_call)("/foo", {}) + result = client_.transport.retry(mock_http_call, url="/foo", query={}) self.assertEqual(http_attempts, 1) self.assertEqual(result, "succeed") @@ -340,12 +408,8 @@ def test_rate_limit_max_retries(self): ) http_attempts = 0 - self.assertRaises( - errors.RateLimitExceededError, - retry.Retry()(mock_http_call), - "/foo", - {"fail_until": 2}, - ) + with self.assertRaises(errors.RateLimitExceededError): + client_.transport.retry(mock_http_call, url="/foo", query={"fail_until": 2}) def test_rate_limit_max_wait(self): global http_attempts @@ -354,12 +418,10 @@ def test_rate_limit_max_wait(self): ) http_attempts = 0 - self.assertRaises( + with self.assertRaises( errors.RateLimitExceededError, - retry.Retry()(mock_http_call), - "/foo", - {"reset": 100}, - ) + ): + client_.transport.retry(mock_http_call, url="/foo", query={"reset": 100}) def test_predefined_errors_default_message(self): messages = { @@ -376,7 +438,11 @@ def test_predefined_errors_default_message(self): for status_code, message in messages.items(): response = MockResponse(status_code, "foo", invalid_json=True) - error = errors.get_error(response) + error = errors.get_error_for_status_code( + response.status_code, + content=response.content, + headers=response.headers, + ) expected_error = "\n".join( [ From 365d5c93f2401d9d99c99b20883e24960e681e7b Mon Sep 17 00:00:00 2001 From: Sean Stewart Date: Wed, 28 Feb 2024 12:27:43 -0500 Subject: [PATCH 08/15] Fix coverage command --- Makefile | 2 +- tox.ini | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 62fbb11..1ac047d 100644 --- a/Makefile +++ b/Makefile @@ -54,7 +54,7 @@ test-all: tox coverage: - coverage run --source contentful python -m unittest discover tests + coverage run -m unittest discover tests coverage report -m flake8 contentful diff --git a/tox.ini b/tox.ini index d75e9d8..aa98fd0 100644 --- a/tox.ini +++ b/tox.ini @@ -25,3 +25,4 @@ deps = ; due to sphinx rendering them, hence limit is set here ; to be extremely high. max_line_length = 180 +extend_ignore = E203,E701 From 2a982ee6337f16f03c028e646f5d1fa74f511b3e Mon Sep 17 00:00:00 2001 From: Sean Stewart Date: Wed, 28 Feb 2024 12:28:21 -0500 Subject: [PATCH 09/15] Fix tox command --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index aa98fd0..f9b87fe 100644 --- a/tox.ini +++ b/tox.ini @@ -11,7 +11,7 @@ deps = flake8 [testenv] commands = - python runtests.py + python -m unittest discover tests deps = PyYAML python-dateutil From 713062e17c1faac20a804874b79f106b9b1dc320 Mon Sep 17 00:00:00 2001 From: Sean Stewart Date: Wed, 28 Feb 2024 14:26:24 -0500 Subject: [PATCH 10/15] Tests for async client --- contentful/client/base.py | 3 + contentful/client/impl.py | 8 +- contentful/client/transport/aio.py | 14 +- requirements.txt | 1 + tests/test_async_client.py | 700 +++++++++++++++++++++++++++++ 5 files changed, 720 insertions(+), 6 deletions(-) create mode 100644 tests/test_async_client.py diff --git a/contentful/client/base.py b/contentful/client/base.py index 2d5b447..99bf609 100644 --- a/contentful/client/base.py +++ b/contentful/client/base.py @@ -152,6 +152,9 @@ def __init__( def initialize(self): raise NotImplementedError() + def teardown(self): + raise NotImplementedError() + def _get(self, url: str, query: QueryT | None = None): """ Wrapper for the HTTP Request, diff --git a/contentful/client/impl.py b/contentful/client/impl.py index a1d3a54..b4d8222 100644 --- a/contentful/client/impl.py +++ b/contentful/client/impl.py @@ -215,6 +215,9 @@ def initialize(self): if self.content_type_cache: self._cache_content_types() + def teardown(self): + self.transport.close() + def _cache_content_types(self): content_types = self.content_types() ContentTypeCache.update_cache( @@ -237,6 +240,9 @@ async def initialize(self): if self.content_type_cache: await self._cache_content_types() + async def teardown(self): + await self.transport.close() + async def _cache_content_types(self): content_types = await self.content_types() ContentTypeCache.update_cache( @@ -245,7 +251,7 @@ async def _cache_content_types(self): async def _get(self, url: str, query: QueryT | None = None): params = self._format_params(query) - response = await self.transport.get(url, params=params, raw_mode=self.raw_mode) + response = await self.transport.get(url, query=params, raw_mode=self.raw_mode) if self.raw_mode: return response return self._format_response(response=response, query=params) diff --git a/contentful/client/transport/aio.py b/contentful/client/transport/aio.py index 35e7af5..a9903fd 100644 --- a/contentful/client/transport/aio.py +++ b/contentful/client/transport/aio.py @@ -1,6 +1,8 @@ from __future__ import annotations import contextlib +import gzip +import urllib.parse from typing import AsyncIterator, Iterator, Any import aiohttp @@ -28,9 +30,9 @@ class AsyncTransport( async def initialize(self) -> aiohttp.ClientSession: if self._session is None: self._session = aiohttp.ClientSession( - base_url=self.base_url, timeout=aiohttp.ClientTimeout(total=self.timeout_s), headers=self.default_headers, + auto_decompress=True, ) return self._session @@ -53,7 +55,7 @@ async def get( **headers: str, ) -> dict[str, Any] | aiohttp.ClientResponse: response = await self.retry( - self._get, query=query, session=session, raw_mode=raw_mode, **headers + self._get, url, query=query, session=session, raw_mode=raw_mode, **headers ) return response @@ -66,12 +68,17 @@ async def _get( raw_mode: bool = False, **headers: str, ) -> dict[str, Any] | aiohttp.ClientResponse: + url = urllib.parse.urljoin(self.base_url, url) async with self.session(session=session) as sess: response: aiohttp.ClientResponse async with sess.get(url, params=query, headers=headers) as response: content = await response.read() status_code = response.status headers = response.headers + # For some reason aiohttp is failing to auto-decompress these. + if headers.get("Content-Encoding") == "gzip": + content = gzip.decompress(content) + reason = response.reason parsed = abstract.parse_response( status_code=status_code, @@ -106,6 +113,3 @@ def translate_async_transport_errors() -> Iterator[None]: # Malformed request, etc. except (aiohttp.ClientError, ValueError) as e: raise errors.PermanentHTTPError(e) from e - except Exception as e: - print(e) - raise diff --git a/requirements.txt b/requirements.txt index ae07a7c..69f87bc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,6 +9,7 @@ flake8>=7,<8 tox>=4,<5 virtualenv>=20,<21 requests-mock>=1.5,<2 +aioresponses >=0.7,<1 Sphinx>=7,<8 sphinxcontrib-websupport>=1,<2 diff --git a/tests/test_async_client.py b/tests/test_async_client.py new file mode 100644 index 0000000..27c9a73 --- /dev/null +++ b/tests/test_async_client.py @@ -0,0 +1,700 @@ +import aioresponses +import vcr +import re +from unittest import TestCase, IsolatedAsyncioTestCase, mock + +from contentful.client import AsyncClient +from contentful.content_type_cache import ContentTypeCache +from contentful.errors import EntryNotFoundError +from contentful.errors import HTTPError +from contentful.client.base import ConfigurationException +from contentful.entry import Entry + + +class AsyncClientTest(IsolatedAsyncioTestCase): + def setUp(self): + ContentTypeCache.__CACHE__ = {} + + async def asyncSetUp(self): + self.client = AsyncClient("cfexampleapi", "b4c0n73n7fu1") + self.client_no_cache = AsyncClient( + "cfexampleapi", "b4c0n73n7fu1", content_type_cache=False + ) + + async def asyncTearDown(self): + await self.client.teardown() + await self.client_no_cache.teardown() + + async def test_client_repr(self): + self.assertEqual( + "", + str(self.client), + ) + + async def test_client_validations(self): + with self.assertRaises(ConfigurationException): + AsyncClient(None, "foo") + with self.assertRaises(ConfigurationException): + AsyncClient("foo", None) + with self.assertRaises(ConfigurationException): + AsyncClient("foo", "bar", api_url=None) + with self.assertRaises(ConfigurationException): + AsyncClient("foo", "bar", default_locale=None) + with self.assertRaises(ConfigurationException): + AsyncClient("foo", "bar", api_version=None) + + async def test_uses_timeouts(self): + + with aioresponses.aioresponses() as m: + c = AsyncClient("cfexampleapi", "b4c0n73n7fu1", max_rate_limit_retries=0) + m.get(re.compile(".*"), status=500) + with self.assertRaises(HTTPError): + await c.entries() + self.assertEqual(len(m.requests), 1) + self.assertEqual(c.transport._session.timeout.total, 1) + await c.teardown() + + with aioresponses.aioresponses() as m: + c = AsyncClient( + "cfexampleapi", + "b4c0n73n7fu1", + timeout_s=0.1231570235, + max_rate_limit_retries=0, + ) + m.get(re.compile(".*"), status=500) + with self.assertRaises(HTTPError): + await c.entries() + + self.assertEqual(len(m.requests), 1) + self.assertEqual(c.transport._session.timeout.total, c.timeout_s) + await c.teardown() + + async def test_client_creates_a_content_type_cache(self): + with vcr.use_cassette( + "fixtures/client/content_type_cache.yaml", match_on=["method", "path"] + ): + client = AsyncClient("cfexampleapi", "b4c0n73n7fu1") + await client.initialize() + self.assertTrue(len(ContentTypeCache.__CACHE__) > 0) + await client.teardown() + + async def test_client_can_avoid_caching_content_types(self): + await self.client_no_cache.initialize() + self.assertFalse(len(ContentTypeCache.__CACHE__) > 0) + + async def test_client_get_space(self): + with vcr.use_cassette( + "fixtures/client/space.yaml", match_on=["method", "path", "query"] + ): + space = await self.client_no_cache.space() + + self.assertEqual( + str(space), "" + ) + + async def test_client_get_content_type(self): + with vcr.use_cassette( + "fixtures/client/content_type.yaml", match_on=["method", "path", "query"] + ): + ct = await self.client_no_cache.content_type("cat") + + self.assertEqual(str(ct), "") + + async def test_client_get_content_types(self): + with vcr.use_cassette( + "fixtures/client/content_types.yaml", match_on=["method", "path", "query"] + ): + cts = await self.client_no_cache.content_types() + + self.assertEqual( + str(cts[0]), "" + ) + + async def test_client_entry(self): + with vcr.use_cassette( + "fixtures/client/entry.yaml", match_on=["method", "path", "query"] + ): + + entry = await self.client_no_cache.entry("nyancat") + + self.assertEqual(str(entry), "") + self.assertEqual(str(entry.best_friend), "") + + async def test_client_entry_not_found(self): + with vcr.use_cassette( + "fixtures/client/entry_not_found.yaml", match_on=["method", "path", "query"] + ): + with self.assertRaises(EntryNotFoundError): + await self.client_no_cache.entry("foobar") + + async def test_client_entries(self): + with vcr.use_cassette( + "fixtures/client/entries.yaml", match_on=["method", "path", "query"] + ): + entries = await self.client_no_cache.entries() + + self.assertEqual(str(entries[0]), "") + + async def test_client_entries_select(self): + with vcr.use_cassette( + "fixtures/client/entries_select.yaml", match_on=["method", "path", "query"] + ): + + entries = await self.client_no_cache.entries( + {"content_type": "cat", "sys.id": "nyancat", "select": ["fields.name"]} + ) + + self.assertEqual(str(entries[0]), "") + self.assertEqual(entries[0].fields(), {"name": "Nyan Cat"}) + + async def test_client_entries_links_to_entry(self): + with vcr.use_cassette( + "fixtures/client/entries_links_to_entry.yaml", + match_on=["method", "path", "query"], + ): + entries = await self.client_no_cache.entries({"links_to_entry": "nyancat"}) + self.assertEqual(len(entries), 1) + self.assertEqual(str(entries[0]), "") + + async def test_entry_incoming_references(self): + with vcr.use_cassette( + "fixtures/client/entry_incoming_references.yaml", + match_on=["method", "path", "query"], + ): + entry = await self.client_no_cache.entry("nyancat") + entries = await entry.incoming_references(self.client_no_cache) + self.assertEqual(len(entries), 1) + self.assertEqual(str(entries[0]), "") + + async def test_entry_incoming_references_with_query(self): + with vcr.use_cassette( + "fixtures/client/entry_incoming_references_with_query.yaml", + match_on=["method", "path", "query"], + ): + entry = await self.client_no_cache.entry("nyancat") + entries = await entry.incoming_references( + self.client_no_cache, {"content_type": "cat", "select": ["fields.name"]} + ) + self.assertEqual(len(entries), 1) + self.assertEqual(str(entries[0]), "") + self.assertEqual(entries[0].fields(), {"name": "Happy Cat"}) + + async def test_client_entries_links_to_asset(self): + with vcr.use_cassette( + "fixtures/client/entries_links_to_asset.yaml", + match_on=["method", "path", "query"], + ): + entries = await self.client_no_cache.entries({"links_to_asset": "nyancat"}) + self.assertEqual(len(entries), 1) + self.assertEqual(str(entries[0]), "") + + async def test_asset_incoming_references(self): + with vcr.use_cassette( + "fixtures/client/asset_incoming_references.yaml", + match_on=["method", "path", "query"], + ): + asset = await self.client_no_cache.asset("nyancat") + entries = await asset.incoming_references(self.client_no_cache) + self.assertEqual(len(entries), 1) + self.assertEqual(str(entries[0]), "") + + async def test_client_asset(self): + with vcr.use_cassette( + "fixtures/client/asset.yaml", match_on=["method", "path", "query"] + ): + asset = await self.client_no_cache.asset("nyancat") + + self.assertEqual( + str(asset), + "", + ) + + async def test_client_locales_on_environment(self): + with vcr.use_cassette( + "fixtures/client/locales_on_environment.yaml", + match_on=["method", "path", "query"], + ): + client = AsyncClient( + "facgnwwgj5fe", + "", + environment="testing", + content_type_cache=False, + ) + locales = await client.locales() + await client.teardown() + + self.assertEqual( + str(locales), "" + ) + self.assertEqual( + str(locales[0]), + "", + ) + + async def test_client_assets(self): + with vcr.use_cassette( + "fixtures/client/assets.yaml", match_on=["method", "path", "query"] + ): + assets = await self.client_no_cache.assets() + + self.assertEqual( + str(assets[0]), + "", + ) + + async def test_client_sync(self): + with vcr.use_cassette( + "fixtures/client/sync.yaml", match_on=["method", "path", "query"] + ): + sync = await self.client_no_cache.sync({"initial": True}) + + self.assertEqual( + str(sync), + "".format( + "w5ZGw6JFwqZmVcKsE8Kow4grw45QdybCnV_Cg8OASMKpwo1UY8K8bsKFwqJrw7DDhcKnM2RDOVbDt1E-wo7CnDjChMKKGsK1wrzCrBzCqMOpZAwOOcOvCcOAwqHDv0XCiMKaOcOxZA8BJUzDr8K-wo1lNx7DnHE" + ), + ) + self.assertEqual( + str(sync.items[0]), + "", + ) + + async def test_client_sync_with_environments(self): + with vcr.use_cassette( + "fixtures/client/sync_environments.yaml", + match_on=["method", "path", "query"], + ): + client = AsyncClient( + "a22o2qgm356c", + "bfbc63cf745a037125dbcc64f716a9a0e9d091df1a79e84920b890f87a6e7ab9", + environment="staging", + content_type_cache=False, + ) + sync = await client.sync({"initial": True}) + await client.teardown() + + self.assertEqual(sync.items[0].environment.id, "staging") + + async def test_client_creates_wrapped_arrays(self): + with vcr.use_cassette( + "fixtures/client/array_endpoints.yaml", match_on=["method", "path", "query"] + ): + client = AsyncClient( + "cfexampleapi", "b4c0n73n7fu1", content_type_cache=False + ) + self.assertEqual( + str(await client.content_types()), + "", + ) + self.assertEqual( + str(await client.entries()), + "", + ) + self.assertEqual( + str(await client.assets()), + "", + ) + await client.teardown() + + # X-Contentful-User-Agent Headers + + def test_client_default_contentful_user_agent_headers(self): + client = AsyncClient("cfexampleapi", "b4c0n73n7fu1", content_type_cache=False) + + from contentful import __version__ + import platform + + expected = [ + "sdk contentful.py/{0};".format(__version__), + "platform python/{0};".format(platform.python_version()), + ] + header = client._contentful_user_agent() + for e in expected: + self.assertTrue(e in header) + + self.assertTrue(re.search("os (Windows|macOS|Linux)(\/.*)?;", header)) + + self.assertTrue("integration" not in header) + self.assertTrue("app" not in header) + + def test_client_with_integration_name_only_headers(self): + client = AsyncClient( + "cfexampleapi", + "b4c0n73n7fu1", + content_type_cache=False, + integration_name="foobar", + ) + + header = client._contentful_user_agent() + self.assertTrue("integration foobar;" in header) + self.assertFalse("integration foobar/;" in header) + + def test_client_with_integration_headers(self): + client = AsyncClient( + "cfexampleapi", + "b4c0n73n7fu1", + content_type_cache=False, + integration_name="foobar", + integration_version="0.1.0", + ) + + header = client._contentful_user_agent() + self.assertTrue("integration foobar/0.1.0;" in header) + + def test_client_with_application_name_only_headers(self): + client = AsyncClient( + "cfexampleapi", + "b4c0n73n7fu1", + content_type_cache=False, + application_name="foobar", + ) + + header = client._contentful_user_agent() + self.assertTrue("app foobar;" in header) + self.assertFalse("app foobar/;" in header) + + def test_client_with_application_headers(self): + client = AsyncClient( + "cfexampleapi", + "b4c0n73n7fu1", + content_type_cache=False, + application_name="foobar", + application_version="0.1.0", + ) + + header = client._contentful_user_agent() + self.assertTrue("app foobar/0.1.0;" in header) + + def test_client_with_integration_version_only_does_not_include_integration_in_header( + self, + ): + client = AsyncClient( + "cfexampleapi", + "b4c0n73n7fu1", + content_type_cache=False, + integration_version="0.1.0", + ) + + header = client._contentful_user_agent() + self.assertFalse("integration /0.1.0" in header) + + def test_client_with_application_version_only_does_not_include_integration_in_header( + self, + ): + client = AsyncClient( + "cfexampleapi", + "b4c0n73n7fu1", + content_type_cache=False, + application_version="0.1.0", + ) + + header = client._contentful_user_agent() + self.assertFalse("app /0.1.0;" in header) + + def test_client_with_all_headers(self): + client = AsyncClient( + "cfexampleapi", + "b4c0n73n7fu1", + content_type_cache=False, + application_name="foobar_app", + application_version="1.1.0", + integration_name="foobar integ", + integration_version="0.1.0", + ) + + from contentful import __version__ + import platform + + expected = [ + "sdk contentful.py/{0};".format(__version__), + "platform python/{0};".format(platform.python_version()), + "app foobar_app/1.1.0;", + "integration foobar integ/0.1.0;", + ] + header = client._contentful_user_agent() + for e in expected: + self.assertTrue(e in header) + + self.assertTrue(re.search("os (Windows|macOS|Linux)(\/.*)?;", header)) + + def test_client_headers(self): + client = AsyncClient( + "cfexampleapi", + "b4c0n73n7fu1", + content_type_cache=False, + application_name="foobar_app", + application_version="1.1.0", + integration_name="foobar integ", + integration_version="0.1.0", + ) + + from contentful import __version__ + import platform + + expected = [ + "sdk contentful.py/{0};".format(__version__), + "platform python/{0};".format(platform.python_version()), + "app foobar_app/1.1.0;", + "integration foobar integ/0.1.0;", + ] + header = client._request_headers()["X-Contentful-User-Agent"] + for e in expected: + self.assertTrue(e in header) + + self.assertTrue(re.search("os (Windows|macOS|Linux)(\/.*)?;", header)) + + # Integration Tests + + async def test_entries_dont_fail_with_unicode_characters(self): + with vcr.use_cassette( + "fixtures/integration/issue-4.yaml", match_on=["method", "path", "query"] + ): + client = AsyncClient( + "wltm0euukdog", + "bbe871957bb60f988af6cbeeccbb178c36cae09e36e8098357e27b51dd38d88e", + content_type_cache=True, + ) + entries = await client.entries() + self.assertEqual(entries[0].name, "😅") + await client.teardown() + + async def test_entries_dont_fail_with_arrays_as_json_root(self): + with vcr.use_cassette( + "fixtures/integration/json-arrays.yaml", + match_on=["method", "path", "query"], + ): + client = AsyncClient( + "4int1zgmkwcf", + "d2ac2076019bd4a8357811cbdd5563bb7186d90d77e53c265a1bafd9f83439e8", + content_type_cache=True, + ) + entries = await client.entries() + self.assertEqual(entries[0].json, [{"foo": "bar"}, {"baz": "qux"}]) + await client.teardown() + + async def test_entries_with_none_values_on_all_fields(self): + with vcr.use_cassette( + "fixtures/integration/issue-11.yaml", match_on=["method", "path", "query"] + ): + client = AsyncClient( + "rtx5c7z0zbas", + "a6c8dc438d470c51d1094dad146a1f20fcdba41e21f4e263af6c3f70d8583634", + content_type_cache=True, + ) + entry = (await client.entries())[0] + await client.teardown() + self.assertEqual(entry.symbol, None) + self.assertEqual(entry.text, None) + self.assertEqual(entry.integer, None) + self.assertEqual(entry.number, None) + self.assertEqual(entry.date, None) + self.assertEqual(entry.location, None) + self.assertEqual(entry.asset, None) + self.assertEqual(entry.bool, None) + self.assertEqual(entry.json, None) + self.assertEqual(entry.link, None) + + async def test_circular_references_default_depth(self): + with vcr.use_cassette( + "fixtures/integration/circular-references.yaml", + match_on=["method", "path", "query"], + ): + client = AsyncClient( + "rk19fq93y3vw", + "821aa502a7ce820e46adb30fa6942889619aac4342a7021cfe15197c52a593cc", + content_type_cache=True, + ) + a = await client.entry("6kdfS7uMs8owuEIoSaOcQk") + await client.teardown() + self.assertEqual(str(a), "") + self.assertEqual(str(a.b), "") + self.assertEqual( + str(a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a), + "", + ) + self.assertEqual( + str(a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b), + "", + ) + + async def test_circular_references_set_depth(self): + with vcr.use_cassette( + "fixtures/integration/circular-references.yaml", + match_on=["method", "path", "query"], + ): + client = AsyncClient( + "rk19fq93y3vw", + "821aa502a7ce820e46adb30fa6942889619aac4342a7021cfe15197c52a593cc", + content_type_cache=True, + max_include_resolution_depth=1, + ) + a = await client.entry("6kdfS7uMs8owuEIoSaOcQk") + await client.teardown() + self.assertEqual(str(a), "") + self.assertEqual(str(a.b), "") + self.assertEqual(str(a.b.a), "") + + async def test_circular_references_with_reusable_entries(self): + with vcr.use_cassette( + "fixtures/integration/circular-references.yaml", + match_on=["method", "path", "query"], + ): + client = AsyncClient( + "rk19fq93y3vw", + "821aa502a7ce820e46adb30fa6942889619aac4342a7021cfe15197c52a593cc", + content_type_cache=True, + reuse_entries=True, + ) + a = await client.entry("6kdfS7uMs8owuEIoSaOcQk") + await client.teardown() + self.assertEqual(str(a), "") + self.assertEqual(str(a.b), "") + self.assertEqual( + str(a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a), + "", + ) + self.assertEqual( + str(a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b), + "", + ) + self.assertEqual(a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b._depth, 1) + self.assertEqual(a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a._depth, 0) + self.assertEqual( + str( + a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a.b.a + ), + "", + ) + + async def test_unresolvable_entries_dont_get_included(self): + with vcr.use_cassette( + "fixtures/integration/errors-filtered.yaml", + match_on=["method", "path", "query"], + ): + client = AsyncClient( + "011npgaszg5o", + "42c9d93410a7319e9a735671fc1e415348f65e94a99fc768b70a7c649859d4fd", + ) + entry = await client.entry("1HR1QvURo4MoSqO0eqmUeO") + await client.teardown() + self.assertEqual(len(entry.modules), 2) + + async def test_rich_text_field(self): + with vcr.use_cassette( + "fixtures/fields/rich_text.yaml", match_on=["method", "path", "query"] + ): + client = AsyncClient( + "jd7yc4wnatx3", + "6256b8ef7d66805ca41f2728271daf27e8fa6055873b802a813941a0fe696248", + gzip_encoded=False, + ) + await client.initialize() + + entry = await client.entry("4BupPSmi4M02m0U48AQCSM") + + await client.teardown() + + expected_entry_occurrances = 2 + embedded_entry_index = 1 + for content in entry.body["content"]: + if content["nodeType"] == "embedded-entry-block": + self.assertTrue(isinstance(content["data"]["target"], Entry)) + self.assertEqual( + content["data"]["target"].body, + "Embedded {0}".format(embedded_entry_index), + ) + expected_entry_occurrances -= 1 + embedded_entry_index += 1 + self.assertEqual(expected_entry_occurrances, 0) + + async def test_rich_text_field_with_embeds_in_lists(self): + with vcr.use_cassette( + "fixtures/fields/rich_text_lists_with_embeds.yaml", + match_on=["method", "path", "query"], + ): + client = AsyncClient( + "jd7yc4wnatx3", + "6256b8ef7d66805ca41f2728271daf27e8fa6055873b802a813941a0fe696248", + gzip_encoded=False, + ) + await client.initialize() + + entry = await client.entry("6NGLswCREsGA28kGouScyY") + + await client.teardown() + + # Hyperlink data is conserved + self.assertEqual( + entry.body["content"][0], + { + "data": {}, + "content": [ + { + "marks": [], + "value": "A link to ", + "nodeType": "text", + "nodeClass": "text", + }, + { + "data": {"uri": "https://google.com"}, + "content": [ + { + "marks": [], + "value": "google", + "nodeType": "text", + "nodeClass": "text", + } + ], + "nodeType": "hyperlink", + "nodeClass": "inline", + }, + { + "marks": [], + "value": "", + "nodeType": "text", + "nodeClass": "text", + }, + ], + "nodeType": "paragraph", + "nodeClass": "block", + }, + ) + + # Unordered lists and ordered lists can contain embedded entries + self.assertEqual(entry.body["content"][3]["nodeType"], "unordered-list") + self.assertEqual( + str( + entry.body["content"][3]["content"][2]["content"][0]["data"][ + "target" + ] + ), + "", + ) + + self.assertEqual(entry.body["content"][4]["nodeType"], "ordered-list") + self.assertEqual( + str( + entry.body["content"][4]["content"][2]["content"][0]["data"][ + "target" + ] + ), + "", + ) + + async def test_rich_text_fields_should_not_get_hydrated_twice(self): + with vcr.use_cassette( + "fixtures/integration/issue-41.yaml", match_on=["method", "path", "query"] + ): + client = AsyncClient( + "fds721b88p6b", + "45ba81cc69423fcd2e3f0a4779de29481bb5c11495bc7e14649a996cf984e98e", + gzip_encoded=False, + ) + + entry = await client.entry("1tBAu0wP9qAQEg6qCqMics") + + await client.teardown() + + # Not failing is already a success + self.assertEqual(str(entry.children[0]), str(entry.children[1])) + self.assertEqual(str(entry.children[0].body), str(entry.children[1].body)) From 6d05e1946952a4cc0793d9886e5ff36fbde77cff Mon Sep 17 00:00:00 2001 From: Sean Stewart Date: Wed, 28 Feb 2024 14:34:36 -0500 Subject: [PATCH 11/15] Safer gzip detection --- contentful/client/transport/aio.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/contentful/client/transport/aio.py b/contentful/client/transport/aio.py index a9903fd..de47ee8 100644 --- a/contentful/client/transport/aio.py +++ b/contentful/client/transport/aio.py @@ -76,7 +76,10 @@ async def _get( status_code = response.status headers = response.headers # For some reason aiohttp is failing to auto-decompress these. - if headers.get("Content-Encoding") == "gzip": + if headers.get("Content-Encoding") == "gzip" and content.startswith( + # Start bytes for gzip. + b"\x1f\x8b" + ): content = gzip.decompress(content) reason = response.reason From 690bb800a1c959da8a563f862ca83bd3e4fb6a7c Mon Sep 17 00:00:00 2001 From: Sean Stewart Date: Wed, 28 Feb 2024 14:36:43 -0500 Subject: [PATCH 12/15] Safer gzip detection --- contentful/client/transport/aio.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/contentful/client/transport/aio.py b/contentful/client/transport/aio.py index de47ee8..cbbd0bf 100644 --- a/contentful/client/transport/aio.py +++ b/contentful/client/transport/aio.py @@ -76,11 +76,11 @@ async def _get( status_code = response.status headers = response.headers # For some reason aiohttp is failing to auto-decompress these. - if headers.get("Content-Encoding") == "gzip" and content.startswith( - # Start bytes for gzip. - b"\x1f\x8b" - ): - content = gzip.decompress(content) + if headers.get("Content-Encoding") == "gzip": + try: + content = gzip.decompress(content) + except gzip.BadGzipFile: + pass reason = response.reason parsed = abstract.parse_response( From 6c665b8d4944f9f3ad3320daec2faf811ac8b2a5 Mon Sep 17 00:00:00 2001 From: Sean Stewart Date: Wed, 28 Feb 2024 15:58:30 -0500 Subject: [PATCH 13/15] Safer orjson dependency, support pypy3 and get all tests passing in all supported python interpreters. --- contentful/client/transport/abstract.py | 13 ++++++++++--- contentful/client/transport/aio.py | 7 ------- contentful/client/transport/compat.py | 7 +++++++ contentful/client/transport/errors.py | 6 +++--- contentful/content_type_field_types.py | 14 ++++---------- requirements.txt | 2 +- tests/test_errors.py | 10 ++++++---- tox.ini | 7 ++----- 8 files changed, 33 insertions(+), 33 deletions(-) create mode 100644 contentful/client/transport/compat.py diff --git a/contentful/client/transport/abstract.py b/contentful/client/transport/abstract.py index 241a96b..3fff2a2 100644 --- a/contentful/client/transport/abstract.py +++ b/contentful/client/transport/abstract.py @@ -1,6 +1,7 @@ from __future__ import annotations import abc +import gzip import http import types from typing import ( @@ -15,9 +16,8 @@ ClassVar, ) -import orjson - from contentful.client.transport import errors, retry +from contentful.client.transport.compat import json """ contentful.client.transport.abstract @@ -204,6 +204,13 @@ def parse_response( raw_mode: bool, ) -> ResponseT | dict[str, Any]: """Parse the received response, raising an error if necessary.""" + # Seeing failures to decode gzipped responses in newer python... + # Just handle it by default here. + if headers.get("Content-Encoding") == "gzip": + try: + content = gzip.decompress(content) + except gzip.BadGzipFile: + pass if status_code >= 400: if reason is None: reason = http.HTTPStatus(status_code).phrase @@ -222,5 +229,5 @@ def parse_response( # Don't bother with .text/.json() since we know this is JSON. # Passing the raw bytes to orjson will be much more efficient. - body = orjson.loads(content) + body = json.loads(content) return body diff --git a/contentful/client/transport/aio.py b/contentful/client/transport/aio.py index cbbd0bf..533f053 100644 --- a/contentful/client/transport/aio.py +++ b/contentful/client/transport/aio.py @@ -75,13 +75,6 @@ async def _get( content = await response.read() status_code = response.status headers = response.headers - # For some reason aiohttp is failing to auto-decompress these. - if headers.get("Content-Encoding") == "gzip": - try: - content = gzip.decompress(content) - except gzip.BadGzipFile: - pass - reason = response.reason parsed = abstract.parse_response( status_code=status_code, diff --git a/contentful/client/transport/compat.py b/contentful/client/transport/compat.py new file mode 100644 index 0000000..8d9498a --- /dev/null +++ b/contentful/client/transport/compat.py @@ -0,0 +1,7 @@ +try: + import orjson as json +except (ImportError, ModuleNotFoundError): + try: + import simplejson as json + except (ImportError, ModuleNotFoundError): + import json diff --git a/contentful/client/transport/errors.py b/contentful/client/transport/errors.py index 43a58e4..71f5916 100644 --- a/contentful/client/transport/errors.py +++ b/contentful/client/transport/errors.py @@ -2,7 +2,7 @@ import dataclasses -import orjson +from contentful.client.transport.compat import json """ contentful.client.transport.errors @@ -47,8 +47,8 @@ def get_error_for_status_code( err_cls = _HTTP_STATUS_TO_ERROR_MAP.get(status_code, default) headers = headers or {} try: - body = orjson.loads(content) - except orjson.JSONDecodeError: + body = json.loads(content) + except json.JSONDecodeError: body = {} info = ErrorResponseInfo( status_code=status_code, diff --git a/contentful/content_type_field_types.py b/contentful/content_type_field_types.py index 52bbbbe..907ab6b 100644 --- a/contentful/content_type_field_types.py +++ b/contentful/content_type_field_types.py @@ -1,14 +1,8 @@ -# -*- coding: utf-8 -*- - -try: - import orjson as json -except ImportError: - import json - import dateutil.parser from collections import namedtuple -from .utils import resource_for_link, unresolvable -from .resource import FieldsResource, Link, Resource +from contentful.client.transport.compat import json +from contentful.utils import resource_for_link, unresolvable +from contentful.resource import FieldsResource, Link, Resource """ contentful.content_type_field_types @@ -21,7 +15,7 @@ """ -class BasicField(object): +class BasicField: """Base Coercion Class""" def __init__(self, items=None): diff --git a/requirements.txt b/requirements.txt index 69f87bc..9fb5f07 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ python-dateutil>=2.8,<3 requests>=2.20,<3.0 aiohttp>=3.9,<4.0 -orjson>=3.9,<4.0 +orjson>=3.9,<4.0 ; python_implementation != "PyPy" vcrpy>=6.0,<7.0 coverage>=7,<8 diff --git a/tests/test_errors.py b/tests/test_errors.py index 0a47130..e42ac9b 100644 --- a/tests/test_errors.py +++ b/tests/test_errors.py @@ -1,9 +1,9 @@ from __future__ import annotations -import orjson from unittest import TestCase from contentful import client from contentful.client.transport import errors +from contentful.client.transport.compat import json as mod_json class MockResponse(object): @@ -15,14 +15,16 @@ def __init__( invalid_json: bool = False, ): self.status_code = status_code - self._json = json.encode() if isinstance(json, str) else orjson.dumps(json) + self._json = json.encode() if isinstance(json, str) else mod_json.dumps(json) + if isinstance(self._json, str): + self._json = self._json.encode() self._invalid_json = invalid_json self.headers = headers if headers is not None else {} def json(self): if self._invalid_json: - raise orjson.JSONDecodeError("foo", "foo", 0) - return orjson.loads(self._json) + raise mod_json.JSONDecodeError("foo", "foo", 0) + return mod_json.loads(self._json) @property def content(self) -> bytes: diff --git a/tox.ini b/tox.ini index f9b87fe..83328d1 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = {py37,py38,py39,py310,py311,py312,pypy3}-{flakes} +envlist = {py38,py39,py310,py311,py312,pypy3}-{flakes} [testenv:flakes] skipsdist = True @@ -13,10 +13,7 @@ deps = flake8 commands = python -m unittest discover tests deps = - PyYAML - python-dateutil - vcrpy - requests-mock + -r requirements.txt [flake8] ; Usual line length should be kept to 80, From c5ff277e22840d833c1b8dbdae88a0ea3fec3751 Mon Sep 17 00:00:00 2001 From: Sean Stewart Date: Thu, 29 Feb 2024 15:24:15 -0500 Subject: [PATCH 14/15] Migrate to pyproject.toml & poetry --- Makefile | 10 +- contentful/__init__.py | 47 +- poetry.lock | 1523 ++++++++++++++++++++++++++++++++++++++++ pyproject.toml | 61 ++ runtests.py | 4 - setup.py | 106 --- 6 files changed, 1624 insertions(+), 127 deletions(-) create mode 100644 poetry.lock create mode 100644 pyproject.toml delete mode 100644 runtests.py delete mode 100755 setup.py diff --git a/Makefile b/Makefile index 1ac047d..8466fd3 100644 --- a/Makefile +++ b/Makefile @@ -77,9 +77,13 @@ git-docs: docs git commit --amend -C HEAD release: clean git-docs - python setup.py publish + $(eval VERSION := $(shell poetry version -s)) + poetry publish --build --no-interaction + git tag -a $(VERSION) -m "version $(VERSION)" + git push --tags + git push + dist: clean - python setup.py sdist - python setup.py bdist_wheel + poetry build --no-interaction ls -l dist diff --git a/contentful/__init__.py b/contentful/__init__.py index 3f7cd5d..46d884e 100644 --- a/contentful/__init__.py +++ b/contentful/__init__.py @@ -1,16 +1,35 @@ -from .client import Client, AsyncClient # noqa: F401 -from .entry import Entry # noqa: F401 -from .asset import Asset # noqa: F401 -from .space import Space # noqa: F401 -from .locale import Locale # noqa: F401 -from .resource import Link # noqa: F401 -from .content_type import ContentType # noqa: F401 -from .deleted_asset import DeletedAsset # noqa: F401 -from .deleted_entry import DeletedEntry # noqa: F401 -from .content_type_cache import ContentTypeCache # noqa: F401 -from .content_type_field import ContentTypeField # noqa: F401 +# flake8: noqa +from importlib import metadata +from .client import Client, AsyncClient +from .entry import Entry +from .asset import Asset +from .space import Space +from .locale import Locale +from .resource import Link +from .content_type import ContentType +from .deleted_asset import DeletedAsset +from .deleted_entry import DeletedEntry +from .content_type_cache import ContentTypeCache +from .content_type_field import ContentTypeField -__version__ = "2.1.1" -__author__ = "Contentful GmbH" -__email__ = "bhushan.lodha@external.contentful.com" +__all__ = ( + "Client", + "AsyncClient", + "Entry", + "Asset", + "Space", + "Locale", + "Link", + "ContentType", + "DeletedAsset", + "DeletedEntry", + "ContentTypeCache", + "ContentTypeField", +) + +_metadata = metadata.metadata(__package__) + +__version__ = _metadata.get("version") +__author__ = _metadata.get("author") +__email__ = _metadata.get("author-email") diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..7afcd59 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1523 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "aiohttp" +version = "3.9.3" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:939677b61f9d72a4fa2a042a5eee2a99a24001a67c13da113b2e30396567db54"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f5cd333fcf7590a18334c90f8c9147c837a6ec8a178e88d90a9b96ea03194cc"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82e6aa28dd46374f72093eda8bcd142f7771ee1eb9d1e223ff0fa7177a96b4a5"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f56455b0c2c7cc3b0c584815264461d07b177f903a04481dfc33e08a89f0c26b"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bca77a198bb6e69795ef2f09a5f4c12758487f83f33d63acde5f0d4919815768"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e083c285857b78ee21a96ba1eb1b5339733c3563f72980728ca2b08b53826ca5"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab40e6251c3873d86ea9b30a1ac6d7478c09277b32e14745d0d3c6e76e3c7e29"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df822ee7feaaeffb99c1a9e5e608800bd8eda6e5f18f5cfb0dc7eeb2eaa6bbec"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:acef0899fea7492145d2bbaaaec7b345c87753168589cc7faf0afec9afe9b747"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cd73265a9e5ea618014802ab01babf1940cecb90c9762d8b9e7d2cc1e1969ec6"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a78ed8a53a1221393d9637c01870248a6f4ea5b214a59a92a36f18151739452c"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6b0e029353361f1746bac2e4cc19b32f972ec03f0f943b390c4ab3371840aabf"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7cf5c9458e1e90e3c390c2639f1017a0379a99a94fdfad3a1fd966a2874bba52"}, + {file = "aiohttp-3.9.3-cp310-cp310-win32.whl", hash = "sha256:3e59c23c52765951b69ec45ddbbc9403a8761ee6f57253250c6e1536cacc758b"}, + {file = "aiohttp-3.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:055ce4f74b82551678291473f66dc9fb9048a50d8324278751926ff0ae7715e5"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6b88f9386ff1ad91ace19d2a1c0225896e28815ee09fc6a8932fded8cda97c3d"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c46956ed82961e31557b6857a5ca153c67e5476972e5f7190015018760938da2"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07b837ef0d2f252f96009e9b8435ec1fef68ef8b1461933253d318748ec1acdc"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad46e6f620574b3b4801c68255492e0159d1712271cc99d8bdf35f2043ec266"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ed3e046ea7b14938112ccd53d91c1539af3e6679b222f9469981e3dac7ba1ce"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:039df344b45ae0b34ac885ab5b53940b174530d4dd8a14ed8b0e2155b9dddccb"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7943c414d3a8d9235f5f15c22ace69787c140c80b718dcd57caaade95f7cd93b"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84871a243359bb42c12728f04d181a389718710129b36b6aad0fc4655a7647d4"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5eafe2c065df5401ba06821b9a054d9cb2848867f3c59801b5d07a0be3a380ae"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9d3c9b50f19704552f23b4eaea1fc082fdd82c63429a6506446cbd8737823da3"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:f033d80bc6283092613882dfe40419c6a6a1527e04fc69350e87a9df02bbc283"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2c895a656dd7e061b2fd6bb77d971cc38f2afc277229ce7dd3552de8313a483e"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1f5a71d25cd8106eab05f8704cd9167b6e5187bcdf8f090a66c6d88b634802b4"}, + {file = "aiohttp-3.9.3-cp311-cp311-win32.whl", hash = "sha256:50fca156d718f8ced687a373f9e140c1bb765ca16e3d6f4fe116e3df7c05b2c5"}, + {file = "aiohttp-3.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:5fe9ce6c09668063b8447f85d43b8d1c4e5d3d7e92c63173e6180b2ac5d46dd8"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:38a19bc3b686ad55804ae931012f78f7a534cce165d089a2059f658f6c91fa60"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:770d015888c2a598b377bd2f663adfd947d78c0124cfe7b959e1ef39f5b13869"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee43080e75fc92bf36219926c8e6de497f9b247301bbf88c5c7593d931426679"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52df73f14ed99cee84865b95a3d9e044f226320a87af208f068ecc33e0c35b96"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc9b311743a78043b26ffaeeb9715dc360335e5517832f5a8e339f8a43581e4d"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b955ed993491f1a5da7f92e98d5dad3c1e14dc175f74517c4e610b1f2456fb11"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:504b6981675ace64c28bf4a05a508af5cde526e36492c98916127f5a02354d53"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6fe5571784af92b6bc2fda8d1925cccdf24642d49546d3144948a6a1ed58ca5"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ba39e9c8627edc56544c8628cc180d88605df3892beeb2b94c9bc857774848ca"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e5e46b578c0e9db71d04c4b506a2121c0cb371dd89af17a0586ff6769d4c58c1"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:938a9653e1e0c592053f815f7028e41a3062e902095e5a7dc84617c87267ebd5"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:c3452ea726c76e92f3b9fae4b34a151981a9ec0a4847a627c43d71a15ac32aa6"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ff30218887e62209942f91ac1be902cc80cddb86bf00fbc6783b7a43b2bea26f"}, + {file = "aiohttp-3.9.3-cp312-cp312-win32.whl", hash = "sha256:38f307b41e0bea3294a9a2a87833191e4bcf89bb0365e83a8be3a58b31fb7f38"}, + {file = "aiohttp-3.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:b791a3143681a520c0a17e26ae7465f1b6f99461a28019d1a2f425236e6eedb5"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ed621426d961df79aa3b963ac7af0d40392956ffa9be022024cd16297b30c8c"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7f46acd6a194287b7e41e87957bfe2ad1ad88318d447caf5b090012f2c5bb528"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feeb18a801aacb098220e2c3eea59a512362eb408d4afd0c242044c33ad6d542"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f734e38fd8666f53da904c52a23ce517f1b07722118d750405af7e4123933511"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b40670ec7e2156d8e57f70aec34a7216407848dfe6c693ef131ddf6e76feb672"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fdd215b7b7fd4a53994f238d0f46b7ba4ac4c0adb12452beee724ddd0743ae5d"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:017a21b0df49039c8f46ca0971b3a7fdc1f56741ab1240cb90ca408049766168"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e99abf0bba688259a496f966211c49a514e65afa9b3073a1fcee08856e04425b"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:648056db9a9fa565d3fa851880f99f45e3f9a771dd3ff3bb0c048ea83fb28194"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8aacb477dc26797ee089721536a292a664846489c49d3ef9725f992449eda5a8"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:522a11c934ea660ff8953eda090dcd2154d367dec1ae3c540aff9f8a5c109ab4"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5bce0dc147ca85caa5d33debc4f4d65e8e8b5c97c7f9f660f215fa74fc49a321"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b4af9f25b49a7be47c0972139e59ec0e8285c371049df1a63b6ca81fdd216a2"}, + {file = "aiohttp-3.9.3-cp38-cp38-win32.whl", hash = "sha256:298abd678033b8571995650ccee753d9458dfa0377be4dba91e4491da3f2be63"}, + {file = "aiohttp-3.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:69361bfdca5468c0488d7017b9b1e5ce769d40b46a9f4a2eed26b78619e9396c"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0fa43c32d1643f518491d9d3a730f85f5bbaedcbd7fbcae27435bb8b7a061b29"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:835a55b7ca49468aaaac0b217092dfdff370e6c215c9224c52f30daaa735c1c1"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06a9b2c8837d9a94fae16c6223acc14b4dfdff216ab9b7202e07a9a09541168f"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abf151955990d23f84205286938796c55ff11bbfb4ccfada8c9c83ae6b3c89a3"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59c26c95975f26e662ca78fdf543d4eeaef70e533a672b4113dd888bd2423caa"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f95511dd5d0e05fd9728bac4096319f80615aaef4acbecb35a990afebe953b0e"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:595f105710293e76b9dc09f52e0dd896bd064a79346234b521f6b968ffdd8e58"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7c8b816c2b5af5c8a436df44ca08258fc1a13b449393a91484225fcb7545533"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f1088fa100bf46e7b398ffd9904f4808a0612e1d966b4aa43baa535d1b6341eb"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f59dfe57bb1ec82ac0698ebfcdb7bcd0e99c255bd637ff613760d5f33e7c81b3"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:361a1026c9dd4aba0109e4040e2aecf9884f5cfe1b1b1bd3d09419c205e2e53d"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:363afe77cfcbe3a36353d8ea133e904b108feea505aa4792dad6585a8192c55a"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e2c45c208c62e955e8256949eb225bd8b66a4c9b6865729a786f2aa79b72e9d"}, + {file = "aiohttp-3.9.3-cp39-cp39-win32.whl", hash = "sha256:f7217af2e14da0856e082e96ff637f14ae45c10a5714b63c77f26d8884cf1051"}, + {file = "aiohttp-3.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:27468897f628c627230dba07ec65dc8d0db566923c48f29e084ce382119802bc"}, + {file = "aiohttp-3.9.3.tar.gz", hash = "sha256:90842933e5d1ff760fae6caca4b2b3edba53ba8f4b71e95dacf2818a2aca06f7"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "brotlicffi"] + +[[package]] +name = "aioresponses" +version = "0.7.6" +description = "Mock out requests made by ClientSession from aiohttp package" +optional = false +python-versions = "*" +files = [ + {file = "aioresponses-0.7.6-py2.py3-none-any.whl", hash = "sha256:d2c26defbb9b440ea2685ec132e90700907fd10bcca3e85ec2f157219f0d26f7"}, + {file = "aioresponses-0.7.6.tar.gz", hash = "sha256:f795d9dbda2d61774840e7e32f5366f45752d1adc1b74c9362afd017296c7ee1"}, +] + +[package.dependencies] +aiohttp = ">=3.3.0,<4.0.0" + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "alabaster" +version = "0.7.13" +description = "A configurable sidebar-enabled Sphinx theme" +optional = false +python-versions = ">=3.6" +files = [ + {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, + {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, +] + +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "babel" +version = "2.14.0" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, + {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, +] + +[package.dependencies] +pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "chardet" +version = "5.2.0" +description = "Universal encoding detector for Python 3" +optional = false +python-versions = ">=3.7" +files = [ + {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, + {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.4.3" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8580b827d4746d47294c0e0b92854c85a92c2227927433998f0d3320ae8a71b6"}, + {file = "coverage-7.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:718187eeb9849fc6cc23e0d9b092bc2348821c5e1a901c9f8975df0bc785bfd4"}, + {file = "coverage-7.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:767b35c3a246bcb55b8044fd3a43b8cd553dd1f9f2c1eeb87a302b1f8daa0524"}, + {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae7f19afe0cce50039e2c782bff379c7e347cba335429678450b8fe81c4ef96d"}, + {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba3a8aaed13770e970b3df46980cb068d1c24af1a1968b7818b69af8c4347efb"}, + {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ee866acc0861caebb4f2ab79f0b94dbfbdbfadc19f82e6e9c93930f74e11d7a0"}, + {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:506edb1dd49e13a2d4cac6a5173317b82a23c9d6e8df63efb4f0380de0fbccbc"}, + {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd6545d97c98a192c5ac995d21c894b581f1fd14cf389be90724d21808b657e2"}, + {file = "coverage-7.4.3-cp310-cp310-win32.whl", hash = "sha256:f6a09b360d67e589236a44f0c39218a8efba2593b6abdccc300a8862cffc2f94"}, + {file = "coverage-7.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:18d90523ce7553dd0b7e23cbb28865db23cddfd683a38fb224115f7826de78d0"}, + {file = "coverage-7.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbbe5e739d45a52f3200a771c6d2c7acf89eb2524890a4a3aa1a7fa0695d2a47"}, + {file = "coverage-7.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:489763b2d037b164846ebac0cbd368b8a4ca56385c4090807ff9fad817de4113"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:451f433ad901b3bb00184d83fd83d135fb682d780b38af7944c9faeecb1e0bfe"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcc66e222cf4c719fe7722a403888b1f5e1682d1679bd780e2b26c18bb648cdc"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ec74cfef2d985e145baae90d9b1b32f85e1741b04cd967aaf9cfa84c1334f3"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:abbbd8093c5229c72d4c2926afaee0e6e3140de69d5dcd918b2921f2f0c8baba"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:35eb581efdacf7b7422af677b92170da4ef34500467381e805944a3201df2079"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8249b1c7334be8f8c3abcaaa996e1e4927b0e5a23b65f5bf6cfe3180d8ca7840"}, + {file = "coverage-7.4.3-cp311-cp311-win32.whl", hash = "sha256:cf30900aa1ba595312ae41978b95e256e419d8a823af79ce670835409fc02ad3"}, + {file = "coverage-7.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:18c7320695c949de11a351742ee001849912fd57e62a706d83dfc1581897fa2e"}, + {file = "coverage-7.4.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b51bfc348925e92a9bd9b2e48dad13431b57011fd1038f08316e6bf1df107d10"}, + {file = "coverage-7.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d6cdecaedea1ea9e033d8adf6a0ab11107b49571bbb9737175444cea6eb72328"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b2eccb883368f9e972e216c7b4c7c06cabda925b5f06dde0650281cb7666a30"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c00cdc8fa4e50e1cc1f941a7f2e3e0f26cb2a1233c9696f26963ff58445bac7"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4a8dd3dcf4cbd3165737358e4d7dfbd9d59902ad11e3b15eebb6393b0446e"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:062b0a75d9261e2f9c6d071753f7eef0fc9caf3a2c82d36d76667ba7b6470003"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ebe7c9e67a2d15fa97b77ea6571ce5e1e1f6b0db71d1d5e96f8d2bf134303c1d"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c0a120238dd71c68484f02562f6d446d736adcc6ca0993712289b102705a9a3a"}, + {file = "coverage-7.4.3-cp312-cp312-win32.whl", hash = "sha256:37389611ba54fd6d278fde86eb2c013c8e50232e38f5c68235d09d0a3f8aa352"}, + {file = "coverage-7.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:d25b937a5d9ffa857d41be042b4238dd61db888533b53bc76dc082cb5a15e914"}, + {file = "coverage-7.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:28ca2098939eabab044ad68850aac8f8db6bf0b29bc7f2887d05889b17346454"}, + {file = "coverage-7.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:280459f0a03cecbe8800786cdc23067a8fc64c0bd51dc614008d9c36e1659d7e"}, + {file = "coverage-7.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c0cdedd3500e0511eac1517bf560149764b7d8e65cb800d8bf1c63ebf39edd2"}, + {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a9babb9466fe1da12417a4aed923e90124a534736de6201794a3aea9d98484e"}, + {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dec9de46a33cf2dd87a5254af095a409ea3bf952d85ad339751e7de6d962cde6"}, + {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:16bae383a9cc5abab9bb05c10a3e5a52e0a788325dc9ba8499e821885928968c"}, + {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2c854ce44e1ee31bda4e318af1dbcfc929026d12c5ed030095ad98197eeeaed0"}, + {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ce8c50520f57ec57aa21a63ea4f325c7b657386b3f02ccaedeccf9ebe27686e1"}, + {file = "coverage-7.4.3-cp38-cp38-win32.whl", hash = "sha256:708a3369dcf055c00ddeeaa2b20f0dd1ce664eeabde6623e516c5228b753654f"}, + {file = "coverage-7.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:1bf25fbca0c8d121a3e92a2a0555c7e5bc981aee5c3fdaf4bb7809f410f696b9"}, + {file = "coverage-7.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b253094dbe1b431d3a4ac2f053b6d7ede2664ac559705a704f621742e034f1f"}, + {file = "coverage-7.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77fbfc5720cceac9c200054b9fab50cb2a7d79660609200ab83f5db96162d20c"}, + {file = "coverage-7.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6679060424faa9c11808598504c3ab472de4531c571ab2befa32f4971835788e"}, + {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4af154d617c875b52651dd8dd17a31270c495082f3d55f6128e7629658d63765"}, + {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8640f1fde5e1b8e3439fe482cdc2b0bb6c329f4bb161927c28d2e8879c6029ee"}, + {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:69b9f6f66c0af29642e73a520b6fed25ff9fd69a25975ebe6acb297234eda501"}, + {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0842571634f39016a6c03e9d4aba502be652a6e4455fadb73cd3a3a49173e38f"}, + {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a78ed23b08e8ab524551f52953a8a05d61c3a760781762aac49f8de6eede8c45"}, + {file = "coverage-7.4.3-cp39-cp39-win32.whl", hash = "sha256:c0524de3ff096e15fcbfe8f056fdb4ea0bf497d584454f344d59fce069d3e6e9"}, + {file = "coverage-7.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:0209a6369ccce576b43bb227dc8322d8ef9e323d089c6f3f26a597b09cb4d2aa"}, + {file = "coverage-7.4.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:7cbde573904625509a3f37b6fecea974e363460b556a627c60dc2f47e2fffa51"}, + {file = "coverage-7.4.3.tar.gz", hash = "sha256:276f6077a5c61447a48d133ed13e759c09e62aff0dc84274a68dc18660104d52"}, +] + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "distlib" +version = "0.3.8" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, +] + +[[package]] +name = "docutils" +version = "0.20.1" +description = "Docutils -- Python Documentation Utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, + {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, +] + +[[package]] +name = "filelock" +version = "3.13.1" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + +[[package]] +name = "flake8" +version = "7.0.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"}, + {file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.2.0,<3.3.0" + +[[package]] +name = "frozenlist" +version = "1.4.1" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] + +[[package]] +name = "importlib-metadata" +version = "7.0.1" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, + {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "multidict" +version = "6.0.5" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, + {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, + {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, + {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, + {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, + {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, + {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, +] + +[[package]] +name = "orjson" +version = "3.9.15" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orjson-3.9.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d61f7ce4727a9fa7680cd6f3986b0e2c732639f46a5e0156e550e35258aa313a"}, + {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4feeb41882e8aa17634b589533baafdceb387e01e117b1ec65534ec724023d04"}, + {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fbbeb3c9b2edb5fd044b2a070f127a0ac456ffd079cb82746fc84af01ef021a4"}, + {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b66bcc5670e8a6b78f0313bcb74774c8291f6f8aeef10fe70e910b8040f3ab75"}, + {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2973474811db7b35c30248d1129c64fd2bdf40d57d84beed2a9a379a6f57d0ab"}, + {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fe41b6f72f52d3da4db524c8653e46243c8c92df826ab5ffaece2dba9cccd58"}, + {file = "orjson-3.9.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4228aace81781cc9d05a3ec3a6d2673a1ad0d8725b4e915f1089803e9efd2b99"}, + {file = "orjson-3.9.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f7b65bfaf69493c73423ce9db66cfe9138b2f9ef62897486417a8fcb0a92bfe"}, + {file = "orjson-3.9.15-cp310-none-win32.whl", hash = "sha256:2d99e3c4c13a7b0fb3792cc04c2829c9db07838fb6973e578b85c1745e7d0ce7"}, + {file = "orjson-3.9.15-cp310-none-win_amd64.whl", hash = "sha256:b725da33e6e58e4a5d27958568484aa766e825e93aa20c26c91168be58e08cbb"}, + {file = "orjson-3.9.15-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c8e8fe01e435005d4421f183038fc70ca85d2c1e490f51fb972db92af6e047c2"}, + {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87f1097acb569dde17f246faa268759a71a2cb8c96dd392cd25c668b104cad2f"}, + {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff0f9913d82e1d1fadbd976424c316fbc4d9c525c81d047bbdd16bd27dd98cfc"}, + {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8055ec598605b0077e29652ccfe9372247474375e0e3f5775c91d9434e12d6b1"}, + {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6768a327ea1ba44c9114dba5fdda4a214bdb70129065cd0807eb5f010bfcbb5"}, + {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12365576039b1a5a47df01aadb353b68223da413e2e7f98c02403061aad34bde"}, + {file = "orjson-3.9.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:71c6b009d431b3839d7c14c3af86788b3cfac41e969e3e1c22f8a6ea13139404"}, + {file = "orjson-3.9.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e18668f1bd39e69b7fed19fa7cd1cd110a121ec25439328b5c89934e6d30d357"}, + {file = "orjson-3.9.15-cp311-none-win32.whl", hash = "sha256:62482873e0289cf7313461009bf62ac8b2e54bc6f00c6fabcde785709231a5d7"}, + {file = "orjson-3.9.15-cp311-none-win_amd64.whl", hash = "sha256:b3d336ed75d17c7b1af233a6561cf421dee41d9204aa3cfcc6c9c65cd5bb69a8"}, + {file = "orjson-3.9.15-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:82425dd5c7bd3adfe4e94c78e27e2fa02971750c2b7ffba648b0f5d5cc016a73"}, + {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c51378d4a8255b2e7c1e5cc430644f0939539deddfa77f6fac7b56a9784160a"}, + {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6ae4e06be04dc00618247c4ae3f7c3e561d5bc19ab6941427f6d3722a0875ef7"}, + {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcef128f970bb63ecf9a65f7beafd9b55e3aaf0efc271a4154050fc15cdb386e"}, + {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b72758f3ffc36ca566ba98a8e7f4f373b6c17c646ff8ad9b21ad10c29186f00d"}, + {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c57bc7b946cf2efa67ac55766e41764b66d40cbd9489041e637c1304400494"}, + {file = "orjson-3.9.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:946c3a1ef25338e78107fba746f299f926db408d34553b4754e90a7de1d44068"}, + {file = "orjson-3.9.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2f256d03957075fcb5923410058982aea85455d035607486ccb847f095442bda"}, + {file = "orjson-3.9.15-cp312-none-win_amd64.whl", hash = "sha256:5bb399e1b49db120653a31463b4a7b27cf2fbfe60469546baf681d1b39f4edf2"}, + {file = "orjson-3.9.15-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b17f0f14a9c0ba55ff6279a922d1932e24b13fc218a3e968ecdbf791b3682b25"}, + {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f6cbd8e6e446fb7e4ed5bac4661a29e43f38aeecbf60c4b900b825a353276a1"}, + {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:76bc6356d07c1d9f4b782813094d0caf1703b729d876ab6a676f3aaa9a47e37c"}, + {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fdfa97090e2d6f73dced247a2f2d8004ac6449df6568f30e7fa1a045767c69a6"}, + {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7413070a3e927e4207d00bd65f42d1b780fb0d32d7b1d951f6dc6ade318e1b5a"}, + {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cf1596680ac1f01839dba32d496136bdd5d8ffb858c280fa82bbfeb173bdd40"}, + {file = "orjson-3.9.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:809d653c155e2cc4fd39ad69c08fdff7f4016c355ae4b88905219d3579e31eb7"}, + {file = "orjson-3.9.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:920fa5a0c5175ab14b9c78f6f820b75804fb4984423ee4c4f1e6d748f8b22bc1"}, + {file = "orjson-3.9.15-cp38-none-win32.whl", hash = "sha256:2b5c0f532905e60cf22a511120e3719b85d9c25d0e1c2a8abb20c4dede3b05a5"}, + {file = "orjson-3.9.15-cp38-none-win_amd64.whl", hash = "sha256:67384f588f7f8daf040114337d34a5188346e3fae6c38b6a19a2fe8c663a2f9b"}, + {file = "orjson-3.9.15-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6fc2fe4647927070df3d93f561d7e588a38865ea0040027662e3e541d592811e"}, + {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34cbcd216e7af5270f2ffa63a963346845eb71e174ea530867b7443892d77180"}, + {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f541587f5c558abd93cb0de491ce99a9ef8d1ae29dd6ab4dbb5a13281ae04cbd"}, + {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92255879280ef9c3c0bcb327c5a1b8ed694c290d61a6a532458264f887f052cb"}, + {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05a1f57fb601c426635fcae9ddbe90dfc1ed42245eb4c75e4960440cac667262"}, + {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ede0bde16cc6e9b96633df1631fbcd66491d1063667f260a4f2386a098393790"}, + {file = "orjson-3.9.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e88b97ef13910e5f87bcbc4dd7979a7de9ba8702b54d3204ac587e83639c0c2b"}, + {file = "orjson-3.9.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57d5d8cf9c27f7ef6bc56a5925c7fbc76b61288ab674eb352c26ac780caa5b10"}, + {file = "orjson-3.9.15-cp39-none-win32.whl", hash = "sha256:001f4eb0ecd8e9ebd295722d0cbedf0748680fb9998d3993abaed2f40587257a"}, + {file = "orjson-3.9.15-cp39-none-win_amd64.whl", hash = "sha256:ea0b183a5fe6b2b45f3b854b0d19c4e932d6f5934ae1f723b07cf9560edd4ec7"}, + {file = "orjson-3.9.15.tar.gz", hash = "sha256:95cae920959d772f30ab36d3b25f83bb0f3be671e986c72ce22f8fa700dae061"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pycodestyle" +version = "2.11.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, +] + +[[package]] +name = "pyflakes" +version = "3.2.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, + {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, +] + +[[package]] +name = "pygments" +version = "2.17.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] + +[package.extras] +plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pyproject-api" +version = "1.6.1" +description = "API to interact with the python pyproject.toml based projects" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyproject_api-1.6.1-py3-none-any.whl", hash = "sha256:4c0116d60476b0786c88692cf4e325a9814965e2469c5998b830bba16b183675"}, + {file = "pyproject_api-1.6.1.tar.gz", hash = "sha256:1817dc018adc0d1ff9ca1ed8c60e1623d5aaca40814b953af14a9cf9a5cae538"}, +] + +[package.dependencies] +packaging = ">=23.1" +tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +docs = ["furo (>=2023.8.19)", "sphinx (<7.2)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "setuptools (>=68.1.2)", "wheel (>=0.41.2)"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-mock" +version = "1.11.0" +description = "Mock out responses from the requests package" +optional = false +python-versions = "*" +files = [ + {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, + {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, +] + +[package.dependencies] +requests = ">=2.3,<3" +six = "*" + +[package.extras] +fixture = ["fixtures"] +test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "sphinx" +version = "7.1.2" +description = "Python documentation generator" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinx-7.1.2-py3-none-any.whl", hash = "sha256:d170a81825b2fcacb6dfd5a0d7f578a053e45d3f2b153fecc948c37344eb4cbe"}, + {file = "sphinx-7.1.2.tar.gz", hash = "sha256:780f4d32f1d7d1126576e0e5ecc19dc32ab76cd24e950228dcf7b1f6d3d9e22f"}, +] + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=2.9" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.18.1,<0.21" +imagesize = ">=1.3" +importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.0" +packaging = ">=21.0" +Pygments = ">=2.13" +requests = ">=2.25.0" +snowballstemmer = ">=2.0" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-simplify", "isort", "mypy (>=0.990)", "ruff", "sphinx-lint", "types-requests"] +test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.4" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, + {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.2" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.1" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, + {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.3" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.5" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, + {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-websupport" +version = "1.2.4" +description = "Sphinx API for Web Apps" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-websupport-1.2.4.tar.gz", hash = "sha256:4edf0223a0685a7c485ae5a156b6f529ba1ee481a1417817935b20bde1956232"}, + {file = "sphinxcontrib_websupport-1.2.4-py2.py3-none-any.whl", hash = "sha256:6fc9287dfc823fe9aa432463edd6cea47fa9ebbf488d7f289b322ffcfca075c7"}, +] + +[package.dependencies] +sphinxcontrib-serializinghtml = "*" + +[package.extras] +lint = ["flake8"] +test = ["Sphinx", "pytest", "sqlalchemy", "whoosh"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tox" +version = "4.13.0" +description = "tox is a generic virtualenv management and test command line tool" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tox-4.13.0-py3-none-any.whl", hash = "sha256:1143c7e2489c68026a55d3d4ae84c02c449f073b28e62f80e3e440a3b72a4afa"}, + {file = "tox-4.13.0.tar.gz", hash = "sha256:dd789a554c16c4b532924ba393c92fc8991323c4b3d466712bfecc8c9b9f24f7"}, +] + +[package.dependencies] +cachetools = ">=5.3.2" +chardet = ">=5.2" +colorama = ">=0.4.6" +filelock = ">=3.13.1" +packaging = ">=23.2" +platformdirs = ">=4.1" +pluggy = ">=1.3" +pyproject-api = ">=1.6.1" +tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} +virtualenv = ">=20.25" + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-argparse-cli (>=1.11.1)", "sphinx-autodoc-typehints (>=1.25.2)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.11)"] +testing = ["build[virtualenv] (>=1.0.3)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=8.0.2)", "distlib (>=0.3.8)", "flaky (>=3.7)", "hatch-vcs (>=0.4)", "hatchling (>=1.21)", "psutil (>=5.9.7)", "pytest (>=7.4.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-xdist (>=3.5)", "re-assert (>=1.1)", "time-machine (>=2.13)", "wheel (>=0.42)"] + +[[package]] +name = "urllib3" +version = "1.26.18" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, + {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, +] + +[package.extras] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "vcrpy" +version = "6.0.1" +description = "Automatically mock your HTTP interactions to simplify and speed up testing" +optional = false +python-versions = ">=3.8" +files = [ + {file = "vcrpy-6.0.1.tar.gz", hash = "sha256:9e023fee7f892baa0bbda2f7da7c8ac51165c1c6e38ff8688683a12a4bde9278"}, +] + +[package.dependencies] +PyYAML = "*" +urllib3 = {version = "<2", markers = "platform_python_implementation == \"PyPy\" or python_version < \"3.10\""} +wrapt = "*" +yarl = "*" + +[package.extras] +tests = ["Werkzeug (==2.0.3)", "aiohttp", "boto3", "httplib2", "httpx", "pytest", "pytest-aiohttp", "pytest-asyncio", "pytest-cov", "pytest-httpbin", "requests (>=2.22.0)", "tornado", "urllib3"] + +[[package]] +name = "virtualenv" +version = "20.25.1" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.25.1-py3-none-any.whl", hash = "sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a"}, + {file = "virtualenv-20.25.1.tar.gz", hash = "sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[[package]] +name = "yarl" +version = "1.9.4" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, + {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, + {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, + {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, + {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, + {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, + {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, + {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, + {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, + {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, + {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, + {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, + {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, + {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, + {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, + {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[[package]] +name = "zipp" +version = "3.17.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + +[metadata] +lock-version = "2.0" +python-versions = ">3.8.1,<4.0" +content-hash = "06598bbc0cd222b4e6b2e6a8a7950af466c05591ee62c0c3b5f7027b04e7c72b" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..4b8fa59 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,61 @@ +[tool.poetry] +name = "contentful" +version = "2.1.1" +description = "Contentful SDK for the Content Delivery API" +authors = ["Contentful GmbH "] +license = "MIT" +readme = "README.rst" +repository = "https://github.com/contentful/contentful.py" +documentation = "https://contentful.github.io/contentful.py/" +packages = [{ include = "contentful" }] +include = ["CHANGELOG.md"] +keywords = [ + "contentful", + "delivery", + "cda", + "cms", + "content", +] +classifiers = [ + 'Development Status :: 5 - Production/Stable', + 'Topic :: Software Development :: Libraries', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: MIT License', + 'Natural Language :: English', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.1', + 'Programming Language :: Python :: 3.2', + 'Programming Language :: Python :: 3.3', + 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', +] + +[tool.poetry.dependencies] +python = ">3.8.1,<4.0" +python-dateutil = "^2.8" +requests = "^2.20" +aiohttp = "^3.9" +orjson = { version = "^3.9", markers = "platform_python_implementation != 'PyPy'" } + +[tool.poetry.group.test.dependencies] +vcrpy = "^6" +coverage = "^7" +flake8 = "^7" +tox = "^4" +requests-mock = "^1.5" +aioresponses = "^0.7" + +[tool.poetry.group.docs.dependencies] +Sphinx = "^7" +sphinxcontrib-websupport = "^1.2" + + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/runtests.py b/runtests.py deleted file mode 100644 index d0d9c78..0000000 --- a/runtests.py +++ /dev/null @@ -1,4 +0,0 @@ -from unittest import main -from tests import * - -main() diff --git a/setup.py b/setup.py deleted file mode 100755 index 6380593..0000000 --- a/setup.py +++ /dev/null @@ -1,106 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - - -try: - from setuptools import setup -except ImportError: - from distutils.core import setup - -import os -import re -import sys - -readme = 'Contentful Delivery API Python SDK.\nRead full docs at: https://contentful.github.io/contentful.py/' - - -package = 'contentful' -requirements = [ - 'requests>=2.20.0,<3.0', - 'python-dateutil' -] -test_requirements = [ - 'vcrpy' -] - - -def get_version(package): - """ - Return package version as listed in `__version__` in `init.py`. - """ - init_py = open(os.path.join(package, '__init__.py')).read() - return re.search("^__version__ = ['\"]([^'\"]+)['\"]", - init_py, re.MULTILINE).group(1) - - -def get_author(package): - """ - Return package author as listed in `__author__` in `init.py`. - """ - init_py = open(os.path.join(package, '__init__.py')).read() - return re.search("^__author__ = ['\"]([^'\"]+)['\"]", - init_py, re.MULTILINE).group(1) - - -def get_email(package): - """ - Return package email as listed in `__email__` in `init.py`. - """ - init_py = open(os.path.join(package, '__init__.py')).read() - return re.search("^__email__ = ['\"]([^'\"]+)['\"]", - init_py, re.MULTILINE).group(1) - - -# python setup.py publish -if sys.argv[-1] == 'publish': - os.system("python3 -m pip install --upgrade build") - os.system("python3 -m build") - os.system("python3 -m pip install --upgrade twine") - os.system("python3 -m twine upload dist/*") - args = {'version': get_version(package)} - print("Pushing tags to GitHub:") - os.system("git tag -a %(version)s -m 'version %(version)s'" % args) - os.system("git push --tags") - os.system("git push") - sys.exit() - - -setup( - name='contentful', - version=get_version(package), - description='Contentful Delivery API Client', - long_description=readme, - author=get_author(package), - author_email=get_email(package), - url='https://github.com/contentful/contentful.py', - packages=[ - 'contentful', - ], - package_dir={'contentful': 'contentful'}, - include_package_data=True, - install_requires=requirements, - license="MIT", - zip_safe=False, - keywords='contentful delivery cda cms content', - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Topic :: Software Development :: Libraries', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: MIT License', - 'Natural Language :: English', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.1', - 'Programming Language :: Python :: 3.2', - 'Programming Language :: Python :: 3.3', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - ], - test_suite='tests', - tests_require=test_requirements -) From 371e3816b65096cbda3d9f7fc7b657faa70f3f18 Mon Sep 17 00:00:00 2001 From: Sean Stewart Date: Thu, 29 Feb 2024 15:25:55 -0500 Subject: [PATCH 15/15] Fix python version specifier --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 4b8fa59..7e038ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,7 +37,7 @@ classifiers = [ ] [tool.poetry.dependencies] -python = ">3.8.1,<4.0" +python = ">=3.8.1,<4.0" python-dateutil = "^2.8" requests = "^2.20" aiohttp = "^3.9"