From 9dbd840c703ecad7338726a0abdfffb56de3e530 Mon Sep 17 00:00:00 2001 From: Marius Killinger <155577904+marius-baseten@users.noreply.github.com> Date: Thu, 12 Sep 2024 09:40:08 -0700 Subject: [PATCH 1/8] Make chains draft by default inc CLI (like truss) (#1137) --- truss/cli/cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/truss/cli/cli.py b/truss/cli/cli.py index 7e6a2f0cd..169deae58 100644 --- a/truss/cli/cli.py +++ b/truss/cli/cli.py @@ -470,7 +470,7 @@ def _create_chains_table(service) -> Tuple[rich.table.Table, List[str]]: @click.option( "--publish/--no-publish", type=bool, - default=True, + default=False, help="Create chainlets as published deployments.", ) @click.option( From 3e4ebf6b5932dada77b8a78b6904d85ff2f548ed Mon Sep 17 00:00:00 2001 From: Marius Killinger <155577904+marius-baseten@users.noreply.github.com> Date: Fri, 13 Sep 2024 10:05:32 -0700 Subject: [PATCH 2/8] Update deploy -> push and hide internal URL (#1142) --- truss-chains/examples/audio-transcription/README.md | 8 ++++---- truss-chains/examples/rag/README.md | 4 ++-- truss-chains/truss_chains/remote.py | 5 ++--- 3 files changed, 8 insertions(+), 9 deletions(-) diff --git a/truss-chains/examples/audio-transcription/README.md b/truss-chains/examples/audio-transcription/README.md index 060be2c79..af6184cc0 100644 --- a/truss-chains/examples/audio-transcription/README.md +++ b/truss-chains/examples/audio-transcription/README.md @@ -14,15 +14,15 @@ whisper transcription model (which has slower deployment times), they are in the current setup deployed separately: ```bash -truss chains deploy whisper_chainlet.py +truss chains push whisper_chainlet.py ``` -Insert the predict URL for the Whisper Chainlet (printed by above deploy +Insert the predict URL for the Whisper Chainlet (printed by above push command or can be found on the status page) as a value for -`WHISPER_PREDICT_URL` in `transcribe.py`. The deploy the transcribe chain. +`WHISPER_PREDICT_URL` in `transcribe.py`. Then push the transcribe chain. ```bash -truss chains deploy transcribe.py +truss chains push transcribe.py ``` An example local invocation of the chain is given in the main-section of diff --git a/truss-chains/examples/rag/README.md b/truss-chains/examples/rag/README.md index fb5dc8086..795d06474 100644 --- a/truss-chains/examples/rag/README.md +++ b/truss-chains/examples/rag/README.md @@ -47,7 +47,7 @@ python rag_chain.py Deploy the Chain to production: ```sh -truss chains deploy rag_chain.py +truss chains push rag_chain.py ``` Note that this command will print you with an example cURL command how to @@ -57,7 +57,7 @@ For example a chain invocation might look like this (you need to update the URL): ```sh -curl -X POST 'https://model-5wo86nn3.api.baseten.co/development/predict' \ +curl -X POST 'https://chain-.api.baseten.co/development/run_remote' \ -H "Authorization: Api-Key $BASETEN_API_KEY" \ -d '{"new_bio": "Sam just moved to Manhattan for his new job at a large bank.In college, he enjoyed building sets for student plays."}' ``` diff --git a/truss-chains/truss_chains/remote.py b/truss-chains/truss_chains/remote.py index 9032a84c5..383e041c4 100644 --- a/truss-chains/truss_chains/remote.py +++ b/truss-chains/truss_chains/remote.py @@ -132,9 +132,8 @@ def _push_service( else: raise NotImplementedError(options) - logging.info( - f"Pushed `{chainlet_descriptor.display_name}` @ {service.predict_url}." - ) + logging.info(f"Pushed `{chainlet_descriptor.display_name}`") + logging.debug(f"Internal model endpoint: `{service.predict_url}`") return service From 18d33958564c04267f0e3d23b589091a40baf688 Mon Sep 17 00:00:00 2001 From: Marius Killinger <155577904+marius-baseten@users.noreply.github.com> Date: Mon, 16 Sep 2024 16:11:46 -0700 Subject: [PATCH 3/8] Move truss server out of common (#1146) --- truss/templates/server/inference_server.py | 2 +- .../server/{common => }/truss_server.py | 0 .../templates/core/server/common/test_util.py | 19 ------------------- .../common => server}/test_truss_server.py | 2 +- 4 files changed, 2 insertions(+), 21 deletions(-) rename truss/templates/server/{common => }/truss_server.py (100%) delete mode 100644 truss/tests/templates/core/server/common/test_util.py rename truss/tests/templates/{core/server/common => server}/test_truss_server.py (96%) diff --git a/truss/templates/server/inference_server.py b/truss/templates/server/inference_server.py index 9c9162260..5b9ea2e44 100644 --- a/truss/templates/server/inference_server.py +++ b/truss/templates/server/inference_server.py @@ -2,8 +2,8 @@ from typing import Dict import yaml -from common.truss_server import TrussServer # noqa: E402 from shared.logging import setup_logging +from truss_server import TrussServer # noqa: E402 CONFIG_FILE = "config.yaml" diff --git a/truss/templates/server/common/truss_server.py b/truss/templates/server/truss_server.py similarity index 100% rename from truss/templates/server/common/truss_server.py rename to truss/templates/server/truss_server.py diff --git a/truss/tests/templates/core/server/common/test_util.py b/truss/tests/templates/core/server/common/test_util.py deleted file mode 100644 index 606d46bf1..000000000 --- a/truss/tests/templates/core/server/common/test_util.py +++ /dev/null @@ -1,19 +0,0 @@ -# This file doesn't test anything, but provides utilities for testing. -from unittest import mock - - -def model_supports_predict_proba(): - mock_not_predict_proba = mock.Mock(name="mock_not_predict_proba") - mock_not_predict_proba.predict_proba.return_value = False - - mock_check_proba = mock.Mock(name="mock_check_proba") - mock_check_proba.predict_proba.return_value = True - mock_check_proba._check_proba.return_value = True - - mock_not_check_proba = mock.Mock(name="mock_not_check_proba") - mock_not_check_proba.predict_proba.return_value = True - mock_not_check_proba._check_proba.side_effect = AttributeError - - assert not model_supports_predict_proba(mock_not_predict_proba) - assert model_supports_predict_proba(mock_check_proba) - assert not model_supports_predict_proba(mock_not_check_proba) diff --git a/truss/tests/templates/core/server/common/test_truss_server.py b/truss/tests/templates/server/test_truss_server.py similarity index 96% rename from truss/tests/templates/core/server/common/test_truss_server.py rename to truss/tests/templates/server/test_truss_server.py index 2f746f405..6e0c2d146 100644 --- a/truss/tests/templates/core/server/common/test_truss_server.py +++ b/truss/tests/templates/server/test_truss_server.py @@ -20,7 +20,7 @@ def start_truss_server(stdout_capture_file_path): app_path = truss_container_fs / "app" sys.path.append(str(app_path)) - from common.truss_server import TrussServer + from truss_server import TrussServer config = yaml.safe_load((app_path / "config.yaml").read_text()) server = TrussServer(http_port=port, config=config) From a121e69bdb4a5519a19a78aa2bc16c6672c82ec1 Mon Sep 17 00:00:00 2001 From: dsingal0 Date: Tue, 17 Sep 2024 09:53:03 -0700 Subject: [PATCH 4/8] add validation for trt-llm HF repo (#1147) * add validation for trt-llm HF repo * use constant, and update validationerror message --- truss/config/trt_llm.py | 12 ++++++++++++ truss/tests/test_config.py | 18 ++++++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/truss/config/trt_llm.py b/truss/config/trt_llm.py index b02f9e9ce..04b149809 100644 --- a/truss/config/trt_llm.py +++ b/truss/config/trt_llm.py @@ -3,6 +3,8 @@ from enum import Enum from typing import Optional +from huggingface_hub.errors import HFValidationError +from huggingface_hub.utils import validate_repo_id from pydantic import BaseModel, validator from rich.console import Console @@ -97,6 +99,8 @@ def __init__(self, **data): super().__init__(**data) self._validate_minimum_required_configuration() self._validate_kv_cache_flags() + if self.build.checkpoint_repository.source == CheckpointSource.HF: + self._validate_hf_repo_id() # In pydantic v2 this would be `@model_validator(mode="after")` and # the __init__ override can be removed. @@ -131,6 +135,14 @@ def _validate_kv_cache_flags(self): raise ValueError("Using fp8 context fmha requires paged context fmha") return self + def _validate_hf_repo_id(self): + try: + validate_repo_id(self.build.checkpoint_repository.repo) + except HFValidationError as e: + raise ValueError( + f"HuggingFace repository validation failed: {str(e)}" + ) from e + @property def requires_build(self): if self.build is not None: diff --git a/truss/tests/test_config.py b/truss/tests/test_config.py index 69cf80f45..ffc4d6ae0 100644 --- a/truss/tests/test_config.py +++ b/truss/tests/test_config.py @@ -468,6 +468,24 @@ def test_plugin_paged_context_fmha_check(trtllm_config): TrussConfig.from_dict(trtllm_config) +@pytest.mark.parametrize( + "repo", + [ + "./llama-3.1-8b", + "../my-model-is-in-parent-directory", + "~/.huggingface/my--model--cache/model", + "foo.git", + "datasets/foo/bar", + ".repo_id" "other..repo..id", + ], +) +def test_invalid_hf_repo(trtllm_config, repo): + trtllm_config["trt_llm"]["build"]["checkpoint_repository"]["source"] = "HF" + trtllm_config["trt_llm"]["build"]["checkpoint_repository"]["repo"] = repo + with pytest.raises(ValueError): + TrussConfig.from_dict(trtllm_config) + + def test_plugin_paged_fp8_context_fmha_check(trtllm_config): trtllm_config["trt_llm"]["build"]["plugin_configuration"] = { "paged_kv_cache": False, From b6f895909ac0cd5805ca130fde51e24b23261067 Mon Sep 17 00:00:00 2001 From: Marius Killinger <155577904+marius-baseten@users.noreply.github.com> Date: Thu, 19 Sep 2024 14:28:38 -0700 Subject: [PATCH 5/8] TrussServer supports request/repsonse (#1148) --- .github/workflows/pr.yml | 2 +- .../base_images/base_image.Dockerfile.jinja | 6 +- poetry.lock | 858 +++++++++--------- pyproject.toml | 2 +- truss/__init__.py | 6 + truss/config/trt_llm.py | 6 +- truss/remote/baseten/service.py | 4 + truss/remote/remote_factory.py | 7 +- truss/templates/base.Dockerfile.jinja | 8 +- truss/templates/cache.Dockerfile.jinja | 6 +- truss/templates/control/control/server.py | 2 +- truss/templates/server.Dockerfile.jinja | 14 +- truss/templates/server/common/errors.py | 117 +-- truss/templates/server/common/tracing.py | 2 +- truss/templates/server/inference_server.py | 29 - truss/templates/server/main.py | 11 + truss/templates/server/model_wrapper.py | 534 +++++++---- truss/templates/server/truss_server.py | 153 ++-- truss/templates/shared/serialization.py | 61 +- truss/templates/shared/util.py | 11 +- truss/test_data/server.Dockerfile | 14 +- .../model/model.py | 4 +- .../test_data/test_async_truss/model/model.py | 6 +- .../model/model.py | 11 - .../model/model.py | 14 +- .../packages/helpers_1.py | 5 + .../packages/helpers_2.py | 2 + .../test_truss_with_error/config.yaml | 4 + .../test_truss_with_error/model/__init__.py | 0 .../test_truss_with_error/model/model.py | 8 + .../packages/helpers_1.py | 5 + .../packages/helpers_2.py | 2 + .../templates/control/control/test_server.py | 2 +- .../templates/server/test_model_wrapper.py | 54 +- .../templates/server/test_truss_server.py | 4 +- truss/tests/test_model_inference.py | 576 ++++++++---- truss/util/data_structures.py | 7 +- 37 files changed, 1511 insertions(+), 1046 deletions(-) delete mode 100644 truss/templates/server/inference_server.py create mode 100644 truss/templates/server/main.py create mode 100644 truss/test_data/test_streaming_truss_with_error/packages/helpers_1.py create mode 100644 truss/test_data/test_streaming_truss_with_error/packages/helpers_2.py create mode 100644 truss/test_data/test_truss_with_error/config.yaml create mode 100644 truss/test_data/test_truss_with_error/model/__init__.py create mode 100644 truss/test_data/test_truss_with_error/model/model.py create mode 100644 truss/test_data/test_truss_with_error/packages/helpers_1.py create mode 100644 truss/test_data/test_truss_with_error/packages/helpers_2.py diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml index d928deb93..4c6036f2c 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/pr.yml @@ -33,7 +33,7 @@ jobs: - uses: ./.github/actions/setup-python/ - run: poetry install - name: run tests - run: poetry run pytest -v --cov=truss -m 'not integration' --junitxml=report.xml + run: poetry run pytest --durations=0 -m 'not integration' --junitxml=report.xml - name: Publish Test Report # Not sure how to display this in the UI for non PRs. uses: mikepenz/action-junit-report@v4 if: always() diff --git a/docker/base_images/base_image.Dockerfile.jinja b/docker/base_images/base_image.Dockerfile.jinja index efdb76301..17792944b 100644 --- a/docker/base_images/base_image.Dockerfile.jinja +++ b/docker/base_images/base_image.Dockerfile.jinja @@ -2,7 +2,7 @@ FROM nvidia/cuda:12.2.2-base-ubuntu20.04 ENV CUDNN_VERSION=8.9.5.29 ENV CUDA=12.2 -ENV LD_LIBRARY_PATH /usr/local/cuda/extras/CUPTI/lib64:$LD_LIBRARY_PATH +ENV LD_LIBRARY_PATH=/usr/local/cuda/extras/CUPTI/lib64:$LD_LIBRARY_PATH RUN apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64/3bf863cc.pub && \ apt-get update && apt-get install -y --no-install-recommends \ @@ -21,7 +21,7 @@ RUN apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/cuda/ rm -rf /var/lib/apt/lists/* # Allow statements and log messages to immediately appear in the Knative logs -ENV PYTHONUNBUFFERED True +ENV PYTHONUNBUFFERED=True ENV DEBIAN_FRONTEND=noninteractive RUN apt update && \ @@ -49,7 +49,7 @@ FROM python:{{python_version}} RUN apt update && apt install -y # Allow statements and log messages to immediately appear in the Knative logs -ENV PYTHONUNBUFFERED True +ENV PYTHONUNBUFFERED=True {% endif %} diff --git a/poetry.lock b/poetry.lock index b46ac1bde..eacadddc8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -16,13 +16,13 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} [[package]] name = "anyio" -version = "4.4.0" +version = "4.5.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, - {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, + {file = "anyio-4.5.0-py3-none-any.whl", hash = "sha256:fdeb095b7cc5a5563175eedd926ec4ae55413bb4be5770c424af0ba46ccb4a78"}, + {file = "anyio-4.5.0.tar.gz", hash = "sha256:c5a275fe5ca0afd788001f58fca1e69e29ce706d746e317d660e21f70c530ef9"}, ] [package.dependencies] @@ -32,9 +32,9 @@ sniffio = ">=1.1" typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] [[package]] name = "appnope" @@ -168,17 +168,17 @@ files = [ [[package]] name = "boto3" -version = "1.35.5" +version = "1.35.22" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.35.5-py3-none-any.whl", hash = "sha256:2cef3aa476181395c260f4b6e6c5565e5a3022a874fb6b579d8e6b169f94e0b3"}, - {file = "boto3-1.35.5.tar.gz", hash = "sha256:5724ddeda8e18c7614c20a09c20159ed87ff7439755cf5e250a1a3feaf9afb7e"}, + {file = "boto3-1.35.22-py3-none-any.whl", hash = "sha256:2109b632b451c1d4347a93a9abe6dc866c03db4ff1f910597f4543f1965829de"}, + {file = "boto3-1.35.22.tar.gz", hash = "sha256:8f4f6e0860ca1b18cbb8d13f3a572a4c099577e741b10205b5604058af0e75b7"}, ] [package.dependencies] -botocore = ">=1.35.5,<1.36.0" +botocore = ">=1.35.22,<1.36.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -187,13 +187,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.5" +version = "1.35.22" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.5-py3-none-any.whl", hash = "sha256:8116b72c7ae845c195146e437e2afd9d17538a37b3f3548dcf67c12c86ba0742"}, - {file = "botocore-1.35.5.tar.gz", hash = "sha256:3a0086c7124cb3b0d9f98563d00ffd14a942c3f9e731d8d1ccf0d3a1ac7ed884"}, + {file = "botocore-1.35.22-py3-none-any.whl", hash = "sha256:d9bc656e7dde0b3e3f3080fc54bacff6a97fd7806b98acbcc21c7f9d4d0102b9"}, + {file = "botocore-1.35.22.tar.gz", hash = "sha256:18362b7ec748561d786aebf1dd5c9faf22c4732efbf89344314199f96d3bbb65"}, ] [package.dependencies] @@ -205,7 +205,7 @@ urllib3 = [ ] [package.extras] -crt = ["awscrt (==0.21.2)"] +crt = ["awscrt (==0.21.5)"] [[package]] name = "cachetools" @@ -220,89 +220,89 @@ files = [ [[package]] name = "certifi" -version = "2024.7.4" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] name = "cffi" -version = "1.17.0" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, - {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, - {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, - {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, - {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, - {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, - {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, - {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, - {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, - {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, - {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, - {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, - {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, - {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, - {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, - {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -636,13 +636,13 @@ test = ["pytest (>=6)"] [[package]] name = "fastapi" -version = "0.112.2" +version = "0.115.0" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.112.2-py3-none-any.whl", hash = "sha256:db84b470bd0e2b1075942231e90e3577e12a903c4dc8696f0d206a7904a7af1c"}, - {file = "fastapi-0.112.2.tar.gz", hash = "sha256:3d4729c038414d5193840706907a41839d839523da6ed0c2811f1168cac1798c"}, + {file = "fastapi-0.115.0-py3-none-any.whl", hash = "sha256:17ea427674467486e997206a5ab25760f6b09e069f099b96f5b55a32fb6f1631"}, + {file = "fastapi-0.115.0.tar.gz", hash = "sha256:f93b4ca3529a8ebc6fc3fcf710e5efa8de3df9b41570958abf1d97d843138004"}, ] [package.dependencies] @@ -670,19 +670,19 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc [[package]] name = "filelock" -version = "3.15.4" +version = "3.16.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, - {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, + {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, + {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] -typing = ["typing-extensions (>=4.8)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] +typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "flask" @@ -709,13 +709,13 @@ dotenv = ["python-dotenv"] [[package]] name = "fsspec" -version = "2024.6.1" +version = "2024.9.0" description = "File-system specification" optional = false python-versions = ">=3.8" files = [ - {file = "fsspec-2024.6.1-py3-none-any.whl", hash = "sha256:3cb443f8bcd2efb31295a5b9fdb02aee81d8452c80d28f97a6d0959e6cee101e"}, - {file = "fsspec-2024.6.1.tar.gz", hash = "sha256:fad7d7e209dd4c1208e3bbfda706620e0da5142bebbd9c384afb95b07e798e49"}, + {file = "fsspec-2024.9.0-py3-none-any.whl", hash = "sha256:a0947d552d8a6efa72cc2c730b12c41d043509156966cca4fb157b0f2a0c574b"}, + {file = "fsspec-2024.9.0.tar.gz", hash = "sha256:4b0afb90c2f21832df142f292649035d80b421f60a9e1c027802e5a0da2b04e8"}, ] [package.extras] @@ -748,13 +748,13 @@ tqdm = ["tqdm"] [[package]] name = "google-api-core" -version = "2.19.1" +version = "2.20.0" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.19.1.tar.gz", hash = "sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd"}, - {file = "google_api_core-2.19.1-py3-none-any.whl", hash = "sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125"}, + {file = "google_api_core-2.20.0-py3-none-any.whl", hash = "sha256:ef0591ef03c30bb83f79b3d0575c3f31219001fc9c5cf37024d08310aeffed8a"}, + {file = "google_api_core-2.20.0.tar.gz", hash = "sha256:f74dff1889ba291a4b76c5079df0711810e2d9da81abfdc99957bc961c1eb28f"}, ] [package.dependencies] @@ -771,13 +771,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.34.0" +version = "2.35.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google_auth-2.34.0-py2.py3-none-any.whl", hash = "sha256:72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65"}, - {file = "google_auth-2.34.0.tar.gz", hash = "sha256:8eb87396435c19b20d32abd2f984e31c191a15284af72eb922f10e5bde9c04cc"}, + {file = "google_auth-2.35.0-py2.py3-none-any.whl", hash = "sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f"}, + {file = "google_auth-2.35.0.tar.gz", hash = "sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a"}, ] [package.dependencies] @@ -931,13 +931,13 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"] [[package]] name = "googleapis-common-protos" -version = "1.64.0" +version = "1.65.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis_common_protos-1.64.0-py2.py3-none-any.whl", hash = "sha256:d1bfc569f70ed2e96ccf06ead265c2cf42b5abfc817cda392e3835f3b67b5c59"}, - {file = "googleapis_common_protos-1.64.0.tar.gz", hash = "sha256:7d77ca6b7c0c38eb6b1bab3b4c9973acf57ce4f2a6d3a4136acba10bcbfb3025"}, + {file = "googleapis_common_protos-1.65.0-py2.py3-none-any.whl", hash = "sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63"}, + {file = "googleapis_common_protos-1.65.0.tar.gz", hash = "sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0"}, ] [package.dependencies] @@ -948,61 +948,61 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] [[package]] name = "grpcio" -version = "1.66.0" +version = "1.66.1" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.8" files = [ - {file = "grpcio-1.66.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:ad7256f224437b2c29c2bef98ddd3130454c5b1ab1f0471fc11794cefd4dbd3d"}, - {file = "grpcio-1.66.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:5f4b3357e59dfba9140a51597287297bc638710d6a163f99ee14efc19967a821"}, - {file = "grpcio-1.66.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:e8d20308eeae15b3e182f47876f05acbdec1eebd9473a9814a44e46ec4a84c04"}, - {file = "grpcio-1.66.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1eb03524d0f55b965d6c86aa44e5db9e5eaa15f9ed3b164621e652e5b927f4b8"}, - {file = "grpcio-1.66.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37514b68a42e9cf24536345d3cf9e580ffd29117c158b4eeea34625200256067"}, - {file = "grpcio-1.66.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:516fdbc8e156db71a004bc431a6303bca24cfde186babe96dde7bd01e8f0cc70"}, - {file = "grpcio-1.66.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d0439a970d65327de21c299ea0e0c2ad0987cdaf18ba5066621dea5f427f922b"}, - {file = "grpcio-1.66.0-cp310-cp310-win32.whl", hash = "sha256:5f93fc84b72bbc7b84a42f3ca9dc055fa00d2303d9803be011ebf7a10a4eb833"}, - {file = "grpcio-1.66.0-cp310-cp310-win_amd64.whl", hash = "sha256:8fc5c710ddd51b5a0dc36ef1b6663430aa620e0ce029b87b150dafd313b978c3"}, - {file = "grpcio-1.66.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:dd614370e939f9fceeeb2915111a0795271b4c11dfb5fc0f58449bee40c726a5"}, - {file = "grpcio-1.66.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:245b08f9b3c645a6a623f3ed4fa43dcfcd6ad701eb9c32511c1bb7380e8c3d23"}, - {file = "grpcio-1.66.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:aaf30c75cbaf30e561ca45f21eb1f729f0fab3f15c592c1074795ed43e3ff96f"}, - {file = "grpcio-1.66.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49234580a073ce7ac490112f6c67c874cbcb27804c4525978cdb21ba7f3f193c"}, - {file = "grpcio-1.66.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de9e20a0acb709dcfa15a622c91f584f12c9739a79c47999f73435d2b3cc8a3b"}, - {file = "grpcio-1.66.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc008c6afa1e7c8df99bd9154abc4f0470d26b7730ca2521122e99e771baa8c7"}, - {file = "grpcio-1.66.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:50cea8ce2552865b87e3dffbb85eb21e6b98d928621600c0feda2f02449cd837"}, - {file = "grpcio-1.66.0-cp311-cp311-win32.whl", hash = "sha256:508411df1f2b7cfa05d4d7dbf3d576fe4f949cd61c03f3a6f0378c84e3d7b963"}, - {file = "grpcio-1.66.0-cp311-cp311-win_amd64.whl", hash = "sha256:6d586a95c05c82a5354be48bb4537e1accaf2472d8eb7e9086d844cbff934482"}, - {file = "grpcio-1.66.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:5ea27f4ce8c0daccfdd2c7961e6ba404b6599f47c948415c4cca5728739107a3"}, - {file = "grpcio-1.66.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:296a45ea835e12a1cc35ab0c57e455346c272af7b0d178e29c67742167262b4c"}, - {file = "grpcio-1.66.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:e36fa838ac1d6c87198ca149cbfcc92e1af06bb8c8cd852622f8e58f33ea3324"}, - {file = "grpcio-1.66.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:684a4c07883cbd4ac864f0d08d927267404f5f0c76f31c85f9bbe05f2daae2f2"}, - {file = "grpcio-1.66.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3084e590e857ba7585ae91078e4c9b6ef55aaf1dc343ce26400ba59a146eada"}, - {file = "grpcio-1.66.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:526d4f6ca19f31b25606d5c470ecba55c0b22707b524e4de8987919e8920437d"}, - {file = "grpcio-1.66.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:423ae18637cd99ddcf2e5a6851c61828c49e9b9d022d0442d979b4f230109787"}, - {file = "grpcio-1.66.0-cp312-cp312-win32.whl", hash = "sha256:7bc9d823e05d63a87511fb456dcc48dc0fced86c282bf60229675e7ee7aac1a1"}, - {file = "grpcio-1.66.0-cp312-cp312-win_amd64.whl", hash = "sha256:230cdd696751e7eb1395718cd308234749daa217bb8d128f00357dc4df102558"}, - {file = "grpcio-1.66.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:0f3010bf46b2a01c9e40644cb9ed91b4b8435e5c500a275da5f9f62580e31e80"}, - {file = "grpcio-1.66.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ba18cfdc09312eb2eea6fa0ce5d2eec3cf345ea78f6528b2eaed6432105e0bd0"}, - {file = "grpcio-1.66.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:53d4c6706b49e358a2a33345dbe9b6b3bb047cecd7e8c07ba383bd09349bfef8"}, - {file = "grpcio-1.66.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:643d8d9632a688ae69661e924b862e23c83a3575b24e52917ec5bcc59543d212"}, - {file = "grpcio-1.66.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba60ae3b465b3e85080ae3bfbc36fd0305ae495ab16fcf8022fc7d7a23aac846"}, - {file = "grpcio-1.66.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9d5251578767fe44602688c851c2373b5513048ac84c21a0fe946590a8e7933d"}, - {file = "grpcio-1.66.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5e8140b39f10d7be2263afa2838112de29374c5c740eb0afd99146cb5bdbd990"}, - {file = "grpcio-1.66.0-cp38-cp38-win32.whl", hash = "sha256:5b15ef1b296c4e78f15f64fc65bf8081f8774480ffcac45642f69d9d753d9c6b"}, - {file = "grpcio-1.66.0-cp38-cp38-win_amd64.whl", hash = "sha256:c072f90a1f0409f827ae86266984cba65e89c5831a0726b9fc7f4b5fb940b853"}, - {file = "grpcio-1.66.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:a639d3866bfb5a678b5c0b92cd7ab543033ed8988854290fd86145e71731fd4c"}, - {file = "grpcio-1.66.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6ed35bf7da3fb3b1949e32bdf47a8b5ffe0aed11722d948933bd068531cd4682"}, - {file = "grpcio-1.66.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:1c5466222470cb7fbc9cc898af1d48eefd297cb2e2f59af6d4a851c862fa90ac"}, - {file = "grpcio-1.66.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:921b8f7f25d5300d7c6837a1e0639ef145fbdbfb728e0a5db2dbccc9fc0fd891"}, - {file = "grpcio-1.66.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3f6feb0dc8456d025e566709f7dd02885add99bedaac50229013069242a1bfd"}, - {file = "grpcio-1.66.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748452dbd5a047475d5413bdef08b0b9ceb2c0c0e249d4ee905a5fb82c6328dc"}, - {file = "grpcio-1.66.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:832945e64176520520317b50d64ec7d79924429528d5747669b52d0bf2c7bd78"}, - {file = "grpcio-1.66.0-cp39-cp39-win32.whl", hash = "sha256:8096a922eb91bc97c839f675c3efa1257c6ef181ae1b25d3fb97f2cae4c57c01"}, - {file = "grpcio-1.66.0-cp39-cp39-win_amd64.whl", hash = "sha256:375b58892301a5fc6ca7d7ff689c9dc9d00895f5d560604ace9f4f0573013c63"}, - {file = "grpcio-1.66.0.tar.gz", hash = "sha256:c1ea4c528e7db6660718e4165fd1b5ac24b79a70c870a7bc0b7bdb9babab7c1e"}, -] - -[package.extras] -protobuf = ["grpcio-tools (>=1.66.0)"] + {file = "grpcio-1.66.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:4877ba180591acdf127afe21ec1c7ff8a5ecf0fe2600f0d3c50e8c4a1cbc6492"}, + {file = "grpcio-1.66.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:3750c5a00bd644c75f4507f77a804d0189d97a107eb1481945a0cf3af3e7a5ac"}, + {file = "grpcio-1.66.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:a013c5fbb12bfb5f927444b477a26f1080755a931d5d362e6a9a720ca7dbae60"}, + {file = "grpcio-1.66.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1b24c23d51a1e8790b25514157d43f0a4dce1ac12b3f0b8e9f66a5e2c4c132f"}, + {file = "grpcio-1.66.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7ffb8ea674d68de4cac6f57d2498fef477cef582f1fa849e9f844863af50083"}, + {file = "grpcio-1.66.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:307b1d538140f19ccbd3aed7a93d8f71103c5d525f3c96f8616111614b14bf2a"}, + {file = "grpcio-1.66.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1c17ebcec157cfb8dd445890a03e20caf6209a5bd4ac5b040ae9dbc59eef091d"}, + {file = "grpcio-1.66.1-cp310-cp310-win32.whl", hash = "sha256:ef82d361ed5849d34cf09105d00b94b6728d289d6b9235513cb2fcc79f7c432c"}, + {file = "grpcio-1.66.1-cp310-cp310-win_amd64.whl", hash = "sha256:292a846b92cdcd40ecca46e694997dd6b9be6c4c01a94a0dfb3fcb75d20da858"}, + {file = "grpcio-1.66.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:c30aeceeaff11cd5ddbc348f37c58bcb96da8d5aa93fed78ab329de5f37a0d7a"}, + {file = "grpcio-1.66.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8a1e224ce6f740dbb6b24c58f885422deebd7eb724aff0671a847f8951857c26"}, + {file = "grpcio-1.66.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a66fe4dc35d2330c185cfbb42959f57ad36f257e0cc4557d11d9f0a3f14311df"}, + {file = "grpcio-1.66.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3ba04659e4fce609de2658fe4dbf7d6ed21987a94460f5f92df7579fd5d0e22"}, + {file = "grpcio-1.66.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4573608e23f7e091acfbe3e84ac2045680b69751d8d67685ffa193a4429fedb1"}, + {file = "grpcio-1.66.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7e06aa1f764ec8265b19d8f00140b8c4b6ca179a6dc67aa9413867c47e1fb04e"}, + {file = "grpcio-1.66.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3885f037eb11f1cacc41f207b705f38a44b69478086f40608959bf5ad85826dd"}, + {file = "grpcio-1.66.1-cp311-cp311-win32.whl", hash = "sha256:97ae7edd3f3f91480e48ede5d3e7d431ad6005bfdbd65c1b56913799ec79e791"}, + {file = "grpcio-1.66.1-cp311-cp311-win_amd64.whl", hash = "sha256:cfd349de4158d797db2bd82d2020554a121674e98fbe6b15328456b3bf2495bb"}, + {file = "grpcio-1.66.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:a92c4f58c01c77205df6ff999faa008540475c39b835277fb8883b11cada127a"}, + {file = "grpcio-1.66.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:fdb14bad0835914f325349ed34a51940bc2ad965142eb3090081593c6e347be9"}, + {file = "grpcio-1.66.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f03a5884c56256e08fd9e262e11b5cfacf1af96e2ce78dc095d2c41ccae2c80d"}, + {file = "grpcio-1.66.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ca2559692d8e7e245d456877a85ee41525f3ed425aa97eb7a70fc9a79df91a0"}, + {file = "grpcio-1.66.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84ca1be089fb4446490dd1135828bd42a7c7f8421e74fa581611f7afdf7ab761"}, + {file = "grpcio-1.66.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:d639c939ad7c440c7b2819a28d559179a4508783f7e5b991166f8d7a34b52815"}, + {file = "grpcio-1.66.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b9feb4e5ec8dc2d15709f4d5fc367794d69277f5d680baf1910fc9915c633524"}, + {file = "grpcio-1.66.1-cp312-cp312-win32.whl", hash = "sha256:7101db1bd4cd9b880294dec41a93fcdce465bdbb602cd8dc5bd2d6362b618759"}, + {file = "grpcio-1.66.1-cp312-cp312-win_amd64.whl", hash = "sha256:b0aa03d240b5539648d996cc60438f128c7f46050989e35b25f5c18286c86734"}, + {file = "grpcio-1.66.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:ecfe735e7a59e5a98208447293ff8580e9db1e890e232b8b292dc8bd15afc0d2"}, + {file = "grpcio-1.66.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4825a3aa5648010842e1c9d35a082187746aa0cdbf1b7a2a930595a94fb10fce"}, + {file = "grpcio-1.66.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:f517fd7259fe823ef3bd21e508b653d5492e706e9f0ef82c16ce3347a8a5620c"}, + {file = "grpcio-1.66.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1fe60d0772831d96d263b53d83fb9a3d050a94b0e94b6d004a5ad111faa5b5b"}, + {file = "grpcio-1.66.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31a049daa428f928f21090403e5d18ea02670e3d5d172581670be006100db9ef"}, + {file = "grpcio-1.66.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f914386e52cbdeb5d2a7ce3bf1fdfacbe9d818dd81b6099a05b741aaf3848bb"}, + {file = "grpcio-1.66.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bff2096bdba686019fb32d2dde45b95981f0d1490e054400f70fc9a8af34b49d"}, + {file = "grpcio-1.66.1-cp38-cp38-win32.whl", hash = "sha256:aa8ba945c96e73de29d25331b26f3e416e0c0f621e984a3ebdb2d0d0b596a3b3"}, + {file = "grpcio-1.66.1-cp38-cp38-win_amd64.whl", hash = "sha256:161d5c535c2bdf61b95080e7f0f017a1dfcb812bf54093e71e5562b16225b4ce"}, + {file = "grpcio-1.66.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:d0cd7050397b3609ea51727b1811e663ffda8bda39c6a5bb69525ef12414b503"}, + {file = "grpcio-1.66.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0e6c9b42ded5d02b6b1fea3a25f036a2236eeb75d0579bfd43c0018c88bf0a3e"}, + {file = "grpcio-1.66.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:c9f80f9fad93a8cf71c7f161778ba47fd730d13a343a46258065c4deb4b550c0"}, + {file = "grpcio-1.66.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dd67ed9da78e5121efc5c510f0122a972216808d6de70953a740560c572eb44"}, + {file = "grpcio-1.66.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48b0d92d45ce3be2084b92fb5bae2f64c208fea8ceed7fccf6a7b524d3c4942e"}, + {file = "grpcio-1.66.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4d813316d1a752be6f5c4360c49f55b06d4fe212d7df03253dfdae90c8a402bb"}, + {file = "grpcio-1.66.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9c9bebc6627873ec27a70fc800f6083a13c70b23a5564788754b9ee52c5aef6c"}, + {file = "grpcio-1.66.1-cp39-cp39-win32.whl", hash = "sha256:30a1c2cf9390c894c90bbc70147f2372130ad189cffef161f0432d0157973f45"}, + {file = "grpcio-1.66.1-cp39-cp39-win_amd64.whl", hash = "sha256:17663598aadbedc3cacd7bbde432f541c8e07d2496564e22b214b22c7523dac8"}, + {file = "grpcio-1.66.1.tar.gz", hash = "sha256:35334f9c9745add3e357e3372756fd32d925bd52c41da97f4dfdafbde0bf0ee2"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.66.1)"] [[package]] name = "h11" @@ -1038,13 +1038,13 @@ trio = ["trio (>=0.22.0,<0.26.0)"] [[package]] name = "httpx" -version = "0.27.0" +version = "0.27.2" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, ] [package.dependencies] @@ -1062,16 +1062,17 @@ brotli = ["brotli", "brotlicffi"] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "huggingface-hub" -version = "0.24.6" +version = "0.25.0" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" optional = false python-versions = ">=3.8.0" files = [ - {file = "huggingface_hub-0.24.6-py3-none-any.whl", hash = "sha256:a990f3232aa985fe749bc9474060cbad75e8b2f115f6665a9fda5b9c97818970"}, - {file = "huggingface_hub-0.24.6.tar.gz", hash = "sha256:cc2579e761d070713eaa9c323e3debe39d5b464ae3a7261c39a9195b27bb8000"}, + {file = "huggingface_hub-0.25.0-py3-none-any.whl", hash = "sha256:e2f357b35d72d5012cfd127108c4e14abcd61ba4ebc90a5a374dc2456cb34e12"}, + {file = "huggingface_hub-0.25.0.tar.gz", hash = "sha256:fb5fbe6c12fcd99d187ec7db95db9110fb1a20505f23040a5449a717c1a0db4d"}, ] [package.dependencies] @@ -1099,13 +1100,13 @@ typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "t [[package]] name = "identify" -version = "2.6.0" +version = "2.6.1" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"}, - {file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"}, + {file = "identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0"}, + {file = "identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98"}, ] [package.extras] @@ -1113,24 +1114,27 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.8" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" files = [ - {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, - {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "importlib-metadata" -version = "8.0.0" +version = "8.4.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"}, - {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"}, + {file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"}, + {file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"}, ] [package.dependencies] @@ -1143,13 +1147,13 @@ test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "p [[package]] name = "importlib-resources" -version = "6.4.4" +version = "6.4.5" description = "Read resources from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-6.4.4-py3-none-any.whl", hash = "sha256:dda242603d1c9cd836c3368b1174ed74cb4049ecd209e7a1a0104620c18c5c11"}, - {file = "importlib_resources-6.4.4.tar.gz", hash = "sha256:20600c8b7361938dc0bb2d5ec0297802e575df486f5a544fa414da65e13721f7"}, + {file = "importlib_resources-6.4.5-py3-none-any.whl", hash = "sha256:ac29d5f956f01d5e4bb63102a5a19957f1b9175e45649977264a1416783bb717"}, + {file = "importlib_resources-6.4.5.tar.gz", hash = "sha256:980862a1d16c9e147a59603677fa2aa5fd82b87f223b6cb870695bcfce830065"}, ] [package.dependencies] @@ -1375,13 +1379,13 @@ referencing = ">=0.31.0" [[package]] name = "jupyter-client" -version = "8.6.2" +version = "8.6.3" description = "Jupyter protocol implementation and client libraries" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_client-8.6.2-py3-none-any.whl", hash = "sha256:50cbc5c66fd1b8f65ecb66bc490ab73217993632809b6e505687de18e9dea39f"}, - {file = "jupyter_client-8.6.2.tar.gz", hash = "sha256:2bda14d55ee5ba58552a8c53ae43d215ad9868853489213f37da060ced54d8df"}, + {file = "jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f"}, + {file = "jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419"}, ] [package.dependencies] @@ -1624,68 +1628,75 @@ files = [ [[package]] name = "msgpack" -version = "1.0.8" +version = "1.1.0" description = "MessagePack serializer" optional = false python-versions = ">=3.8" files = [ - {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:505fe3d03856ac7d215dbe005414bc28505d26f0c128906037e66d98c4e95868"}, - {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b7842518a63a9f17107eb176320960ec095a8ee3b4420b5f688e24bf50c53c"}, - {file = "msgpack-1.0.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:376081f471a2ef24828b83a641a02c575d6103a3ad7fd7dade5486cad10ea659"}, - {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e390971d082dba073c05dbd56322427d3280b7cc8b53484c9377adfbae67dc2"}, - {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e073efcba9ea99db5acef3959efa45b52bc67b61b00823d2a1a6944bf45982"}, - {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82d92c773fbc6942a7a8b520d22c11cfc8fd83bba86116bfcf962c2f5c2ecdaa"}, - {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9ee32dcb8e531adae1f1ca568822e9b3a738369b3b686d1477cbc643c4a9c128"}, - {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e3aa7e51d738e0ec0afbed661261513b38b3014754c9459508399baf14ae0c9d"}, - {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69284049d07fce531c17404fcba2bb1df472bc2dcdac642ae71a2d079d950653"}, - {file = "msgpack-1.0.8-cp310-cp310-win32.whl", hash = "sha256:13577ec9e247f8741c84d06b9ece5f654920d8365a4b636ce0e44f15e07ec693"}, - {file = "msgpack-1.0.8-cp310-cp310-win_amd64.whl", hash = "sha256:e532dbd6ddfe13946de050d7474e3f5fb6ec774fbb1a188aaf469b08cf04189a"}, - {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9517004e21664f2b5a5fd6333b0731b9cf0817403a941b393d89a2f1dc2bd836"}, - {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d16a786905034e7e34098634b184a7d81f91d4c3d246edc6bd7aefb2fd8ea6ad"}, - {file = "msgpack-1.0.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2872993e209f7ed04d963e4b4fbae72d034844ec66bc4ca403329db2074377b"}, - {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c330eace3dd100bdb54b5653b966de7f51c26ec4a7d4e87132d9b4f738220ba"}, - {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b5c044f3eff2a6534768ccfd50425939e7a8b5cf9a7261c385de1e20dcfc85"}, - {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1876b0b653a808fcd50123b953af170c535027bf1d053b59790eebb0aeb38950"}, - {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dfe1f0f0ed5785c187144c46a292b8c34c1295c01da12e10ccddfc16def4448a"}, - {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3528807cbbb7f315bb81959d5961855e7ba52aa60a3097151cb21956fbc7502b"}, - {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e2f879ab92ce502a1e65fce390eab619774dda6a6ff719718069ac94084098ce"}, - {file = "msgpack-1.0.8-cp311-cp311-win32.whl", hash = "sha256:26ee97a8261e6e35885c2ecd2fd4a6d38252246f94a2aec23665a4e66d066305"}, - {file = "msgpack-1.0.8-cp311-cp311-win_amd64.whl", hash = "sha256:eadb9f826c138e6cf3c49d6f8de88225a3c0ab181a9b4ba792e006e5292d150e"}, - {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:114be227f5213ef8b215c22dde19532f5da9652e56e8ce969bf0a26d7c419fee"}, - {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d661dc4785affa9d0edfdd1e59ec056a58b3dbb9f196fa43587f3ddac654ac7b"}, - {file = "msgpack-1.0.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d56fd9f1f1cdc8227d7b7918f55091349741904d9520c65f0139a9755952c9e8"}, - {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0726c282d188e204281ebd8de31724b7d749adebc086873a59efb8cf7ae27df3"}, - {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8db8e423192303ed77cff4dce3a4b88dbfaf43979d280181558af5e2c3c71afc"}, - {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99881222f4a8c2f641f25703963a5cefb076adffd959e0558dc9f803a52d6a58"}, - {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b5505774ea2a73a86ea176e8a9a4a7c8bf5d521050f0f6f8426afe798689243f"}, - {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ef254a06bcea461e65ff0373d8a0dd1ed3aa004af48839f002a0c994a6f72d04"}, - {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e1dd7839443592d00e96db831eddb4111a2a81a46b028f0facd60a09ebbdd543"}, - {file = "msgpack-1.0.8-cp312-cp312-win32.whl", hash = "sha256:64d0fcd436c5683fdd7c907eeae5e2cbb5eb872fafbc03a43609d7941840995c"}, - {file = "msgpack-1.0.8-cp312-cp312-win_amd64.whl", hash = "sha256:74398a4cf19de42e1498368c36eed45d9528f5fd0155241e82c4082b7e16cffd"}, - {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ceea77719d45c839fd73abcb190b8390412a890df2f83fb8cf49b2a4b5c2f40"}, - {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ab0bbcd4d1f7b6991ee7c753655b481c50084294218de69365f8f1970d4c151"}, - {file = "msgpack-1.0.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1cce488457370ffd1f953846f82323cb6b2ad2190987cd4d70b2713e17268d24"}, - {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3923a1778f7e5ef31865893fdca12a8d7dc03a44b33e2a5f3295416314c09f5d"}, - {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22e47578b30a3e199ab067a4d43d790249b3c0587d9a771921f86250c8435db"}, - {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd739c9251d01e0279ce729e37b39d49a08c0420d3fee7f2a4968c0576678f77"}, - {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d3420522057ebab1728b21ad473aa950026d07cb09da41103f8e597dfbfaeb13"}, - {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5845fdf5e5d5b78a49b826fcdc0eb2e2aa7191980e3d2cfd2a30303a74f212e2"}, - {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a0e76621f6e1f908ae52860bdcb58e1ca85231a9b0545e64509c931dd34275a"}, - {file = "msgpack-1.0.8-cp38-cp38-win32.whl", hash = "sha256:374a8e88ddab84b9ada695d255679fb99c53513c0a51778796fcf0944d6c789c"}, - {file = "msgpack-1.0.8-cp38-cp38-win_amd64.whl", hash = "sha256:f3709997b228685fe53e8c433e2df9f0cdb5f4542bd5114ed17ac3c0129b0480"}, - {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f51bab98d52739c50c56658cc303f190785f9a2cd97b823357e7aeae54c8f68a"}, - {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:73ee792784d48aa338bba28063e19a27e8d989344f34aad14ea6e1b9bd83f596"}, - {file = "msgpack-1.0.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f9904e24646570539a8950400602d66d2b2c492b9010ea7e965025cb71d0c86d"}, - {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e75753aeda0ddc4c28dce4c32ba2f6ec30b1b02f6c0b14e547841ba5b24f753f"}, - {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5dbf059fb4b7c240c873c1245ee112505be27497e90f7c6591261c7d3c3a8228"}, - {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4916727e31c28be8beaf11cf117d6f6f188dcc36daae4e851fee88646f5b6b18"}, - {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7938111ed1358f536daf311be244f34df7bf3cdedb3ed883787aca97778b28d8"}, - {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:493c5c5e44b06d6c9268ce21b302c9ca055c1fd3484c25ba41d34476c76ee746"}, - {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, - {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, - {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, - {file = "msgpack-1.0.8-py3-none-any.whl", hash = "sha256:24f727df1e20b9876fa6e95f840a2a2651e34c0ad147676356f4bf5fbb0206ca"}, - {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, + {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ad442d527a7e358a469faf43fda45aaf4ac3249c8310a82f0ccff9164e5dccd"}, + {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74bed8f63f8f14d75eec75cf3d04ad581da6b914001b474a5d3cd3372c8cc27d"}, + {file = "msgpack-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:914571a2a5b4e7606997e169f64ce53a8b1e06f2cf2c3a7273aa106236d43dd5"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c921af52214dcbb75e6bdf6a661b23c3e6417f00c603dd2070bccb5c3ef499f5"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8ce0b22b890be5d252de90d0e0d119f363012027cf256185fc3d474c44b1b9e"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73322a6cc57fcee3c0c57c4463d828e9428275fb85a27aa2aa1a92fdc42afd7b"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e1f3c3d21f7cf67bcf2da8e494d30a75e4cf60041d98b3f79875afb5b96f3a3f"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64fc9068d701233effd61b19efb1485587560b66fe57b3e50d29c5d78e7fef68"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:42f754515e0f683f9c79210a5d1cad631ec3d06cea5172214d2176a42e67e19b"}, + {file = "msgpack-1.1.0-cp310-cp310-win32.whl", hash = "sha256:3df7e6b05571b3814361e8464f9304c42d2196808e0119f55d0d3e62cd5ea044"}, + {file = "msgpack-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:685ec345eefc757a7c8af44a3032734a739f8c45d1b0ac45efc5d8977aa4720f"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d364a55082fb2a7416f6c63ae383fbd903adb5a6cf78c5b96cc6316dc1cedc7"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:79ec007767b9b56860e0372085f8504db5d06bd6a327a335449508bbee9648fa"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6ad622bf7756d5a497d5b6836e7fc3752e2dd6f4c648e24b1803f6048596f701"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e59bca908d9ca0de3dc8684f21ebf9a690fe47b6be93236eb40b99af28b6ea6"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1da8f11a3dd397f0a32c76165cf0c4eb95b31013a94f6ecc0b280c05c91b59"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452aff037287acb1d70a804ffd022b21fa2bb7c46bee884dbc864cc9024128a0"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8da4bf6d54ceed70e8861f833f83ce0814a2b72102e890cbdfe4b34764cdd66e"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:41c991beebf175faf352fb940bf2af9ad1fb77fd25f38d9142053914947cdbf6"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a52a1f3a5af7ba1c9ace055b659189f6c669cf3657095b50f9602af3a3ba0fe5"}, + {file = "msgpack-1.1.0-cp311-cp311-win32.whl", hash = "sha256:58638690ebd0a06427c5fe1a227bb6b8b9fdc2bd07701bec13c2335c82131a88"}, + {file = "msgpack-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd2906780f25c8ed5d7b323379f6138524ba793428db5d0e9d226d3fa6aa1788"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d46cf9e3705ea9485687aa4001a76e44748b609d260af21c4ceea7f2212a501d"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5dbad74103df937e1325cc4bfeaf57713be0b4f15e1c2da43ccdd836393e2ea2"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58dfc47f8b102da61e8949708b3eafc3504509a5728f8b4ddef84bd9e16ad420"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676e5be1b472909b2ee6356ff425ebedf5142427842aa06b4dfd5117d1ca8a2"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17fb65dd0bec285907f68b15734a993ad3fc94332b5bb21b0435846228de1f39"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a51abd48c6d8ac89e0cfd4fe177c61481aca2d5e7ba42044fd218cfd8ea9899f"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2137773500afa5494a61b1208619e3871f75f27b03bcfca7b3a7023284140247"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:398b713459fea610861c8a7b62a6fec1882759f308ae0795b5413ff6a160cf3c"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:06f5fd2f6bb2a7914922d935d3b8bb4a7fff3a9a91cfce6d06c13bc42bec975b"}, + {file = "msgpack-1.1.0-cp312-cp312-win32.whl", hash = "sha256:ad33e8400e4ec17ba782f7b9cf868977d867ed784a1f5f2ab46e7ba53b6e1e1b"}, + {file = "msgpack-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:115a7af8ee9e8cddc10f87636767857e7e3717b7a2e97379dc2054712693e90f"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:071603e2f0771c45ad9bc65719291c568d4edf120b44eb36324dcb02a13bfddf"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0f92a83b84e7c0749e3f12821949d79485971f087604178026085f60ce109330"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1964df7b81285d00a84da4e70cb1383f2e665e0f1f2a7027e683956d04b734"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59caf6a4ed0d164055ccff8fe31eddc0ebc07cf7326a2aaa0dbf7a4001cd823e"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0907e1a7119b337971a689153665764adc34e89175f9a34793307d9def08e6ca"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65553c9b6da8166e819a6aa90ad15288599b340f91d18f60b2061f402b9a4915"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7a946a8992941fea80ed4beae6bff74ffd7ee129a90b4dd5cf9c476a30e9708d"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4b51405e36e075193bc051315dbf29168d6141ae2500ba8cd80a522964e31434"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4c01941fd2ff87c2a934ee6055bda4ed353a7846b8d4f341c428109e9fcde8c"}, + {file = "msgpack-1.1.0-cp313-cp313-win32.whl", hash = "sha256:7c9a35ce2c2573bada929e0b7b3576de647b0defbd25f5139dcdaba0ae35a4cc"}, + {file = "msgpack-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:bce7d9e614a04d0883af0b3d4d501171fbfca038f12c77fa838d9f198147a23f"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c40ffa9a15d74e05ba1fe2681ea33b9caffd886675412612d93ab17b58ea2fec"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1ba6136e650898082d9d5a5217d5906d1e138024f836ff48691784bbe1adf96"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0856a2b7e8dcb874be44fea031d22e5b3a19121be92a1e098f46068a11b0870"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:471e27a5787a2e3f974ba023f9e265a8c7cfd373632247deb225617e3100a3c7"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:646afc8102935a388ffc3914b336d22d1c2d6209c773f3eb5dd4d6d3b6f8c1cb"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:13599f8829cfbe0158f6456374e9eea9f44eee08076291771d8ae93eda56607f"}, + {file = "msgpack-1.1.0-cp38-cp38-win32.whl", hash = "sha256:8a84efb768fb968381e525eeeb3d92857e4985aacc39f3c47ffd00eb4509315b"}, + {file = "msgpack-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:879a7b7b0ad82481c52d3c7eb99bf6f0645dbdec5134a4bddbd16f3506947feb"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:53258eeb7a80fc46f62fd59c876957a2d0e15e6449a9e71842b6d24419d88ca1"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e7b853bbc44fb03fbdba34feb4bd414322180135e2cb5164f20ce1c9795ee48"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3e9b4936df53b970513eac1758f3882c88658a220b58dcc1e39606dccaaf01c"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46c34e99110762a76e3911fc923222472c9d681f1094096ac4102c18319e6468"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a706d1e74dd3dea05cb54580d9bd8b2880e9264856ce5068027eed09680aa74"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:534480ee5690ab3cbed89d4c8971a5c631b69a8c0883ecfea96c19118510c846"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8cf9e8c3a2153934a23ac160cc4cba0ec035f6867c8013cc6077a79823370346"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3180065ec2abbe13a4ad37688b61b99d7f9e012a535b930e0e683ad6bc30155b"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c5a91481a3cc573ac8c0d9aace09345d989dc4a0202b7fcb312c88c26d4e71a8"}, + {file = "msgpack-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f80bc7d47f76089633763f952e67f8214cb7b3ee6bfa489b3cb6a84cfac114cd"}, + {file = "msgpack-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:4d1b7ff2d6146e16e8bd665ac726a89c74163ef8cd39fa8c1087d4e52d3a2325"}, + {file = "msgpack-1.1.0.tar.gz", hash = "sha256:dd432ccc2c72b914e4cb77afce64aab761c1137cc698be3984eee260bcb2896e"}, ] [[package]] @@ -1903,57 +1914,57 @@ files = [ [[package]] name = "opentelemetry-api" -version = "1.26.0" +version = "1.27.0" description = "OpenTelemetry Python API" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_api-1.26.0-py3-none-any.whl", hash = "sha256:7d7ea33adf2ceda2dd680b18b1677e4152000b37ca76e679da71ff103b943064"}, - {file = "opentelemetry_api-1.26.0.tar.gz", hash = "sha256:2bd639e4bed5b18486fef0b5a520aaffde5a18fc225e808a1ac4df363f43a1ce"}, + {file = "opentelemetry_api-1.27.0-py3-none-any.whl", hash = "sha256:953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7"}, + {file = "opentelemetry_api-1.27.0.tar.gz", hash = "sha256:ed673583eaa5f81b5ce5e86ef7cdaf622f88ef65f0b9aab40b843dcae5bef342"}, ] [package.dependencies] deprecated = ">=1.2.6" -importlib-metadata = ">=6.0,<=8.0.0" +importlib-metadata = ">=6.0,<=8.4.0" [[package]] name = "opentelemetry-exporter-otlp" -version = "1.26.0" +version = "1.27.0" description = "OpenTelemetry Collector Exporters" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_exporter_otlp-1.26.0-py3-none-any.whl", hash = "sha256:f839989f54bda85ee33c5dae033c44dcec9ccbb0dafc6a43d585df44da1d2036"}, - {file = "opentelemetry_exporter_otlp-1.26.0.tar.gz", hash = "sha256:cf0e093f080011951d9f97431a83869761e4d4ebe83a4195ee92d7806223299c"}, + {file = "opentelemetry_exporter_otlp-1.27.0-py3-none-any.whl", hash = "sha256:7688791cbdd951d71eb6445951d1cfbb7b6b2d7ee5948fac805d404802931145"}, + {file = "opentelemetry_exporter_otlp-1.27.0.tar.gz", hash = "sha256:4a599459e623868cc95d933c301199c2367e530f089750e115599fccd67cb2a1"}, ] [package.dependencies] -opentelemetry-exporter-otlp-proto-grpc = "1.26.0" -opentelemetry-exporter-otlp-proto-http = "1.26.0" +opentelemetry-exporter-otlp-proto-grpc = "1.27.0" +opentelemetry-exporter-otlp-proto-http = "1.27.0" [[package]] name = "opentelemetry-exporter-otlp-proto-common" -version = "1.26.0" +version = "1.27.0" description = "OpenTelemetry Protobuf encoding" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_exporter_otlp_proto_common-1.26.0-py3-none-any.whl", hash = "sha256:ee4d8f8891a1b9c372abf8d109409e5b81947cf66423fd998e56880057afbc71"}, - {file = "opentelemetry_exporter_otlp_proto_common-1.26.0.tar.gz", hash = "sha256:bdbe50e2e22a1c71acaa0c8ba6efaadd58882e5a5978737a44a4c4b10d304c92"}, + {file = "opentelemetry_exporter_otlp_proto_common-1.27.0-py3-none-any.whl", hash = "sha256:675db7fffcb60946f3a5c43e17d1168a3307a94a930ecf8d2ea1f286f3d4f79a"}, + {file = "opentelemetry_exporter_otlp_proto_common-1.27.0.tar.gz", hash = "sha256:159d27cf49f359e3798c4c3eb8da6ef4020e292571bd8c5604a2a573231dd5c8"}, ] [package.dependencies] -opentelemetry-proto = "1.26.0" +opentelemetry-proto = "1.27.0" [[package]] name = "opentelemetry-exporter-otlp-proto-grpc" -version = "1.26.0" +version = "1.27.0" description = "OpenTelemetry Collector Protobuf over gRPC Exporter" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_exporter_otlp_proto_grpc-1.26.0-py3-none-any.whl", hash = "sha256:e2be5eff72ebcb010675b818e8d7c2e7d61ec451755b8de67a140bc49b9b0280"}, - {file = "opentelemetry_exporter_otlp_proto_grpc-1.26.0.tar.gz", hash = "sha256:a65b67a9a6b06ba1ec406114568e21afe88c1cdb29c464f2507d529eb906d8ae"}, + {file = "opentelemetry_exporter_otlp_proto_grpc-1.27.0-py3-none-any.whl", hash = "sha256:56b5bbd5d61aab05e300d9d62a6b3c134827bbd28d0b12f2649c2da368006c9e"}, + {file = "opentelemetry_exporter_otlp_proto_grpc-1.27.0.tar.gz", hash = "sha256:af6f72f76bcf425dfb5ad11c1a6d6eca2863b91e63575f89bb7b4b55099d968f"}, ] [package.dependencies] @@ -1961,39 +1972,39 @@ deprecated = ">=1.2.6" googleapis-common-protos = ">=1.52,<2.0" grpcio = ">=1.0.0,<2.0.0" opentelemetry-api = ">=1.15,<2.0" -opentelemetry-exporter-otlp-proto-common = "1.26.0" -opentelemetry-proto = "1.26.0" -opentelemetry-sdk = ">=1.26.0,<1.27.0" +opentelemetry-exporter-otlp-proto-common = "1.27.0" +opentelemetry-proto = "1.27.0" +opentelemetry-sdk = ">=1.27.0,<1.28.0" [[package]] name = "opentelemetry-exporter-otlp-proto-http" -version = "1.26.0" +version = "1.27.0" description = "OpenTelemetry Collector Protobuf over HTTP Exporter" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_exporter_otlp_proto_http-1.26.0-py3-none-any.whl", hash = "sha256:ee72a87c48ec977421b02f16c52ea8d884122470e0be573905237b540f4ee562"}, - {file = "opentelemetry_exporter_otlp_proto_http-1.26.0.tar.gz", hash = "sha256:5801ebbcf7b527377883e6cbbdda35ee712dc55114fff1e93dfee210be56c908"}, + {file = "opentelemetry_exporter_otlp_proto_http-1.27.0-py3-none-any.whl", hash = "sha256:688027575c9da42e179a69fe17e2d1eba9b14d81de8d13553a21d3114f3b4d75"}, + {file = "opentelemetry_exporter_otlp_proto_http-1.27.0.tar.gz", hash = "sha256:2103479092d8eb18f61f3fbff084f67cc7f2d4a7d37e75304b8b56c1d09ebef5"}, ] [package.dependencies] deprecated = ">=1.2.6" googleapis-common-protos = ">=1.52,<2.0" opentelemetry-api = ">=1.15,<2.0" -opentelemetry-exporter-otlp-proto-common = "1.26.0" -opentelemetry-proto = "1.26.0" -opentelemetry-sdk = ">=1.26.0,<1.27.0" +opentelemetry-exporter-otlp-proto-common = "1.27.0" +opentelemetry-proto = "1.27.0" +opentelemetry-sdk = ">=1.27.0,<1.28.0" requests = ">=2.7,<3.0" [[package]] name = "opentelemetry-proto" -version = "1.26.0" +version = "1.27.0" description = "OpenTelemetry Python Proto" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_proto-1.26.0-py3-none-any.whl", hash = "sha256:6c4d7b4d4d9c88543bcf8c28ae3f8f0448a753dc291c18c5390444c90b76a725"}, - {file = "opentelemetry_proto-1.26.0.tar.gz", hash = "sha256:c5c18796c0cab3751fc3b98dee53855835e90c0422924b484432ac852d93dc1e"}, + {file = "opentelemetry_proto-1.27.0-py3-none-any.whl", hash = "sha256:b133873de5581a50063e1e4b29cdcf0c5e253a8c2d8dc1229add20a4c3830ace"}, + {file = "opentelemetry_proto-1.27.0.tar.gz", hash = "sha256:33c9345d91dafd8a74fc3d7576c5a38f18b7fdf8d02983ac67485386132aedd6"}, ] [package.dependencies] @@ -2001,34 +2012,34 @@ protobuf = ">=3.19,<5.0" [[package]] name = "opentelemetry-sdk" -version = "1.26.0" +version = "1.27.0" description = "OpenTelemetry Python SDK" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_sdk-1.26.0-py3-none-any.whl", hash = "sha256:feb5056a84a88670c041ea0ded9921fca559efec03905dddeb3885525e0af897"}, - {file = "opentelemetry_sdk-1.26.0.tar.gz", hash = "sha256:c90d2868f8805619535c05562d699e2f4fb1f00dbd55a86dcefca4da6fa02f85"}, + {file = "opentelemetry_sdk-1.27.0-py3-none-any.whl", hash = "sha256:365f5e32f920faf0fd9e14fdfd92c086e317eaa5f860edba9cdc17a380d9197d"}, + {file = "opentelemetry_sdk-1.27.0.tar.gz", hash = "sha256:d525017dea0ccce9ba4e0245100ec46ecdc043f2d7b8315d56b19aff0904fa6f"}, ] [package.dependencies] -opentelemetry-api = "1.26.0" -opentelemetry-semantic-conventions = "0.47b0" +opentelemetry-api = "1.27.0" +opentelemetry-semantic-conventions = "0.48b0" typing-extensions = ">=3.7.4" [[package]] name = "opentelemetry-semantic-conventions" -version = "0.47b0" +version = "0.48b0" description = "OpenTelemetry Semantic Conventions" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_semantic_conventions-0.47b0-py3-none-any.whl", hash = "sha256:4ff9d595b85a59c1c1413f02bba320ce7ea6bf9e2ead2b0913c4395c7bbc1063"}, - {file = "opentelemetry_semantic_conventions-0.47b0.tar.gz", hash = "sha256:a8d57999bbe3495ffd4d510de26a97dadc1dace53e0275001b2c1b2f67992a7e"}, + {file = "opentelemetry_semantic_conventions-0.48b0-py3-none-any.whl", hash = "sha256:a0de9f45c413a8669788a38569c7e0a11ce6ce97861a628cca785deecdc32a1f"}, + {file = "opentelemetry_semantic_conventions-0.48b0.tar.gz", hash = "sha256:12d74983783b6878162208be57c9effcb89dc88691c64992d70bb89dc00daa1a"}, ] [package.dependencies] deprecated = ">=1.2.6" -opentelemetry-api = "1.26.0" +opentelemetry-api = "1.27.0" [[package]] name = "packaging" @@ -2130,19 +2141,19 @@ files = [ [[package]] name = "platformdirs" -version = "4.2.2" +version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" @@ -2210,22 +2221,22 @@ testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" -version = "4.25.4" +version = "4.25.5" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.4-cp310-abi3-win32.whl", hash = "sha256:db9fd45183e1a67722cafa5c1da3e85c6492a5383f127c86c4c4aa4845867dc4"}, - {file = "protobuf-4.25.4-cp310-abi3-win_amd64.whl", hash = "sha256:ba3d8504116a921af46499471c63a85260c1a5fc23333154a427a310e015d26d"}, - {file = "protobuf-4.25.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:eecd41bfc0e4b1bd3fa7909ed93dd14dd5567b98c941d6c1ad08fdcab3d6884b"}, - {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:4c8a70fdcb995dcf6c8966cfa3a29101916f7225e9afe3ced4395359955d3835"}, - {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:3319e073562e2515c6ddc643eb92ce20809f5d8f10fead3332f71c63be6a7040"}, - {file = "protobuf-4.25.4-cp38-cp38-win32.whl", hash = "sha256:7e372cbbda66a63ebca18f8ffaa6948455dfecc4e9c1029312f6c2edcd86c4e1"}, - {file = "protobuf-4.25.4-cp38-cp38-win_amd64.whl", hash = "sha256:051e97ce9fa6067a4546e75cb14f90cf0232dcb3e3d508c448b8d0e4265b61c1"}, - {file = "protobuf-4.25.4-cp39-cp39-win32.whl", hash = "sha256:90bf6fd378494eb698805bbbe7afe6c5d12c8e17fca817a646cd6a1818c696ca"}, - {file = "protobuf-4.25.4-cp39-cp39-win_amd64.whl", hash = "sha256:ac79a48d6b99dfed2729ccccee547b34a1d3d63289c71cef056653a846a2240f"}, - {file = "protobuf-4.25.4-py3-none-any.whl", hash = "sha256:bfbebc1c8e4793cfd58589acfb8a1026be0003e852b9da7db5a4285bde996978"}, - {file = "protobuf-4.25.4.tar.gz", hash = "sha256:0dc4a62cc4052a036ee2204d26fe4d835c62827c855c8a03f29fe6da146b380d"}, + {file = "protobuf-4.25.5-cp310-abi3-win32.whl", hash = "sha256:5e61fd921603f58d2f5acb2806a929b4675f8874ff5f330b7d6f7e2e784bbcd8"}, + {file = "protobuf-4.25.5-cp310-abi3-win_amd64.whl", hash = "sha256:4be0571adcbe712b282a330c6e89eae24281344429ae95c6d85e79e84780f5ea"}, + {file = "protobuf-4.25.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:b2fde3d805354df675ea4c7c6338c1aecd254dfc9925e88c6d31a2bcb97eb173"}, + {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:919ad92d9b0310070f8356c24b855c98df2b8bd207ebc1c0c6fcc9ab1e007f3d"}, + {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fe14e16c22be926d3abfcb500e60cab068baf10b542b8c858fa27e098123e331"}, + {file = "protobuf-4.25.5-cp38-cp38-win32.whl", hash = "sha256:98d8d8aa50de6a2747efd9cceba361c9034050ecce3e09136f90de37ddba66e1"}, + {file = "protobuf-4.25.5-cp38-cp38-win_amd64.whl", hash = "sha256:b0234dd5a03049e4ddd94b93400b67803c823cfc405689688f59b34e0742381a"}, + {file = "protobuf-4.25.5-cp39-cp39-win32.whl", hash = "sha256:abe32aad8561aa7cc94fc7ba4fdef646e576983edb94a73381b03c53728a626f"}, + {file = "protobuf-4.25.5-cp39-cp39-win_amd64.whl", hash = "sha256:7a183f592dc80aa7c8da7ad9e55091c4ffc9497b3054452d629bb85fa27c2a45"}, + {file = "protobuf-4.25.5-py3-none-any.whl", hash = "sha256:0aebecb809cae990f8129ada5ca273d9d670b76d9bfc9b1809f0a9c02b7dbf41"}, + {file = "protobuf-4.25.5.tar.gz", hash = "sha256:7f8249476b4a9473645db7f8ab42b02fe1488cbe5fb72fddd445e0665afd8584"}, ] [[package]] @@ -2270,24 +2281,24 @@ files = [ [[package]] name = "pyasn1" -version = "0.6.0" +version = "0.6.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = ">=3.8" files = [ - {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, - {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, + {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, + {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, ] [[package]] name = "pyasn1-modules" -version = "0.4.0" +version = "0.4.1" description = "A collection of ASN.1-based protocols modules" optional = false python-versions = ">=3.8" files = [ - {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, - {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, + {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, + {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, ] [package.dependencies] @@ -2306,119 +2317,120 @@ files = [ [[package]] name = "pydantic" -version = "2.8.2" +version = "2.9.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, - {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, + {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, + {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, ] [package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.20.1" +annotated-types = ">=0.6.0" +pydantic-core = "2.23.4" typing-extensions = {version = ">=4.6.1", markers = "python_version < \"3.13\""} [package.extras] email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.20.1" +version = "2.23.4" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, - {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, - {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, - {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, - {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, - {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, - {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, - {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, - {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, - {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, - {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, - {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, - {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, - {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, + {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, + {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, + {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, + {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, + {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, + {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, + {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, + {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, + {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, + {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, + {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, + {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, + {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, ] [package.dependencies] @@ -2818,13 +2830,13 @@ fixture = ["fixtures"] [[package]] name = "rich" -version = "13.8.0" +version = "13.8.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.8.0-py3-none-any.whl", hash = "sha256:2e85306a063b9492dffc86278197a60cbece75bcb766022f3436f567cae11bdc"}, - {file = "rich-13.8.0.tar.gz", hash = "sha256:a5ac1f1cd448ade0d59cc3356f7db7a7ccda2c8cbae9c7a90c28ff463d3e91f4"}, + {file = "rich-13.8.1-py3-none-any.whl", hash = "sha256:1760a3c0848469b97b558fc61c85233e3dafb69c7a071b4d60c38099d3cd4c06"}, + {file = "rich-13.8.1.tar.gz", hash = "sha256:8260cda28e3db6bf04d2d1ef4dbc03ba80a824c88b0e7668a0f23126a424844a"}, ] [package.dependencies] @@ -3026,19 +3038,23 @@ crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] [[package]] name = "setuptools" -version = "73.0.1" +version = "75.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-73.0.1-py3-none-any.whl", hash = "sha256:b208925fcb9f7af924ed2dc04708ea89791e24bde0d3020b27df0e116088b34e"}, - {file = "setuptools-73.0.1.tar.gz", hash = "sha256:d59a3e788ab7e012ab2c4baed1b376da6366883ee20d7a5fc426816e3d7b1193"}, + {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"}, + {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"}, ] [package.extras] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] [[package]] name = "shellingham" @@ -3097,13 +3113,13 @@ files = [ [[package]] name = "starlette" -version = "0.38.2" +version = "0.38.5" description = "The little ASGI library that shines." optional = false python-versions = ">=3.8" files = [ - {file = "starlette-0.38.2-py3-none-any.whl", hash = "sha256:4ec6a59df6bbafdab5f567754481657f7ed90dc9d69b0c9ff017907dd54faeff"}, - {file = "starlette-0.38.2.tar.gz", hash = "sha256:c7c0441065252160993a1a37cf2a73bb64d271b17303e0b0c1eb7191cfb12d75"}, + {file = "starlette-0.38.5-py3-none-any.whl", hash = "sha256:632f420a9d13e3ee2a6f18f437b0a9f1faecb0bc42e1942aa2ea0e379a4c4206"}, + {file = "starlette-0.38.5.tar.gz", hash = "sha256:04a92830a9b6eb1442c766199d62260c3d4dc9c4f9188360626b1e0273cb7077"}, ] [package.dependencies] @@ -3231,13 +3247,13 @@ typing-extensions = ">=3.7.4.3" [[package]] name = "types-pyyaml" -version = "6.0.12.20240808" +version = "6.0.12.20240917" description = "Typing stubs for PyYAML" optional = false python-versions = ">=3.8" files = [ - {file = "types-PyYAML-6.0.12.20240808.tar.gz", hash = "sha256:b8f76ddbd7f65440a8bda5526a9607e4c7a322dc2f8e1a8c405644f9a6f4b9af"}, - {file = "types_PyYAML-6.0.12.20240808-py3-none-any.whl", hash = "sha256:deda34c5c655265fc517b546c902aa6eed2ef8d3e921e4765fe606fe2afe8d35"}, + {file = "types-PyYAML-6.0.12.20240917.tar.gz", hash = "sha256:d1405a86f9576682234ef83bcb4e6fff7c9305c8b1fbad5e0bcd4f7dbdc9c587"}, + {file = "types_PyYAML-6.0.12.20240917-py3-none-any.whl", hash = "sha256:392b267f1c0fe6022952462bf5d6523f31e37f6cea49b14cee7ad634b6301570"}, ] [[package]] @@ -3256,13 +3272,13 @@ types-urllib3 = "*" [[package]] name = "types-requests" -version = "2.32.0.20240712" +version = "2.32.0.20240914" description = "Typing stubs for requests" optional = false python-versions = ">=3.8" files = [ - {file = "types-requests-2.32.0.20240712.tar.gz", hash = "sha256:90c079ff05e549f6bf50e02e910210b98b8ff1ebdd18e19c873cd237737c1358"}, - {file = "types_requests-2.32.0.20240712-py3-none-any.whl", hash = "sha256:f754283e152c752e46e70942fa2a146b5bc70393522257bb85bd1ef7e019dcc3"}, + {file = "types-requests-2.32.0.20240914.tar.gz", hash = "sha256:2850e178db3919d9bf809e434eef65ba49d0e7e33ac92d588f4a5e295fffd405"}, + {file = "types_requests-2.32.0.20240914-py3-none-any.whl", hash = "sha256:59c2f673eb55f32a99b2894faf6020e1a9f4a402ad0f192bfee0b64469054310"}, ] [package.dependencies] @@ -3318,13 +3334,13 @@ typing-extensions = ">=3.7.4" [[package]] name = "urllib3" -version = "1.26.19" +version = "1.26.20" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "urllib3-1.26.19-py2.py3-none-any.whl", hash = "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3"}, - {file = "urllib3-1.26.19.tar.gz", hash = "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429"}, + {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, + {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, ] [package.extras] @@ -3334,13 +3350,13 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "urllib3" -version = "2.2.2" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] @@ -3414,13 +3430,13 @@ test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)" [[package]] name = "virtualenv" -version = "20.26.3" +version = "20.26.5" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, - {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, + {file = "virtualenv-20.26.5-py3-none-any.whl", hash = "sha256:4f3ac17b81fba3ce3bd6f4ead2749a72da5929c01774948e243db9ba41df4ff6"}, + {file = "virtualenv-20.26.5.tar.gz", hash = "sha256:ce489cac131aa58f4b25e321d6d186171f78e6cb13fafbf32a840cee67733ff4"}, ] [package.dependencies] @@ -3600,13 +3616,13 @@ files = [ [[package]] name = "zipp" -version = "3.20.1" +version = "3.20.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.20.1-py3-none-any.whl", hash = "sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064"}, - {file = "zipp-3.20.1.tar.gz", hash = "sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b"}, + {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, + {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, ] [package.extras] diff --git a/pyproject.toml b/pyproject.toml index f39032347..51578eff5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "truss" -version = "0.9.35" +version = "0.9.36rc01" description = "A seamless bridge from model development to model delivery" license = "MIT" readme = "README.md" diff --git a/truss/__init__.py b/truss/__init__.py index 5f23ce604..e381fa15d 100644 --- a/truss/__init__.py +++ b/truss/__init__.py @@ -1,7 +1,13 @@ +import warnings from pathlib import Path +from pydantic import PydanticDeprecatedSince20 from single_source import get_version +# Suppress Pydantic V1 warnings, because we have to use it for backwards compat. +warnings.filterwarnings("ignore", category=PydanticDeprecatedSince20) + + __version__ = get_version(__name__, Path(__file__).parent.parent) diff --git a/truss/config/trt_llm.py b/truss/config/trt_llm.py index 04b149809..3bef2c1f6 100644 --- a/truss/config/trt_llm.py +++ b/truss/config/trt_llm.py @@ -1,13 +1,17 @@ import json import logging +import warnings from enum import Enum from typing import Optional from huggingface_hub.errors import HFValidationError from huggingface_hub.utils import validate_repo_id -from pydantic import BaseModel, validator +from pydantic import BaseModel, PydanticDeprecatedSince20, validator from rich.console import Console +# Suppress Pydantic V1 warnings, because we have to use it for backwards compat. +warnings.filterwarnings("ignore", category=PydanticDeprecatedSince20) + logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) diff --git a/truss/remote/baseten/service.py b/truss/remote/baseten/service.py index 2954ef9b0..7392ace6b 100644 --- a/truss/remote/baseten/service.py +++ b/truss/remote/baseten/service.py @@ -1,6 +1,7 @@ import enum import time import urllib.parse +import warnings from typing import ( Any, Dict, @@ -17,6 +18,9 @@ from truss.truss_handle import TrussHandle from truss.util.errors import RemoteNetworkError +# "classes created inside an enum will not become a member" -> intended here anyway. +warnings.filterwarnings("ignore", category=DeprecationWarning, message=".*enum.*") + DEFAULT_STREAM_ENCODING = "utf-8" diff --git a/truss/remote/remote_factory.py b/truss/remote/remote_factory.py index 35c2314e5..426080d24 100644 --- a/truss/remote/remote_factory.py +++ b/truss/remote/remote_factory.py @@ -1,11 +1,12 @@ import inspect try: + from configparser import DEFAULTSECT, ConfigParser # type: ignore +except ImportError: + # We need to do this for old python. from configparser import DEFAULTSECT from configparser import SafeConfigParser as ConfigParser -except ImportError: - # We need to do this for py312 and onwards. - from configparser import DEFAULTSECT, ConfigParser # type: ignore + from functools import partial from operator import is_not diff --git a/truss/templates/base.Dockerfile.jinja b/truss/templates/base.Dockerfile.jinja index d7a11b144..5b66a7a6a 100644 --- a/truss/templates/base.Dockerfile.jinja +++ b/truss/templates/base.Dockerfile.jinja @@ -1,7 +1,7 @@ ARG PYVERSION={{config.python_version}} -FROM {{base_image_name_and_tag}} as truss_server +FROM {{base_image_name_and_tag}} AS truss_server -ENV PYTHON_EXECUTABLE {{ config.base_image.python_executable_path or 'python3' }} +ENV PYTHON_EXECUTABLE={{ config.base_image.python_executable_path or 'python3' }} {% block fail_fast %} RUN grep -w 'ID=debian\|ID_LIKE=debian' /etc/os-release || { echo "ERROR: Supplied base image is not a debian image"; exit 1; } @@ -52,7 +52,7 @@ RUN pip install -r {{config_requirements_filename}} --no-cache-dir && rm -rf /ro -ENV APP_HOME /app +ENV APP_HOME=/app WORKDIR $APP_HOME @@ -68,7 +68,7 @@ COPY ./{{config.bundled_packages_dir}} /packages {% for env_var_name, env_var_value in config.environment_variables.items() %} -ENV {{ env_var_name }} {{ env_var_value }} +ENV {{ env_var_name }}={{ env_var_value }} {% endfor %} {% block run %} diff --git a/truss/templates/cache.Dockerfile.jinja b/truss/templates/cache.Dockerfile.jinja index 5ed3dd169..eec4da24d 100644 --- a/truss/templates/cache.Dockerfile.jinja +++ b/truss/templates/cache.Dockerfile.jinja @@ -1,14 +1,14 @@ -FROM python:3.11-slim as cache_warmer +FROM python:3.11-slim AS cache_warmer RUN mkdir -p /app/model_cache WORKDIR /app {% if hf_access_token %} -ENV HUGGING_FACE_HUB_TOKEN {{hf_access_token}} +ENV HUGGING_FACE_HUB_TOKEN={{hf_access_token}} {% endif %} RUN apt-get -y update; apt-get -y install curl; curl -s https://baseten-public.s3.us-west-2.amazonaws.com/bin/b10cp-5fe8dc7da-linux-amd64 -o /app/b10cp; chmod +x /app/b10cp -ENV B10CP_PATH_TRUSS /app/b10cp +ENV B10CP_PATH_TRUSS=/app/b10cp COPY ./cache_requirements.txt /app/cache_requirements.txt RUN pip install -r /app/cache_requirements.txt --no-cache-dir && rm -rf /root/.cache/pip COPY ./cache_warmer.py /cache_warmer.py diff --git a/truss/templates/control/control/server.py b/truss/templates/control/control/server.py index a55b99652..7577e1a7f 100644 --- a/truss/templates/control/control/server.py +++ b/truss/templates/control/control/server.py @@ -35,7 +35,7 @@ def run(self): "inference_server_home": self._inf_serv_home, "inference_server_process_args": [ self._python_executable_path, - f"{self._inf_serv_home}/inference_server.py", + f"{self._inf_serv_home}/main.py", ], "control_server_host": "0.0.0.0", "control_server_port": self._control_server_port, diff --git a/truss/templates/server.Dockerfile.jinja b/truss/templates/server.Dockerfile.jinja index cac0f64d5..f5b25143f 100644 --- a/truss/templates/server.Dockerfile.jinja +++ b/truss/templates/server.Dockerfile.jinja @@ -7,7 +7,7 @@ {% block base_image_patch %} # If user base image is supplied in config, apply build commands from truss base image {% if config.base_image %} -ENV PYTHONUNBUFFERED True +ENV PYTHONUNBUFFERED=True ENV DEBIAN_FRONTEND=noninteractive RUN apt update && \ @@ -90,14 +90,14 @@ COPY ./{{ config.model_module_dir }} /app/model {% block run %} {%- if config.live_reload %} -ENV HASH_TRUSS {{truss_hash}} -ENV CONTROL_SERVER_PORT 8080 -ENV INFERENCE_SERVER_PORT 8090 +ENV HASH_TRUSS={{truss_hash}} +ENV CONTROL_SERVER_PORT=8080 +ENV INFERENCE_SERVER_PORT=8090 ENV SERVER_START_CMD="/control/.env/bin/python3 /control/control/server.py" ENTRYPOINT ["/control/.env/bin/python3", "/control/control/server.py"] {%- else %} -ENV INFERENCE_SERVER_PORT 8080 -ENV SERVER_START_CMD="{{(config.base_image.python_executable_path or "python3") ~ " /app/inference_server.py"}}" -ENTRYPOINT ["{{config.base_image.python_executable_path or "python3"}}", "/app/inference_server.py"] +ENV INFERENCE_SERVER_PORT=8080 +ENV SERVER_START_CMD="{{(config.base_image.python_executable_path or "python3") ~ " /app/main.py"}}" +ENTRYPOINT ["{{config.base_image.python_executable_path or "python3"}}", "/app/main.py"] {%- endif %} {% endblock %} diff --git a/truss/templates/server/common/errors.py b/truss/templates/server/common/errors.py index fea19fe12..90392b617 100644 --- a/truss/templates/server/common/errors.py +++ b/truss/templates/server/common/errors.py @@ -1,21 +1,21 @@ -import asyncio +import contextlib import logging +import sys from http import HTTPStatus +from types import TracebackType from typing import ( - Callable, - Coroutine, + Generator, Mapping, - NoReturn, Optional, - TypeVar, + Tuple, + Type, Union, - overload, ) import fastapi +import starlette.responses from fastapi import HTTPException from fastapi.responses import JSONResponse -from typing_extensions import ParamSpec # See https://github.com/basetenlabs/baseten/blob/master/docs/Error-Propagation.md _TRUSS_SERVER_SERVICE_ID = 4 @@ -51,6 +51,10 @@ class UserCodeError(Exception): pass +class ModelDefinitionError(TypeError): + """When the user-defined truss model does not meet the contract.""" + + def _make_baseten_error_headers(error_code: int) -> Mapping[str, str]: return { "X-BASETEN-ERROR-SOURCE": f"{_TRUSS_SERVER_SERVICE_ID:02}", @@ -58,6 +62,10 @@ def _make_baseten_error_headers(error_code: int) -> Mapping[str, str]: } +def add_error_headers_to_user_response(response: starlette.responses.Response) -> None: + response.headers.update(_make_baseten_error_headers(_BASETEN_CLIENT_ERROR_CODE)) + + def _make_baseten_response( http_status: int, info: Union[str, Exception], @@ -71,9 +79,7 @@ def _make_baseten_response( ) -async def exception_handler( - request: fastapi.Request, exc: Exception -) -> fastapi.Response: +async def exception_handler(_: fastapi.Request, exc: Exception) -> fastapi.Response: if isinstance(exc, ModelMissingError): return _make_baseten_response( HTTPStatus.NOT_FOUND.value, exc, _BASETEN_DOWNSTREAM_ERROR_CODE @@ -88,6 +94,12 @@ async def exception_handler( exc, _BASETEN_CLIENT_ERROR_CODE, ) + if isinstance(exc, ModelDefinitionError): + return _make_baseten_response( + HTTPStatus.PRECONDITION_FAILED.value, + f"{type(exc).__name__}: {str(exc)}", + _BASETEN_DOWNSTREAM_ERROR_CODE, + ) if isinstance(exc, UserCodeError): return _make_baseten_response( HTTPStatus.INTERNAL_SERVER_ERROR.value, @@ -113,60 +125,49 @@ async def exception_handler( NotImplementedError, InputParsingError, UserCodeError, + ModelDefinitionError, fastapi.HTTPException, } -def _intercept_user_exception(exc: Exception, logger: logging.Logger) -> NoReturn: - # Note that logger.exception logs the stacktrace, such that the user can - # debug this error from the logs. - # TODO: consider removing the wrapper function from the stack trace. - if isinstance(exc, HTTPException): - logger.exception("Model raised HTTPException", stacklevel=2) - raise exc - else: - logger.exception("Internal Server Error", stacklevel=2) - raise UserCodeError(str(exc)) - - -_P = ParamSpec("_P") -_R = TypeVar("_R") -_R_async = TypeVar("_R_async", bound=Coroutine) # Return type for async functions - - -@overload -def intercept_exceptions( - func: Callable[_P, _R], logger: logging.Logger -) -> Callable[_P, _R]: ... +def filter_traceback( + model_file_name: str, +) -> Union[ + Tuple[Type[BaseException], BaseException, TracebackType], + Tuple[None, None, None], +]: + exc_type, exc_value, tb = sys.exc_info() + if tb is None: + return exc_type, exc_value, tb # type: ignore[return-value] + # Walk the traceback until we find the frame ending with 'model.py' + current_tb: Optional[TracebackType] = tb + while current_tb is not None: + filename = current_tb.tb_frame.f_code.co_filename + if filename.endswith(model_file_name): + # Return exception info with traceback starting from current_tb + return exc_type, exc_value, current_tb # type: ignore[return-value] + current_tb = current_tb.tb_next -@overload -def intercept_exceptions( - func: Callable[_P, _R_async], logger: logging.Logger -) -> Callable[_P, _R_async]: ... + # If `model_file_name` not found, return the original exception info + return exc_type, exc_value, tb # type: ignore[return-value] +@contextlib.contextmanager def intercept_exceptions( - func: Callable[_P, _R], logger: logging.Logger -) -> Callable[_P, _R]: - """Converts all exceptions to 500-`HTTPException` and logs them. - If exception is already `HTTPException`, re-raises exception as is. - """ - if asyncio.iscoroutinefunction(func): - - async def inner_async(*args: _P.args, **kwargs: _P.kwargs) -> _R: - try: - return await func(*args, **kwargs) - except Exception as e: - _intercept_user_exception(e, logger) - - return inner_async # type: ignore[return-value] - else: - - def inner_sync(*args: _P.args, **kwargs: _P.kwargs) -> _R: - try: - return func(*args, **kwargs) - except Exception as e: - _intercept_user_exception(e, logger) - - return inner_sync + logger: logging.Logger, model_file_name: str +) -> Generator[None, None, None]: + try: + yield + # Note that logger.error logs the stacktrace, such that the user can + # debug this error from the logs. + except HTTPException: + logger.error( + "Model raised HTTPException", exc_info=filter_traceback(model_file_name) + ) + raise + except Exception as exc: + logger.error( + "Internal Server Error", exc_info=filter_traceback(model_file_name) + ) + raise UserCodeError(str(exc)) diff --git a/truss/templates/server/common/tracing.py b/truss/templates/server/common/tracing.py index dd01601a1..8ac927c1d 100644 --- a/truss/templates/server/common/tracing.py +++ b/truss/templates/server/common/tracing.py @@ -54,7 +54,7 @@ def shutdown(self) -> None: _truss_tracer: Optional[trace.Tracer] = None -def get_truss_tracer(secrets: secrets_resolver.SecretsResolver, config) -> trace.Tracer: +def get_truss_tracer(secrets: secrets_resolver.Secrets, config) -> trace.Tracer: """Creates a cached tracer (i.e. runtime-singleton) to be used for truss internal tracing. diff --git a/truss/templates/server/inference_server.py b/truss/templates/server/inference_server.py deleted file mode 100644 index 5b9ea2e44..000000000 --- a/truss/templates/server/inference_server.py +++ /dev/null @@ -1,29 +0,0 @@ -import os -from typing import Dict - -import yaml -from shared.logging import setup_logging -from truss_server import TrussServer # noqa: E402 - -CONFIG_FILE = "config.yaml" - -setup_logging() - - -class ConfiguredTrussServer: - _config: Dict - _port: int - - def __init__(self, config_path: str, port: int): - self._port = port - with open(config_path, encoding="utf-8") as config_file: - self._config = yaml.safe_load(config_file) - - def start(self): - server = TrussServer(http_port=self._port, config=self._config) - server.start() - - -if __name__ == "__main__": - env_port = int(os.environ.get("INFERENCE_SERVER_PORT", "8080")) - ConfiguredTrussServer(CONFIG_FILE, env_port).start() diff --git a/truss/templates/server/main.py b/truss/templates/server/main.py new file mode 100644 index 000000000..a2ca0981f --- /dev/null +++ b/truss/templates/server/main.py @@ -0,0 +1,11 @@ +import os + +from shared.logging import setup_logging +from truss_server import TrussServer + +CONFIG_FILE = "config.yaml" + +if __name__ == "__main__": + setup_logging() + http_port = int(os.environ.get("INFERENCE_SERVER_PORT", "8080")) + TrussServer(http_port, CONFIG_FILE).start() diff --git a/truss/templates/server/model_wrapper.py b/truss/templates/server/model_wrapper.py index 4b7bab46c..2586efc2c 100644 --- a/truss/templates/server/model_wrapper.py +++ b/truss/templates/server/model_wrapper.py @@ -1,27 +1,32 @@ import asyncio +import dataclasses +import enum import importlib +import importlib.util import inspect import logging import os +import pathlib import sys import time -from collections.abc import Generator from contextlib import asynccontextmanager from enum import Enum +from functools import cached_property from multiprocessing import Lock from pathlib import Path from threading import Thread from typing import ( Any, - AsyncGenerator, Callable, Dict, - Mapping, Optional, + Tuple, Union, ) import opentelemetry.sdk.trace as sdk_trace +import starlette.requests +import starlette.responses from anyio import Semaphore, to_thread from common import errors, tracing from common.patches import apply_patches @@ -29,9 +34,15 @@ from common.schema import TrussSchema from opentelemetry import trace from pydantic import BaseModel +from shared import serialization from shared.lazy_data_resolver import LazyDataResolver from shared.secrets_resolver import SecretsResolver +if sys.version_info >= (3, 9): + from typing import AsyncGenerator, Generator +else: + from typing_extensions import AsyncGenerator, Generator + MODEL_BASENAME = "model" NUM_LOAD_RETRIES = int(os.environ.get("NUM_LOAD_RETRIES_TRUSS", "1")) @@ -74,8 +85,175 @@ def defer() -> Callable[[], None]: release_and_end() +_ArgsType = Union[ + Tuple[Any], + Tuple[Any, starlette.requests.Request], + Tuple[starlette.requests.Request], +] + + +class _Sentinel: + def __repr__(self) -> str: + return "" + + +SENTINEL = _Sentinel() + + +def _is_request_type(obj: Any) -> bool: + return isinstance(obj, type) and issubclass(obj, starlette.requests.Request) + + +class ArgConfig(enum.Enum): + INPUTS_ONLY = enum.auto() + REQUEST_ONLY = enum.auto() + INPUTS_AND_REQUEST = enum.auto() + + @classmethod + def from_signature( + cls, + signature: inspect.Signature, + method_name: str, + ) -> "ArgConfig": + parameters = list(signature.parameters.values()) + + if len(parameters) == 1: + if _is_request_type(parameters[0].annotation): + return cls.REQUEST_ONLY + return cls.INPUTS_ONLY + + elif len(parameters) == 2: + # First arg can be whatever, except request. Second arg must be request. + param1, param2 = parameters + if param1.annotation: + if _is_request_type(param1.annotation): + raise errors.ModelDefinitionError( + f"`{method_name}` method with two arguments is not allowed to " + "have request as first argument, request must be second. " + f"Got: {signature}" + ) + if not (param2.annotation and _is_request_type(param2.annotation)): + raise errors.ModelDefinitionError( + f"`{method_name}` method with two arguments must have request as " + f"second argument (type annotated). Got: {signature} " + ) + return cls.INPUTS_AND_REQUEST + else: + raise errors.ModelDefinitionError( + f"`{method_name}` method cannot have more than two arguments. " + f"Got: {signature}" + ) + + @classmethod + def prepare_args( + cls, + descriptor: "MethodDescriptor", + inputs: Any, + request: starlette.requests.Request, + ) -> _ArgsType: + args: _ArgsType + if descriptor.arg_config == ArgConfig.INPUTS_ONLY: + args = (inputs,) + elif descriptor.arg_config == ArgConfig.REQUEST_ONLY: + args = (request,) + elif descriptor.arg_config == ArgConfig.INPUTS_AND_REQUEST: + args = (inputs, request) + else: + raise NotImplementedError(f"Arg config {descriptor.arg_config}.") + return args + + +@dataclasses.dataclass +class MethodDescriptor: + is_async: bool + is_generator: bool + arg_config: ArgConfig + + @classmethod + def from_method(cls, method, method_name: str) -> "MethodDescriptor": + return cls( + is_async=inspect.iscoroutinefunction(method) + or inspect.isasyncgenfunction(method), + is_generator=inspect.isgeneratorfunction(method) + or inspect.isasyncgenfunction(method), + arg_config=ArgConfig.from_signature(inspect.signature(method), method_name), + ) + + +@dataclasses.dataclass +class ModelDescriptor: + preprocess: Optional[MethodDescriptor] + predict: MethodDescriptor + postprocess: Optional[MethodDescriptor] + truss_schema: Optional[TrussSchema] + + @cached_property + def skip_input_parsing(self) -> bool: + return self.predict.arg_config == ArgConfig.REQUEST_ONLY and ( + not self.preprocess or self.preprocess.arg_config == ArgConfig.REQUEST_ONLY + ) + + @classmethod + def from_model(cls, model) -> "ModelDescriptor": + if hasattr(model, "preprocess"): + preprocess = MethodDescriptor.from_method( + model.preprocess, method_name="preprocess" + ) + else: + preprocess = None + + if hasattr(model, "predict"): + predict = MethodDescriptor.from_method( + model.predict, + method_name="predict", + ) + if preprocess and predict.arg_config == ArgConfig.REQUEST_ONLY: + raise errors.ModelDefinitionError( + "When using preprocessing, the predict method cannot only have the " + "request argument (because the result of preprocessing would be " + "discarded)." + ) + else: + raise errors.ModelDefinitionError( + "Truss model must have a `predict` method." + ) + + if hasattr(model, "postprocess"): + postprocess = MethodDescriptor.from_method(model.postprocess, "postprocess") + if postprocess and postprocess.arg_config == ArgConfig.REQUEST_ONLY: + raise errors.ModelDefinitionError( + "The postprocessing method cannot only have the request " + "argument (because the result of predict would be discarded)." + ) + else: + postprocess = None + + if preprocess: + parameters = inspect.signature(model.preprocess).parameters + else: + parameters = inspect.signature(model.predict).parameters + + if postprocess: + return_annotation = inspect.signature(model.postprocess).return_annotation + else: + return_annotation = inspect.signature(model.predict).return_annotation + + return cls( + preprocess=preprocess, + predict=predict, + postprocess=postprocess, + truss_schema=TrussSchema.from_signature(parameters, return_annotation), + ) + + class ModelWrapper: + _config: Dict _tracer: sdk_trace.Tracer + _maybe_model: Optional[Any] + _maybe_model_descriptor: Optional[ModelDescriptor] + _logger: logging.Logger + _status: "ModelWrapper.Status" + _predict_semaphore: Semaphore class Status(Enum): NOT_READY = 0 @@ -86,9 +264,10 @@ class Status(Enum): def __init__(self, config: Dict, tracer: sdk_trace.Tracer): self._config = config self._tracer = tracer + self._maybe_model = None + self._maybe_model_descriptor = None self._logger = logging.getLogger() self.name = MODEL_BASENAME - self.ready = False self._load_lock = Lock() self._status = ModelWrapper.Status.NOT_READY self._predict_semaphore = Semaphore( @@ -96,48 +275,63 @@ def __init__(self, config: Dict, tracer: sdk_trace.Tracer): "predict_concurrency", DEFAULT_PREDICT_CONCURRENCY ) ) - self.truss_schema: TrussSchema = None + + @property + def _model(self) -> Any: + if self._maybe_model: + return self._maybe_model + else: + raise errors.ModelNotReady(self.name) + + @property + def model_descriptor(self) -> ModelDescriptor: + if self._maybe_model_descriptor: + return self._maybe_model_descriptor + else: + raise errors.ModelNotReady(self.name) + + @property + def load_failed(self) -> bool: + return self._status == ModelWrapper.Status.FAILED + + @property + def ready(self) -> bool: + return self._status == ModelWrapper.Status.READY + + @property + def _model_file_name(self) -> str: + return self._config["model_class_filename"] + + def start_load_thread(self): + # Don't retry failed loads. + if self._status == ModelWrapper.Status.NOT_READY: + thread = Thread(target=self.load) + thread.start() def load(self) -> bool: if self.ready: - return self.ready + return True # if we are already loading, block on aquiring the lock; # this worker will return 503 while the worker with the lock is loading with self._load_lock: self._status = ModelWrapper.Status.LOADING - self._logger.info("Executing model.load()...") - try: start_time = time.perf_counter() - self.try_load() - self.ready = True + self._load_impl() self._status = ModelWrapper.Status.READY self._logger.info( f"Completed model.load() execution in {_elapsed_ms(start_time)} ms" ) - - return self.ready + return True except Exception: self._logger.exception("Exception while loading model") self._status = ModelWrapper.Status.FAILED - return self.ready - - def start_load(self): - if self.should_load(): - thread = Thread(target=self.load) - thread.start() + return False - def load_failed(self) -> bool: - return self._status == ModelWrapper.Status.FAILED - - def should_load(self) -> bool: - # don't retry failed loads - return not self._status == ModelWrapper.Status.FAILED and not self.ready - - def try_load(self): + def _load_impl(self): data_dir = Path("data") data_dir.mkdir(exist_ok=True) @@ -165,11 +359,23 @@ def try_load(self): / self._config["model_class_filename"] ) if model_class_file_path.exists(): - model_module_path = Path(self._config["model_class_filename"]) - model_module_name = str(model_module_path.with_suffix("")) - module = importlib.import_module( - f"{self._config['model_module_dir']}.{model_module_name}" - ) + self._logger.info("Loading truss model from file.") + module_path = pathlib.Path(model_class_file_path).resolve() + module_name = module_path.stem # Use the file's name as the module name + if not os.path.isfile(module_path): + raise ImportError( + f"`{module_path}` is not a file. You must point to a python file where " + "the entrypoint chainlet is defined." + ) + import_error_msg = f"Could not import `{module_path}`. Check path." + spec = importlib.util.spec_from_file_location(module_name, module_path) + if not spec: + raise ImportError(import_error_msg) + if not spec.loader: + raise ImportError(import_error_msg) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + model_class = getattr(module, self._config["model_class_name"]) model_init_params = _prepare_init_args( model_class, @@ -182,16 +388,18 @@ def try_load(self): for ext_name, ext in extensions.items(): if _signature_accepts_keyword_arg(signature, ext_name): model_init_params[ext_name] = ext.model_args() - self._model = model_class(**model_init_params) + self._maybe_model = model_class(**model_init_params) + elif TRT_LLM_EXTENSION_NAME in extensions: + self._logger.info("Loading TRT LLM extension as model.") # trt_llm extension allows model.py to be absent. It supplies its # own model class in that case. trt_llm_extension = extensions["trt_llm"] - self._model = trt_llm_extension.model_override() + self._maybe_model = trt_llm_extension.model_override() else: raise RuntimeError("No module class file found") - self.set_truss_schema() + self._maybe_model_descriptor = ModelDescriptor.from_model(self._model) if hasattr(self._model, "load"): retry( @@ -202,144 +410,92 @@ def try_load(self): gap_seconds=1.0, ) - def set_truss_schema(self): - parameters = ( - inspect.signature(self._model.preprocess).parameters - if hasattr(self._model, "preprocess") - else inspect.signature(self._model.predict).parameters - ) - - outputs_annotation = ( - inspect.signature(self._model.postprocess).return_annotation - if hasattr(self._model, "postprocess") - else inspect.signature(self._model.predict).return_annotation - ) - - self.truss_schema = TrussSchema.from_signature(parameters, outputs_annotation) - async def preprocess( self, - payload: Any, + inputs: serialization.InputType, + request: starlette.requests.Request, ) -> Any: - if not hasattr(self._model, "preprocess"): - return payload + descriptor = self.model_descriptor.preprocess + if not descriptor: + return inputs - if inspect.iscoroutinefunction(self._model.preprocess): - return await errors.intercept_exceptions( - self._model.preprocess, self._logger - )(payload) - else: - return await to_thread.run_sync( - errors.intercept_exceptions(self._model.preprocess, self._logger), - payload, - ) + args = ArgConfig.prepare_args(descriptor, inputs, request) + with errors.intercept_exceptions(self._logger, self._model_file_name): + if descriptor.is_async: + return await self._model.preprocess(*args) + else: + return await to_thread.run_sync(self._model.preprocess, *args) async def predict( self, - payload: Any, - ) -> Any: - # It's possible for the user's predict function to be a: - # 1. Generator function (function that returns a generator) - # 2. Async generator (function that returns async generator) - # In these cases, just return the generator or async generator, - # as we will be propagating these up. No need for await at this point. - # 3. Coroutine -- in this case, await the predict function as it is async - # 4. Normal function -- in this case, offload to a separate thread to prevent - # blocking the main event loop - if inspect.isasyncgenfunction( - self._model.predict - ) or inspect.isgeneratorfunction(self._model.predict): - return self._model.predict(payload) - - if inspect.iscoroutinefunction(self._model.predict): - return await errors.intercept_exceptions(self._model.predict, self._logger)( - payload - ) - - return await to_thread.run_sync( - errors.intercept_exceptions(self._model.predict, self._logger), payload - ) + inputs: Any, + request: starlette.requests.Request, + ) -> Union[serialization.OutputType, Any]: + # The result can be a serializable data structure, byte-generator, a request, + # or, if `postprocessing` is used, anything. In the last case postprocessing + # must convert the result to something serializable. + descriptor = self.model_descriptor.predict + args = ArgConfig.prepare_args(descriptor, inputs, request) + with errors.intercept_exceptions(self._logger, self._model_file_name): + if descriptor.is_generator: + # Even for async generators, don't await here. + return self._model.predict(*args) + if descriptor.is_async: + return await self._model.predict(*args) + # Offload sync functions to thread, to not block event loop. + return await to_thread.run_sync(self._model.predict, *args) async def postprocess( self, - response: Any, - ) -> Any: - # Similar to the predict function, it is possible for postprocess - # to return either a generator or async generator, in which case - # just return the generator. - # - # It can also return a coroutine or just be a function, in which - # case either await, or offload to a thread respectively. - if not hasattr(self._model, "postprocess"): - return response - - if inspect.isasyncgenfunction( - self._model.postprocess - ) or inspect.isgeneratorfunction(self._model.postprocess): - return self._model.postprocess(response) - - if inspect.iscoroutinefunction(self._model.postprocess): - return await errors.intercept_exceptions( - self._model.postprocess, self._logger - )(response) - - return await to_thread.run_sync( - errors.intercept_exceptions(self._model.postprocess, self._logger), response - ) - - async def write_response_to_queue( - self, queue: asyncio.Queue, generator: AsyncGenerator, span: trace.Span - ): + result: Union[serialization.InputType, Any], + request: starlette.requests.Request, + ) -> serialization.OutputType: + # The postprocess function can handle outputs of `predict`, but not + # generators and responses - in that case predict must return directly + # and postprocess is skipped. + # The result type can be the same as for predict. + descriptor = self.model_descriptor.postprocess + if not descriptor: + return result + + args = ArgConfig.prepare_args(descriptor, result, request) + with errors.intercept_exceptions(self._logger, self._model_file_name): + if descriptor.is_async: + return await self._model.postprocess(*args) + # Offload sync functions to thread, to not block event loop. + return await to_thread.run_sync(self._model.postprocess, *args) + + async def _write_response_to_queue( + self, + queue: asyncio.Queue, + generator: AsyncGenerator[bytes, None], + span: trace.Span, + ) -> None: with tracing.section_as_event(span, "write_response_to_queue"): try: async for chunk in generator: - # TODO: consider checking `request.is_disconnected()` for - # client-side cancellations and freeing resources. - await queue.put(ResponseChunk(chunk)) + await queue.put(chunk) except Exception as e: self._logger.exception( - "Exception while reading stream response: " + str(e) + "Exception while generating streamed response: " + str(e), + exc_info=errors.filter_traceback(self._model_file_name), ) finally: - await queue.put(None) - - async def _streaming_post_process(self, response: Any, span: trace.Span) -> Any: - if hasattr(self._model, "postprocess"): - self._logger.warning( - "Predict returned a streaming response, while a postprocess is defined." - "Note that in this case, the postprocess will run within the predict lock." - ) - with tracing.section_as_event( - span, "postprocess" - ), tracing.detach_context(): - response = await self.postprocess(response) - - return response - - async def _gather_generator(self, response: Any, span: trace.Span) -> str: - # In the case of gathering, it might make more sense to apply the postprocess - # to the gathered result, but that would be inconsistent with streaming. - # In general, it might even be better to strictly forbid postprocessing - # for generators. - response = await self._streaming_post_process(response, span) - return await _convert_streamed_response_to_string( - _force_async_generator(response) - ) + await queue.put(SENTINEL) async def _stream_with_background_task( self, - response: Any, + generator: Union[Generator[bytes, None, None], AsyncGenerator[bytes, None]], span: trace.Span, trace_ctx: trace.Context, release_and_end: Callable[[], None], - ): + ) -> AsyncGenerator[bytes, None]: # The streaming read timeout is the amount of time in between streamed chunk # before a timeout is triggered. streaming_read_timeout = self._config.get("runtime", {}).get( "streaming_read_timeout", STREAMING_RESPONSE_QUEUE_READ_TIMEOUT_SECS ) - response = await self._streaming_post_process(response, span) - async_generator = _force_async_generator(response) + async_generator = _force_async_generator(generator) # To ensure that a partial read from a client does not keep the semaphore # claimed, we write all the data from the stream to the queue as it is produced, # irrespective of how fast it is consumed. @@ -349,13 +505,13 @@ async def _stream_with_background_task( # `write_response_to_queue` keeps running the background until completion. gen_task = asyncio.create_task( - self.write_response_to_queue(response_queue, async_generator, span) + self._write_response_to_queue(response_queue, async_generator, span) ) # Defer the release of the semaphore until the write_response_to_queue task. gen_task.add_done_callback(lambda _: release_and_end()) # The gap between responses in a stream must be < streaming_read_timeout - async def _buffered_response_generator(): + async def _buffered_response_generator() -> AsyncGenerator[bytes, None]: # `span` is tied to the "producer" `gen_task` which might complete before # "consume" part here finishes, therefore a dedicated span is required. # Because all of this code is inside a `detach_context` block, we @@ -368,31 +524,25 @@ async def _buffered_response_generator(): response_queue.get(), timeout=streaming_read_timeout, ) - if chunk is None: + if chunk == SENTINEL: return - yield chunk.value + yield chunk return _buffered_response_generator() async def __call__( - self, body: Any, headers: Optional[Mapping[str, str]] = None - ) -> Union[Dict, Generator, AsyncGenerator, str]: - """Method to call predictor or explainer with the given input. - - Args: - body (Any): Request payload body. - headers (Dict): Request headers. - - Returns: - Dict: Response output from preprocess -> predictor -> postprocess - Generator: In case of streaming response - String: in case of non-streamed generator (the string is the JSON result). + self, + inputs: Optional[serialization.InputType], + request: starlette.requests.Request, + ) -> serialization.OutputType: + """ + Returns result from: preprocess -> predictor -> postprocess. """ with self._tracer.start_as_current_span("call-pre") as span_pre: with tracing.section_as_event( span_pre, "preprocess" ), tracing.detach_context(): - payload = await self.preprocess(body) + preprocess_result = await self.preprocess(inputs, request) span_predict = self._tracer.start_span("call-predict") async with deferred_semaphore_and_span( @@ -414,41 +564,64 @@ async def __call__( # exactly handle that case we would need to apply `detach_context` # around each `next`-invocation that consumes the generator, which is # prohibitive. - response = await self.predict(payload) + predict_result = await self.predict(preprocess_result, request) - if inspect.isgenerator(response) or inspect.isasyncgen(response): - if headers and headers.get("accept") == "application/json": + if inspect.isgenerator(predict_result) or inspect.isasyncgen( + predict_result + ): + if self.model_descriptor.postprocess: + with errors.intercept_exceptions( + self._logger, self._model_file_name + ): + raise errors.ModelDefinitionError( + "If the predict function returns a generator (streaming), " + "you cannot use postprocessing. Include all processing in " + "the predict method." + ) + + if request.headers.get("accept") == "application/json": # In the case of a streaming response, consume stream # if the http accept header is set, and json is requested. - return await self._gather_generator(response, span_predict) + return await _gather_generator(predict_result) else: return await self._stream_with_background_task( - response, + predict_result, span_predict, detached_ctx, release_and_end=get_defer_fn(), ) + if isinstance(predict_result, starlette.responses.Response): + if self.model_descriptor.postprocess: + with errors.intercept_exceptions(self._logger, self._model_file_name): + raise errors.ModelDefinitionError( + "If the predict function returns a response object, you cannot " + "use postprocessing." + ) + else: + return predict_result + with self._tracer.start_as_current_span("call-post") as span_post: with tracing.section_as_event( span_post, "postprocess" ), tracing.detach_context(): - processed_response = await self.postprocess(response) + postprocess_result = await self.postprocess(predict_result, request) - if isinstance(processed_response, BaseModel): + if isinstance(postprocess_result, BaseModel): # If we return a pydantic object, convert it back to a dict with tracing.section_as_event(span_post, "dump-pydantic"): - processed_response = processed_response.dict() - return processed_response + final_result = postprocess_result.dict() + else: + final_result = postprocess_result + return final_result -class ResponseChunk: - def __init__(self, value): - self.value = value - - -async def _convert_streamed_response_to_string(response: AsyncGenerator): - return "".join([str(chunk) async for chunk in response]) +async def _gather_generator( + predict_result: Union[AsyncGenerator[bytes, None], Generator[bytes, None, None]], +) -> str: + return "".join( + [str(chunk) async for chunk in _force_async_generator(predict_result)] + ) def _force_async_generator(gen: Union[Generator, AsyncGenerator]) -> AsyncGenerator: @@ -463,15 +636,14 @@ async def _convert_generator_to_async(): Runs each iteration of the generator in an offloaded thread, to ensure the main loop is not blocked, and yield to create an async generator. """ - FINAL_GENERATOR_VALUE = object() while True: # Note that this is the equivalent of running: # next(gen, FINAL_GENERATOR_VALUE) on a separate thread, # ensuring that if there is anything blocking in the generator, # it does not block the main loop. - chunk = await to_thread.run_sync(next, gen, FINAL_GENERATOR_VALUE) - if chunk == FINAL_GENERATOR_VALUE: - break + chunk = await to_thread.run_sync(next, gen, SENTINEL) + if chunk == SENTINEL: + return yield chunk return _convert_generator_to_async() diff --git a/truss/templates/server/truss_server.py b/truss/templates/server/truss_server.py index b567ea500..776849f89 100644 --- a/truss/templates/server/truss_server.py +++ b/truss/templates/server/truss_server.py @@ -7,32 +7,35 @@ import socket import sys import time -from collections.abc import Generator +from http import HTTPStatus from pathlib import Path -from typing import AsyncGenerator, Dict, List, Optional, Union +from typing import Dict, List, Optional, Union import pydantic import uvicorn +import yaml from common import errors, tracing +from common.schema import TrussSchema from common.termination_handler_middleware import TerminationHandlerMiddleware from fastapi import Depends, FastAPI, HTTPException, Request from fastapi.responses import ORJSONResponse, StreamingResponse from fastapi.routing import APIRoute as FastAPIRoute from model_wrapper import ModelWrapper from opentelemetry import propagate as otel_propagate +from opentelemetry import trace from opentelemetry.sdk import trace as sdk_trace -from shared import util +from shared import serialization, util from shared.logging import setup_logging from shared.secrets_resolver import SecretsResolver -from shared.serialization import ( - DeepNumpyEncoder, - truss_msgpack_deserialize, - truss_msgpack_serialize, -) from starlette.middleware.base import BaseHTTPMiddleware from starlette.requests import ClientDisconnect from starlette.responses import Response +if sys.version_info >= (3, 9): + from typing import AsyncGenerator, Generator +else: + from typing_extensions import AsyncGenerator, Generator + # [IMPORTANT] A lot of things depend on this currently. # Please consider the following when increasing this: # 1. Self-termination on model load fail. @@ -92,7 +95,7 @@ def _safe_lookup_model(self, model_name: str) -> ModelWrapper: @staticmethod def check_healthy(model: ModelWrapper): - if model.load_failed(): + if model.load_failed: INFERENCE_SERVER_FAILED_FILE.touch() os.kill(os.getpid(), signal.SIGKILL) @@ -122,6 +125,45 @@ async def invocations( """ return await self.predict(self._model.name, request, body_raw) + async def _parse_body( + self, + request: Request, + body_raw: bytes, + truss_schema: Optional[TrussSchema], + span: trace.Span, + ) -> serialization.InputType: + if self.is_binary(request): + with tracing.section_as_event(span, "binary-deserialize"): + inputs = serialization.truss_msgpack_deserialize(body_raw) + if truss_schema: + try: + with tracing.section_as_event(span, "parse-pydantic"): + inputs = truss_schema.input_type.parse_obj(inputs) + except pydantic.ValidationError as e: + raise errors.InputParsingError( + f"Request Validation Error, {str(e)}" + ) from e + else: + if truss_schema: + if truss_schema: + try: + with tracing.section_as_event(span, "parse-pydantic"): + inputs = truss_schema.input_type.parse_raw(body_raw) + except pydantic.ValidationError as e: + raise errors.InputParsingError( + f"Request Validation Error, {str(e)}" + ) from e + else: + try: + with tracing.section_as_event(span, "json-deserialize"): + inputs = json.loads(body_raw) + except json.JSONDecodeError as e: + raise errors.InputParsingError( + f"Invalid JSON payload: {str(e)}" + ) from e + + return inputs + async def predict( self, model_name: str, request: Request, body_raw: bytes = Depends(parse_body) ) -> Response: @@ -137,75 +179,46 @@ async def predict( with self._tracer.start_as_current_span( "predict-endpoint", context=trace_ctx ) as span: - body: Dict - if self.is_binary(request): - with tracing.section_as_event(span, "binary-deserialize"): - body = truss_msgpack_deserialize(body_raw) - if model.truss_schema: - try: - with tracing.section_as_event(span, "parse-pydantic"): - body = model.truss_schema.input_type.parse_obj(body) - except pydantic.ValidationError as e: - raise errors.InputParsingError( - f"Request Validation Error, {str(e)}" - ) from e + inputs: Optional[serialization.InputType] + if model.model_descriptor.skip_input_parsing: + inputs = None else: - if model.truss_schema: - if model.truss_schema: - try: - with tracing.section_as_event(span, "parse-pydantic"): - body = model.truss_schema.input_type.parse_raw(body_raw) - except pydantic.ValidationError as e: - raise errors.InputParsingError( - f"Request Validation Error, {str(e)}" - ) from e - else: - try: - with tracing.section_as_event(span, "json-deserialize"): - body = json.loads(body_raw) - except json.JSONDecodeError as e: - raise errors.InputParsingError( - f"Invalid JSON payload: {str(e)}" - ) from e - - # Calls ModelWrapper.__call__, which runs validate, preprocess, predict, - # and postprocess. - with tracing.section_as_event(span, "model-call"): - response: Union[Dict, Generator] = await model( - body, - headers=util.transform_keys( - request.headers, lambda key: key.lower() - ), + inputs = await self._parse_body( + request, body_raw, model.model_descriptor.truss_schema, span ) + # Calls ModelWrapper which runs: preprocess, predict, postprocess. + with tracing.section_as_event(span, "model-call"): + result: Union[Dict, Generator] = await model(inputs, request) # In the case that the model returns a Generator object, return a # StreamingResponse instead. - if isinstance(response, (AsyncGenerator, Generator)): + if isinstance(result, (AsyncGenerator, Generator)): # media_type in StreamingResponse sets the Content-Type header - return StreamingResponse( - response, media_type="application/octet-stream" - ) + return StreamingResponse(result, media_type="application/octet-stream") + elif isinstance(result, Response): + if result.status_code >= HTTPStatus.MULTIPLE_CHOICES.value: + errors.add_error_headers_to_user_response(result) + return result response_headers = {} if self.is_binary(request): with tracing.section_as_event(span, "binary-serialize"): response_headers["Content-Type"] = "application/octet-stream" return Response( - content=truss_msgpack_serialize(response), + content=serialization.truss_msgpack_serialize(result), headers=response_headers, ) else: with tracing.section_as_event(span, "json-serialize"): response_headers["Content-Type"] = "application/json" return Response( - content=json.dumps(response, cls=DeepNumpyEncoder), + content=json.dumps(result, cls=serialization.DeepNumpyEncoder), headers=response_headers, ) async def schema(self, model_name: str) -> Dict: model: ModelWrapper = self._safe_lookup_model(model_name) - - if model.truss_schema is None: + if model.model_descriptor.truss_schema is None: # If there is not a TrussSchema, we return a 404. if model.ready: raise HTTPException(status_code=404, detail="No schema found") @@ -215,7 +228,7 @@ async def schema(self, model_name: str) -> Dict: detail="Schema not available, please try again later.", ) else: - return model.truss_schema.serialize() + return model.model_descriptor.truss_schema.serialize() @staticmethod def is_binary(request: Request): @@ -226,27 +239,34 @@ def is_binary(request: Request): class TrussServer: - """This wrapper class manages creation and cleanup of uvicorn server processes running the FastAPI inference server app - - TrussServer runs as a main process managing UvicornCustomServer subprocesses that in turn may manage - their own worker processes. Notably, this main process is kept alive when running `servers_task()` - because of the child uvicorn server processes' main loop. + """This wrapper class manages creation and cleanup of uvicorn server processes + running the FastAPI inference server app. + TrussServer runs as a main process managing UvicornCustomServer subprocesses that + in turn may manage their own worker processes. Notably, this main process is kept + alive when running `servers_task()` because of the child uvicorn server processes' + main loop. """ def __init__( self, http_port: int, - config: Dict, + config_or_path: Union[str, Path, Dict], setup_json_logger: bool = True, ): + if isinstance(config_or_path, (str, Path)): + with open(config_or_path, encoding="utf-8") as config_file: + config = yaml.safe_load(config_file) + else: + config = config_or_path + secrets = SecretsResolver.get_secrets(config) tracer = tracing.get_truss_tracer(secrets, config) + self._setup_json_logger = setup_json_logger self.http_port = http_port self._config = config self._model = ModelWrapper(self._config, tracer) self._endpoints = BasetenEndpoints(self._model, tracer) - self._setup_json_logger = setup_json_logger def cleanup(self): if INFERENCE_SERVER_FAILED_FILE.exists(): @@ -254,14 +274,13 @@ def cleanup(self): def on_startup(self): """ - This method will be started inside the main process, so here is where we want to setup our logging and model + This method will be started inside the main process, so here is where + we want to setup our logging and model. """ self.cleanup() - if self._setup_json_logger: setup_logging() - - self._model.start_load() + self._model.start_load_thread() def create_application(self): app = FastAPI( diff --git a/truss/templates/shared/serialization.py b/truss/templates/shared/serialization.py index 7d7c63db0..7e99e4305 100644 --- a/truss/templates/shared/serialization.py +++ b/truss/templates/shared/serialization.py @@ -2,11 +2,54 @@ import uuid from datetime import date, datetime, time, timedelta from decimal import Decimal -from typing import Any, Callable, Dict, Optional, Union +from typing import ( + TYPE_CHECKING, + Any, + AsyncGenerator, + Callable, + Dict, + Generator, + List, + Optional, + Union, +) + +import pydantic +import starlette.responses + +if TYPE_CHECKING: + from numpy.typing import NDArray + + +JSONType = Union[str, int, float, bool, None, List["JSONType"], Dict[str, "JSONType"]] +MsgPackType = Union[ + str, + int, + float, + bool, + None, + date, + Decimal, + datetime, + time, + timedelta, + uuid.UUID, + "NDArray", + List["MsgPackType"], + Dict[str, "MsgPackType"], +] +InputType = Union[JSONType, MsgPackType, pydantic.BaseModel] +OutputType = Union[ + JSONType, + MsgPackType, + Generator[bytes, None, None], + AsyncGenerator[bytes, None], + starlette.responses.Response, +] # mostly cribbed from django.core.serializer.DjangoJSONEncoder -def truss_msgpack_encoder( +def _truss_msgpack_encoder( obj: Union[Decimal, date, time, timedelta, uuid.UUID, Dict], chain: Optional[Callable] = None, ) -> Dict: @@ -36,7 +79,7 @@ def truss_msgpack_encoder( return obj if chain is None else chain(obj) -def truss_msgpack_decoder(obj: Any, chain=None): +def _truss_msgpack_decoder(obj: Any, chain=None): try: if b"__dt_datetime_iso__" in obj: return datetime.fromisoformat(obj[b"data"]) @@ -58,7 +101,7 @@ def truss_msgpack_decoder(obj: Any, chain=None): # this json object is JSONType + np.array + datetime -def is_truss_serializable(obj) -> bool: +def is_truss_serializable(obj: Any) -> bool: import numpy as np # basic JSON types @@ -72,21 +115,21 @@ def is_truss_serializable(obj) -> bool: return False -def truss_msgpack_serialize(obj): +def truss_msgpack_serialize(obj: MsgPackType) -> bytes: import msgpack import msgpack_numpy as mp_np return msgpack.packb( - obj, default=lambda x: truss_msgpack_encoder(x, chain=mp_np.encode) + obj, default=lambda x: _truss_msgpack_encoder(x, chain=mp_np.encode) ) -def truss_msgpack_deserialize(obj): +def truss_msgpack_deserialize(data: bytes) -> MsgPackType: import msgpack import msgpack_numpy as mp_np return msgpack.unpackb( - obj, object_hook=lambda x: truss_msgpack_decoder(x, chain=mp_np.decode) + data, object_hook=lambda x: _truss_msgpack_decoder(x, chain=mp_np.decode) ) @@ -101,4 +144,4 @@ def default(self, obj): elif isinstance(obj, np.ndarray): return obj.tolist() else: - return super(DeepNumpyEncoder, self).default(obj) + return super().default(obj) diff --git a/truss/templates/shared/util.py b/truss/templates/shared/util.py index d1e3fe124..f4d7f45c0 100644 --- a/truss/templates/shared/util.py +++ b/truss/templates/shared/util.py @@ -3,7 +3,7 @@ import shutil import sys from pathlib import Path -from typing import Callable, Dict, List, TypeVar +from typing import List import psutil import requests @@ -80,15 +80,6 @@ def kill_child_processes(parent_pid: int): process.kill() -X = TypeVar("X") -Y = TypeVar("Y") -Z = TypeVar("Z") - - -def transform_keys(d: Dict[X, Z], fn: Callable[[X], Y]) -> Dict[Y, Z]: - return {fn(key): value for key, value in d.items()} - - def download_from_url_using_requests(URL: str, download_to: Path): # Streaming download to keep memory usage low resp = requests.get( diff --git a/truss/test_data/server.Dockerfile b/truss/test_data/server.Dockerfile index e81435154..2b12df967 100644 --- a/truss/test_data/server.Dockerfile +++ b/truss/test_data/server.Dockerfile @@ -1,7 +1,7 @@ ARG PYVERSION=py39 -FROM baseten/truss-server-base:3.9-v0.4.3 as truss_server +FROM baseten/truss-server-base:3.9-v0.4.3 AS truss_server -ENV PYTHON_EXECUTABLE /usr/local/bin/python3 +ENV PYTHON_EXECUTABLE=/usr/local/bin/python3 RUN grep -w 'ID=debian\|ID_LIKE=debian' /etc/os-release || { echo "ERROR: Supplied base image is not a debian image"; exit 1; } RUN $PYTHON_EXECUTABLE -c "import sys; sys.exit(0) if sys.version_info.major == 3 and sys.version_info.minor >=8 and sys.version_info.minor <=11 else sys.exit(1)" \ @@ -11,7 +11,7 @@ RUN pip install --upgrade pip --no-cache-dir \ && rm -rf /root/.cache/pip # If user base image is supplied in config, apply build commands from truss base image -ENV PYTHONUNBUFFERED True +ENV PYTHONUNBUFFERED=True ENV DEBIAN_FRONTEND=noninteractive RUN apt update && \ @@ -32,7 +32,7 @@ COPY ./requirements.txt requirements.txt RUN cat requirements.txt RUN pip install -r requirements.txt --no-cache-dir && rm -rf /root/.cache/pip -ENV APP_HOME /app +ENV APP_HOME=/app WORKDIR $APP_HOME # Copy data before code for better caching @@ -43,6 +43,6 @@ COPY ./model /app/model COPY ./packages /packages -ENV INFERENCE_SERVER_PORT 8080 -ENV SERVER_START_CMD="/usr/local/bin/python3 /app/inference_server.py" -ENTRYPOINT ["/usr/local/bin/python3", "/app/inference_server.py"] +ENV INFERENCE_SERVER_PORT=8080 +ENV SERVER_START_CMD="/usr/local/bin/python3 /app/main.py" +ENTRYPOINT ["/usr/local/bin/python3", "/app/main.py"] diff --git a/truss/test_data/server_conformance_test_truss/model/model.py b/truss/test_data/server_conformance_test_truss/model/model.py index a2e9aef4a..54a081a2a 100644 --- a/truss/test_data/server_conformance_test_truss/model/model.py +++ b/truss/test_data/server_conformance_test_truss/model/model.py @@ -10,11 +10,9 @@ def __init__(self, **kwargs) -> None: self._model = None def load(self): - # Load model here and assign to self._model. - print("Taking 20 seconds to load") + print("Starting loading over 20 seconds.") time.sleep(20) def predict(self, model_input: Any) -> Dict[str, List]: - # Invoke model on model_input and calculate predictions here. print("Taking 20 seconds to predict") time.sleep(20) diff --git a/truss/test_data/test_async_truss/model/model.py b/truss/test_data/test_async_truss/model/model.py index d0a49955d..900b996c7 100644 --- a/truss/test_data/test_async_truss/model/model.py +++ b/truss/test_data/test_async_truss/model/model.py @@ -15,8 +15,8 @@ def load(self): async def preprocess(self, model_input: Dict): return {"preprocess_value": "value", **model_input} - async def postprocess(self, response: Dict): - return {"postprocess_value": "value", **response} - async def predict(self, model_input: Any) -> Dict[str, List]: return model_input + + async def postprocess(self, response: Dict): + return {"postprocess_value": "value", **response} diff --git a/truss/test_data/test_streaming_async_generator_truss/model/model.py b/truss/test_data/test_streaming_async_generator_truss/model/model.py index 92a53f8a2..d120d2c87 100644 --- a/truss/test_data/test_streaming_async_generator_truss/model/model.py +++ b/truss/test_data/test_streaming_async_generator_truss/model/model.py @@ -2,17 +2,6 @@ class Model: - def __init__(self, **kwargs) -> None: - self._data_dir = kwargs["data_dir"] - self._config = kwargs["config"] - self._secrets = kwargs["secrets"] - self._model = None - - def load(self): - # Load model here and assign to self._model. - pass - async def predict(self, model_input: Any) -> Dict[str, List]: - # Invoke model on model_input and calculate predictions here. for i in range(5): yield str(i) diff --git a/truss/test_data/test_streaming_truss_with_error/model/model.py b/truss/test_data/test_streaming_truss_with_error/model/model.py index 131d7755e..39824c9e5 100644 --- a/truss/test_data/test_streaming_truss_with_error/model/model.py +++ b/truss/test_data/test_streaming_truss_with_error/model/model.py @@ -1,23 +1,15 @@ from typing import Any, Dict, List +import helpers_1 -class Model: - def __init__(self, **kwargs) -> None: - self._data_dir = kwargs["data_dir"] - self._config = kwargs["config"] - self._secrets = kwargs["secrets"] - self._model = None - - def load(self): - # Load model here and assign to self._model. - pass +class Model: def predict(self, model_input: Any) -> Dict[str, List]: def inner(): for i in range(5): # Raise error partway through if throw_error is set if i == 3 and model_input.get("throw_error"): - raise Exception("error") + helpers_1.foo(123) yield str(i) return inner() diff --git a/truss/test_data/test_streaming_truss_with_error/packages/helpers_1.py b/truss/test_data/test_streaming_truss_with_error/packages/helpers_1.py new file mode 100644 index 000000000..4ac324834 --- /dev/null +++ b/truss/test_data/test_streaming_truss_with_error/packages/helpers_1.py @@ -0,0 +1,5 @@ +import helpers_2 + + +def foo(x): + return helpers_2.bar(x) diff --git a/truss/test_data/test_streaming_truss_with_error/packages/helpers_2.py b/truss/test_data/test_streaming_truss_with_error/packages/helpers_2.py new file mode 100644 index 000000000..5752f798c --- /dev/null +++ b/truss/test_data/test_streaming_truss_with_error/packages/helpers_2.py @@ -0,0 +1,2 @@ +def bar(x): + raise Exception("Crashed in `bar`.") diff --git a/truss/test_data/test_truss_with_error/config.yaml b/truss/test_data/test_truss_with_error/config.yaml new file mode 100644 index 000000000..bbea0314e --- /dev/null +++ b/truss/test_data/test_truss_with_error/config.yaml @@ -0,0 +1,4 @@ +model_name: Test +python_version: py39 +environment_variables: + OTEL_TRACING_NDJSON_FILE: "/tmp/otel_traces.ndjson" diff --git a/truss/test_data/test_truss_with_error/model/__init__.py b/truss/test_data/test_truss_with_error/model/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/truss/test_data/test_truss_with_error/model/model.py b/truss/test_data/test_truss_with_error/model/model.py new file mode 100644 index 000000000..acaffbb1c --- /dev/null +++ b/truss/test_data/test_truss_with_error/model/model.py @@ -0,0 +1,8 @@ +from typing import Any + +import helpers_1 + + +class Model: + def predict(self, model_input: Any) -> Any: + return helpers_1.foo(123) diff --git a/truss/test_data/test_truss_with_error/packages/helpers_1.py b/truss/test_data/test_truss_with_error/packages/helpers_1.py new file mode 100644 index 000000000..4ac324834 --- /dev/null +++ b/truss/test_data/test_truss_with_error/packages/helpers_1.py @@ -0,0 +1,5 @@ +import helpers_2 + + +def foo(x): + return helpers_2.bar(x) diff --git a/truss/test_data/test_truss_with_error/packages/helpers_2.py b/truss/test_data/test_truss_with_error/packages/helpers_2.py new file mode 100644 index 000000000..5752f798c --- /dev/null +++ b/truss/test_data/test_truss_with_error/packages/helpers_2.py @@ -0,0 +1,2 @@ +def bar(x): + raise Exception("Crashed in `bar`.") diff --git a/truss/tests/templates/control/control/test_server.py b/truss/tests/templates/control/control/test_server.py index 560391dd5..b9bf0fc6d 100644 --- a/truss/tests/templates/control/control/test_server.py +++ b/truss/tests/templates/control/control/test_server.py @@ -45,7 +45,7 @@ def app(truss_container_fs, truss_original_hash): control_app = create_app( { "inference_server_home": inf_serv_home, - "inference_server_process_args": ["python", "inference_server.py"], + "inference_server_process_args": ["python", "main.py"], "control_server_host": "*", "control_server_port": 8081, "inference_server_port": 8082, diff --git a/truss/tests/templates/server/test_model_wrapper.py b/truss/tests/templates/server/test_model_wrapper.py index e8c1ad829..b652f416d 100644 --- a/truss/tests/templates/server/test_model_wrapper.py +++ b/truss/tests/templates/server/test_model_wrapper.py @@ -5,11 +5,17 @@ from contextlib import contextmanager from pathlib import Path from typing import Any -from unittest.mock import Mock, patch +from unittest.mock import MagicMock, Mock, patch import opentelemetry.sdk.trace as sdk_trace import pytest import yaml +from starlette.requests import Request + + +@pytest.fixture +def anyio_backend(): + return "asyncio" @pytest.fixture @@ -19,10 +25,12 @@ def app_path(truss_container_fs: Path, helpers: Any): class Model: def __init__(self): self.load_count = 0 + def load(self): self.load_count += 1 if self.load_count <= 2: raise RuntimeError('Simulated error') + def predict(self, request): return request """ @@ -33,31 +41,25 @@ def predict(self, request): yield truss_container_app_path -# TODO: Make this test work -@pytest.mark.skip( - reason="Succeeds when tests in this file are run alone, but fails with the whole suit" -) -def test_model_wrapper_load_error_once(app_path): +@pytest.mark.anyio +async def test_model_wrapper_load_error_once(app_path): if "model_wrapper" in sys.modules: model_wrapper_module = sys.modules["model_wrapper"] importlib.reload(model_wrapper_module) else: model_wrapper_module = importlib.import_module("model_wrapper") - model_wraper_class = getattr(model_wrapper_module, "ModelWrapper") + model_wrapper_class = getattr(model_wrapper_module, "ModelWrapper") config = yaml.safe_load((app_path / "config.yaml").read_text()) - model_wrapper = model_wraper_class(config) + os.chdir(app_path) + model_wrapper = model_wrapper_class(config, sdk_trace.NoOpTracer()) model_wrapper.load() # Allow load thread to execute time.sleep(1) - output = model_wrapper.predict({}) + output = await model_wrapper.predict({}, MagicMock(spec=Request)) assert output == {} - assert model_wrapper._model.load_count == 3 + assert model_wrapper._model.load_count == 2 -# TODO: Make this test work -@pytest.mark.skip( - reason="Succeeds when tests in this file are run alone, but fails with the whole suit" -) def test_model_wrapper_load_error_more_than_allowed(app_path, helpers): with helpers.env_var("NUM_LOAD_RETRIES_TRUSS", "0"): if "model_wrapper" in sys.modules: @@ -65,15 +67,17 @@ def test_model_wrapper_load_error_more_than_allowed(app_path, helpers): importlib.reload(model_wrapper_module) else: model_wrapper_module = importlib.import_module("model_wrapper") - model_wraper_class = getattr(model_wrapper_module, "ModelWrapper") + model_wrapper_class = getattr(model_wrapper_module, "ModelWrapper") config = yaml.safe_load((app_path / "config.yaml").read_text()) - model_wrapper = model_wraper_class(config) + os.chdir(app_path) + model_wrapper = model_wrapper_class(config, sdk_trace.NoOpTracer()) model_wrapper.load() # Allow load thread to execute time.sleep(1) - assert model_wrapper.load_failed() + assert model_wrapper.load_failed +@pytest.mark.anyio @pytest.mark.integration async def test_model_wrapper_streaming_timeout(app_path): if "model_wrapper" in sys.modules: @@ -81,17 +85,17 @@ async def test_model_wrapper_streaming_timeout(app_path): importlib.reload(model_wrapper_module) else: model_wrapper_module = importlib.import_module("model_wrapper") - model_wraper_class = getattr(model_wrapper_module, "ModelWrapper") + model_wrapper_class = getattr(model_wrapper_module, "ModelWrapper") # Create an instance of ModelWrapper with streaming_read_timeout set to 5 seconds config = yaml.safe_load((app_path / "config.yaml").read_text()) config["runtime"]["streaming_read_timeout"] = 5 - model_wrapper = model_wraper_class(config) + model_wrapper = model_wrapper_class(config, sdk_trace.NoOpTracer()) model_wrapper.load() assert model_wrapper._config.get("runtime").get("streaming_read_timeout") == 5 -@pytest.mark.asyncio +@pytest.mark.anyio async def test_trt_llm_truss_init_extension(trt_llm_truss_container_fs, helpers): app_path = trt_llm_truss_container_fs / "app" packages_path = trt_llm_truss_container_fs / "packages" @@ -115,7 +119,7 @@ async def test_trt_llm_truss_init_extension(trt_llm_truss_container_fs, helpers) ), "Expected extension_name was not called" -@pytest.mark.asyncio +@pytest.mark.anyio async def test_trt_llm_truss_predict(trt_llm_truss_container_fs, helpers): app_path = trt_llm_truss_container_fs / "app" packages_path = trt_llm_truss_container_fs / "packages" @@ -143,14 +147,14 @@ async def mock_predict(return_value): ): model_wrapper = model_wrapper_class(config, sdk_trace.NoOpTracer()) model_wrapper.load() - resp = await model_wrapper.predict({}) + resp = await model_wrapper.predict({}, MagicMock(spec=Request)) mock_extension.load.assert_called() mock_extension.model_args.assert_called() assert mock_predict_called assert resp == expected_predict_response -@pytest.mark.asyncio +@pytest.mark.anyio async def test_trt_llm_truss_missing_model_py(trt_llm_truss_container_fs, helpers): app_path = trt_llm_truss_container_fs / "app" (app_path / "model" / "model.py").unlink() @@ -171,7 +175,7 @@ async def mock_predict(return_value): mock_predict_called = True return expected_predict_response - mock_engine = Mock(predict=mock_predict) + mock_engine = Mock(predict=mock_predict, spec=["predict"]) mock_extension = Mock() mock_extension.load = Mock() mock_extension.model_override = Mock(return_value=mock_engine) @@ -180,7 +184,7 @@ async def mock_predict(return_value): ): model_wrapper = model_wrapper_class(config, sdk_trace.NoOpTracer()) model_wrapper.load() - resp = await model_wrapper.predict({}) + resp = await model_wrapper.predict({}, MagicMock(spec=Request)) mock_extension.load.assert_called() mock_extension.model_override.assert_called() assert mock_predict_called diff --git a/truss/tests/templates/server/test_truss_server.py b/truss/tests/templates/server/test_truss_server.py index 6e0c2d146..1fd7f25ff 100644 --- a/truss/tests/templates/server/test_truss_server.py +++ b/truss/tests/templates/server/test_truss_server.py @@ -8,7 +8,6 @@ from pathlib import Path import pytest -import yaml @pytest.mark.integration @@ -22,8 +21,7 @@ def start_truss_server(stdout_capture_file_path): from truss_server import TrussServer - config = yaml.safe_load((app_path / "config.yaml").read_text()) - server = TrussServer(http_port=port, config=config) + server = TrussServer(http_port=port, config_or_path=app_path / "config.yaml") server.start() stdout_capture_file = tempfile.NamedTemporaryFile() diff --git a/truss/tests/test_model_inference.py b/truss/tests/test_model_inference.py index e64c67ca2..9b267772d 100644 --- a/truss/tests/test_model_inference.py +++ b/truss/tests/test_model_inference.py @@ -1,4 +1,5 @@ import concurrent +import contextlib import dataclasses import inspect import json @@ -10,8 +11,9 @@ from concurrent.futures import ThreadPoolExecutor from pathlib import Path from threading import Thread -from typing import Mapping +from typing import Iterator, Mapping +import httpx import opentelemetry.trace.propagation.tracecontext as tracecontext import pytest import requests @@ -27,6 +29,7 @@ logger = logging.getLogger(__name__) DEFAULT_LOG_ERROR = "Internal Server Error" +PREDICT_URL = "http://localhost:8090/v1/models/model:predict" def _log_contains_error(line: dict, error: str, message: str): @@ -38,9 +41,10 @@ def _log_contains_error(line: dict, error: str, message: str): def assert_logs_contain_error(logs: str, error: str, message=DEFAULT_LOG_ERROR): - loglines = logs.splitlines() - assert any( - _log_contains_error(json.loads(line), error, message) for line in loglines + loglines = [json.loads(line) for line in logs.splitlines()] + assert any(_log_contains_error(line, error, message) for line in loglines), ( + f"Did not find expected error in logs.\nExpected error: {error}\n" + f"Expected message: {message}\nActual logs:\n{loglines}" ) @@ -64,6 +68,14 @@ def join(self, timeout=None): return self.ret +@contextlib.contextmanager +def temp_truss(model_src: str, config_src: str) -> Iterator[TrussHandle]: + with ensure_kill_all(), tempfile.TemporaryDirectory(dir=".") as tmp_work_dir: + truss_dir = Path(tmp_work_dir, "truss") + create_truss(truss_dir, config_src, textwrap.dedent(model_src)) + yield TrussHandle(truss_dir) + + @pytest.mark.parametrize( "python_version, expected_python_version", [ @@ -141,22 +153,16 @@ def test_concurrency_truss(): # Tests that concurrency limits work correctly with ensure_kill_all(): truss_root = Path(__file__).parent.parent.parent.resolve() / "truss" - truss_dir = truss_root / "test_data" / "test_concurrency_truss" - tr = TrussHandle(truss_dir) - _ = tr.docker_run(local_port=8090, detach=True, wait_for_server_ready=True) - truss_server_addr = "http://localhost:8090" - full_url = f"{truss_server_addr}/v1/models/model:predict" - # Each request takes 2 seconds, for this thread, we allow # a concurrency of 2. This means the first two requests will # succeed within the 2 seconds, and the third will fail, since # it cannot start until the first two have completed. def make_request(): - requests.post(full_url, json={}, timeout=3) + requests.post(PREDICT_URL, json={}, timeout=3) successful_thread_1 = PropagatingThread(target=make_request) successful_thread_2 = PropagatingThread(target=make_request) @@ -178,17 +184,12 @@ def make_request(): def test_requirements_file_truss(): with ensure_kill_all(): truss_root = Path(__file__).parent.parent.parent.resolve() / "truss" - truss_dir = truss_root / "test_data" / "test_requirements_file_truss" - tr = TrussHandle(truss_dir) - _ = tr.docker_run(local_port=8090, detach=True, wait_for_server_ready=True) - truss_server_addr = "http://localhost:8090" - full_url = f"{truss_server_addr}/v1/models/model:predict" # The prediction imports torch which is specified in a requirements.txt and returns if GPU is available. - response = requests.post(full_url, json={}) + response = requests.post(PREDICT_URL, json={}) assert response.status_code == 200 assert response.json() is False @@ -198,16 +199,11 @@ def test_requirements_file_truss(): def test_requirements_pydantic(pydantic_major_version): with ensure_kill_all(): truss_root = Path(__file__).parent.parent.parent.resolve() / "truss" - truss_dir = truss_root / "test_data" / f"test_pyantic_v{pydantic_major_version}" - tr = TrussHandle(truss_dir) - _ = tr.docker_run(local_port=8090, detach=True, wait_for_server_ready=True) - truss_server_addr = "http://localhost:8090" - full_url = f"{truss_server_addr}/v1/models/model:predict" - response = requests.post(full_url, json={}) + response = requests.post(PREDICT_URL, json={}) assert response.status_code == 200 assert response.json() == '{\n "foo": "bla",\n "bar": 123\n}' @@ -216,16 +212,11 @@ def test_requirements_pydantic(pydantic_major_version): def test_async_truss(): with ensure_kill_all(): truss_root = Path(__file__).parent.parent.parent.resolve() / "truss" - truss_dir = truss_root / "test_data" / "test_async_truss" - tr = TrussHandle(truss_dir) - _ = tr.docker_run(local_port=8090, detach=True, wait_for_server_ready=True) - truss_server_addr = "http://localhost:8090" - full_url = f"{truss_server_addr}/v1/models/model:predict" - response = requests.post(full_url, json={}) + response = requests.post(PREDICT_URL, json={}) assert response.json() == { "preprocess_value": "value", "postprocess_value": "value", @@ -236,23 +227,18 @@ def test_async_truss(): def test_async_streaming(): with ensure_kill_all(): truss_root = Path(__file__).parent.parent.parent.resolve() / "truss" - truss_dir = truss_root / "test_data" / "test_streaming_async_generator_truss" - tr = TrussHandle(truss_dir) - _ = tr.docker_run(local_port=8090, detach=True, wait_for_server_ready=True) - truss_server_addr = "http://localhost:8090" - full_url = f"{truss_server_addr}/v1/models/model:predict" - response = requests.post(full_url, json={}, stream=True) + response = requests.post(PREDICT_URL, json={}, stream=True) assert response.headers.get("transfer-encoding") == "chunked" assert [ byte_string.decode() for byte_string in list(response.iter_content()) ] == ["0", "1", "2", "3", "4"] predict_non_stream_response = requests.post( - full_url, + PREDICT_URL, json={}, stream=True, headers={"accept": "application/json"}, @@ -265,25 +251,21 @@ def test_async_streaming(): def test_async_streaming_timeout(): with ensure_kill_all(): truss_root = Path(__file__).parent.parent.parent.resolve() / "truss" - truss_dir = truss_root / "test_data" / "test_streaming_read_timeout" - tr = TrussHandle(truss_dir) - container = tr.docker_run( local_port=8090, detach=True, wait_for_server_ready=True ) - truss_server_addr = "http://localhost:8090" - predict_url = f"{truss_server_addr}/v1/models/model:predict" # ChunkedEncodingError is raised when the chunk does not get processed due to streaming read timeout with pytest.raises(requests.exceptions.ChunkedEncodingError): - response = requests.post(predict_url, json={}, stream=True) + response = requests.post(PREDICT_URL, json={}, stream=True) for chunk in response.iter_content(): pass # Check to ensure the Timeout error is in the container logs + # TODO: maybe intercept this error better? assert_logs_contain_error( container.logs(), error="raise exceptions.TimeoutError()", @@ -292,20 +274,17 @@ def test_async_streaming_timeout(): @pytest.mark.integration -def test_streaming_with_error(): +def test_streaming_with_error_and_stacktrace(): with ensure_kill_all(): truss_root = Path(__file__).parent.parent.parent.resolve() / "truss" - truss_dir = truss_root / "test_data" / "test_streaming_truss_with_error" - tr = TrussHandle(truss_dir) - - _ = tr.docker_run(local_port=8090, detach=True, wait_for_server_ready=True) - truss_server_addr = "http://localhost:8090" - predict_url = f"{truss_server_addr}/v1/models/model:predict" + container = tr.docker_run( + local_port=8090, detach=True, wait_for_server_ready=True + ) predict_error_response = requests.post( - predict_url, json={"throw_error": True}, stream=True, timeout=2 + PREDICT_URL, json={"throw_error": True}, stream=True, timeout=2 ) # In error cases, the response will return whatever the stream returned, @@ -318,7 +297,7 @@ def test_streaming_with_error(): # Test that we are able to continue to make requests successfully predict_non_error_response = requests.post( - predict_url, json={"throw_error": False}, stream=True, timeout=2 + PREDICT_URL, json={"throw_error": False}, stream=True, timeout=2 ) assert [ @@ -331,6 +310,21 @@ def test_streaming_with_error(): "3", "4", ] + expected_stack_trace = ( + "Traceback (most recent call last):\n" + ' File "/app/model/model.py", line 12, in inner\n' + " helpers_1.foo(123)\n" + ' File "/packages/helpers_1.py", line 5, in foo\n' + " return helpers_2.bar(x)\n" + ' File "/packages/helpers_2.py", line 2, in bar\n' + ' raise Exception("Crashed in `bar`.")\n' + "Exception: Crashed in `bar`." + ) + assert_logs_contain_error( + container.logs(), + error=expected_stack_trace, + message="Exception while generating streamed response: Crashed in `bar`.", + ) @pytest.mark.integration @@ -339,18 +333,14 @@ def test_streaming_truss(): truss_root = Path(__file__).parent.parent.parent.resolve() / "truss" truss_dir = truss_root / "test_data" / "test_streaming_truss" tr = TrussHandle(truss_dir) - _ = tr.docker_run(local_port=8090, detach=True, wait_for_server_ready=True) - truss_server_addr = "http://localhost:8090" - predict_url = f"{truss_server_addr}/v1/models/model:predict" - # A request for which response is not completely read - predict_response = requests.post(predict_url, json={}, stream=True) + predict_response = requests.post(PREDICT_URL, json={}, stream=True) # We just read the first part and leave it hanging here next(predict_response.iter_content()) - predict_response = requests.post(predict_url, json={}, stream=True) + predict_response = requests.post(PREDICT_URL, json={}, stream=True) assert predict_response.headers.get("transfer-encoding") == "chunked" assert [ @@ -366,7 +356,7 @@ def test_streaming_truss(): # When accept is set to application/json, the response is not streamed. predict_non_stream_response = requests.post( - predict_url, + PREDICT_URL, json={}, stream=True, headers={"accept": "application/json"}, @@ -385,7 +375,7 @@ def make_request(delay: int): # For streamed responses, requests does not start receiving content from server until # `iter_content` is called, so we must call this in order to get an actual timeout. time.sleep(delay) - list(requests.post(predict_url, json={}, stream=True).iter_content()) + list(requests.post(PREDICT_URL, json={}, stream=True).iter_content()) with ThreadPoolExecutor() as e: # We use concurrent.futures.wait instead of the timeout property @@ -418,61 +408,39 @@ def predict(self, request): * Secret 'secret' is defined in the 'secrets' section of the Truss config file * The model was pushed with the --trusted flag""" - with ensure_kill_all(), tempfile.TemporaryDirectory(dir=".") as tmp_work_dir: - truss_dir = Path(tmp_work_dir, "truss") - - create_truss(truss_dir, config, textwrap.dedent(inspect.getsource(Model))) - - tr = TrussHandle(truss_dir) + with ensure_kill_all(), temp_truss(inspect.getsource(Model), config) as tr: LocalConfigHandler.set_secret("secret", "secret_value") _ = tr.docker_run(local_port=8090, detach=True, wait_for_server_ready=True) - truss_server_addr = "http://localhost:8090" - full_url = f"{truss_server_addr}/v1/models/model:predict" - response = requests.post(full_url, json={}) + response = requests.post(PREDICT_URL, json={}) assert response.json() == "secret_value" - with ensure_kill_all(), tempfile.TemporaryDirectory(dir=".") as tmp_work_dir: - # Case where the secret is not specified in the config - truss_dir = Path(tmp_work_dir, "truss") - - create_truss( - truss_dir, config_with_no_secret, textwrap.dedent(inspect.getsource(Model)) - ) - tr = TrussHandle(truss_dir) + # Case where the secret is not specified in the config + with ensure_kill_all(), temp_truss( + inspect.getsource(Model), config_with_no_secret + ) as tr: LocalConfigHandler.set_secret("secret", "secret_value") container = tr.docker_run( local_port=8090, detach=True, wait_for_server_ready=True ) - truss_server_addr = "http://localhost:8090" - full_url = f"{truss_server_addr}/v1/models/model:predict" - - response = requests.post(full_url, json={}) + response = requests.post(PREDICT_URL, json={}) assert "error" in response.json() - assert_logs_contain_error(container.logs(), missing_secret_error_message) assert "Internal Server Error" in response.json()["error"] assert response.headers["x-baseten-error-source"] == "04" assert response.headers["x-baseten-error-code"] == "600" - with ensure_kill_all(), tempfile.TemporaryDirectory(dir=".") as tmp_work_dir: - # Case where the secret is not mounted - truss_dir = Path(tmp_work_dir, "truss") - - create_truss(truss_dir, config, textwrap.dedent(inspect.getsource(Model))) - tr = TrussHandle(truss_dir) + # Case where the secret is not mounted + with ensure_kill_all(), temp_truss(inspect.getsource(Model), config) as tr: LocalConfigHandler.remove_secret("secret") container = tr.docker_run( local_port=8090, detach=True, wait_for_server_ready=True ) - truss_server_addr = "http://localhost:8090" - full_url = f"{truss_server_addr}/v1/models/model:predict" - response = requests.post(full_url, json={}) + response = requests.post(PREDICT_URL, json={}) assert response.status_code == 500 - assert_logs_contain_error(container.logs(), missing_secret_error_message) assert "Internal Server Error" in response.json()["error"] assert response.headers["x-baseten-error-source"] == "04" @@ -481,6 +449,7 @@ def predict(self, request): @pytest.mark.integration def test_postprocess_with_streaming_predict(): + # TODO: revisit the decision to forbid this. If so remove below comment. """ Test a Truss that has streaming response from both predict and postprocess. In this case, the postprocess step continues to happen within the predict lock, @@ -502,25 +471,26 @@ def predict(self, request): """ config = "model_name: error-truss" - with ensure_kill_all(), tempfile.TemporaryDirectory(dir=".") as tmp_work_dir: - truss_dir = Path(tmp_work_dir, "truss") - - create_truss(truss_dir, config, textwrap.dedent(model)) + with ensure_kill_all(), temp_truss(model, config) as tr: + container = tr.docker_run( + local_port=8090, detach=True, wait_for_server_ready=True + ) - tr = TrussHandle(truss_dir) - _ = tr.docker_run(local_port=8090, detach=True, wait_for_server_ready=True) - truss_server_addr = "http://localhost:8090" - full_url = f"{truss_server_addr}/v1/models/model:predict" - response = requests.post(full_url, json={}, stream=True) - # Note that the postprocess function is applied to the - # streamed response. - assert response.content == b"0 modified1 modified" + response = requests.post(PREDICT_URL, json={}, stream=True) + logging.info(response.content) + assert_logs_contain_error( + container.logs(), + "ModelDefinitionError: If the predict function returns a generator (streaming), you cannot use postprocessing.", + ) + assert "Internal Server Error" in response.json()["error"] + assert response.headers["x-baseten-error-source"] == "04" + assert response.headers["x-baseten-error-code"] == "600" @pytest.mark.integration def test_streaming_postprocess(): """ - Tests a Truss where predict returns non-streaming, but postprocess is streamd, and + Tests a Truss where predict returns non-streaming, but postprocess is streamed, and ensures that the postprocess step does not happen within the predict lock. To do this, we sleep for two seconds during the postprocess streaming process, and fire off two requests with a total timeout of 3 seconds, ensuring that if they were serialized @@ -539,22 +509,15 @@ def predict(self, request): return ["0", "1"] """ - config = "model_name: error-truss" - with ensure_kill_all(), tempfile.TemporaryDirectory(dir=".") as tmp_work_dir: - truss_dir = Path(tmp_work_dir, "truss") - - create_truss(truss_dir, config, textwrap.dedent(model)) - - tr = TrussHandle(truss_dir) + config = "model_name: streaming-truss" + with ensure_kill_all(), temp_truss(model, config) as tr: _ = tr.docker_run(local_port=8090, detach=True, wait_for_server_ready=True) - truss_server_addr = "http://localhost:8090" - full_url = f"{truss_server_addr}/v1/models/model:predict" def make_request(delay: int): # For streamed responses, requests does not start receiving content from server until # `iter_content` is called, so we must call this in order to get an actual timeout. time.sleep(delay) - response = requests.post(full_url, json={}, stream=True) + response = requests.post(PREDICT_URL, json={}, stream=True) assert response.status_code == 200 assert response.content == b"0 modified1 modified" @@ -602,20 +565,13 @@ def predict(self, request): """ - config = "model_name: error-truss" - with ensure_kill_all(), tempfile.TemporaryDirectory(dir=".") as tmp_work_dir: - truss_dir = Path(tmp_work_dir, "truss") - - create_truss(truss_dir, config, textwrap.dedent(model)) - - tr = TrussHandle(truss_dir) + config = "model_name: postprocess-truss" + with ensure_kill_all(), temp_truss(model, config) as tr: _ = tr.docker_run(local_port=8090, detach=True, wait_for_server_ready=True) - truss_server_addr = "http://localhost:8090" - full_url = f"{truss_server_addr}/v1/models/model:predict" def make_request(delay: int): time.sleep(delay) - response = requests.post(full_url, json={}) + response = requests.post(PREDICT_URL, json={}) assert response.status_code == 200 assert response.json() == ["0 modified", "1 modified"] @@ -647,19 +603,12 @@ def predict(self, request): config = "model_name: error-truss" - with ensure_kill_all(), tempfile.TemporaryDirectory(dir=".") as tmp_work_dir: - truss_dir = Path(tmp_work_dir, "truss") - - create_truss(truss_dir, config, textwrap.dedent(model)) - - tr = TrussHandle(truss_dir) + with ensure_kill_all(), temp_truss(model, config) as tr: container = tr.docker_run( local_port=8090, detach=True, wait_for_server_ready=True ) - truss_server_addr = "http://localhost:8090" - full_url = f"{truss_server_addr}/v1/models/model:predict" - response = requests.post(full_url, json={}) + response = requests.post(PREDICT_URL, json={}) assert response.status_code == 500 assert "error" in response.json() @@ -678,19 +627,12 @@ def predict(self, request): return {"a": "b"} """ - with ensure_kill_all(), tempfile.TemporaryDirectory(dir=".") as tmp_work_dir: - truss_dir = Path(tmp_work_dir, "truss") - - create_truss(truss_dir, config, textwrap.dedent(model_preprocess_error)) - - tr = TrussHandle(truss_dir) + with ensure_kill_all(), temp_truss(model_preprocess_error, config) as tr: container = tr.docker_run( local_port=8090, detach=True, wait_for_server_ready=True ) - truss_server_addr = "http://localhost:8090" - full_url = f"{truss_server_addr}/v1/models/model:predict" - response = requests.post(full_url, json={}) + response = requests.post(PREDICT_URL, json={}) assert response.status_code == 500 assert "error" in response.json() @@ -708,19 +650,12 @@ def postprocess(self, response): raise ValueError("error") """ - with ensure_kill_all(), tempfile.TemporaryDirectory(dir=".") as tmp_work_dir: - truss_dir = Path(tmp_work_dir, "truss") - - create_truss(truss_dir, config, textwrap.dedent(model_postprocess_error)) - - tr = TrussHandle(truss_dir) + with ensure_kill_all(), temp_truss(model_postprocess_error, config) as tr: container = tr.docker_run( local_port=8090, detach=True, wait_for_server_ready=True ) - truss_server_addr = "http://localhost:8090" - full_url = f"{truss_server_addr}/v1/models/model:predict" - response = requests.post(full_url, json={}) + response = requests.post(PREDICT_URL, json={}) assert response.status_code == 500 assert "error" in response.json() assert_logs_contain_error(container.logs(), "ValueError: error") @@ -734,19 +669,12 @@ async def predict(self, request): raise ValueError("error") """ - with ensure_kill_all(), tempfile.TemporaryDirectory(dir=".") as tmp_work_dir: - truss_dir = Path(tmp_work_dir, "truss") - - create_truss(truss_dir, config, textwrap.dedent(model_async)) - - tr = TrussHandle(truss_dir) + with ensure_kill_all(), temp_truss(model_async, config) as tr: container = tr.docker_run( local_port=8090, detach=True, wait_for_server_ready=True ) - truss_server_addr = "http://localhost:8090" - full_url = f"{truss_server_addr}/v1/models/model:predict" - response = requests.post(full_url, json={}) + response = requests.post(PREDICT_URL, json={}) assert response.status_code == 500 assert "error" in response.json() @@ -770,19 +698,12 @@ def predict(self, request): config = "model_name: error-truss" - with ensure_kill_all(), tempfile.TemporaryDirectory(dir=".") as tmp_work_dir: - truss_dir = Path(tmp_work_dir, "truss") - - create_truss(truss_dir, config, textwrap.dedent(model)) - - tr = TrussHandle(truss_dir) + with ensure_kill_all(), temp_truss(model, config) as tr: container = tr.docker_run( local_port=8090, detach=True, wait_for_server_ready=True ) - truss_server_addr = "http://localhost:8090" - full_url = f"{truss_server_addr}/v1/models/model:predict" - response = requests.post(full_url, json={}) + response = requests.post(PREDICT_URL, json={}) assert response.status_code == 500 assert "error" in response.json() assert response.headers["x-baseten-error-source"] == "04" @@ -799,6 +720,41 @@ def predict(self, request): assert response.headers["x-baseten-error-code"] == "600" +@pytest.mark.integration +def test_truss_with_error_stacktrace(): + with ensure_kill_all(): + truss_root = Path(__file__).parent.parent.parent.resolve() / "truss" + truss_dir = truss_root / "test_data" / "test_truss_with_error" + tr = TrussHandle(truss_dir) + container = tr.docker_run( + local_port=8090, detach=True, wait_for_server_ready=True + ) + + response = requests.post(PREDICT_URL, json={}) + assert response.status_code == 500 + assert "error" in response.json() + + assert "Internal Server Error" in response.json()["error"] + assert response.headers["x-baseten-error-source"] == "04" + assert response.headers["x-baseten-error-code"] == "600" + + expected_stack_trace = ( + "Traceback (most recent call last):\n" + ' File "/app/model/model.py", line 8, in predict\n' + " return helpers_1.foo(123)\n" + ' File "/packages/helpers_1.py", line 5, in foo\n' + " return helpers_2.bar(x)\n" + ' File "/packages/helpers_2.py", line 2, in bar\n' + ' raise Exception("Crashed in `bar`.")\n' + "Exception: Crashed in `bar`." + ) + assert_logs_contain_error( + container.logs(), + error=expected_stack_trace, + message="Internal Server Error", + ) + + @pytest.mark.integration def test_slow_truss(): with ensure_kill_all(): @@ -914,28 +870,24 @@ def enable_gpu_fn(conf): local_port=8090, detach=True, wait_for_server_ready=True ) - truss_server_addr = "http://localhost:8090" - predict_url = f"{truss_server_addr}/v1/models/model:predict" - print(predict_url) - # A request for which response is not completely read headers_0 = _make_otel_headers() predict_response = requests.post( - predict_url, json={}, stream=True, headers=headers_0 + PREDICT_URL, json={}, stream=True, headers=headers_0 ) # We just read the first part and leave it hanging here next(predict_response.iter_content()) headers_1 = _make_otel_headers() predict_response = requests.post( - predict_url, json={}, stream=True, headers=headers_1 + PREDICT_URL, json={}, stream=True, headers=headers_1 ) assert predict_response.headers.get("transfer-encoding") == "chunked" # When accept is set to application/json, the response is not streamed. headers_2 = _make_otel_headers() predict_non_stream_response = requests.post( - predict_url, + PREDICT_URL, json={}, stream=True, headers={**headers_2, "accept": "application/json"}, @@ -974,3 +926,275 @@ def enable_gpu_fn(conf): # But make sure traces have parents at all. assert len(user_parents) > 3 assert len(truss_parents) > 3 + + +# Returning Response Objects ########################################################### + + +@pytest.mark.integration +def test_truss_with_response(): + """Test that user-code can set a custom status code.""" + model = """ + from fastapi.responses import Response + + class Model: + def predict(self, inputs): + return Response(status_code=inputs["code"]) + """ + from fastapi import status + + config = "model_name: custom-status-code-truss" + + with ensure_kill_all(), temp_truss(model, config) as tr: + _ = tr.docker_run(local_port=8090, detach=True, wait_for_server_ready=True) + + response = requests.post(PREDICT_URL, json={"code": status.HTTP_204_NO_CONTENT}) + assert response.status_code == 204 + assert "x-baseten-error-source" not in response.headers + assert "x-baseten-error-code" not in response.headers + + response = requests.post( + PREDICT_URL, json={"code": status.HTTP_500_INTERNAL_SERVER_ERROR} + ) + assert response.status_code == 500 + assert response.headers["x-baseten-error-source"] == "04" + assert response.headers["x-baseten-error-code"] == "700" + + +@pytest.mark.integration +def test_truss_with_streaming_response(): + # TODO: one issue with this is that (unlike our "builtin" streaming), this keeps + # the semaphore claimed potentially longer if the client drops. + + model = """from starlette.responses import StreamingResponse +class Model: + def predict(self, model_input): + def text_generator(): + for i in range(3): + yield f"data: {i}\\n\\n" + return StreamingResponse(text_generator(), media_type="text/event-stream") + """ + + config = "model_name: sse-truss" + + with ensure_kill_all(), temp_truss(model, config) as tr: + _ = tr.docker_run(local_port=8090, detach=True, wait_for_server_ready=True) + + # A request for which response is not completely read. + predict_response = requests.post(PREDICT_URL, json={}, stream=True) + assert ( + predict_response.headers["Content-Type"] + == "text/event-stream; charset=utf-8" + ) + + lines = predict_response.text.strip().split("\n") + assert lines == [ + "data: 0", + "", + "data: 1", + "", + "data: 2", + ] + + +# Using Request in Model ############################################################### + + +@pytest.mark.integration +def test_truss_with_request(): + model = """ + import fastapi + class Model: + async def preprocess(self, request: fastapi.Request): + return await request.json() + + async def predict(self, inputs, request: fastapi.Request): + inputs["request_size"] = len(await request.body()) + return inputs + + def postprocess(self, inputs): + return {**inputs, "postprocess": "was here"} + """ + with ensure_kill_all(), temp_truss(model, "") as tr: + _ = tr.docker_run(local_port=8090, detach=True, wait_for_server_ready=True) + + response = requests.post(PREDICT_URL, json={"test": 123}) + assert response.status_code == 200 + assert response.json() == { + "test": 123, + "request_size": 13, + "postprocess": "was here", + } + + +@pytest.mark.integration +def test_truss_with_requests_and_invalid_signatures(): + model = """ + class Model: + def predict(self, inputs, invalid_arg): ... + """ + with ensure_kill_all(), temp_truss(model, "") as tr: + container = tr.docker_run( + local_port=8090, detach=True, wait_for_server_ready=False + ) + time.sleep(1.0) # Wait for logs. + assert_logs_contain_error( + container.logs(), + "`predict` method with two arguments must have request as second argument", + "Exception while loading model", + ) + + model = """ + import fastapi + + class Model: + def predict(self, request: fastapi.Request, invalid_arg): ... + """ + with ensure_kill_all(), temp_truss(model, "") as tr: + container = tr.docker_run( + local_port=8090, detach=True, wait_for_server_ready=False + ) + time.sleep(1.0) # Wait for logs. + assert_logs_contain_error( + container.logs(), + "`predict` method with two arguments is not allowed to have request as " + "first argument", + "Exception while loading model", + ) + + model = """ + import fastapi + + class Model: + def predict(self, inputs, request: fastapi.Request, something): ... + """ + with ensure_kill_all(), temp_truss(model, "") as tr: + container = tr.docker_run( + local_port=8090, detach=True, wait_for_server_ready=False + ) + time.sleep(1.0) # Wait for logs. + assert_logs_contain_error( + container.logs(), + "`predict` method cannot have more than two arguments", + "Exception while loading model", + ) + + +@pytest.mark.integration +def test_truss_with_requests_and_invalid_argument_combinations(): + model = """ + import fastapi + class Model: + async def preprocess(self, inputs): ... + + def predict(self, request: fastapi.Request): ... + """ + with ensure_kill_all(), temp_truss(model, "") as tr: + container = tr.docker_run( + local_port=8090, detach=True, wait_for_server_ready=False + ) + time.sleep(1.0) # Wait for logs. + assert_logs_contain_error( + container.logs(), + "When using preprocessing, the predict method cannot only have the request argument", + "Exception while loading model", + ) + + model = """ + import fastapi + class Model: + def preprocess(self, inputs): ... + + async def predict(self, inputs, request: fastapi.Request): ... + + def postprocess(self, request: fastapi.Request): ... + """ + with ensure_kill_all(), temp_truss(model, "") as tr: + container = tr.docker_run( + local_port=8090, detach=True, wait_for_server_ready=False + ) + time.sleep(1.0) # Wait for logs. + assert_logs_contain_error( + container.logs(), + "The postprocessing method cannot only have the request argument", + "Exception while loading model", + ) + + model = """ + import fastapi + class Model: + def preprocess(self, inputs): ... + """ + with ensure_kill_all(), temp_truss(model, "") as tr: + container = tr.docker_run( + local_port=8090, detach=True, wait_for_server_ready=False + ) + time.sleep(1.0) # Wait for logs. + assert_logs_contain_error( + container.logs(), + "Truss model must have a `predict` method.", + "Exception while loading model", + ) + + +@pytest.mark.integration +def test_truss_forbid_postprocessing_with_response(): + model = """ + import fastapi, json + class Model: + def predict(self, inputs): + return fastapi.Response(content=json.dumps(inputs), status_code=200) + + def postprocess(self, inputs): + return inputs + """ + with ensure_kill_all(), temp_truss(model, "") as tr: + container = tr.docker_run( + local_port=8090, detach=True, wait_for_server_ready=True + ) + + response = requests.post(PREDICT_URL, json={}) + assert response.status_code == 500 + assert response.headers["x-baseten-error-source"] == "04" + assert response.headers["x-baseten-error-code"] == "600" + assert_logs_contain_error( + container.logs(), + "If the predict function returns a response object, you cannot " + "use postprocessing.", + ) + + +@pytest.mark.integration +def test_async_streaming_with_cancellation(): + model = """ + import fastapi, asyncio, logging + + class Model: + async def predict(self, inputs, request: fastapi.Request): + await asyncio.sleep(1) + if await request.is_disconnected(): + logging.warning("Cancelled (before gen).") + return + + for i in range(5): + await asyncio.sleep(1.0) + logging.warning(i) + yield str(i) + if await request.is_disconnected(): + logging.warning("Cancelled (during gen).") + return + """ + with ensure_kill_all(), temp_truss(model, "") as tr: + container = tr.docker_run( + local_port=8090, detach=True, wait_for_server_ready=True + ) + # For hard cancellation we need to use httpx, requests' timeouts don't work. + with pytest.raises(httpx.ReadTimeout): + with httpx.Client( + timeout=httpx.Timeout(1.0, connect=1.0, read=1.0) + ) as client: + response = client.post(PREDICT_URL, json={}, timeout=1.0) + response.raise_for_status() + + time.sleep(2) # Wait a bit to get all logs. + assert "Cancelled (during gen)." in container.logs() diff --git a/truss/util/data_structures.py b/truss/util/data_structures.py index 8d754d6df..0834dbfe6 100644 --- a/truss/util/data_structures.py +++ b/truss/util/data_structures.py @@ -1,8 +1,7 @@ -from typing import Callable, Dict, Optional, TypeVar +from typing import Callable, Optional, TypeVar X = TypeVar("X") Y = TypeVar("Y") -Z = TypeVar("Z") def transform_optional(x: Optional[X], fn: Callable[[X], Optional[Y]]) -> Optional[Y]: @@ -10,7 +9,3 @@ def transform_optional(x: Optional[X], fn: Callable[[X], Optional[Y]]) -> Option return None return fn(x) - - -def transform_keys(d: Dict[X, Z], fn: Callable[[X], Y]) -> Dict[Y, Z]: - return {fn(key): value for key, value in d.items()} From 039c75076964e9c90ab7bf9125e509f4c227bc00 Mon Sep 17 00:00:00 2001 From: Marius Killinger <155577904+marius-baseten@users.noreply.github.com> Date: Fri, 20 Sep 2024 11:53:49 -0700 Subject: [PATCH 6/8] Escape docker env vars (#1149) --- .devcontainer/Dockerfile | 2 +- .devcontainer/gpu/Dockerfile | 2 +- docker/base_images/base_image.Dockerfile.jinja | 12 ++++++------ pyproject.toml | 2 +- .../contexts/image_builder/serving_image_builder.py | 4 ++-- truss/templates/base.Dockerfile.jinja | 6 +++--- truss/templates/cache.Dockerfile.jinja | 4 ++-- truss/templates/server.Dockerfile.jinja | 12 ++++++------ truss/test_data/server.Dockerfile | 10 +++++----- 9 files changed, 27 insertions(+), 27 deletions(-) diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 5861d2f6c..54f02392a 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -21,7 +21,7 @@ RUN apt-get update && /bin/bash /tmp/library-scripts/common-debian.sh "${INSTALL USER vscode RUN curl -sSL https://install.python-poetry.org | python -ENV PATH=/home/vscode/.poetry/bin:$PATH +ENV PATH="/home/vscode/.poetry/bin:$PATH" USER root diff --git a/.devcontainer/gpu/Dockerfile b/.devcontainer/gpu/Dockerfile index 627be7028..3d2350cf5 100644 --- a/.devcontainer/gpu/Dockerfile +++ b/.devcontainer/gpu/Dockerfile @@ -21,7 +21,7 @@ RUN apt-get update && /bin/bash /tmp/library-scripts/common-debian.sh "${INSTALL USER vscode RUN curl -sSL https://install.python-poetry.org | python -ENV PATH=/home/vscode/.poetry/bin:$PATH +ENV PATH="/home/vscode/.poetry/bin:$PATH" USER root diff --git a/docker/base_images/base_image.Dockerfile.jinja b/docker/base_images/base_image.Dockerfile.jinja index 17792944b..7e29a2ff6 100644 --- a/docker/base_images/base_image.Dockerfile.jinja +++ b/docker/base_images/base_image.Dockerfile.jinja @@ -1,8 +1,8 @@ {% if use_gpu %} FROM nvidia/cuda:12.2.2-base-ubuntu20.04 -ENV CUDNN_VERSION=8.9.5.29 -ENV CUDA=12.2 -ENV LD_LIBRARY_PATH=/usr/local/cuda/extras/CUPTI/lib64:$LD_LIBRARY_PATH +ENV CUDNN_VERSION="8.9.5.29" +ENV CUDA="12.2" +ENV LD_LIBRARY_PATH="/usr/local/cuda/extras/CUPTI/lib64:$LD_LIBRARY_PATH" RUN apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64/3bf863cc.pub && \ apt-get update && apt-get install -y --no-install-recommends \ @@ -21,8 +21,8 @@ RUN apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/cuda/ rm -rf /var/lib/apt/lists/* # Allow statements and log messages to immediately appear in the Knative logs -ENV PYTHONUNBUFFERED=True -ENV DEBIAN_FRONTEND=noninteractive +ENV PYTHONUNBUFFERED="True" +ENV DEBIAN_FRONTEND="noninteractive" RUN apt update && \ apt install -y bash \ @@ -49,7 +49,7 @@ FROM python:{{python_version}} RUN apt update && apt install -y # Allow statements and log messages to immediately appear in the Knative logs -ENV PYTHONUNBUFFERED=True +ENV PYTHONUNBUFFERED="True" {% endif %} diff --git a/pyproject.toml b/pyproject.toml index 51578eff5..42d1ebb54 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "truss" -version = "0.9.36rc01" +version = "0.9.36rc02" description = "A seamless bridge from model development to model delivery" license = "MIT" readme = "README.md" diff --git a/truss/contexts/image_builder/serving_image_builder.py b/truss/contexts/image_builder/serving_image_builder.py index 60c3b7344..58b29ff31 100644 --- a/truss/contexts/image_builder/serving_image_builder.py +++ b/truss/contexts/image_builder/serving_image_builder.py @@ -12,6 +12,7 @@ from google.cloud import storage from huggingface_hub import get_hf_file_metadata, hf_hub_url, list_repo_files from huggingface_hub.utils import filter_repo_objects +from truss import constants from truss.config.trt_llm import TrussTRTLLMModel from truss.constants import ( AUDIO_MODEL_TRTLLM_REQUIREMENTS, @@ -70,7 +71,6 @@ GCS_CREDENTIALS = "service_account.json" S3_CREDENTIALS = "s3_credentials.json" -HF_ACCESS_TOKEN_SECRET_NAME = "hf_access_token" HF_ACCESS_TOKEN_FILE_NAME = "hf-access-token" CLOUD_BUCKET_CACHE = Path("/app/model_cache/") @@ -526,7 +526,7 @@ def _render_dockerfile( build_dir / USER_SUPPLIED_REQUIREMENTS_TXT_FILENAME ) - hf_access_token = config.secrets.get(HF_ACCESS_TOKEN_SECRET_NAME) + hf_access_token = config.secrets.get(constants.HF_ACCESS_TOKEN_KEY) dockerfile_contents = dockerfile_template.render( should_install_server_requirements=should_install_server_requirements, base_image_name_and_tag=base_image_name_and_tag, diff --git a/truss/templates/base.Dockerfile.jinja b/truss/templates/base.Dockerfile.jinja index 5b66a7a6a..de52ca1e2 100644 --- a/truss/templates/base.Dockerfile.jinja +++ b/truss/templates/base.Dockerfile.jinja @@ -1,7 +1,7 @@ ARG PYVERSION={{config.python_version}} FROM {{base_image_name_and_tag}} AS truss_server -ENV PYTHON_EXECUTABLE={{ config.base_image.python_executable_path or 'python3' }} +ENV PYTHON_EXECUTABLE="{{ config.base_image.python_executable_path or 'python3' }}" {% block fail_fast %} RUN grep -w 'ID=debian\|ID_LIKE=debian' /etc/os-release || { echo "ERROR: Supplied base image is not a debian image"; exit 1; } @@ -52,7 +52,7 @@ RUN pip install -r {{config_requirements_filename}} --no-cache-dir && rm -rf /ro -ENV APP_HOME=/app +ENV APP_HOME="/app" WORKDIR $APP_HOME @@ -68,7 +68,7 @@ COPY ./{{config.bundled_packages_dir}} /packages {% for env_var_name, env_var_value in config.environment_variables.items() %} -ENV {{ env_var_name }}={{ env_var_value }} +ENV {{ env_var_name }}="{{ env_var_value }}" {% endfor %} {% block run %} diff --git a/truss/templates/cache.Dockerfile.jinja b/truss/templates/cache.Dockerfile.jinja index eec4da24d..8640a2c6f 100644 --- a/truss/templates/cache.Dockerfile.jinja +++ b/truss/templates/cache.Dockerfile.jinja @@ -4,11 +4,11 @@ RUN mkdir -p /app/model_cache WORKDIR /app {% if hf_access_token %} -ENV HUGGING_FACE_HUB_TOKEN={{hf_access_token}} +ENV HUGGING_FACE_HUB_TOKEN="{{hf_access_token}}" {% endif %} RUN apt-get -y update; apt-get -y install curl; curl -s https://baseten-public.s3.us-west-2.amazonaws.com/bin/b10cp-5fe8dc7da-linux-amd64 -o /app/b10cp; chmod +x /app/b10cp -ENV B10CP_PATH_TRUSS=/app/b10cp +ENV B10CP_PATH_TRUSS="/app/b10cp" COPY ./cache_requirements.txt /app/cache_requirements.txt RUN pip install -r /app/cache_requirements.txt --no-cache-dir && rm -rf /root/.cache/pip COPY ./cache_warmer.py /cache_warmer.py diff --git a/truss/templates/server.Dockerfile.jinja b/truss/templates/server.Dockerfile.jinja index f5b25143f..ca3961d1a 100644 --- a/truss/templates/server.Dockerfile.jinja +++ b/truss/templates/server.Dockerfile.jinja @@ -7,8 +7,8 @@ {% block base_image_patch %} # If user base image is supplied in config, apply build commands from truss base image {% if config.base_image %} -ENV PYTHONUNBUFFERED=True -ENV DEBIAN_FRONTEND=noninteractive +ENV PYTHONUNBUFFERED="True" +ENV DEBIAN_FRONTEND="noninteractive" RUN apt update && \ apt install -y bash \ @@ -90,13 +90,13 @@ COPY ./{{ config.model_module_dir }} /app/model {% block run %} {%- if config.live_reload %} -ENV HASH_TRUSS={{truss_hash}} -ENV CONTROL_SERVER_PORT=8080 -ENV INFERENCE_SERVER_PORT=8090 +ENV HASH_TRUSS="{{truss_hash}}" +ENV CONTROL_SERVER_PORT="8080" +ENV INFERENCE_SERVER_PORT="8090" ENV SERVER_START_CMD="/control/.env/bin/python3 /control/control/server.py" ENTRYPOINT ["/control/.env/bin/python3", "/control/control/server.py"] {%- else %} -ENV INFERENCE_SERVER_PORT=8080 +ENV INFERENCE_SERVER_PORT="8080" ENV SERVER_START_CMD="{{(config.base_image.python_executable_path or "python3") ~ " /app/main.py"}}" ENTRYPOINT ["{{config.base_image.python_executable_path or "python3"}}", "/app/main.py"] {%- endif %} diff --git a/truss/test_data/server.Dockerfile b/truss/test_data/server.Dockerfile index 2b12df967..c0a80a8e4 100644 --- a/truss/test_data/server.Dockerfile +++ b/truss/test_data/server.Dockerfile @@ -1,7 +1,7 @@ ARG PYVERSION=py39 FROM baseten/truss-server-base:3.9-v0.4.3 AS truss_server -ENV PYTHON_EXECUTABLE=/usr/local/bin/python3 +ENV PYTHON_EXECUTABLE="/usr/local/bin/python3" RUN grep -w 'ID=debian\|ID_LIKE=debian' /etc/os-release || { echo "ERROR: Supplied base image is not a debian image"; exit 1; } RUN $PYTHON_EXECUTABLE -c "import sys; sys.exit(0) if sys.version_info.major == 3 and sys.version_info.minor >=8 and sys.version_info.minor <=11 else sys.exit(1)" \ @@ -11,8 +11,8 @@ RUN pip install --upgrade pip --no-cache-dir \ && rm -rf /root/.cache/pip # If user base image is supplied in config, apply build commands from truss base image -ENV PYTHONUNBUFFERED=True -ENV DEBIAN_FRONTEND=noninteractive +ENV PYTHONUNBUFFERED="True" +ENV DEBIAN_FRONTEND="noninteractive" RUN apt update && \ apt install -y bash \ @@ -32,7 +32,7 @@ COPY ./requirements.txt requirements.txt RUN cat requirements.txt RUN pip install -r requirements.txt --no-cache-dir && rm -rf /root/.cache/pip -ENV APP_HOME=/app +ENV APP_HOME="/app" WORKDIR $APP_HOME # Copy data before code for better caching @@ -43,6 +43,6 @@ COPY ./model /app/model COPY ./packages /packages -ENV INFERENCE_SERVER_PORT=8080 +ENV INFERENCE_SERVER_PORT="8080" ENV SERVER_START_CMD="/usr/local/bin/python3 /app/main.py" ENTRYPOINT ["/usr/local/bin/python3", "/app/main.py"] From ce8c4c938ea8a5545cf2856c538d6f45d5200c39 Mon Sep 17 00:00:00 2001 From: Bryce Dubayah Date: Sun, 22 Sep 2024 14:29:03 -0700 Subject: [PATCH 7/8] Briton bug fixes (#1145) * Add special tokens from tokenizer decoder config to briton rust tokenizer * Fix bug in structured outputs generation for FP8 and TP2 * Fix concurrency bug in briton server --- pyproject.toml | 2 +- truss/constants.py | 2 +- truss/templates/trtllm-briton/src/engine.py | 77 ++++++++++++++++++--- 3 files changed, 68 insertions(+), 13 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 42d1ebb54..bb107084e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "truss" -version = "0.9.36rc02" +version = "0.9.36rc3" description = "A seamless bridge from model development to model delivery" license = "MIT" readme = "README.md" diff --git a/truss/constants.py b/truss/constants.py index e3c64e7f6..b6db858f3 100644 --- a/truss/constants.py +++ b/truss/constants.py @@ -103,7 +103,7 @@ REGISTRY_BUILD_SECRET_PREFIX = "DOCKER_REGISTRY_" -TRTLLM_BASE_IMAGE = "baseten/briton-server:5fa9436e_v0.0.9" +TRTLLM_BASE_IMAGE = "baseten/briton-server:5fa9436e_v0.0.11" TRTLLM_PYTHON_EXECUTABLE = "/usr/bin/python3" BASE_TRTLLM_REQUIREMENTS = [ "grpcio==1.62.3", diff --git a/truss/templates/trtllm-briton/src/engine.py b/truss/templates/trtllm-briton/src/engine.py index 01b8dfde1..baab259e1 100644 --- a/truss/templates/trtllm-briton/src/engine.py +++ b/truss/templates/trtllm-briton/src/engine.py @@ -12,7 +12,7 @@ import time from itertools import count from pathlib import Path -from typing import Any, Dict, Optional +from typing import Any, Dict, List, Optional import briton_pb2 import briton_pb2_grpc @@ -20,7 +20,7 @@ from fastapi import HTTPException from outlines.models.transformers import TransformerTokenizer from outlines.processors.structured import JSONLogitsProcessor -from transformers import AutoTokenizer +from transformers import AutoTokenizer, PreTrainedTokenizerFast from truss.config.trt_llm import TrussTRTLLMBuildConfiguration from truss.constants import OPENAI_COMPATIBLE_TAG @@ -135,14 +135,34 @@ def load(self): Path(FSM_CACHE_DIR), self._tokenizer, self._max_fsm_workers ) - # Start engine + # We only support Llama and mistral with Briton, for which this should + # apply. + assert isinstance(self._tokenizer, PreTrainedTokenizerFast) + + # These are tokens outside of tokenizer.json. We need to pass these to + # Briton, to pass to rust tokenizer. + added_token_decoders = self._tokenizer.added_tokens_decoder + added_tokens = [token for token in added_token_decoders.values()] + + self._saved_tokenizer_dir = str(self._data_dir / "saved_tokenizer") + self._tokenizer.save_pretrained(self._saved_tokenizer_dir) + + # Pass tokenizer file to Briton for the rust tokenizer. + tokenizer_file = Path(self._saved_tokenizer_dir) / "tokenizer.json" config_str = f""" - engine_path: "{self._data_dir.resolve()}" - hf_tokenizer: "{self._tokenizer_repository}" - kv_cache_free_gpu_mem_fraction: {self._kv_cache_free_gpu_mem_fraction} - enable_kv_cache_reuse: {"true" if self._enable_kv_cache_reuse else "false"} - fsm_cache_dir: "{FSM_CACHE_DIR}" +engine_path: "{self._data_dir.resolve()}" +hf_tokenizer: "{tokenizer_file.resolve()}" +kv_cache_free_gpu_mem_fraction: {self._kv_cache_free_gpu_mem_fraction} +enable_kv_cache_reuse: {"true" if self._enable_kv_cache_reuse else "false"} +fsm_cache_dir: "{FSM_CACHE_DIR}" """ + + # Pass added tokens to Briton for the rust tokenizer. + if len(added_tokens) > 0: + config_str += "\n" + "\n".join( + _serialize_added_tokens_to_config(added_tokens) + ) + config_pbtxt_path = (self._data_dir / "briton_config.pbtxt").resolve() config_pbtxt_path.write_text(config_str) briton_env = os.environ.copy() @@ -281,6 +301,7 @@ async def predict(self, model_input): "items": { "anyOf": list(tool_schemas.values()), }, + "minItems": 1, } prompt = model_input.get("prompt", None) @@ -384,17 +405,47 @@ async def generate_after_first_chunk(): else: return await build_response() except grpc.RpcError as ex: - if ex.code() == grpc.StatusCode.INVALID_ARGUMENT: + if ( + ex.code() == grpc.StatusCode.INVALID_ARGUMENT + or ex.code() == grpc.StatusCode.UNIMPLEMENTED + ): raise HTTPException(status_code=400, detail=ex.details()) - # If the error is another GRPC exception like NotImplemented, we should return a 500 + # If the error is another type of gRPC error, we should return a 500 else: raise HTTPException( - status_code=500, detail=f"An error has occurred: {ex}" + status_code=500, detail=f"An error has occurred: {ex.details()}" ) except Exception as ex: raise HTTPException(status_code=500, detail=f"An error has occurred: {ex}") +def _serialize_added_tokens_to_config(added_tokens: list) -> List[str]: + """Serialize to pbtxt format.""" + lines = ["added_tokens {"] + for added_token in added_tokens: + token_lines = _serialize_added_token_to_config(added_token) + lines.extend([f" {line}" for line in token_lines]) + lines.append("}") + return lines + + +def _serialize_added_token_to_config(added_token) -> List[str]: + """Serialize to pbtxt format.""" + fields = [ + f'content: "{added_token.content}"', + f"single_word: {added_token.single_word}", + f"lstrip: {added_token.lstrip}", + f"rstrip: {added_token.rstrip}", + f"normalized: {added_token.normalized}", + f"special: {added_token.special}", + ] + return [ + "tokens {", + *[f" {field}" for field in fields], + "}", + ] + + def create_tool_schema(tool_json: Dict[str, Any]) -> Dict[str, Any]: return { "type": "object", @@ -424,6 +475,10 @@ def worker(vocab_size: int, end_id: int, schema: Dict[str, Any], output_path: Pa ) if not output_path.exists(): try: + # Open the file with flags to protect against concurrent writes. + # O_CREAT: Create the file if it does not exist. + # O_EXCL: Ensure that this call creates the file exclusively. If the file already exists, the call will fail. + # O_WRONLY: Open the file for write-only access. fd = os.open(output_path, os.O_CREAT | os.O_EXCL | os.O_WRONLY) with os.fdopen(fd, "wb") as f: fcntl.flock(f, fcntl.LOCK_EX) From b25a74a2ef7c58dd0b5cb6a75fbaa0e50ea2ddcb Mon Sep 17 00:00:00 2001 From: basetenbot <96544894+basetenbot@users.noreply.github.com> Date: Sun, 22 Sep 2024 21:30:36 +0000 Subject: [PATCH 8/8] Bump version to 0.9.36 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index bb107084e..d1b5b47c9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "truss" -version = "0.9.36rc3" +version = "0.9.36" description = "A seamless bridge from model development to model delivery" license = "MIT" readme = "README.md"