From ccf8af3aaa388f94145669970e7e85e0442fdeb0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 10 Sep 2024 22:55:03 +0000 Subject: [PATCH 01/18] Bump pytest from 8.3.2 to 8.3.3 Bumps [pytest](https://github.com/pytest-dev/pytest) from 8.3.2 to 8.3.3. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/8.3.2...8.3.3) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 483a2032..e8cbde16 100644 --- a/poetry.lock +++ b/poetry.lock @@ -397,13 +397,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pytest" -version = "8.3.2" +version = "8.3.3" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, - {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, ] [package.dependencies] From eff2afd82723c47e20a98d571e1c77af6a923d3e Mon Sep 17 00:00:00 2001 From: Patrick Devine Date: Thu, 12 Sep 2024 16:49:38 -0700 Subject: [PATCH 02/18] update docs --- README.md | 10 ++++++++-- ollama/_client.py | 6 ++++++ 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 14dfddae..8f21acfa 100644 --- a/README.md +++ b/README.md @@ -101,10 +101,16 @@ ollama.pull('llama3.1') ollama.push('user/llama3.1') ``` -### Embeddings +### Embed ```python -ollama.embeddings(model='llama3.1', prompt='The sky is blue because of rayleigh scattering') +ollama.embed(model='llama3.1', input='The sky is blue because of rayleigh scattering') +``` + +### Embed (Batch Embedding) + +```python +ollama.embed(model='llama3.1', input=['The sky is blue because of rayleigh scattering', 'Grass is green because of chlorophyll']) ``` ### Ps diff --git a/ollama/_client.py b/ollama/_client.py index ec9acb90..e3d9fed0 100644 --- a/ollama/_client.py +++ b/ollama/_client.py @@ -278,6 +278,9 @@ def embeddings( options: Optional[Options] = None, keep_alive: Optional[Union[float, str]] = None, ) -> Mapping[str, Sequence[float]]: + """ + Deprecated in favor of `embed`. + """ return self._request( 'POST', '/api/embeddings', @@ -698,6 +701,9 @@ async def embeddings( options: Optional[Options] = None, keep_alive: Optional[Union[float, str]] = None, ) -> Mapping[str, Sequence[float]]: + """ + Deprecated in favor of `embed`. + """ response = await self._request( 'POST', '/api/embeddings', From a26537c188de90ff0909908d6c43dc4c1e060122 Mon Sep 17 00:00:00 2001 From: royjhan Date: Wed, 17 Jul 2024 10:52:08 -0700 Subject: [PATCH 03/18] update docs --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 8f21acfa..e03ea00a 100644 --- a/README.md +++ b/README.md @@ -107,7 +107,7 @@ ollama.push('user/llama3.1') ollama.embed(model='llama3.1', input='The sky is blue because of rayleigh scattering') ``` -### Embed (Batch Embedding) +### Embed (batch) ```python ollama.embed(model='llama3.1', input=['The sky is blue because of rayleigh scattering', 'Grass is green because of chlorophyll']) From 7fda5c9dae1ac8733b9cd18def235c4629d77595 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 13 Sep 2024 22:26:57 +0000 Subject: [PATCH 04/18] Bump ruff from 0.6.3 to 0.6.5 Bumps [ruff](https://github.com/astral-sh/ruff) from 0.6.3 to 0.6.5. - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/0.6.3...0.6.5) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 38 +++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/poetry.lock b/poetry.lock index 483a2032..a730c06a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -469,29 +469,29 @@ Werkzeug = ">=2.0.0" [[package]] name = "ruff" -version = "0.6.3" +version = "0.6.5" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.6.3-py3-none-linux_armv6l.whl", hash = "sha256:97f58fda4e309382ad30ede7f30e2791d70dd29ea17f41970119f55bdb7a45c3"}, - {file = "ruff-0.6.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:3b061e49b5cf3a297b4d1c27ac5587954ccb4ff601160d3d6b2f70b1622194dc"}, - {file = "ruff-0.6.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:34e2824a13bb8c668c71c1760a6ac7d795ccbd8d38ff4a0d8471fdb15de910b1"}, - {file = "ruff-0.6.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bddfbb8d63c460f4b4128b6a506e7052bad4d6f3ff607ebbb41b0aa19c2770d1"}, - {file = "ruff-0.6.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ced3eeb44df75353e08ab3b6a9e113b5f3f996bea48d4f7c027bc528ba87b672"}, - {file = "ruff-0.6.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47021dff5445d549be954eb275156dfd7c37222acc1e8014311badcb9b4ec8c1"}, - {file = "ruff-0.6.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:7d7bd20dc07cebd68cc8bc7b3f5ada6d637f42d947c85264f94b0d1cd9d87384"}, - {file = "ruff-0.6.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:500f166d03fc6d0e61c8e40a3ff853fa8a43d938f5d14c183c612df1b0d6c58a"}, - {file = "ruff-0.6.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:42844ff678f9b976366b262fa2d1d1a3fe76f6e145bd92c84e27d172e3c34500"}, - {file = "ruff-0.6.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70452a10eb2d66549de8e75f89ae82462159855e983ddff91bc0bce6511d0470"}, - {file = "ruff-0.6.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:65a533235ed55f767d1fc62193a21cbf9e3329cf26d427b800fdeacfb77d296f"}, - {file = "ruff-0.6.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d2e2c23cef30dc3cbe9cc5d04f2899e7f5e478c40d2e0a633513ad081f7361b5"}, - {file = "ruff-0.6.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d8a136aa7d228975a6aee3dd8bea9b28e2b43e9444aa678fb62aeb1956ff2351"}, - {file = "ruff-0.6.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:f92fe93bc72e262b7b3f2bba9879897e2d58a989b4714ba6a5a7273e842ad2f8"}, - {file = "ruff-0.6.3-py3-none-win32.whl", hash = "sha256:7a62d3b5b0d7f9143d94893f8ba43aa5a5c51a0ffc4a401aa97a81ed76930521"}, - {file = "ruff-0.6.3-py3-none-win_amd64.whl", hash = "sha256:746af39356fee2b89aada06c7376e1aa274a23493d7016059c3a72e3b296befb"}, - {file = "ruff-0.6.3-py3-none-win_arm64.whl", hash = "sha256:14a9528a8b70ccc7a847637c29e56fd1f9183a9db743bbc5b8e0c4ad60592a82"}, - {file = "ruff-0.6.3.tar.gz", hash = "sha256:183b99e9edd1ef63be34a3b51fee0a9f4ab95add123dbf89a71f7b1f0c991983"}, + {file = "ruff-0.6.5-py3-none-linux_armv6l.whl", hash = "sha256:7e4e308f16e07c95fc7753fc1aaac690a323b2bb9f4ec5e844a97bb7fbebd748"}, + {file = "ruff-0.6.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:932cd69eefe4daf8c7d92bd6689f7e8182571cb934ea720af218929da7bd7d69"}, + {file = "ruff-0.6.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3a8d42d11fff8d3143ff4da41742a98f8f233bf8890e9fe23077826818f8d680"}, + {file = "ruff-0.6.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a50af6e828ee692fb10ff2dfe53f05caecf077f4210fae9677e06a808275754f"}, + {file = "ruff-0.6.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:794ada3400a0d0b89e3015f1a7e01f4c97320ac665b7bc3ade24b50b54cb2972"}, + {file = "ruff-0.6.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:381413ec47f71ce1d1c614f7779d88886f406f1fd53d289c77e4e533dc6ea200"}, + {file = "ruff-0.6.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:52e75a82bbc9b42e63c08d22ad0ac525117e72aee9729a069d7c4f235fc4d276"}, + {file = "ruff-0.6.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09c72a833fd3551135ceddcba5ebdb68ff89225d30758027280968c9acdc7810"}, + {file = "ruff-0.6.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:800c50371bdcb99b3c1551d5691e14d16d6f07063a518770254227f7f6e8c178"}, + {file = "ruff-0.6.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e25ddd9cd63ba1f3bd51c1f09903904a6adf8429df34f17d728a8fa11174253"}, + {file = "ruff-0.6.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7291e64d7129f24d1b0c947ec3ec4c0076e958d1475c61202497c6aced35dd19"}, + {file = "ruff-0.6.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9ad7dfbd138d09d9a7e6931e6a7e797651ce29becd688be8a0d4d5f8177b4b0c"}, + {file = "ruff-0.6.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:005256d977021790cc52aa23d78f06bb5090dc0bfbd42de46d49c201533982ae"}, + {file = "ruff-0.6.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:482c1e6bfeb615eafc5899127b805d28e387bd87db38b2c0c41d271f5e58d8cc"}, + {file = "ruff-0.6.5-py3-none-win32.whl", hash = "sha256:cf4d3fa53644137f6a4a27a2b397381d16454a1566ae5335855c187fbf67e4f5"}, + {file = "ruff-0.6.5-py3-none-win_amd64.whl", hash = "sha256:3e42a57b58e3612051a636bc1ac4e6b838679530235520e8f095f7c44f706ff9"}, + {file = "ruff-0.6.5-py3-none-win_arm64.whl", hash = "sha256:51935067740773afdf97493ba9b8231279e9beef0f2a8079188c4776c25688e0"}, + {file = "ruff-0.6.5.tar.gz", hash = "sha256:4d32d87fab433c0cf285c3683dd4dae63be05fd7a1d65b3f5bf7cdd05a6b96fb"}, ] [[package]] From 61c8d0d4409616ea538865b788dd5feeb7e74c84 Mon Sep 17 00:00:00 2001 From: Michael Yang Date: Wed, 18 Sep 2024 12:35:27 -0700 Subject: [PATCH 05/18] add basic delete/copy tests (#275) --- tests/test_client.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/tests/test_client.py b/tests/test_client.py index 0b062f5d..efc8d4fa 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -520,6 +520,20 @@ def test_client_create_blob_exists(httpserver: HTTPServer): assert response == 'sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855' +def test_client_delete(httpserver: HTTPServer): + httpserver.expect_ordered_request(PrefixPattern('/api/delete'), method='DELETE').respond_with_response(Response(status=200)) + client = Client(httpserver.url_for('/api/delete')) + response = client.delete('dummy') + assert response == {'status': 'success'} + + +def test_client_copy(httpserver: HTTPServer): + httpserver.expect_ordered_request(PrefixPattern('/api/copy'), method='POST').respond_with_response(Response(status=200)) + client = Client(httpserver.url_for('/api/copy')) + response = client.copy('dum', 'dummer') + assert response == {'status': 'success'} + + @pytest.mark.asyncio async def test_async_client_chat(httpserver: HTTPServer): httpserver.expect_ordered_request( @@ -992,3 +1006,19 @@ async def test_async_client_create_blob_exists(httpserver: HTTPServer): with tempfile.NamedTemporaryFile() as blob: response = await client._create_blob(blob.name) assert response == 'sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855' + + +@pytest.mark.asyncio +async def test_async_client_delete(httpserver: HTTPServer): + httpserver.expect_ordered_request(PrefixPattern('/api/delete'), method='DELETE').respond_with_response(Response(status=200)) + client = AsyncClient(httpserver.url_for('/api/delete')) + response = await client.delete('dummy') + assert response == {'status': 'success'} + + +@pytest.mark.asyncio +async def test_async_client_copy(httpserver: HTTPServer): + httpserver.expect_ordered_request(PrefixPattern('/api/copy'), method='POST').respond_with_response(Response(status=200)) + client = AsyncClient(httpserver.url_for('/api/copy')) + response = await client.copy('dum', 'dummer') + assert response == {'status': 'success'} From f3e72b6c4f5e9b4bcb3e11b1f98b001ed1d66887 Mon Sep 17 00:00:00 2001 From: Michael Yang Date: Wed, 18 Sep 2024 16:34:54 -0700 Subject: [PATCH 06/18] update pyproject.toml (#284) --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3adf10f3..4e58aa93 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,8 +5,8 @@ description = "The official Python client for Ollama." authors = ["Ollama "] license = "MIT" readme = "README.md" -homepage = "https://ollama.ai" -repository = "https://github.com/jmorganca/ollama-python" +homepage = "https://ollama.com" +repository = "https://github.com/ollama/ollama-python" [tool.poetry.dependencies] python = "^3.8" From a7571423d36fc66be7939429a9dd9d204ee6630b Mon Sep 17 00:00:00 2001 From: Parth Sareen Date: Thu, 7 Nov 2024 14:46:41 -0800 Subject: [PATCH 07/18] Update requirements.txt and poetry.lock (#315) * Update requirements.txt and poetry.lock --- poetry.lock | 244 ++++++++++++++++++++++++++--------------------- requirements.txt | 42 ++++---- 2 files changed, 155 insertions(+), 131 deletions(-) diff --git a/poetry.lock b/poetry.lock index 0cd1de13..27c55310 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. [[package]] name = "anyio" -version = "4.3.0" +version = "4.5.2" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, - {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, + {file = "anyio-4.5.2-py3-none-any.whl", hash = "sha256:c011ee36bc1e8ba40e5a81cb9df91925c218fe9b778554e0b56a21e1b5d4716f"}, + {file = "anyio-4.5.2.tar.gz", hash = "sha256:23009af4ed04ce05991845451e11ef02fc7c5ed29179ac9a420e5ad0ac7ddc5b"}, ] [package.dependencies] @@ -18,19 +18,19 @@ sniffio = ">=1.1" typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] [[package]] name = "certifi" -version = "2024.2.2" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] @@ -46,63 +46,83 @@ files = [ [[package]] name = "coverage" -version = "7.4.4" +version = "7.6.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"}, - {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"}, - {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"}, - {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"}, - {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"}, - {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"}, - {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"}, - {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"}, - {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"}, - {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"}, - {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"}, - {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"}, - {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"}, - {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"}, - {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"}, - {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"}, - {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"}, - {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"}, - {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"}, - {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"}, - {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"}, - {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, + {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, + {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, + {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, + {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, + {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, + {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, + {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, + {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, + {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, + {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, + {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, + {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, + {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, ] [package.dependencies] @@ -113,13 +133,13 @@ toml = ["tomli"] [[package]] name = "exceptiongroup" -version = "1.2.0" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] @@ -138,13 +158,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.4" +version = "1.0.6" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.4-py3-none-any.whl", hash = "sha256:ac418c1db41bade2ad53ae2f3834a3a0f5ae76b56cf5aa497d2d033384fc7d73"}, - {file = "httpcore-1.0.4.tar.gz", hash = "sha256:cb2839ccfcba0d2d3c1131d3c3e26dfc327326fbe7a5dc0dbfe9f6c9151bb022"}, + {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"}, + {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"}, ] [package.dependencies] @@ -155,17 +175,17 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.25.0)"] +trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httpx" -version = "0.27.0" +version = "0.27.2" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, ] [package.dependencies] @@ -180,18 +200,22 @@ brotli = ["brotli", "brotlicffi"] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "idna" -version = "3.6" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -274,13 +298,13 @@ files = [ [[package]] name = "packaging" -version = "24.0" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] @@ -469,29 +493,29 @@ Werkzeug = ">=2.0.0" [[package]] name = "ruff" -version = "0.6.5" +version = "0.6.9" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.6.5-py3-none-linux_armv6l.whl", hash = "sha256:7e4e308f16e07c95fc7753fc1aaac690a323b2bb9f4ec5e844a97bb7fbebd748"}, - {file = "ruff-0.6.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:932cd69eefe4daf8c7d92bd6689f7e8182571cb934ea720af218929da7bd7d69"}, - {file = "ruff-0.6.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3a8d42d11fff8d3143ff4da41742a98f8f233bf8890e9fe23077826818f8d680"}, - {file = "ruff-0.6.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a50af6e828ee692fb10ff2dfe53f05caecf077f4210fae9677e06a808275754f"}, - {file = "ruff-0.6.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:794ada3400a0d0b89e3015f1a7e01f4c97320ac665b7bc3ade24b50b54cb2972"}, - {file = "ruff-0.6.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:381413ec47f71ce1d1c614f7779d88886f406f1fd53d289c77e4e533dc6ea200"}, - {file = "ruff-0.6.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:52e75a82bbc9b42e63c08d22ad0ac525117e72aee9729a069d7c4f235fc4d276"}, - {file = "ruff-0.6.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09c72a833fd3551135ceddcba5ebdb68ff89225d30758027280968c9acdc7810"}, - {file = "ruff-0.6.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:800c50371bdcb99b3c1551d5691e14d16d6f07063a518770254227f7f6e8c178"}, - {file = "ruff-0.6.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e25ddd9cd63ba1f3bd51c1f09903904a6adf8429df34f17d728a8fa11174253"}, - {file = "ruff-0.6.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7291e64d7129f24d1b0c947ec3ec4c0076e958d1475c61202497c6aced35dd19"}, - {file = "ruff-0.6.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9ad7dfbd138d09d9a7e6931e6a7e797651ce29becd688be8a0d4d5f8177b4b0c"}, - {file = "ruff-0.6.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:005256d977021790cc52aa23d78f06bb5090dc0bfbd42de46d49c201533982ae"}, - {file = "ruff-0.6.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:482c1e6bfeb615eafc5899127b805d28e387bd87db38b2c0c41d271f5e58d8cc"}, - {file = "ruff-0.6.5-py3-none-win32.whl", hash = "sha256:cf4d3fa53644137f6a4a27a2b397381d16454a1566ae5335855c187fbf67e4f5"}, - {file = "ruff-0.6.5-py3-none-win_amd64.whl", hash = "sha256:3e42a57b58e3612051a636bc1ac4e6b838679530235520e8f095f7c44f706ff9"}, - {file = "ruff-0.6.5-py3-none-win_arm64.whl", hash = "sha256:51935067740773afdf97493ba9b8231279e9beef0f2a8079188c4776c25688e0"}, - {file = "ruff-0.6.5.tar.gz", hash = "sha256:4d32d87fab433c0cf285c3683dd4dae63be05fd7a1d65b3f5bf7cdd05a6b96fb"}, + {file = "ruff-0.6.9-py3-none-linux_armv6l.whl", hash = "sha256:064df58d84ccc0ac0fcd63bc3090b251d90e2a372558c0f057c3f75ed73e1ccd"}, + {file = "ruff-0.6.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:140d4b5c9f5fc7a7b074908a78ab8d384dd7f6510402267bc76c37195c02a7ec"}, + {file = "ruff-0.6.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:53fd8ca5e82bdee8da7f506d7b03a261f24cd43d090ea9db9a1dc59d9313914c"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645d7d8761f915e48a00d4ecc3686969761df69fb561dd914a773c1a8266e14e"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eae02b700763e3847595b9d2891488989cac00214da7f845f4bcf2989007d577"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d5ccc9e58112441de8ad4b29dcb7a86dc25c5f770e3c06a9d57e0e5eba48829"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:417b81aa1c9b60b2f8edc463c58363075412866ae4e2b9ab0f690dc1e87ac1b5"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c866b631f5fbce896a74a6e4383407ba7507b815ccc52bcedabb6810fdb3ef7"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b118afbb3202f5911486ad52da86d1d52305b59e7ef2031cea3425142b97d6f"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a67267654edc23c97335586774790cde402fb6bbdb3c2314f1fc087dee320bfa"}, + {file = "ruff-0.6.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3ef0cc774b00fec123f635ce5c547dac263f6ee9fb9cc83437c5904183b55ceb"}, + {file = "ruff-0.6.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:12edd2af0c60fa61ff31cefb90aef4288ac4d372b4962c2864aeea3a1a2460c0"}, + {file = "ruff-0.6.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:55bb01caeaf3a60b2b2bba07308a02fca6ab56233302406ed5245180a05c5625"}, + {file = "ruff-0.6.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:925d26471fa24b0ce5a6cdfab1bb526fb4159952385f386bdcc643813d472039"}, + {file = "ruff-0.6.9-py3-none-win32.whl", hash = "sha256:eb61ec9bdb2506cffd492e05ac40e5bc6284873aceb605503d8494180d6fc84d"}, + {file = "ruff-0.6.9-py3-none-win_amd64.whl", hash = "sha256:785d31851c1ae91f45b3d8fe23b8ae4b5170089021fbb42402d811135f0b7117"}, + {file = "ruff-0.6.9-py3-none-win_arm64.whl", hash = "sha256:a9641e31476d601f83cd602608739a0840e348bda93fec9f1ee816f8b6798b93"}, + {file = "ruff-0.6.9.tar.gz", hash = "sha256:b076ef717a8e5bc819514ee1d602bbdca5b4420ae13a9cf61a0c0a4f53a2baa2"}, ] [[package]] @@ -507,35 +531,35 @@ files = [ [[package]] name = "tomli" -version = "2.0.1" +version = "2.0.2" description = "A lil' TOML parser" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, + {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, + {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, ] [[package]] name = "typing-extensions" -version = "4.10.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, - {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] name = "werkzeug" -version = "3.0.1" +version = "3.0.6" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.8" files = [ - {file = "werkzeug-3.0.1-py3-none-any.whl", hash = "sha256:90a285dc0e42ad56b34e696398b8122ee4c681833fb35b8334a095d82c56da10"}, - {file = "werkzeug-3.0.1.tar.gz", hash = "sha256:507e811ecea72b18a404947aded4b3390e1db8f826b494d76550ef45bb3b1dcc"}, + {file = "werkzeug-3.0.6-py3-none-any.whl", hash = "sha256:1bc0c2310d2fbb07b1dd1105eba2f7af72f322e1e455f2f93c993bee8c8a5f17"}, + {file = "werkzeug-3.0.6.tar.gz", hash = "sha256:a8dd59d4de28ca70471a34cba79bed5f7ef2e036a76b3ab0835474246eb41f8d"}, ] [package.dependencies] diff --git a/requirements.txt b/requirements.txt index f1dde1f1..992f59a8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,27 +1,27 @@ -anyio==4.3.0 ; python_version >= "3.8" and python_version < "4.0" \ - --hash=sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8 \ - --hash=sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6 -certifi==2024.2.2 ; python_version >= "3.8" and python_version < "4.0" \ - --hash=sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f \ - --hash=sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1 -exceptiongroup==1.2.0 ; python_version >= "3.8" and python_version < "3.11" \ - --hash=sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14 \ - --hash=sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68 +anyio==4.5.2 ; python_version >= "3.8" and python_version < "4.0" \ + --hash=sha256:23009af4ed04ce05991845451e11ef02fc7c5ed29179ac9a420e5ad0ac7ddc5b \ + --hash=sha256:c011ee36bc1e8ba40e5a81cb9df91925c218fe9b778554e0b56a21e1b5d4716f +certifi==2024.8.30 ; python_version >= "3.8" and python_version < "4.0" \ + --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ + --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 +exceptiongroup==1.2.2 ; python_version >= "3.8" and python_version < "3.11" \ + --hash=sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b \ + --hash=sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc h11==0.14.0 ; python_version >= "3.8" and python_version < "4.0" \ --hash=sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d \ --hash=sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761 -httpcore==1.0.4 ; python_version >= "3.8" and python_version < "4.0" \ - --hash=sha256:ac418c1db41bade2ad53ae2f3834a3a0f5ae76b56cf5aa497d2d033384fc7d73 \ - --hash=sha256:cb2839ccfcba0d2d3c1131d3c3e26dfc327326fbe7a5dc0dbfe9f6c9151bb022 -httpx==0.27.0 ; python_version >= "3.8" and python_version < "4.0" \ - --hash=sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5 \ - --hash=sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5 -idna==3.6 ; python_version >= "3.8" and python_version < "4.0" \ - --hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \ - --hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f +httpcore==1.0.6 ; python_version >= "3.8" and python_version < "4.0" \ + --hash=sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f \ + --hash=sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f +httpx==0.27.2 ; python_version >= "3.8" and python_version < "4.0" \ + --hash=sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0 \ + --hash=sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2 +idna==3.10 ; python_version >= "3.8" and python_version < "4.0" \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 sniffio==1.3.1 ; python_version >= "3.8" and python_version < "4.0" \ --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \ --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc -typing-extensions==4.10.0 ; python_version >= "3.8" and python_version < "3.11" \ - --hash=sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475 \ - --hash=sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb +typing-extensions==4.12.2 ; python_version >= "3.8" and python_version < "3.11" \ + --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ + --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 From 0bbc2460076a75cebedf5de1315e7c9a70cfbb28 Mon Sep 17 00:00:00 2001 From: Michael Yang Date: Wed, 4 Sep 2024 10:40:35 -0700 Subject: [PATCH 08/18] pydantic types --- ollama/__init__.py | 34 +- ollama/_client.py | 879 +++++++++++++++++++++++++------------------ ollama/_types.py | 422 ++++++++++++++++----- poetry.lock | 162 +++++++- pyproject.toml | 1 + requirements.txt | 101 ++++- tests/test_client.py | 244 +++++------- 7 files changed, 1207 insertions(+), 636 deletions(-) diff --git a/ollama/__init__.py b/ollama/__init__.py index c452f710..23d736a8 100644 --- a/ollama/__init__.py +++ b/ollama/__init__.py @@ -1,10 +1,17 @@ from ollama._client import Client, AsyncClient from ollama._types import ( + Options, + Message, + Tool, GenerateResponse, ChatResponse, + EmbedResponse, + EmbeddingsResponse, + StatusResponse, ProgressResponse, - Message, - Options, + ListResponse, + ShowResponse, + ProcessResponse, RequestError, ResponseError, ) @@ -12,25 +19,20 @@ __all__ = [ 'Client', 'AsyncClient', + 'Options', + 'Message', + 'Tool', 'GenerateResponse', 'ChatResponse', + 'EmbedResponse', + 'EmbeddingsResponse', + 'StatusResponse', 'ProgressResponse', - 'Message', - 'Options', + 'ListResponse', + 'ShowResponse', + 'ProcessResponse', 'RequestError', 'ResponseError', - 'generate', - 'chat', - 'embed', - 'embeddings', - 'pull', - 'push', - 'create', - 'delete', - 'list', - 'copy', - 'show', - 'ps', ] _client = Client() diff --git a/ollama/_client.py b/ollama/_client.py index e3d9fed0..c1f5f95d 100644 --- a/ollama/_client.py +++ b/ollama/_client.py @@ -1,18 +1,24 @@ -import ipaddress import os import io import json -import httpx -import binascii import platform +import ipaddress import urllib.parse from os import PathLike from pathlib import Path -from copy import deepcopy from hashlib import sha256 -from base64 import b64encode, b64decode -from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal, overload +from typing import ( + Any, + Literal, + Mapping, + Optional, + Sequence, + Type, + TypeVar, + Union, + overload, +) import sys @@ -28,7 +34,38 @@ except metadata.PackageNotFoundError: __version__ = '0.0.0' -from ollama._types import Message, Options, RequestError, ResponseError, Tool +import httpx + +from ollama._types import ( + ChatRequest, + ChatResponse, + CreateRequest, + CopyRequest, + DeleteRequest, + EmbedRequest, + EmbedResponse, + EmbeddingsRequest, + EmbeddingsResponse, + GenerateRequest, + GenerateResponse, + Image, + ListResponse, + Message, + Options, + ProcessResponse, + ProgressResponse, + PullRequest, + PushRequest, + RequestError, + ResponseError, + ShowRequest, + ShowResponse, + StatusResponse, + Tool, +) + + +T = TypeVar('T') class BaseClient: @@ -38,6 +75,7 @@ def __init__( host: Optional[str] = None, follow_redirects: bool = True, timeout: Any = None, + headers: Optional[Mapping[str, str]] = None, **kwargs, ) -> None: """ @@ -48,16 +86,15 @@ def __init__( `kwargs` are passed to the httpx client. """ - headers = kwargs.pop('headers', {}) - headers['Content-Type'] = 'application/json' - headers['Accept'] = 'application/json' - headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}' - self._client = client( base_url=_parse_host(host or os.getenv('OLLAMA_HOST')), follow_redirects=follow_redirects, timeout=timeout, - headers=headers, + headers={ + 'Content-Type': 'application/json', + 'Accept': 'application/json', + 'User-Agent': f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}', + }.update(headers or {}), **kwargs, ) @@ -66,37 +103,67 @@ class Client(BaseClient): def __init__(self, host: Optional[str] = None, **kwargs) -> None: super().__init__(httpx.Client, host, **kwargs) - def _request(self, method: str, url: str, **kwargs) -> httpx.Response: - response = self._client.request(method, url, **kwargs) - + def _request_raw(self, *args, **kwargs): + r = self._client.request(*args, **kwargs) try: - response.raise_for_status() + r.raise_for_status() except httpx.HTTPStatusError as e: raise ResponseError(e.response.text, e.response.status_code) from None + return r - return response + @overload + def _request( + self, + cls: Type[T], + *args, + stream: Literal[False] = False, + **kwargs, + ) -> T: ... - def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]: - with self._client.stream(method, url, **kwargs) as r: - try: - r.raise_for_status() - except httpx.HTTPStatusError as e: - e.response.read() - raise ResponseError(e.response.text, e.response.status_code) from None + @overload + def _request( + self, + cls: Type[T], + *args, + stream: Literal[True] = True, + **kwargs, + ) -> Iterator[T]: ... - for line in r.iter_lines(): - partial = json.loads(line) - if e := partial.get('error'): - raise ResponseError(e) - yield partial + @overload + def _request( + self, + cls: Type[T], + *args, + stream: bool = False, + **kwargs, + ) -> Union[T, Iterator[T]]: ... - def _request_stream( + def _request( self, + cls: Type[T], *args, stream: bool = False, **kwargs, - ) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]: - return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json() + ) -> Union[T, Iterator[T]]: + if stream: + + def inner(): + with self._client.stream(*args, **kwargs) as r: + try: + r.raise_for_status() + except httpx.HTTPStatusError as e: + e.response.read() + raise ResponseError(e.response.text, e.response.status_code) from None + + for line in r.iter_lines(): + part = json.loads(line) + if err := part.get('error'): + raise ResponseError(err) + yield cls(**part) + + return inner() + + return cls(**self._request_raw(*args, **kwargs).json()) @overload def generate( @@ -104,16 +171,17 @@ def generate( model: str = '', prompt: str = '', suffix: str = '', + *, system: str = '', template: str = '', context: Optional[Sequence[int]] = None, stream: Literal[False] = False, raw: bool = False, - format: Literal['', 'json'] = '', - images: Optional[Sequence[AnyStr]] = None, - options: Optional[Options] = None, + format: Optional[Literal['', 'json']] = None, + images: Optional[Sequence[Union[str, bytes]]] = None, + options: Optional[Union[Mapping[str, Any], Options]] = None, keep_alive: Optional[Union[float, str]] = None, - ) -> Mapping[str, Any]: ... + ) -> GenerateResponse: ... @overload def generate( @@ -121,32 +189,34 @@ def generate( model: str = '', prompt: str = '', suffix: str = '', + *, system: str = '', template: str = '', context: Optional[Sequence[int]] = None, stream: Literal[True] = True, raw: bool = False, - format: Literal['', 'json'] = '', - images: Optional[Sequence[AnyStr]] = None, - options: Optional[Options] = None, + format: Optional[Literal['', 'json']] = None, + images: Optional[Sequence[Union[str, bytes]]] = None, + options: Optional[Union[Mapping[str, Any], Options]] = None, keep_alive: Optional[Union[float, str]] = None, - ) -> Iterator[Mapping[str, Any]]: ... + ) -> Iterator[GenerateResponse]: ... def generate( self, model: str = '', - prompt: str = '', - suffix: str = '', - system: str = '', - template: str = '', + prompt: Optional[str] = None, + suffix: Optional[str] = None, + *, + system: Optional[str] = None, + template: Optional[str] = None, context: Optional[Sequence[int]] = None, stream: bool = False, - raw: bool = False, - format: Literal['', 'json'] = '', - images: Optional[Sequence[AnyStr]] = None, - options: Optional[Options] = None, + raw: Optional[bool] = None, + format: Optional[Literal['', 'json']] = None, + images: Optional[Sequence[Union[str, bytes]]] = None, + options: Optional[Union[Mapping[str, Any], Options]] = None, keep_alive: Optional[Union[float, str]] = None, - ) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]: + ) -> Union[GenerateResponse, Iterator[GenerateResponse]]: """ Create a response using the requested model. @@ -157,26 +227,24 @@ def generate( Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator. """ - if not model: - raise RequestError('must provide a model') - - return self._request_stream( + return self._request( + GenerateResponse, 'POST', '/api/generate', - json={ - 'model': model, - 'prompt': prompt, - 'suffix': suffix, - 'system': system, - 'template': template, - 'context': context or [], - 'stream': stream, - 'raw': raw, - 'images': [_encode_image(image) for image in images or []], - 'format': format, - 'options': options or {}, - 'keep_alive': keep_alive, - }, + json=GenerateRequest( + model=model, + prompt=prompt, + suffix=suffix, + system=system, + template=template, + context=context, + stream=stream, + raw=raw, + format=format, + images=[Image(value=image) for image in images] if images else None, + options=options, + keep_alive=keep_alive, + ).model_dump(exclude_none=True), stream=stream, ) @@ -184,36 +252,39 @@ def generate( def chat( self, model: str = '', - messages: Optional[Sequence[Message]] = None, - tools: Optional[Sequence[Tool]] = None, + messages: Optional[Sequence[Union[Mapping[str, Any], Message]]] = None, + *, + tools: Optional[Sequence[Union[Mapping[str, Any], Tool]]] = None, stream: Literal[False] = False, - format: Literal['', 'json'] = '', - options: Optional[Options] = None, + format: Optional[Literal['', 'json']] = None, + options: Optional[Union[Mapping[str, Any], Options]] = None, keep_alive: Optional[Union[float, str]] = None, - ) -> Mapping[str, Any]: ... + ) -> ChatResponse: ... @overload def chat( self, model: str = '', - messages: Optional[Sequence[Message]] = None, - tools: Optional[Sequence[Tool]] = None, + messages: Optional[Sequence[Union[Mapping[str, Any], Message]]] = None, + *, + tools: Optional[Sequence[Union[Mapping[str, Any], Tool]]] = None, stream: Literal[True] = True, - format: Literal['', 'json'] = '', - options: Optional[Options] = None, + format: Optional[Literal['', 'json']] = None, + options: Optional[Union[Mapping[str, Any], Options]] = None, keep_alive: Optional[Union[float, str]] = None, - ) -> Iterator[Mapping[str, Any]]: ... + ) -> Iterator[ChatResponse]: ... def chat( self, model: str = '', - messages: Optional[Sequence[Message]] = None, - tools: Optional[Sequence[Tool]] = None, + messages: Optional[Sequence[Union[Mapping[str, Any], Message]]] = None, + *, + tools: Optional[Sequence[Union[Mapping[str, Any], Tool]]] = None, stream: bool = False, - format: Literal['', 'json'] = '', - options: Optional[Options] = None, + format: Optional[Literal['', 'json']] = None, + options: Optional[Union[Mapping[str, Any], Options]] = None, keep_alive: Optional[Union[float, str]] = None, - ) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]: + ) -> Union[ChatResponse, Iterator[ChatResponse]]: """ Create a chat response using the requested model. @@ -224,109 +295,104 @@ def chat( Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator. """ - if not model: - raise RequestError('must provide a model') - - messages = deepcopy(messages) - - for message in messages or []: - if images := message.get('images'): - message['images'] = [_encode_image(image) for image in images] - - return self._request_stream( + return self._request( + ChatResponse, 'POST', '/api/chat', - json={ - 'model': model, - 'messages': messages, - 'tools': tools or [], - 'stream': stream, - 'format': format, - 'options': options or {}, - 'keep_alive': keep_alive, - }, + json=ChatRequest( + model=model, + messages=[message for message in _copy_messages(messages)], + tools=[tool for tool in _copy_tools(tools)], + stream=stream, + format=format, + options=options, + keep_alive=keep_alive, + ).model_dump(exclude_none=True), stream=stream, ) def embed( self, model: str = '', - input: Union[str, Sequence[AnyStr]] = '', - truncate: bool = True, - options: Optional[Options] = None, + input: Union[str, Sequence[str]] = '', + truncate: Optional[bool] = None, + options: Optional[Union[Mapping[str, Any], Options]] = None, keep_alive: Optional[Union[float, str]] = None, - ) -> Mapping[str, Any]: - if not model: - raise RequestError('must provide a model') - + ) -> EmbedResponse: return self._request( + EmbedResponse, 'POST', '/api/embed', - json={ - 'model': model, - 'input': input, - 'truncate': truncate, - 'options': options or {}, - 'keep_alive': keep_alive, - }, - ).json() + json=EmbedRequest( + model=model, + input=input, + truncate=truncate, + options=options, + keep_alive=keep_alive, + ).model_dump(exclude_none=True), + ) def embeddings( self, model: str = '', - prompt: str = '', - options: Optional[Options] = None, + prompt: Optional[str] = None, + options: Optional[Union[Mapping[str, Any], Options]] = None, keep_alive: Optional[Union[float, str]] = None, - ) -> Mapping[str, Sequence[float]]: + ) -> EmbeddingsResponse: """ Deprecated in favor of `embed`. """ return self._request( + EmbeddingsResponse, 'POST', '/api/embeddings', - json={ - 'model': model, - 'prompt': prompt, - 'options': options or {}, - 'keep_alive': keep_alive, - }, - ).json() + json=EmbeddingsRequest( + model=model, + prompt=prompt, + options=options, + keep_alive=keep_alive, + ).model_dump(exclude_none=True), + ) @overload def pull( self, model: str, + *, insecure: bool = False, stream: Literal[False] = False, - ) -> Mapping[str, Any]: ... + ) -> ProgressResponse: ... @overload def pull( self, model: str, + *, insecure: bool = False, stream: Literal[True] = True, - ) -> Iterator[Mapping[str, Any]]: ... + ) -> Iterator[ProgressResponse]: ... def pull( self, model: str, + *, insecure: bool = False, stream: bool = False, - ) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]: + ) -> Union[ProgressResponse, Iterator[ProgressResponse]]: """ Raises `ResponseError` if the request could not be fulfilled. Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator. """ - return self._request_stream( + return self._request( + ProgressResponse, 'POST', '/api/pull', - json={ - 'name': model, - 'insecure': insecure, - 'stream': stream, - }, + json=PullRequest( + model=model, + insecure=insecure, + stream=stream, + ).model_dump(exclude_none=True), stream=stream, ) @@ -334,37 +400,41 @@ def pull( def push( self, model: str, + *, insecure: bool = False, stream: Literal[False] = False, - ) -> Mapping[str, Any]: ... + ) -> ProgressResponse: ... @overload def push( self, model: str, + *, insecure: bool = False, stream: Literal[True] = True, - ) -> Iterator[Mapping[str, Any]]: ... + ) -> Iterator[ProgressResponse]: ... def push( self, model: str, + *, insecure: bool = False, stream: bool = False, - ) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]: + ) -> Union[ProgressResponse, Iterator[ProgressResponse]]: """ Raises `ResponseError` if the request could not be fulfilled. Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator. """ - return self._request_stream( + return self._request( + ProgressResponse, 'POST', '/api/push', - json={ - 'name': model, - 'insecure': insecure, - 'stream': stream, - }, + json=PushRequest( + model=model, + insecure=insecure, + stream=stream, + ).model_dump(exclude_none=True), stream=stream, ) @@ -374,9 +444,10 @@ def create( model: str, path: Optional[Union[str, PathLike]] = None, modelfile: Optional[str] = None, + *, quantize: Optional[str] = None, stream: Literal[False] = False, - ) -> Mapping[str, Any]: ... + ) -> ProgressResponse: ... @overload def create( @@ -384,18 +455,20 @@ def create( model: str, path: Optional[Union[str, PathLike]] = None, modelfile: Optional[str] = None, + *, quantize: Optional[str] = None, stream: Literal[True] = True, - ) -> Iterator[Mapping[str, Any]]: ... + ) -> Iterator[ProgressResponse]: ... def create( self, model: str, path: Optional[Union[str, PathLike]] = None, modelfile: Optional[str] = None, + *, quantize: Optional[str] = None, stream: bool = False, - ) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]: + ) -> Union[ProgressResponse, Iterator[ProgressResponse]]: """ Raises `ResponseError` if the request could not be fulfilled. @@ -408,15 +481,16 @@ def create( else: raise RequestError('must provide either path or modelfile') - return self._request_stream( + return self._request( + ProgressResponse, 'POST', '/api/create', - json={ - 'name': model, - 'modelfile': modelfile, - 'stream': stream, - 'quantize': quantize, - }, + json=CreateRequest( + model=model, + modelfile=modelfile, + stream=stream, + quantize=quantize, + ).model_dump(exclude_none=True), stream=stream, ) @@ -450,76 +524,131 @@ def _create_blob(self, path: Union[str, Path]) -> str: digest = f'sha256:{sha256sum.hexdigest()}' try: - self._request('HEAD', f'/api/blobs/{digest}') + self._request_raw('HEAD', f'/api/blobs/{digest}') except ResponseError as e: if e.status_code != 404: raise with open(path, 'rb') as r: - self._request('POST', f'/api/blobs/{digest}', content=r) + self._request_raw('POST', f'/api/blobs/{digest}', content=r) return digest - def delete(self, model: str) -> Mapping[str, Any]: - response = self._request('DELETE', '/api/delete', json={'name': model}) - return {'status': 'success' if response.status_code == 200 else 'error'} + def list(self) -> ListResponse: + return self._request( + ListResponse, + 'GET', + '/api/tags', + ) - def list(self) -> Mapping[str, Any]: - return self._request('GET', '/api/tags').json() + def delete(self, model: str) -> StatusResponse: + r = self._request_raw( + 'DELETE', + '/api/delete', + json=DeleteRequest( + model=model, + ).model_dump(exclude_none=True), + ) + return StatusResponse( + status='success' if r.status_code == 200 else 'error', + ) - def copy(self, source: str, destination: str) -> Mapping[str, Any]: - response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination}) - return {'status': 'success' if response.status_code == 200 else 'error'} + def copy(self, source: str, destination: str) -> StatusResponse: + r = self._request_raw( + 'POST', + '/api/copy', + json=CopyRequest( + source=source, + destination=destination, + ).model_dump(exclude_none=True), + ) + return StatusResponse( + status='success' if r.status_code == 200 else 'error', + ) - def show(self, model: str) -> Mapping[str, Any]: - return self._request('POST', '/api/show', json={'name': model}).json() + def show(self, model: str) -> ShowResponse: + return self._request( + ShowResponse, + 'POST', + '/api/show', + json=ShowRequest( + model=model, + ).model_dump(exclude_none=True), + ) - def ps(self) -> Mapping[str, Any]: - return self._request('GET', '/api/ps').json() + def ps(self) -> ProcessResponse: + return self._request( + ProcessResponse, + 'GET', + '/api/ps', + ) class AsyncClient(BaseClient): def __init__(self, host: Optional[str] = None, **kwargs) -> None: super().__init__(httpx.AsyncClient, host, **kwargs) - async def _request(self, method: str, url: str, **kwargs) -> httpx.Response: - response = await self._client.request(method, url, **kwargs) - + async def _request_raw(self, *args, **kwargs): + r = await self._client.request(*args, **kwargs) try: - response.raise_for_status() + r.raise_for_status() except httpx.HTTPStatusError as e: raise ResponseError(e.response.text, e.response.status_code) from None + return r - return response - - async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]: - async def inner(): - async with self._client.stream(method, url, **kwargs) as r: - try: - r.raise_for_status() - except httpx.HTTPStatusError as e: - await e.response.aread() - raise ResponseError(e.response.text, e.response.status_code) from None + @overload + async def _request( + self, + cls: Type[T], + *args, + stream: Literal[False] = False, + **kwargs, + ) -> T: ... - async for line in r.aiter_lines(): - partial = json.loads(line) - if e := partial.get('error'): - raise ResponseError(e) - yield partial + @overload + async def _request( + self, + cls: Type[T], + *args, + stream: Literal[True] = True, + **kwargs, + ) -> AsyncIterator[T]: ... - return inner() + @overload + async def _request( + self, + cls: Type[T], + *args, + stream: bool = False, + **kwargs, + ) -> Union[T, AsyncIterator[T]]: ... - async def _request_stream( + async def _request( self, + cls: Type[T], *args, stream: bool = False, **kwargs, - ) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]: + ) -> Union[T, AsyncIterator[T]]: if stream: - return await self._stream(*args, **kwargs) - response = await self._request(*args, **kwargs) - return response.json() + async def inner(): + async with self._client.stream(*args, **kwargs) as r: + try: + r.raise_for_status() + except httpx.HTTPStatusError as e: + await e.response.aread() + raise ResponseError(e.response.text, e.response.status_code) from None + + async for line in r.aiter_lines(): + part = json.loads(line) + if err := part.get('error'): + raise ResponseError(err) + yield cls(**part) + + return inner() + + return cls(**(await self._request_raw(*args, **kwargs)).json()) @overload async def generate( @@ -527,16 +656,17 @@ async def generate( model: str = '', prompt: str = '', suffix: str = '', + *, system: str = '', template: str = '', context: Optional[Sequence[int]] = None, stream: Literal[False] = False, raw: bool = False, - format: Literal['', 'json'] = '', - images: Optional[Sequence[AnyStr]] = None, - options: Optional[Options] = None, + format: Optional[Literal['', 'json']] = None, + images: Optional[Sequence[Union[str, bytes]]] = None, + options: Optional[Union[Mapping[str, Any], Options]] = None, keep_alive: Optional[Union[float, str]] = None, - ) -> Mapping[str, Any]: ... + ) -> GenerateResponse: ... @overload async def generate( @@ -544,32 +674,34 @@ async def generate( model: str = '', prompt: str = '', suffix: str = '', + *, system: str = '', template: str = '', context: Optional[Sequence[int]] = None, stream: Literal[True] = True, raw: bool = False, - format: Literal['', 'json'] = '', - images: Optional[Sequence[AnyStr]] = None, - options: Optional[Options] = None, + format: Optional[Literal['', 'json']] = None, + images: Optional[Sequence[Union[str, bytes]]] = None, + options: Optional[Union[Mapping[str, Any], Options]] = None, keep_alive: Optional[Union[float, str]] = None, - ) -> AsyncIterator[Mapping[str, Any]]: ... + ) -> AsyncIterator[GenerateResponse]: ... async def generate( self, model: str = '', - prompt: str = '', - suffix: str = '', - system: str = '', - template: str = '', + prompt: Optional[str] = None, + suffix: Optional[str] = None, + *, + system: Optional[str] = None, + template: Optional[str] = None, context: Optional[Sequence[int]] = None, stream: bool = False, - raw: bool = False, - format: Literal['', 'json'] = '', - images: Optional[Sequence[AnyStr]] = None, - options: Optional[Options] = None, + raw: Optional[bool] = None, + format: Optional[Literal['', 'json']] = None, + images: Optional[Sequence[Union[str, bytes]]] = None, + options: Optional[Union[Mapping[str, Any], Options]] = None, keep_alive: Optional[Union[float, str]] = None, - ) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]: + ) -> Union[GenerateResponse, AsyncIterator[GenerateResponse]]: """ Create a response using the requested model. @@ -579,26 +711,24 @@ async def generate( Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator. """ - if not model: - raise RequestError('must provide a model') - - return await self._request_stream( + return await self._request( + GenerateResponse, 'POST', '/api/generate', - json={ - 'model': model, - 'prompt': prompt, - 'suffix': suffix, - 'system': system, - 'template': template, - 'context': context or [], - 'stream': stream, - 'raw': raw, - 'images': [_encode_image(image) for image in images or []], - 'format': format, - 'options': options or {}, - 'keep_alive': keep_alive, - }, + json=GenerateRequest( + model=model, + prompt=prompt, + suffix=suffix, + system=system, + template=template, + context=context, + stream=stream, + raw=raw, + format=format, + images=[Image(value=image) for image in images] if images else None, + options=options, + keep_alive=keep_alive, + ).model_dump(exclude_none=True), stream=stream, ) @@ -606,36 +736,39 @@ async def generate( async def chat( self, model: str = '', - messages: Optional[Sequence[Message]] = None, - tools: Optional[Sequence[Tool]] = None, + messages: Optional[Sequence[Union[Mapping[str, Any], Message]]] = None, + *, + tools: Optional[Sequence[Union[Mapping[str, Any], Tool]]] = None, stream: Literal[False] = False, - format: Literal['', 'json'] = '', - options: Optional[Options] = None, + format: Optional[Literal['', 'json']] = None, + options: Optional[Union[Mapping[str, Any], Options]] = None, keep_alive: Optional[Union[float, str]] = None, - ) -> Mapping[str, Any]: ... + ) -> ChatResponse: ... @overload async def chat( self, model: str = '', - messages: Optional[Sequence[Message]] = None, - tools: Optional[Sequence[Tool]] = None, + messages: Optional[Sequence[Union[Mapping[str, Any], Message]]] = None, + *, + tools: Optional[Sequence[Union[Mapping[str, Any], Tool]]] = None, stream: Literal[True] = True, - format: Literal['', 'json'] = '', - options: Optional[Options] = None, + format: Optional[Literal['', 'json']] = None, + options: Optional[Union[Mapping[str, Any], Options]] = None, keep_alive: Optional[Union[float, str]] = None, - ) -> AsyncIterator[Mapping[str, Any]]: ... + ) -> AsyncIterator[ChatResponse]: ... async def chat( self, model: str = '', - messages: Optional[Sequence[Message]] = None, - tools: Optional[Sequence[Tool]] = None, + messages: Optional[Sequence[Union[Mapping[str, Any], Message]]] = None, + *, + tools: Optional[Sequence[Union[Mapping[str, Any], Tool]]] = None, stream: bool = False, - format: Literal['', 'json'] = '', - options: Optional[Options] = None, + format: Optional[Literal['', 'json']] = None, + options: Optional[Union[Mapping[str, Any], Options]] = None, keep_alive: Optional[Union[float, str]] = None, - ) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]: + ) -> Union[ChatResponse, AsyncIterator[ChatResponse]]: """ Create a chat response using the requested model. @@ -645,113 +778,105 @@ async def chat( Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator. """ - if not model: - raise RequestError('must provide a model') - - messages = deepcopy(messages) - - for message in messages or []: - if images := message.get('images'): - message['images'] = [_encode_image(image) for image in images] - return await self._request_stream( + return await self._request( + ChatResponse, 'POST', '/api/chat', - json={ - 'model': model, - 'messages': messages, - 'tools': tools or [], - 'stream': stream, - 'format': format, - 'options': options or {}, - 'keep_alive': keep_alive, - }, + json=ChatRequest( + model=model, + messages=[message for message in _copy_messages(messages)], + tools=[tool for tool in _copy_tools(tools)], + stream=stream, + format=format, + options=options, + keep_alive=keep_alive, + ).model_dump(exclude_none=True), stream=stream, ) async def embed( self, model: str = '', - input: Union[str, Sequence[AnyStr]] = '', - truncate: bool = True, - options: Optional[Options] = None, + input: Union[str, Sequence[str]] = '', + truncate: Optional[bool] = None, + options: Optional[Union[Mapping[str, Any], Options]] = None, keep_alive: Optional[Union[float, str]] = None, - ) -> Mapping[str, Any]: - if not model: - raise RequestError('must provide a model') - - response = await self._request( + ) -> EmbedResponse: + return await self._request( + EmbedResponse, 'POST', '/api/embed', - json={ - 'model': model, - 'input': input, - 'truncate': truncate, - 'options': options or {}, - 'keep_alive': keep_alive, - }, + json=EmbedRequest( + model=model, + input=input, + truncate=truncate, + options=options, + keep_alive=keep_alive, + ).model_dump(exclude_none=True), ) - return response.json() - async def embeddings( self, model: str = '', - prompt: str = '', - options: Optional[Options] = None, + prompt: Optional[str] = None, + options: Optional[Union[Mapping[str, Any], Options]] = None, keep_alive: Optional[Union[float, str]] = None, - ) -> Mapping[str, Sequence[float]]: + ) -> EmbeddingsResponse: """ Deprecated in favor of `embed`. """ - response = await self._request( + return await self._request( + EmbeddingsResponse, 'POST', '/api/embeddings', - json={ - 'model': model, - 'prompt': prompt, - 'options': options or {}, - 'keep_alive': keep_alive, - }, + json=EmbeddingsRequest( + model=model, + prompt=prompt, + options=options, + keep_alive=keep_alive, + ).model_dump(exclude_none=True), ) - return response.json() - @overload async def pull( self, model: str, + *, insecure: bool = False, stream: Literal[False] = False, - ) -> Mapping[str, Any]: ... + ) -> ProgressResponse: ... @overload async def pull( self, model: str, + *, insecure: bool = False, stream: Literal[True] = True, - ) -> AsyncIterator[Mapping[str, Any]]: ... + ) -> AsyncIterator[ProgressResponse]: ... async def pull( self, model: str, + *, insecure: bool = False, stream: bool = False, - ) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]: + ) -> Union[ProgressResponse, AsyncIterator[ProgressResponse]]: """ Raises `ResponseError` if the request could not be fulfilled. Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator. """ - return await self._request_stream( + return await self._request( + ProgressResponse, 'POST', '/api/pull', - json={ - 'name': model, - 'insecure': insecure, - 'stream': stream, - }, + json=PullRequest( + model=model, + insecure=insecure, + stream=stream, + ).model_dump(exclude_none=True), stream=stream, ) @@ -759,37 +884,41 @@ async def pull( async def push( self, model: str, + *, insecure: bool = False, stream: Literal[False] = False, - ) -> Mapping[str, Any]: ... + ) -> ProgressResponse: ... @overload async def push( self, model: str, + *, insecure: bool = False, stream: Literal[True] = True, - ) -> AsyncIterator[Mapping[str, Any]]: ... + ) -> AsyncIterator[ProgressResponse]: ... async def push( self, model: str, + *, insecure: bool = False, stream: bool = False, - ) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]: + ) -> Union[ProgressResponse, AsyncIterator[ProgressResponse]]: """ Raises `ResponseError` if the request could not be fulfilled. Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator. """ - return await self._request_stream( + return await self._request( + ProgressResponse, 'POST', '/api/push', - json={ - 'name': model, - 'insecure': insecure, - 'stream': stream, - }, + json=PushRequest( + model=model, + insecure=insecure, + stream=stream, + ).model_dump(exclude_none=True), stream=stream, ) @@ -799,9 +928,10 @@ async def create( model: str, path: Optional[Union[str, PathLike]] = None, modelfile: Optional[str] = None, + *, quantize: Optional[str] = None, stream: Literal[False] = False, - ) -> Mapping[str, Any]: ... + ) -> ProgressResponse: ... @overload async def create( @@ -809,18 +939,20 @@ async def create( model: str, path: Optional[Union[str, PathLike]] = None, modelfile: Optional[str] = None, + *, quantize: Optional[str] = None, stream: Literal[True] = True, - ) -> AsyncIterator[Mapping[str, Any]]: ... + ) -> AsyncIterator[ProgressResponse]: ... async def create( self, model: str, path: Optional[Union[str, PathLike]] = None, modelfile: Optional[str] = None, + *, quantize: Optional[str] = None, stream: bool = False, - ) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]: + ) -> Union[ProgressResponse, AsyncIterator[ProgressResponse]]: """ Raises `ResponseError` if the request could not be fulfilled. @@ -833,15 +965,16 @@ async def create( else: raise RequestError('must provide either path or modelfile') - return await self._request_stream( + return await self._request( + ProgressResponse, 'POST', '/api/create', - json={ - 'name': model, - 'modelfile': modelfile, - 'stream': stream, - 'quantize': quantize, - }, + json=CreateRequest( + model=model, + modelfile=modelfile, + stream=stream, + quantize=quantize, + ).model_dump(exclude_none=True), stream=stream, ) @@ -875,7 +1008,7 @@ async def _create_blob(self, path: Union[str, Path]) -> str: digest = f'sha256:{sha256sum.hexdigest()}' try: - await self._request('HEAD', f'/api/blobs/{digest}') + await self._request_raw('HEAD', f'/api/blobs/{digest}') except ResponseError as e: if e.status_code != 404: raise @@ -888,60 +1021,70 @@ async def upload_bytes(): break yield chunk - await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes()) + await self._request_raw('POST', f'/api/blobs/{digest}', content=upload_bytes()) return digest - async def delete(self, model: str) -> Mapping[str, Any]: - response = await self._request('DELETE', '/api/delete', json={'name': model}) - return {'status': 'success' if response.status_code == 200 else 'error'} - - async def list(self) -> Mapping[str, Any]: - response = await self._request('GET', '/api/tags') - return response.json() - - async def copy(self, source: str, destination: str) -> Mapping[str, Any]: - response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination}) - return {'status': 'success' if response.status_code == 200 else 'error'} + async def list(self) -> ListResponse: + return await self._request( + ListResponse, + 'GET', + '/api/tags', + ) - async def show(self, model: str) -> Mapping[str, Any]: - response = await self._request('POST', '/api/show', json={'name': model}) - return response.json() + async def delete(self, model: str) -> StatusResponse: + r = await self._request_raw( + 'DELETE', + '/api/delete', + json=DeleteRequest( + model=model, + ).model_dump(exclude_none=True), + ) + return StatusResponse( + status='success' if r.status_code == 200 else 'error', + ) - async def ps(self) -> Mapping[str, Any]: - response = await self._request('GET', '/api/ps') - return response.json() + async def copy(self, source: str, destination: str) -> StatusResponse: + r = await self._request_raw( + 'POST', + '/api/copy', + json=CopyRequest( + source=source, + destination=destination, + ).model_dump(exclude_none=True), + ) + return StatusResponse( + status='success' if r.status_code == 200 else 'error', + ) + async def show(self, model: str) -> ShowResponse: + return await self._request( + ShowResponse, + 'POST', + '/api/show', + json=ShowRequest( + model=model, + ).model_dump(exclude_none=True), + ) -def _encode_image(image) -> str: - """ - >>> _encode_image(b'ollama') - 'b2xsYW1h' - >>> _encode_image(io.BytesIO(b'ollama')) - 'b2xsYW1h' - >>> _encode_image('LICENSE') - 'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo=' - >>> _encode_image(Path('LICENSE')) - 'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo=' - >>> _encode_image('YWJj') - 'YWJj' - >>> _encode_image(b'YWJj') - 'YWJj' - """ + async def ps(self) -> ProcessResponse: + return await self._request( + ProcessResponse, + 'GET', + '/api/ps', + ) - if p := _as_path(image): - return b64encode(p.read_bytes()).decode('utf-8') - try: - b64decode(image, validate=True) - return image if isinstance(image, str) else image.decode('utf-8') - except (binascii.Error, TypeError): - ... +def _copy_messages(messages: Optional[Sequence[Union[Mapping[str, Any], Message]]]) -> Iterator[Message]: + for message in messages or []: + yield Message.model_validate( + {k: [Image(value=image) for image in v] if k == 'images' else v for k, v in dict(message).items() if v}, + ) - if b := _as_bytesio(image): - return b64encode(b.read()).decode('utf-8') - raise RequestError('image must be bytes, path-like object, or file-like object') +def _copy_tools(tools: Optional[Sequence[Union[Mapping[str, Any], Tool]]]) -> Iterator[Tool]: + for tool in tools or []: + yield Tool.model_validate(tool) def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]: @@ -954,14 +1097,6 @@ def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]: return None -def _as_bytesio(s: Any) -> Union[io.BytesIO, None]: - if isinstance(s, io.BytesIO): - return s - elif isinstance(s, bytes): - return io.BytesIO(s) - return None - - def _parse_host(host: Optional[str]) -> str: """ >>> _parse_host(None) @@ -1039,9 +1174,9 @@ def _parse_host(host: Optional[str]) -> str: host = split.hostname or '127.0.0.1' port = split.port or port - # Fix missing square brackets for IPv6 from urlsplit try: if isinstance(ipaddress.ip_address(host), ipaddress.IPv6Address): + # Fix missing square brackets for IPv6 from urlsplit host = f'[{host}]' except ValueError: ... diff --git a/ollama/_types.py b/ollama/_types.py index 7bdcbe49..b223d9cc 100644 --- a/ollama/_types.py +++ b/ollama/_types.py @@ -1,43 +1,162 @@ import json -from typing import Any, TypedDict, Sequence, Literal, Mapping +from base64 import b64encode +from pathlib import Path +from datetime import datetime +from typing import ( + Any, + Literal, + Mapping, + Optional, + Sequence, + Union, +) +from typing_extensions import Annotated + +from pydantic import ( + BaseModel, + ByteSize, + Field, + FilePath, + Base64Str, + model_serializer, +) +from pydantic.json_schema import JsonSchemaValue + + +class SubscriptableBaseModel(BaseModel): + def __getitem__(self, key: str) -> Any: + return getattr(self, key) + + def __setitem__(self, key: str, value: Any) -> None: + setattr(self, key, value) + + def __contains__(self, key: str) -> bool: + return hasattr(self, key) + + def get(self, key: str, default: Any = None) -> Any: + return getattr(self, key, default) + + +class Options(SubscriptableBaseModel): + # load time options + numa: Optional[bool] = None + num_ctx: Optional[int] = None + num_batch: Optional[int] = None + num_gpu: Optional[int] = None + main_gpu: Optional[int] = None + low_vram: Optional[bool] = None + f16_kv: Optional[bool] = None + logits_all: Optional[bool] = None + vocab_only: Optional[bool] = None + use_mmap: Optional[bool] = None + use_mlock: Optional[bool] = None + embedding_only: Optional[bool] = None + num_thread: Optional[int] = None + + # runtime options + num_keep: Optional[int] = None + seed: Optional[int] = None + num_predict: Optional[int] = None + top_k: Optional[int] = None + top_p: Optional[float] = None + tfs_z: Optional[float] = None + typical_p: Optional[float] = None + repeat_last_n: Optional[int] = None + temperature: Optional[float] = None + repeat_penalty: Optional[float] = None + presence_penalty: Optional[float] = None + frequency_penalty: Optional[float] = None + mirostat: Optional[int] = None + mirostat_tau: Optional[float] = None + mirostat_eta: Optional[float] = None + penalize_newline: Optional[bool] = None + stop: Optional[Sequence[str]] = None + + +class BaseRequest(SubscriptableBaseModel): + model: Annotated[str, Field(min_length=1)] + 'Model to use for the request.' + + +class BaseStreamableRequest(BaseRequest): + stream: Optional[bool] = None + 'Stream response.' + + +class BaseGenerateRequest(BaseStreamableRequest): + options: Optional[Union[Mapping[str, Any], Options]] = None + 'Options to use for the request.' + + format: Optional[Literal['', 'json']] = None + 'Format of the response.' + + keep_alive: Optional[Union[float, str]] = None + 'Keep model alive for the specified duration.' + + +class Image(BaseModel): + value: Union[FilePath, Base64Str, bytes] -import sys + @model_serializer + def serialize_model(self): + if isinstance(self.value, Path): + return b64encode(self.value.read_bytes()).decode() + elif isinstance(self.value, bytes): + return b64encode(self.value).decode() + return self.value -if sys.version_info < (3, 11): - from typing_extensions import NotRequired -else: - from typing import NotRequired +class GenerateRequest(BaseGenerateRequest): + prompt: Optional[str] = None + 'Prompt to generate response from.' -class BaseGenerateResponse(TypedDict): - model: str + suffix: Optional[str] = None + 'Suffix to append to the response.' + + system: Optional[str] = None + 'System prompt to prepend to the prompt.' + + template: Optional[str] = None + 'Template to use for the response.' + + context: Optional[Sequence[int]] = None + 'Tokenized history to use for the response.' + + raw: Optional[bool] = None + + images: Optional[Sequence[Image]] = None + 'Image data for multimodal models.' + + +class BaseGenerateResponse(SubscriptableBaseModel): + model: Optional[str] = None 'Model used to generate response.' - created_at: str + created_at: Optional[str] = None 'Time when the request was created.' - done: bool + done: Optional[bool] = None 'True if response is complete, otherwise False. Useful for streaming to detect the final response.' - done_reason: str + done_reason: Optional[str] = None 'Reason for completion. Only present when done is True.' - total_duration: int + total_duration: Optional[int] = None 'Total duration in nanoseconds.' - load_duration: int + load_duration: Optional[int] = None 'Load duration in nanoseconds.' - prompt_eval_count: int + prompt_eval_count: Optional[int] = None 'Number of tokens evaluated in the prompt.' - prompt_eval_duration: int + prompt_eval_duration: Optional[int] = None 'Duration of evaluating the prompt in nanoseconds.' - eval_count: int + eval_count: Optional[int] = None 'Number of tokens evaluated in inference.' - eval_duration: int + eval_duration: Optional[int] = None 'Duration of evaluating inference in nanoseconds.' @@ -49,43 +168,22 @@ class GenerateResponse(BaseGenerateResponse): response: str 'Response content. When streaming, this contains a fragment of the response.' - context: Sequence[int] + context: Optional[Sequence[int]] = None 'Tokenized history up to the point of the response.' -class ToolCallFunction(TypedDict): - """ - Tool call function. - """ - - name: str - 'Name of the function.' - - arguments: NotRequired[Mapping[str, Any]] - 'Arguments of the function.' - - -class ToolCall(TypedDict): - """ - Model tool calls. - """ - - function: ToolCallFunction - 'Function to be called.' - - -class Message(TypedDict): +class Message(SubscriptableBaseModel): """ Chat message. """ role: Literal['user', 'assistant', 'system', 'tool'] - "Assumed role of the message. Response messages always has role 'assistant' or 'tool'." + "Assumed role of the message. Response messages has role 'assistant' or 'tool'." - content: NotRequired[str] + content: Optional[str] = None 'Content of the message. Response messages contains message fragments when streaming.' - images: NotRequired[Sequence[Any]] + images: Optional[Sequence[Image]] = None """ Optional list of image data for multimodal models. @@ -97,33 +195,54 @@ class Message(TypedDict): Valid image formats depend on the model. See the model card for more information. """ - tool_calls: NotRequired[Sequence[ToolCall]] + class ToolCall(SubscriptableBaseModel): + """ + Model tool calls. + """ + + class Function(SubscriptableBaseModel): + """ + Tool call function. + """ + + name: str + 'Name of the function.' + + arguments: Mapping[str, Any] + 'Arguments of the function.' + + function: Function + 'Function to be called.' + + tool_calls: Optional[Sequence[ToolCall]] = None """ Tools calls to be made by the model. """ -class Property(TypedDict): - type: str - description: str - enum: NotRequired[Sequence[str]] # `enum` is optional and can be a list of strings +class Tool(SubscriptableBaseModel): + type: Literal['function'] = 'function' + class Function(SubscriptableBaseModel): + name: str + description: str -class Parameters(TypedDict): - type: str - required: Sequence[str] - properties: Mapping[str, Property] + class Parameters(SubscriptableBaseModel): + type: str + required: Optional[Sequence[str]] = None + properties: Optional[JsonSchemaValue] = None + parameters: Parameters -class ToolFunction(TypedDict): - name: str - description: str - parameters: Parameters + function: Function -class Tool(TypedDict): - type: str - function: ToolFunction +class ChatRequest(BaseGenerateRequest): + messages: Optional[Sequence[Union[Mapping[str, Any], Message]]] = None + 'Messages to chat with.' + + tools: Optional[Sequence[Tool]] = None + 'Tools to use for the chat.' class ChatResponse(BaseGenerateResponse): @@ -135,47 +254,156 @@ class ChatResponse(BaseGenerateResponse): 'Response message.' -class ProgressResponse(TypedDict): - status: str - completed: int - total: int - digest: str +class EmbedRequest(BaseRequest): + input: Union[str, Sequence[str]] + 'Input text to embed.' + truncate: Optional[bool] = None + 'Truncate the input to the maximum token length.' -class Options(TypedDict, total=False): - # load time options - numa: bool - num_ctx: int - num_batch: int - num_gpu: int - main_gpu: int - low_vram: bool - f16_kv: bool - logits_all: bool - vocab_only: bool - use_mmap: bool - use_mlock: bool - embedding_only: bool - num_thread: int + options: Optional[Union[Mapping[str, Any], Options]] = None + 'Options to use for the request.' - # runtime options - num_keep: int - seed: int - num_predict: int - top_k: int - top_p: float - tfs_z: float - typical_p: float - repeat_last_n: int - temperature: float - repeat_penalty: float - presence_penalty: float - frequency_penalty: float - mirostat: int - mirostat_tau: float - mirostat_eta: float - penalize_newline: bool - stop: Sequence[str] + keep_alive: Optional[Union[float, str]] = None + + +class EmbedResponse(BaseGenerateResponse): + """ + Response returned by embed requests. + """ + + embeddings: Sequence[Sequence[float]] + 'Embeddings of the inputs.' + + +class EmbeddingsRequest(BaseRequest): + prompt: Optional[str] = None + 'Prompt to generate embeddings from.' + + options: Optional[Union[Mapping[str, Any], Options]] = None + 'Options to use for the request.' + + keep_alive: Optional[Union[float, str]] = None + + +class EmbeddingsResponse(SubscriptableBaseModel): + """ + Response returned by embeddings requests. + """ + + embedding: Sequence[float] + 'Embedding of the prompt.' + + +class PullRequest(BaseStreamableRequest): + """ + Request to pull the model. + """ + + insecure: Optional[bool] = None + 'Allow insecure (HTTP) connections.' + + +class PushRequest(BaseStreamableRequest): + """ + Request to pull the model. + """ + + insecure: Optional[bool] = None + 'Allow insecure (HTTP) connections.' + + +class CreateRequest(BaseStreamableRequest): + """ + Request to create a new model. + """ + + modelfile: Optional[str] = None + + quantize: Optional[str] = None + + +class ModelDetails(SubscriptableBaseModel): + parent_model: Optional[str] = None + format: Optional[str] = None + family: Optional[str] = None + families: Optional[Sequence[str]] = None + parameter_size: Optional[str] = None + quantization_level: Optional[str] = None + + +class ListResponse(SubscriptableBaseModel): + class Model(BaseModel): + modified_at: Optional[datetime] = None + digest: Optional[str] = None + size: Optional[ByteSize] = None + details: Optional[ModelDetails] = None + + models: Sequence[Model] + 'List of models.' + + +class DeleteRequest(BaseRequest): + """ + Request to delete a model. + """ + + +class CopyRequest(BaseModel): + """ + Request to copy a model. + """ + + source: str + 'Source model to copy.' + + destination: str + 'Destination model to copy to.' + + +class StatusResponse(SubscriptableBaseModel): + status: Optional[str] = None + + +class ProgressResponse(StatusResponse): + completed: Optional[int] = None + total: Optional[int] = None + digest: Optional[str] = None + + +class ShowRequest(BaseRequest): + """ + Request to show model information. + """ + + +class ShowResponse(SubscriptableBaseModel): + modified_at: Optional[datetime] = None + + template: Optional[str] = None + + modelfile: Optional[str] = None + + license: Optional[str] = None + + details: Optional[ModelDetails] = None + + modelinfo: Optional[Mapping[str, Any]] = Field(alias='model_info') + + parameters: Optional[str] = None + + +class ProcessResponse(SubscriptableBaseModel): + class Model(BaseModel): + model: Optional[str] = None + name: Optional[str] = None + digest: Optional[str] = None + expires_at: Optional[datetime] = None + size: Optional[ByteSize] = None + size_vram: Optional[ByteSize] = None + details: Optional[ModelDetails] = None + + models: Sequence[Model] class RequestError(Exception): diff --git a/poetry.lock b/poetry.lock index 27c55310..83ebc984 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,19 @@ # This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} + [[package]] name = "anyio" version = "4.5.2" @@ -419,6 +433,130 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "pydantic" +version = "2.9.0" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.9.0-py3-none-any.whl", hash = "sha256:f66a7073abd93214a20c5f7b32d56843137a7a2e70d02111f3be287035c45370"}, + {file = "pydantic-2.9.0.tar.gz", hash = "sha256:c7a8a9fdf7d100afa49647eae340e2d23efa382466a8d177efcd1381e9be5598"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.23.2" +typing-extensions = [ + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, +] +tzdata = {version = "*", markers = "python_version >= \"3.9\""} + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.23.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.23.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7d0324a35ab436c9d768753cbc3c47a865a2cbc0757066cb864747baa61f6ece"}, + {file = "pydantic_core-2.23.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:276ae78153a94b664e700ac362587c73b84399bd1145e135287513442e7dfbc7"}, + {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:964c7aa318da542cdcc60d4a648377ffe1a2ef0eb1e996026c7f74507b720a78"}, + {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1cf842265a3a820ebc6388b963ead065f5ce8f2068ac4e1c713ef77a67b71f7c"}, + {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae90b9e50fe1bd115b24785e962b51130340408156d34d67b5f8f3fa6540938e"}, + {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ae65fdfb8a841556b52935dfd4c3f79132dc5253b12c0061b96415208f4d622"}, + {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c8aa40f6ca803f95b1c1c5aeaee6237b9e879e4dfb46ad713229a63651a95fb"}, + {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c53100c8ee5a1e102766abde2158077d8c374bee0639201f11d3032e3555dfbc"}, + {file = "pydantic_core-2.23.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d6b9dd6aa03c812017411734e496c44fef29b43dba1e3dd1fa7361bbacfc1354"}, + {file = "pydantic_core-2.23.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b18cf68255a476b927910c6873d9ed00da692bb293c5b10b282bd48a0afe3ae2"}, + {file = "pydantic_core-2.23.2-cp310-none-win32.whl", hash = "sha256:e460475719721d59cd54a350c1f71c797c763212c836bf48585478c5514d2854"}, + {file = "pydantic_core-2.23.2-cp310-none-win_amd64.whl", hash = "sha256:5f3cf3721eaf8741cffaf092487f1ca80831202ce91672776b02b875580e174a"}, + {file = "pydantic_core-2.23.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:7ce8e26b86a91e305858e018afc7a6e932f17428b1eaa60154bd1f7ee888b5f8"}, + {file = "pydantic_core-2.23.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e9b24cca4037a561422bf5dc52b38d390fb61f7bfff64053ce1b72f6938e6b2"}, + {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:753294d42fb072aa1775bfe1a2ba1012427376718fa4c72de52005a3d2a22178"}, + {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:257d6a410a0d8aeb50b4283dea39bb79b14303e0fab0f2b9d617701331ed1515"}, + {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8319e0bd6a7b45ad76166cc3d5d6a36c97d0c82a196f478c3ee5346566eebfd"}, + {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a05c0240f6c711eb381ac392de987ee974fa9336071fb697768dfdb151345ce"}, + {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d5b0ff3218858859910295df6953d7bafac3a48d5cd18f4e3ed9999efd2245f"}, + {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:96ef39add33ff58cd4c112cbac076726b96b98bb8f1e7f7595288dcfb2f10b57"}, + {file = "pydantic_core-2.23.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0102e49ac7d2df3379ef8d658d3bc59d3d769b0bdb17da189b75efa861fc07b4"}, + {file = "pydantic_core-2.23.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a6612c2a844043e4d10a8324c54cdff0042c558eef30bd705770793d70b224aa"}, + {file = "pydantic_core-2.23.2-cp311-none-win32.whl", hash = "sha256:caffda619099cfd4f63d48462f6aadbecee3ad9603b4b88b60cb821c1b258576"}, + {file = "pydantic_core-2.23.2-cp311-none-win_amd64.whl", hash = "sha256:6f80fba4af0cb1d2344869d56430e304a51396b70d46b91a55ed4959993c0589"}, + {file = "pydantic_core-2.23.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:4c83c64d05ffbbe12d4e8498ab72bdb05bcc1026340a4a597dc647a13c1605ec"}, + {file = "pydantic_core-2.23.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6294907eaaccf71c076abdd1c7954e272efa39bb043161b4b8aa1cd76a16ce43"}, + {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a801c5e1e13272e0909c520708122496647d1279d252c9e6e07dac216accc41"}, + {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cc0c316fba3ce72ac3ab7902a888b9dc4979162d320823679da270c2d9ad0cad"}, + {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b06c5d4e8701ac2ba99a2ef835e4e1b187d41095a9c619c5b185c9068ed2a49"}, + {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82764c0bd697159fe9947ad59b6db6d7329e88505c8f98990eb07e84cc0a5d81"}, + {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b1a195efd347ede8bcf723e932300292eb13a9d2a3c1f84eb8f37cbbc905b7f"}, + {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7efb12e5071ad8d5b547487bdad489fbd4a5a35a0fc36a1941517a6ad7f23e0"}, + {file = "pydantic_core-2.23.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5dd0ec5f514ed40e49bf961d49cf1bc2c72e9b50f29a163b2cc9030c6742aa73"}, + {file = "pydantic_core-2.23.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:820f6ee5c06bc868335e3b6e42d7ef41f50dfb3ea32fbd523ab679d10d8741c0"}, + {file = "pydantic_core-2.23.2-cp312-none-win32.whl", hash = "sha256:3713dc093d5048bfaedbba7a8dbc53e74c44a140d45ede020dc347dda18daf3f"}, + {file = "pydantic_core-2.23.2-cp312-none-win_amd64.whl", hash = "sha256:e1895e949f8849bc2757c0dbac28422a04be031204df46a56ab34bcf98507342"}, + {file = "pydantic_core-2.23.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:da43cbe593e3c87d07108d0ebd73771dc414488f1f91ed2e204b0370b94b37ac"}, + {file = "pydantic_core-2.23.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:64d094ea1aa97c6ded4748d40886076a931a8bf6f61b6e43e4a1041769c39dd2"}, + {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:084414ffe9a85a52940b49631321d636dadf3576c30259607b75516d131fecd0"}, + {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:043ef8469f72609c4c3a5e06a07a1f713d53df4d53112c6d49207c0bd3c3bd9b"}, + {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3649bd3ae6a8ebea7dc381afb7f3c6db237fc7cebd05c8ac36ca8a4187b03b30"}, + {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6db09153d8438425e98cdc9a289c5fade04a5d2128faff8f227c459da21b9703"}, + {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5668b3173bb0b2e65020b60d83f5910a7224027232c9f5dc05a71a1deac9f960"}, + {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1c7b81beaf7c7ebde978377dc53679c6cba0e946426fc7ade54251dfe24a7604"}, + {file = "pydantic_core-2.23.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:ae579143826c6f05a361d9546446c432a165ecf1c0b720bbfd81152645cb897d"}, + {file = "pydantic_core-2.23.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:19f1352fe4b248cae22a89268720fc74e83f008057a652894f08fa931e77dced"}, + {file = "pydantic_core-2.23.2-cp313-none-win32.whl", hash = "sha256:e1a79ad49f346aa1a2921f31e8dbbab4d64484823e813a002679eaa46cba39e1"}, + {file = "pydantic_core-2.23.2-cp313-none-win_amd64.whl", hash = "sha256:582871902e1902b3c8e9b2c347f32a792a07094110c1bca6c2ea89b90150caac"}, + {file = "pydantic_core-2.23.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:743e5811b0c377eb830150d675b0847a74a44d4ad5ab8845923d5b3a756d8100"}, + {file = "pydantic_core-2.23.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6650a7bbe17a2717167e3e23c186849bae5cef35d38949549f1c116031b2b3aa"}, + {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56e6a12ec8d7679f41b3750ffa426d22b44ef97be226a9bab00a03365f217b2b"}, + {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:810ca06cca91de9107718dc83d9ac4d2e86efd6c02cba49a190abcaf33fb0472"}, + {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:785e7f517ebb9890813d31cb5d328fa5eda825bb205065cde760b3150e4de1f7"}, + {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ef71ec876fcc4d3bbf2ae81961959e8d62f8d74a83d116668409c224012e3af"}, + {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d50ac34835c6a4a0d456b5db559b82047403c4317b3bc73b3455fefdbdc54b0a"}, + {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16b25a4a120a2bb7dab51b81e3d9f3cde4f9a4456566c403ed29ac81bf49744f"}, + {file = "pydantic_core-2.23.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:41ae8537ad371ec018e3c5da0eb3f3e40ee1011eb9be1da7f965357c4623c501"}, + {file = "pydantic_core-2.23.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07049ec9306ec64e955b2e7c40c8d77dd78ea89adb97a2013d0b6e055c5ee4c5"}, + {file = "pydantic_core-2.23.2-cp38-none-win32.whl", hash = "sha256:086c5db95157dc84c63ff9d96ebb8856f47ce113c86b61065a066f8efbe80acf"}, + {file = "pydantic_core-2.23.2-cp38-none-win_amd64.whl", hash = "sha256:67b6655311b00581914aba481729971b88bb8bc7996206590700a3ac85e457b8"}, + {file = "pydantic_core-2.23.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:358331e21a897151e54d58e08d0219acf98ebb14c567267a87e971f3d2a3be59"}, + {file = "pydantic_core-2.23.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c4d9f15ffe68bcd3898b0ad7233af01b15c57d91cd1667f8d868e0eacbfe3f87"}, + {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0123655fedacf035ab10c23450163c2f65a4174f2bb034b188240a6cf06bb123"}, + {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e6e3ccebdbd6e53474b0bb7ab8b88e83c0cfe91484b25e058e581348ee5a01a5"}, + {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc535cb898ef88333cf317777ecdfe0faac1c2a3187ef7eb061b6f7ecf7e6bae"}, + {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aab9e522efff3993a9e98ab14263d4e20211e62da088298089a03056980a3e69"}, + {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05b366fb8fe3d8683b11ac35fa08947d7b92be78ec64e3277d03bd7f9b7cda79"}, + {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7568f682c06f10f30ef643a1e8eec4afeecdafde5c4af1b574c6df079e96f96c"}, + {file = "pydantic_core-2.23.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cdd02a08205dc90238669f082747612cb3c82bd2c717adc60f9b9ecadb540f80"}, + {file = "pydantic_core-2.23.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a2ab4f410f4b886de53b6bddf5dd6f337915a29dd9f22f20f3099659536b2f6"}, + {file = "pydantic_core-2.23.2-cp39-none-win32.whl", hash = "sha256:0448b81c3dfcde439551bb04a9f41d7627f676b12701865c8a2574bcea034437"}, + {file = "pydantic_core-2.23.2-cp39-none-win_amd64.whl", hash = "sha256:4cebb9794f67266d65e7e4cbe5dcf063e29fc7b81c79dc9475bd476d9534150e"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e758d271ed0286d146cf7c04c539a5169a888dd0b57026be621547e756af55bc"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f477d26183e94eaafc60b983ab25af2a809a1b48ce4debb57b343f671b7a90b6"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da3131ef2b940b99106f29dfbc30d9505643f766704e14c5d5e504e6a480c35e"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329a721253c7e4cbd7aad4a377745fbcc0607f9d72a3cc2102dd40519be75ed2"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7706e15cdbf42f8fab1e6425247dfa98f4a6f8c63746c995d6a2017f78e619ae"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e64ffaf8f6e17ca15eb48344d86a7a741454526f3a3fa56bc493ad9d7ec63936"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dd59638025160056687d598b054b64a79183f8065eae0d3f5ca523cde9943940"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:12625e69b1199e94b0ae1c9a95d000484ce9f0182f9965a26572f054b1537e44"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5d813fd871b3d5c3005157622ee102e8908ad6011ec915a18bd8fde673c4360e"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1eb37f7d6a8001c0f86dc8ff2ee8d08291a536d76e49e78cda8587bb54d8b329"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ce7eaf9a98680b4312b7cebcdd9352531c43db00fca586115845df388f3c465"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f087879f1ffde024dd2788a30d55acd67959dcf6c431e9d3682d1c491a0eb474"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ce883906810b4c3bd90e0ada1f9e808d9ecf1c5f0b60c6b8831d6100bcc7dd6"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:a8031074a397a5925d06b590121f8339d34a5a74cfe6970f8a1124eb8b83f4ac"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23af245b8f2f4ee9e2c99cb3f93d0e22fb5c16df3f2f643f5a8da5caff12a653"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c57e493a0faea1e4c38f860d6862ba6832723396c884fbf938ff5e9b224200e2"}, + {file = "pydantic_core-2.23.2.tar.gz", hash = "sha256:95d6bf449a1ac81de562d65d180af5d8c19672793c81877a2eda8fde5d08f2fd"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + [[package]] name = "pytest" version = "8.3.3" @@ -551,6 +689,28 @@ files = [ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + [[package]] name = "werkzeug" version = "3.0.6" @@ -571,4 +731,4 @@ watchdog = ["watchdog (>=2.3)"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "e36516c932ab9dd7497acc0c3d55ab2c963004595efe97c2bc80854687c32c1e" +content-hash = "e664c86cc330480eb86239842f55f12b0fba4df5c2fc776d094f37f58320e637" diff --git a/pyproject.toml b/pyproject.toml index 4e58aa93..ff151d31 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,6 +11,7 @@ repository = "https://github.com/ollama/ollama-python" [tool.poetry.dependencies] python = "^3.8" httpx = "^0.27.0" +pydantic = "^2.9.0" [tool.poetry.group.dev.dependencies] pytest = ">=7.4.3,<9.0.0" diff --git a/requirements.txt b/requirements.txt index 992f59a8..5b549609 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,6 @@ +annotated-types==0.7.0 ; python_version >= "3.8" and python_version < "4.0" \ + --hash=sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53 \ + --hash=sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89 anyio==4.5.2 ; python_version >= "3.8" and python_version < "4.0" \ --hash=sha256:23009af4ed04ce05991845451e11ef02fc7c5ed29179ac9a420e5ad0ac7ddc5b \ --hash=sha256:c011ee36bc1e8ba40e5a81cb9df91925c218fe9b778554e0b56a21e1b5d4716f @@ -19,9 +22,105 @@ httpx==0.27.2 ; python_version >= "3.8" and python_version < "4.0" \ idna==3.10 ; python_version >= "3.8" and python_version < "4.0" \ --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 +pydantic-core==2.23.2 ; python_version >= "3.8" and python_version < "4.0" \ + --hash=sha256:0102e49ac7d2df3379ef8d658d3bc59d3d769b0bdb17da189b75efa861fc07b4 \ + --hash=sha256:0123655fedacf035ab10c23450163c2f65a4174f2bb034b188240a6cf06bb123 \ + --hash=sha256:043ef8469f72609c4c3a5e06a07a1f713d53df4d53112c6d49207c0bd3c3bd9b \ + --hash=sha256:0448b81c3dfcde439551bb04a9f41d7627f676b12701865c8a2574bcea034437 \ + --hash=sha256:05b366fb8fe3d8683b11ac35fa08947d7b92be78ec64e3277d03bd7f9b7cda79 \ + --hash=sha256:07049ec9306ec64e955b2e7c40c8d77dd78ea89adb97a2013d0b6e055c5ee4c5 \ + --hash=sha256:084414ffe9a85a52940b49631321d636dadf3576c30259607b75516d131fecd0 \ + --hash=sha256:086c5db95157dc84c63ff9d96ebb8856f47ce113c86b61065a066f8efbe80acf \ + --hash=sha256:12625e69b1199e94b0ae1c9a95d000484ce9f0182f9965a26572f054b1537e44 \ + --hash=sha256:16b25a4a120a2bb7dab51b81e3d9f3cde4f9a4456566c403ed29ac81bf49744f \ + --hash=sha256:19f1352fe4b248cae22a89268720fc74e83f008057a652894f08fa931e77dced \ + --hash=sha256:1a2ab4f410f4b886de53b6bddf5dd6f337915a29dd9f22f20f3099659536b2f6 \ + --hash=sha256:1c7b81beaf7c7ebde978377dc53679c6cba0e946426fc7ade54251dfe24a7604 \ + --hash=sha256:1cf842265a3a820ebc6388b963ead065f5ce8f2068ac4e1c713ef77a67b71f7c \ + --hash=sha256:1eb37f7d6a8001c0f86dc8ff2ee8d08291a536d76e49e78cda8587bb54d8b329 \ + --hash=sha256:23af245b8f2f4ee9e2c99cb3f93d0e22fb5c16df3f2f643f5a8da5caff12a653 \ + --hash=sha256:257d6a410a0d8aeb50b4283dea39bb79b14303e0fab0f2b9d617701331ed1515 \ + --hash=sha256:276ae78153a94b664e700ac362587c73b84399bd1145e135287513442e7dfbc7 \ + --hash=sha256:2b1a195efd347ede8bcf723e932300292eb13a9d2a3c1f84eb8f37cbbc905b7f \ + --hash=sha256:329a721253c7e4cbd7aad4a377745fbcc0607f9d72a3cc2102dd40519be75ed2 \ + --hash=sha256:358331e21a897151e54d58e08d0219acf98ebb14c567267a87e971f3d2a3be59 \ + --hash=sha256:3649bd3ae6a8ebea7dc381afb7f3c6db237fc7cebd05c8ac36ca8a4187b03b30 \ + --hash=sha256:3713dc093d5048bfaedbba7a8dbc53e74c44a140d45ede020dc347dda18daf3f \ + --hash=sha256:3ef71ec876fcc4d3bbf2ae81961959e8d62f8d74a83d116668409c224012e3af \ + --hash=sha256:41ae8537ad371ec018e3c5da0eb3f3e40ee1011eb9be1da7f965357c4623c501 \ + --hash=sha256:4a801c5e1e13272e0909c520708122496647d1279d252c9e6e07dac216accc41 \ + --hash=sha256:4c83c64d05ffbbe12d4e8498ab72bdb05bcc1026340a4a597dc647a13c1605ec \ + --hash=sha256:4cebb9794f67266d65e7e4cbe5dcf063e29fc7b81c79dc9475bd476d9534150e \ + --hash=sha256:5668b3173bb0b2e65020b60d83f5910a7224027232c9f5dc05a71a1deac9f960 \ + --hash=sha256:56e6a12ec8d7679f41b3750ffa426d22b44ef97be226a9bab00a03365f217b2b \ + --hash=sha256:582871902e1902b3c8e9b2c347f32a792a07094110c1bca6c2ea89b90150caac \ + --hash=sha256:5c8aa40f6ca803f95b1c1c5aeaee6237b9e879e4dfb46ad713229a63651a95fb \ + --hash=sha256:5d813fd871b3d5c3005157622ee102e8908ad6011ec915a18bd8fde673c4360e \ + --hash=sha256:5dd0ec5f514ed40e49bf961d49cf1bc2c72e9b50f29a163b2cc9030c6742aa73 \ + --hash=sha256:5f3cf3721eaf8741cffaf092487f1ca80831202ce91672776b02b875580e174a \ + --hash=sha256:6294907eaaccf71c076abdd1c7954e272efa39bb043161b4b8aa1cd76a16ce43 \ + --hash=sha256:64d094ea1aa97c6ded4748d40886076a931a8bf6f61b6e43e4a1041769c39dd2 \ + --hash=sha256:6650a7bbe17a2717167e3e23c186849bae5cef35d38949549f1c116031b2b3aa \ + --hash=sha256:67b6655311b00581914aba481729971b88bb8bc7996206590700a3ac85e457b8 \ + --hash=sha256:6b06c5d4e8701ac2ba99a2ef835e4e1b187d41095a9c619c5b185c9068ed2a49 \ + --hash=sha256:6ce883906810b4c3bd90e0ada1f9e808d9ecf1c5f0b60c6b8831d6100bcc7dd6 \ + --hash=sha256:6db09153d8438425e98cdc9a289c5fade04a5d2128faff8f227c459da21b9703 \ + --hash=sha256:6f80fba4af0cb1d2344869d56430e304a51396b70d46b91a55ed4959993c0589 \ + --hash=sha256:743e5811b0c377eb830150d675b0847a74a44d4ad5ab8845923d5b3a756d8100 \ + --hash=sha256:753294d42fb072aa1775bfe1a2ba1012427376718fa4c72de52005a3d2a22178 \ + --hash=sha256:7568f682c06f10f30ef643a1e8eec4afeecdafde5c4af1b574c6df079e96f96c \ + --hash=sha256:7706e15cdbf42f8fab1e6425247dfa98f4a6f8c63746c995d6a2017f78e619ae \ + --hash=sha256:785e7f517ebb9890813d31cb5d328fa5eda825bb205065cde760b3150e4de1f7 \ + --hash=sha256:7a05c0240f6c711eb381ac392de987ee974fa9336071fb697768dfdb151345ce \ + --hash=sha256:7ce7eaf9a98680b4312b7cebcdd9352531c43db00fca586115845df388f3c465 \ + --hash=sha256:7ce8e26b86a91e305858e018afc7a6e932f17428b1eaa60154bd1f7ee888b5f8 \ + --hash=sha256:7d0324a35ab436c9d768753cbc3c47a865a2cbc0757066cb864747baa61f6ece \ + --hash=sha256:7e9b24cca4037a561422bf5dc52b38d390fb61f7bfff64053ce1b72f6938e6b2 \ + --hash=sha256:810ca06cca91de9107718dc83d9ac4d2e86efd6c02cba49a190abcaf33fb0472 \ + --hash=sha256:820f6ee5c06bc868335e3b6e42d7ef41f50dfb3ea32fbd523ab679d10d8741c0 \ + --hash=sha256:82764c0bd697159fe9947ad59b6db6d7329e88505c8f98990eb07e84cc0a5d81 \ + --hash=sha256:8ae65fdfb8a841556b52935dfd4c3f79132dc5253b12c0061b96415208f4d622 \ + --hash=sha256:8d5b0ff3218858859910295df6953d7bafac3a48d5cd18f4e3ed9999efd2245f \ + --hash=sha256:95d6bf449a1ac81de562d65d180af5d8c19672793c81877a2eda8fde5d08f2fd \ + --hash=sha256:964c7aa318da542cdcc60d4a648377ffe1a2ef0eb1e996026c7f74507b720a78 \ + --hash=sha256:96ef39add33ff58cd4c112cbac076726b96b98bb8f1e7f7595288dcfb2f10b57 \ + --hash=sha256:a6612c2a844043e4d10a8324c54cdff0042c558eef30bd705770793d70b224aa \ + --hash=sha256:a8031074a397a5925d06b590121f8339d34a5a74cfe6970f8a1124eb8b83f4ac \ + --hash=sha256:aab9e522efff3993a9e98ab14263d4e20211e62da088298089a03056980a3e69 \ + --hash=sha256:ae579143826c6f05a361d9546446c432a165ecf1c0b720bbfd81152645cb897d \ + --hash=sha256:ae90b9e50fe1bd115b24785e962b51130340408156d34d67b5f8f3fa6540938e \ + --hash=sha256:b18cf68255a476b927910c6873d9ed00da692bb293c5b10b282bd48a0afe3ae2 \ + --hash=sha256:b7efb12e5071ad8d5b547487bdad489fbd4a5a35a0fc36a1941517a6ad7f23e0 \ + --hash=sha256:c4d9f15ffe68bcd3898b0ad7233af01b15c57d91cd1667f8d868e0eacbfe3f87 \ + --hash=sha256:c53100c8ee5a1e102766abde2158077d8c374bee0639201f11d3032e3555dfbc \ + --hash=sha256:c57e493a0faea1e4c38f860d6862ba6832723396c884fbf938ff5e9b224200e2 \ + --hash=sha256:c8319e0bd6a7b45ad76166cc3d5d6a36c97d0c82a196f478c3ee5346566eebfd \ + --hash=sha256:caffda619099cfd4f63d48462f6aadbecee3ad9603b4b88b60cb821c1b258576 \ + --hash=sha256:cc0c316fba3ce72ac3ab7902a888b9dc4979162d320823679da270c2d9ad0cad \ + --hash=sha256:cdd02a08205dc90238669f082747612cb3c82bd2c717adc60f9b9ecadb540f80 \ + --hash=sha256:d50ac34835c6a4a0d456b5db559b82047403c4317b3bc73b3455fefdbdc54b0a \ + --hash=sha256:d6b9dd6aa03c812017411734e496c44fef29b43dba1e3dd1fa7361bbacfc1354 \ + --hash=sha256:da3131ef2b940b99106f29dfbc30d9505643f766704e14c5d5e504e6a480c35e \ + --hash=sha256:da43cbe593e3c87d07108d0ebd73771dc414488f1f91ed2e204b0370b94b37ac \ + --hash=sha256:dd59638025160056687d598b054b64a79183f8065eae0d3f5ca523cde9943940 \ + --hash=sha256:e1895e949f8849bc2757c0dbac28422a04be031204df46a56ab34bcf98507342 \ + --hash=sha256:e1a79ad49f346aa1a2921f31e8dbbab4d64484823e813a002679eaa46cba39e1 \ + --hash=sha256:e460475719721d59cd54a350c1f71c797c763212c836bf48585478c5514d2854 \ + --hash=sha256:e64ffaf8f6e17ca15eb48344d86a7a741454526f3a3fa56bc493ad9d7ec63936 \ + --hash=sha256:e6e3ccebdbd6e53474b0bb7ab8b88e83c0cfe91484b25e058e581348ee5a01a5 \ + --hash=sha256:e758d271ed0286d146cf7c04c539a5169a888dd0b57026be621547e756af55bc \ + --hash=sha256:f087879f1ffde024dd2788a30d55acd67959dcf6c431e9d3682d1c491a0eb474 \ + --hash=sha256:f477d26183e94eaafc60b983ab25af2a809a1b48ce4debb57b343f671b7a90b6 \ + --hash=sha256:fc535cb898ef88333cf317777ecdfe0faac1c2a3187ef7eb061b6f7ecf7e6bae +pydantic==2.9.0 ; python_version >= "3.8" and python_version < "4.0" \ + --hash=sha256:c7a8a9fdf7d100afa49647eae340e2d23efa382466a8d177efcd1381e9be5598 \ + --hash=sha256:f66a7073abd93214a20c5f7b32d56843137a7a2e70d02111f3be287035c45370 sniffio==1.3.1 ; python_version >= "3.8" and python_version < "4.0" \ --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \ --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc -typing-extensions==4.12.2 ; python_version >= "3.8" and python_version < "3.11" \ +typing-extensions==4.12.2 ; python_version >= "3.8" and python_version < "4.0" \ --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 +tzdata==2024.1 ; python_version >= "3.9" and python_version < "4.0" \ + --hash=sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd \ + --hash=sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252 diff --git a/tests/test_client.py b/tests/test_client.py index efc8d4fa..3bb451c9 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -28,9 +28,6 @@ def test_client_chat(httpserver: HTTPServer): 'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}], 'tools': [], 'stream': False, - 'format': '', - 'options': {}, - 'keep_alive': None, }, ).respond_with_json( { @@ -76,9 +73,6 @@ def generate(): 'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}], 'tools': [], 'stream': True, - 'format': '', - 'options': {}, - 'keep_alive': None, }, ).respond_with_handler(stream_handler) @@ -106,9 +100,6 @@ def test_client_chat_images(httpserver: HTTPServer): ], 'tools': [], 'stream': False, - 'format': '', - 'options': {}, - 'keep_alive': None, }, ).respond_with_json( { @@ -137,16 +128,7 @@ def test_client_generate(httpserver: HTTPServer): json={ 'model': 'dummy', 'prompt': 'Why is the sky blue?', - 'suffix': '', - 'system': '', - 'template': '', - 'context': [], 'stream': False, - 'raw': False, - 'images': [], - 'format': '', - 'options': {}, - 'keep_alive': None, }, ).respond_with_json( { @@ -183,16 +165,7 @@ def generate(): json={ 'model': 'dummy', 'prompt': 'Why is the sky blue?', - 'suffix': '', - 'system': '', - 'template': '', - 'context': [], 'stream': True, - 'raw': False, - 'images': [], - 'format': '', - 'options': {}, - 'keep_alive': None, }, ).respond_with_handler(stream_handler) @@ -212,16 +185,8 @@ def test_client_generate_images(httpserver: HTTPServer): json={ 'model': 'dummy', 'prompt': 'Why is the sky blue?', - 'suffix': '', - 'system': '', - 'template': '', - 'context': [], 'stream': False, - 'raw': False, 'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'], - 'format': '', - 'options': {}, - 'keep_alive': None, }, ).respond_with_json( { @@ -244,15 +209,11 @@ def test_client_pull(httpserver: HTTPServer): '/api/pull', method='POST', json={ - 'name': 'dummy', + 'model': 'dummy', 'insecure': False, 'stream': False, }, - ).respond_with_json( - { - 'status': 'success', - } - ) + ).respond_with_json({'status': 'success'}) client = Client(httpserver.url_for('/')) response = client.pull('dummy') @@ -274,7 +235,7 @@ def generate(): '/api/pull', method='POST', json={ - 'name': 'dummy', + 'model': 'dummy', 'insecure': False, 'stream': True, }, @@ -293,15 +254,15 @@ def test_client_push(httpserver: HTTPServer): '/api/push', method='POST', json={ - 'name': 'dummy', + 'model': 'dummy', 'insecure': False, 'stream': False, }, - ).respond_with_json({}) + ).respond_with_json({'status': 'success'}) client = Client(httpserver.url_for('/')) response = client.push('dummy') - assert isinstance(response, dict) + assert response['status'] == 'success' def test_client_push_stream(httpserver: HTTPServer): @@ -317,7 +278,7 @@ def generate(): '/api/push', method='POST', json={ - 'name': 'dummy', + 'model': 'dummy', 'insecure': False, 'stream': True, }, @@ -337,12 +298,11 @@ def test_client_create_path(httpserver: HTTPServer): '/api/create', method='POST', json={ - 'name': 'dummy', + 'model': 'dummy', 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', 'stream': False, - 'quantize': None, }, - ).respond_with_json({}) + ).respond_with_json({'status': 'success'}) client = Client(httpserver.url_for('/')) @@ -352,7 +312,7 @@ def test_client_create_path(httpserver: HTTPServer): modelfile.flush() response = client.create('dummy', path=modelfile.name) - assert isinstance(response, dict) + assert response['status'] == 'success' def test_client_create_path_relative(httpserver: HTTPServer): @@ -361,12 +321,11 @@ def test_client_create_path_relative(httpserver: HTTPServer): '/api/create', method='POST', json={ - 'name': 'dummy', + 'model': 'dummy', 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', 'stream': False, - 'quantize': None, }, - ).respond_with_json({}) + ).respond_with_json({'status': 'success'}) client = Client(httpserver.url_for('/')) @@ -376,7 +335,7 @@ def test_client_create_path_relative(httpserver: HTTPServer): modelfile.flush() response = client.create('dummy', path=modelfile.name) - assert isinstance(response, dict) + assert response['status'] == 'success' @pytest.fixture @@ -394,12 +353,11 @@ def test_client_create_path_user_home(httpserver: HTTPServer, userhomedir): '/api/create', method='POST', json={ - 'name': 'dummy', + 'model': 'dummy', 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', 'stream': False, - 'quantize': None, }, - ).respond_with_json({}) + ).respond_with_json({'status': 'success'}) client = Client(httpserver.url_for('/')) @@ -409,7 +367,7 @@ def test_client_create_path_user_home(httpserver: HTTPServer, userhomedir): modelfile.flush() response = client.create('dummy', path=modelfile.name) - assert isinstance(response, dict) + assert response['status'] == 'success' def test_client_create_modelfile(httpserver: HTTPServer): @@ -418,18 +376,17 @@ def test_client_create_modelfile(httpserver: HTTPServer): '/api/create', method='POST', json={ - 'name': 'dummy', + 'model': 'dummy', 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', 'stream': False, - 'quantize': None, }, - ).respond_with_json({}) + ).respond_with_json({'status': 'success'}) client = Client(httpserver.url_for('/')) with tempfile.NamedTemporaryFile() as blob: response = client.create('dummy', modelfile=f'FROM {blob.name}') - assert isinstance(response, dict) + assert response['status'] == 'success' def test_client_create_modelfile_roundtrip(httpserver: HTTPServer): @@ -438,7 +395,7 @@ def test_client_create_modelfile_roundtrip(httpserver: HTTPServer): '/api/create', method='POST', json={ - 'name': 'dummy', + 'model': 'dummy', 'modelfile': '''FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 TEMPLATE """[INST] <>{{.System}}<> {{.Prompt}} [/INST]""" @@ -452,9 +409,8 @@ def test_client_create_modelfile_roundtrip(httpserver: HTTPServer): PARAMETER stop <> PARAMETER stop <>''', 'stream': False, - 'quantize': None, }, - ).respond_with_json({}) + ).respond_with_json({'status': 'success'}) client = Client(httpserver.url_for('/')) @@ -478,7 +434,7 @@ def test_client_create_modelfile_roundtrip(httpserver: HTTPServer): ] ), ) - assert isinstance(response, dict) + assert response['status'] == 'success' def test_client_create_from_library(httpserver: HTTPServer): @@ -486,17 +442,16 @@ def test_client_create_from_library(httpserver: HTTPServer): '/api/create', method='POST', json={ - 'name': 'dummy', + 'model': 'dummy', 'modelfile': 'FROM llama2', 'stream': False, - 'quantize': None, }, - ).respond_with_json({}) + ).respond_with_json({'status': 'success'}) client = Client(httpserver.url_for('/')) response = client.create('dummy', modelfile='FROM llama2') - assert isinstance(response, dict) + assert response['status'] == 'success' def test_client_create_blob(httpserver: HTTPServer): @@ -524,14 +479,14 @@ def test_client_delete(httpserver: HTTPServer): httpserver.expect_ordered_request(PrefixPattern('/api/delete'), method='DELETE').respond_with_response(Response(status=200)) client = Client(httpserver.url_for('/api/delete')) response = client.delete('dummy') - assert response == {'status': 'success'} + assert response['status'] == 'success' def test_client_copy(httpserver: HTTPServer): httpserver.expect_ordered_request(PrefixPattern('/api/copy'), method='POST').respond_with_response(Response(status=200)) client = Client(httpserver.url_for('/api/copy')) response = client.copy('dum', 'dummer') - assert response == {'status': 'success'} + assert response['status'] == 'success' @pytest.mark.asyncio @@ -544,15 +499,22 @@ async def test_async_client_chat(httpserver: HTTPServer): 'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}], 'tools': [], 'stream': False, - 'format': '', - 'options': {}, - 'keep_alive': None, }, - ).respond_with_json({}) + ).respond_with_json( + { + 'model': 'dummy', + 'message': { + 'role': 'assistant', + 'content': "I don't know.", + }, + } + ) client = AsyncClient(httpserver.url_for('/')) response = await client.chat('dummy', messages=[{'role': 'user', 'content': 'Why is the sky blue?'}]) - assert isinstance(response, dict) + assert response['model'] == 'dummy' + assert response['message']['role'] == 'assistant' + assert response['message']['content'] == "I don't know." @pytest.mark.asyncio @@ -583,9 +545,6 @@ def generate(): 'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}], 'tools': [], 'stream': True, - 'format': '', - 'options': {}, - 'keep_alive': None, }, ).respond_with_handler(stream_handler) @@ -614,18 +573,25 @@ async def test_async_client_chat_images(httpserver: HTTPServer): ], 'tools': [], 'stream': False, - 'format': '', - 'options': {}, - 'keep_alive': None, }, - ).respond_with_json({}) + ).respond_with_json( + { + 'model': 'dummy', + 'message': { + 'role': 'assistant', + 'content': "I don't know.", + }, + } + ) client = AsyncClient(httpserver.url_for('/')) with io.BytesIO() as b: Image.new('RGB', (1, 1)).save(b, 'PNG') response = await client.chat('dummy', messages=[{'role': 'user', 'content': 'Why is the sky blue?', 'images': [b.getvalue()]}]) - assert isinstance(response, dict) + assert response['model'] == 'dummy' + assert response['message']['role'] == 'assistant' + assert response['message']['content'] == "I don't know." @pytest.mark.asyncio @@ -636,22 +602,19 @@ async def test_async_client_generate(httpserver: HTTPServer): json={ 'model': 'dummy', 'prompt': 'Why is the sky blue?', - 'suffix': '', - 'system': '', - 'template': '', - 'context': [], 'stream': False, - 'raw': False, - 'images': [], - 'format': '', - 'options': {}, - 'keep_alive': None, }, - ).respond_with_json({}) + ).respond_with_json( + { + 'model': 'dummy', + 'response': 'Because it is.', + } + ) client = AsyncClient(httpserver.url_for('/')) response = await client.generate('dummy', 'Why is the sky blue?') - assert isinstance(response, dict) + assert response['model'] == 'dummy' + assert response['response'] == 'Because it is.' @pytest.mark.asyncio @@ -677,16 +640,7 @@ def generate(): json={ 'model': 'dummy', 'prompt': 'Why is the sky blue?', - 'suffix': '', - 'system': '', - 'template': '', - 'context': [], 'stream': True, - 'raw': False, - 'images': [], - 'format': '', - 'options': {}, - 'keep_alive': None, }, ).respond_with_handler(stream_handler) @@ -707,25 +661,23 @@ async def test_async_client_generate_images(httpserver: HTTPServer): json={ 'model': 'dummy', 'prompt': 'Why is the sky blue?', - 'suffix': '', - 'system': '', - 'template': '', - 'context': [], 'stream': False, - 'raw': False, 'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'], - 'format': '', - 'options': {}, - 'keep_alive': None, }, - ).respond_with_json({}) + ).respond_with_json( + { + 'model': 'dummy', + 'response': 'Because it is.', + } + ) client = AsyncClient(httpserver.url_for('/')) with tempfile.NamedTemporaryFile() as temp: Image.new('RGB', (1, 1)).save(temp, 'PNG') response = await client.generate('dummy', 'Why is the sky blue?', images=[temp.name]) - assert isinstance(response, dict) + assert response['model'] == 'dummy' + assert response['response'] == 'Because it is.' @pytest.mark.asyncio @@ -734,15 +686,15 @@ async def test_async_client_pull(httpserver: HTTPServer): '/api/pull', method='POST', json={ - 'name': 'dummy', + 'model': 'dummy', 'insecure': False, 'stream': False, }, - ).respond_with_json({}) + ).respond_with_json({'status': 'success'}) client = AsyncClient(httpserver.url_for('/')) response = await client.pull('dummy') - assert isinstance(response, dict) + assert response['status'] == 'success' @pytest.mark.asyncio @@ -761,7 +713,7 @@ def generate(): '/api/pull', method='POST', json={ - 'name': 'dummy', + 'model': 'dummy', 'insecure': False, 'stream': True, }, @@ -781,15 +733,15 @@ async def test_async_client_push(httpserver: HTTPServer): '/api/push', method='POST', json={ - 'name': 'dummy', + 'model': 'dummy', 'insecure': False, 'stream': False, }, - ).respond_with_json({}) + ).respond_with_json({'status': 'success'}) client = AsyncClient(httpserver.url_for('/')) response = await client.push('dummy') - assert isinstance(response, dict) + assert response['status'] == 'success' @pytest.mark.asyncio @@ -806,7 +758,7 @@ def generate(): '/api/push', method='POST', json={ - 'name': 'dummy', + 'model': 'dummy', 'insecure': False, 'stream': True, }, @@ -827,12 +779,11 @@ async def test_async_client_create_path(httpserver: HTTPServer): '/api/create', method='POST', json={ - 'name': 'dummy', + 'model': 'dummy', 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', 'stream': False, - 'quantize': None, }, - ).respond_with_json({}) + ).respond_with_json({'status': 'success'}) client = AsyncClient(httpserver.url_for('/')) @@ -842,7 +793,7 @@ async def test_async_client_create_path(httpserver: HTTPServer): modelfile.flush() response = await client.create('dummy', path=modelfile.name) - assert isinstance(response, dict) + assert response['status'] == 'success' @pytest.mark.asyncio @@ -852,12 +803,11 @@ async def test_async_client_create_path_relative(httpserver: HTTPServer): '/api/create', method='POST', json={ - 'name': 'dummy', + 'model': 'dummy', 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', 'stream': False, - 'quantize': None, }, - ).respond_with_json({}) + ).respond_with_json({'status': 'success'}) client = AsyncClient(httpserver.url_for('/')) @@ -867,7 +817,7 @@ async def test_async_client_create_path_relative(httpserver: HTTPServer): modelfile.flush() response = await client.create('dummy', path=modelfile.name) - assert isinstance(response, dict) + assert response['status'] == 'success' @pytest.mark.asyncio @@ -877,12 +827,11 @@ async def test_async_client_create_path_user_home(httpserver: HTTPServer, userho '/api/create', method='POST', json={ - 'name': 'dummy', + 'model': 'dummy', 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', 'stream': False, - 'quantize': None, }, - ).respond_with_json({}) + ).respond_with_json({'status': 'success'}) client = AsyncClient(httpserver.url_for('/')) @@ -892,7 +841,7 @@ async def test_async_client_create_path_user_home(httpserver: HTTPServer, userho modelfile.flush() response = await client.create('dummy', path=modelfile.name) - assert isinstance(response, dict) + assert response['status'] == 'success' @pytest.mark.asyncio @@ -902,18 +851,17 @@ async def test_async_client_create_modelfile(httpserver: HTTPServer): '/api/create', method='POST', json={ - 'name': 'dummy', + 'model': 'dummy', 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', 'stream': False, - 'quantize': None, }, - ).respond_with_json({}) + ).respond_with_json({'status': 'success'}) client = AsyncClient(httpserver.url_for('/')) with tempfile.NamedTemporaryFile() as blob: response = await client.create('dummy', modelfile=f'FROM {blob.name}') - assert isinstance(response, dict) + assert response['status'] == 'success' @pytest.mark.asyncio @@ -923,7 +871,7 @@ async def test_async_client_create_modelfile_roundtrip(httpserver: HTTPServer): '/api/create', method='POST', json={ - 'name': 'dummy', + 'model': 'dummy', 'modelfile': '''FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 TEMPLATE """[INST] <>{{.System}}<> {{.Prompt}} [/INST]""" @@ -937,9 +885,8 @@ async def test_async_client_create_modelfile_roundtrip(httpserver: HTTPServer): PARAMETER stop <> PARAMETER stop <>''', 'stream': False, - 'quantize': None, }, - ).respond_with_json({}) + ).respond_with_json({'status': 'success'}) client = AsyncClient(httpserver.url_for('/')) @@ -963,7 +910,7 @@ async def test_async_client_create_modelfile_roundtrip(httpserver: HTTPServer): ] ), ) - assert isinstance(response, dict) + assert response['status'] == 'success' @pytest.mark.asyncio @@ -972,17 +919,16 @@ async def test_async_client_create_from_library(httpserver: HTTPServer): '/api/create', method='POST', json={ - 'name': 'dummy', + 'model': 'dummy', 'modelfile': 'FROM llama2', 'stream': False, - 'quantize': None, }, - ).respond_with_json({}) + ).respond_with_json({'status': 'success'}) client = AsyncClient(httpserver.url_for('/')) response = await client.create('dummy', modelfile='FROM llama2') - assert isinstance(response, dict) + assert response['status'] == 'success' @pytest.mark.asyncio @@ -1013,7 +959,7 @@ async def test_async_client_delete(httpserver: HTTPServer): httpserver.expect_ordered_request(PrefixPattern('/api/delete'), method='DELETE').respond_with_response(Response(status=200)) client = AsyncClient(httpserver.url_for('/api/delete')) response = await client.delete('dummy') - assert response == {'status': 'success'} + assert response['status'] == 'success' @pytest.mark.asyncio @@ -1021,4 +967,4 @@ async def test_async_client_copy(httpserver: HTTPServer): httpserver.expect_ordered_request(PrefixPattern('/api/copy'), method='POST').respond_with_response(Response(status=200)) client = AsyncClient(httpserver.url_for('/api/copy')) response = await client.copy('dum', 'dummer') - assert response == {'status': 'success'} + assert response['status'] == 'success' From b0da4ff2d87f084c4a9098c554cc30b2817a48c2 Mon Sep 17 00:00:00 2001 From: Michael Yang Date: Mon, 9 Sep 2024 11:46:19 -0700 Subject: [PATCH 09/18] no head this request is unnecessary since the POST will short circuit the request if the blob already exists --- ollama/_client.py | 34 +++++++++++----------------------- tests/test_client.py | 26 ++++++++++++-------------- 2 files changed, 23 insertions(+), 37 deletions(-) diff --git a/ollama/_client.py b/ollama/_client.py index c1f5f95d..723eb651 100644 --- a/ollama/_client.py +++ b/ollama/_client.py @@ -523,14 +523,8 @@ def _create_blob(self, path: Union[str, Path]) -> str: digest = f'sha256:{sha256sum.hexdigest()}' - try: - self._request_raw('HEAD', f'/api/blobs/{digest}') - except ResponseError as e: - if e.status_code != 404: - raise - - with open(path, 'rb') as r: - self._request_raw('POST', f'/api/blobs/{digest}', content=r) + with open(path, 'rb') as r: + self._request_raw('POST', f'/api/blobs/sha256:{digest}', content=r) return digest @@ -1007,21 +1001,15 @@ async def _create_blob(self, path: Union[str, Path]) -> str: digest = f'sha256:{sha256sum.hexdigest()}' - try: - await self._request_raw('HEAD', f'/api/blobs/{digest}') - except ResponseError as e: - if e.status_code != 404: - raise - - async def upload_bytes(): - with open(path, 'rb') as r: - while True: - chunk = r.read(32 * 1024) - if not chunk: - break - yield chunk - - await self._request_raw('POST', f'/api/blobs/{digest}', content=upload_bytes()) + async def upload_bytes(): + with open(path, 'rb') as r: + while True: + chunk = r.read(32 * 1024) + if not chunk: + break + yield chunk + + await self._request_raw('POST', f'/api/blobs/{digest}', content=upload_bytes()) return digest diff --git a/tests/test_client.py b/tests/test_client.py index 3bb451c9..49836108 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -293,7 +293,7 @@ def generate(): def test_client_create_path(httpserver: HTTPServer): - httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200)) + httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='POST').respond_with_response(Response(status=200)) httpserver.expect_ordered_request( '/api/create', method='POST', @@ -316,7 +316,7 @@ def test_client_create_path(httpserver: HTTPServer): def test_client_create_path_relative(httpserver: HTTPServer): - httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200)) + httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='POST').respond_with_response(Response(status=200)) httpserver.expect_ordered_request( '/api/create', method='POST', @@ -348,7 +348,7 @@ def userhomedir(): def test_client_create_path_user_home(httpserver: HTTPServer, userhomedir): - httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200)) + httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='POST').respond_with_response(Response(status=200)) httpserver.expect_ordered_request( '/api/create', method='POST', @@ -371,7 +371,7 @@ def test_client_create_path_user_home(httpserver: HTTPServer, userhomedir): def test_client_create_modelfile(httpserver: HTTPServer): - httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200)) + httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='POST').respond_with_response(Response(status=200)) httpserver.expect_ordered_request( '/api/create', method='POST', @@ -390,7 +390,7 @@ def test_client_create_modelfile(httpserver: HTTPServer): def test_client_create_modelfile_roundtrip(httpserver: HTTPServer): - httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200)) + httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='POST').respond_with_response(Response(status=200)) httpserver.expect_ordered_request( '/api/create', method='POST', @@ -455,7 +455,6 @@ def test_client_create_from_library(httpserver: HTTPServer): def test_client_create_blob(httpserver: HTTPServer): - httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=404)) httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='POST').respond_with_response(Response(status=201)) client = Client(httpserver.url_for('/')) @@ -466,7 +465,7 @@ def test_client_create_blob(httpserver: HTTPServer): def test_client_create_blob_exists(httpserver: HTTPServer): - httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200)) + httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='POST').respond_with_response(Response(status=200)) client = Client(httpserver.url_for('/')) @@ -774,7 +773,7 @@ def generate(): @pytest.mark.asyncio async def test_async_client_create_path(httpserver: HTTPServer): - httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200)) + httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='POST').respond_with_response(Response(status=200)) httpserver.expect_ordered_request( '/api/create', method='POST', @@ -798,7 +797,7 @@ async def test_async_client_create_path(httpserver: HTTPServer): @pytest.mark.asyncio async def test_async_client_create_path_relative(httpserver: HTTPServer): - httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200)) + httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='POST').respond_with_response(Response(status=200)) httpserver.expect_ordered_request( '/api/create', method='POST', @@ -822,7 +821,7 @@ async def test_async_client_create_path_relative(httpserver: HTTPServer): @pytest.mark.asyncio async def test_async_client_create_path_user_home(httpserver: HTTPServer, userhomedir): - httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200)) + httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='POST').respond_with_response(Response(status=200)) httpserver.expect_ordered_request( '/api/create', method='POST', @@ -846,7 +845,7 @@ async def test_async_client_create_path_user_home(httpserver: HTTPServer, userho @pytest.mark.asyncio async def test_async_client_create_modelfile(httpserver: HTTPServer): - httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200)) + httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='POST').respond_with_response(Response(status=200)) httpserver.expect_ordered_request( '/api/create', method='POST', @@ -866,7 +865,7 @@ async def test_async_client_create_modelfile(httpserver: HTTPServer): @pytest.mark.asyncio async def test_async_client_create_modelfile_roundtrip(httpserver: HTTPServer): - httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200)) + httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='POST').respond_with_response(Response(status=200)) httpserver.expect_ordered_request( '/api/create', method='POST', @@ -933,7 +932,6 @@ async def test_async_client_create_from_library(httpserver: HTTPServer): @pytest.mark.asyncio async def test_async_client_create_blob(httpserver: HTTPServer): - httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=404)) httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='POST').respond_with_response(Response(status=201)) client = AsyncClient(httpserver.url_for('/')) @@ -945,7 +943,7 @@ async def test_async_client_create_blob(httpserver: HTTPServer): @pytest.mark.asyncio async def test_async_client_create_blob_exists(httpserver: HTTPServer): - httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200)) + httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='POST').respond_with_response(Response(status=200)) client = AsyncClient(httpserver.url_for('/')) From f25834217be31fb730c9c8d64a7a2d638d489bf5 Mon Sep 17 00:00:00 2001 From: Parth Sareen Date: Wed, 6 Nov 2024 14:04:56 -0800 Subject: [PATCH 10/18] Pydantic Fixes and Tests (#311) * Added SubscriptableBaseModel to the Model classes and added Image codec test --------- Co-authored-by: Parth Sareen --- ollama/_types.py | 5 +++-- tests/test_type_serialization.py | 15 +++++++++++++++ 2 files changed, 18 insertions(+), 2 deletions(-) create mode 100644 tests/test_type_serialization.py diff --git a/ollama/_types.py b/ollama/_types.py index b223d9cc..968099dc 100644 --- a/ollama/_types.py +++ b/ollama/_types.py @@ -97,6 +97,7 @@ class BaseGenerateRequest(BaseStreamableRequest): class Image(BaseModel): value: Union[FilePath, Base64Str, bytes] + # This overloads the `model_dump` method and returns values depending on the type of the `value` field @model_serializer def serialize_model(self): if isinstance(self.value, Path): @@ -333,7 +334,7 @@ class ModelDetails(SubscriptableBaseModel): class ListResponse(SubscriptableBaseModel): - class Model(BaseModel): + class Model(SubscriptableBaseModel): modified_at: Optional[datetime] = None digest: Optional[str] = None size: Optional[ByteSize] = None @@ -394,7 +395,7 @@ class ShowResponse(SubscriptableBaseModel): class ProcessResponse(SubscriptableBaseModel): - class Model(BaseModel): + class Model(SubscriptableBaseModel): model: Optional[str] = None name: Optional[str] = None digest: Optional[str] = None diff --git a/tests/test_type_serialization.py b/tests/test_type_serialization.py new file mode 100644 index 00000000..f127b03f --- /dev/null +++ b/tests/test_type_serialization.py @@ -0,0 +1,15 @@ +from base64 import b64decode, b64encode + +from ollama._types import Image + + +def test_image_serialization(): + # Test bytes serialization + image_bytes = b'test image bytes' + img = Image(value=image_bytes) + assert img.model_dump() == b64encode(image_bytes).decode() + + # Test base64 string serialization + b64_str = 'dGVzdCBiYXNlNjQgc3RyaW5n' + img = Image(value=b64_str) + assert img.model_dump() == b64decode(b64_str).decode() From 72052188c3d0e020bdd12abb3ff9cd12a156a0bf Mon Sep 17 00:00:00 2001 From: Parth Sareen Date: Thu, 7 Nov 2024 13:12:22 -0800 Subject: [PATCH 11/18] Fixing empty header + ensuring security (#313) * Fixing empty header + ensuring security --- ollama/_client.py | 13 +++++++++---- tests/test_client.py | 16 ++++++++++++++++ 2 files changed, 25 insertions(+), 4 deletions(-) diff --git a/ollama/_client.py b/ollama/_client.py index c1f5f95d..3372af45 100644 --- a/ollama/_client.py +++ b/ollama/_client.py @@ -90,11 +90,16 @@ def __init__( base_url=_parse_host(host or os.getenv('OLLAMA_HOST')), follow_redirects=follow_redirects, timeout=timeout, + # Lowercase all headers to ensure override headers={ - 'Content-Type': 'application/json', - 'Accept': 'application/json', - 'User-Agent': f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}', - }.update(headers or {}), + k.lower(): v + for k, v in { + **(headers or {}), + 'Content-Type': 'application/json', + 'Accept': 'application/json', + 'User-Agent': f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}', + }.items() + }, **kwargs, ) diff --git a/tests/test_client.py b/tests/test_client.py index 3bb451c9..124ccfc2 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -968,3 +968,19 @@ async def test_async_client_copy(httpserver: HTTPServer): client = AsyncClient(httpserver.url_for('/api/copy')) response = await client.copy('dum', 'dummer') assert response['status'] == 'success' + + +def test_headers(): + client = Client() + assert client._client.headers['content-type'] == 'application/json' + assert client._client.headers['accept'] == 'application/json' + assert client._client.headers['user-agent'].startswith('ollama-python/') + + client = Client( + headers={ + 'X-Custom': 'value', + 'Content-Type': 'text/plain', + } + ) + assert client._client.headers['x-custom'] == 'value' + assert client._client.headers['content-type'] == 'application/json' From 425bce23299364d930e945d74051274a85738d33 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 22:30:48 +0000 Subject: [PATCH 12/18] Bump ruff from 0.6.9 to 0.7.4 Bumps [ruff](https://github.com/astral-sh/ruff) from 0.6.9 to 0.7.4. - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/0.6.9...0.7.4) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 53 ++++++++++++++++++++------------------------------ pyproject.toml | 2 +- 2 files changed, 22 insertions(+), 33 deletions(-) diff --git a/poetry.lock b/poetry.lock index 83ebc984..a59f458d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "annotated-types" @@ -631,29 +631,29 @@ Werkzeug = ">=2.0.0" [[package]] name = "ruff" -version = "0.6.9" +version = "0.7.4" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.6.9-py3-none-linux_armv6l.whl", hash = "sha256:064df58d84ccc0ac0fcd63bc3090b251d90e2a372558c0f057c3f75ed73e1ccd"}, - {file = "ruff-0.6.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:140d4b5c9f5fc7a7b074908a78ab8d384dd7f6510402267bc76c37195c02a7ec"}, - {file = "ruff-0.6.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:53fd8ca5e82bdee8da7f506d7b03a261f24cd43d090ea9db9a1dc59d9313914c"}, - {file = "ruff-0.6.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645d7d8761f915e48a00d4ecc3686969761df69fb561dd914a773c1a8266e14e"}, - {file = "ruff-0.6.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eae02b700763e3847595b9d2891488989cac00214da7f845f4bcf2989007d577"}, - {file = "ruff-0.6.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d5ccc9e58112441de8ad4b29dcb7a86dc25c5f770e3c06a9d57e0e5eba48829"}, - {file = "ruff-0.6.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:417b81aa1c9b60b2f8edc463c58363075412866ae4e2b9ab0f690dc1e87ac1b5"}, - {file = "ruff-0.6.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c866b631f5fbce896a74a6e4383407ba7507b815ccc52bcedabb6810fdb3ef7"}, - {file = "ruff-0.6.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b118afbb3202f5911486ad52da86d1d52305b59e7ef2031cea3425142b97d6f"}, - {file = "ruff-0.6.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a67267654edc23c97335586774790cde402fb6bbdb3c2314f1fc087dee320bfa"}, - {file = "ruff-0.6.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3ef0cc774b00fec123f635ce5c547dac263f6ee9fb9cc83437c5904183b55ceb"}, - {file = "ruff-0.6.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:12edd2af0c60fa61ff31cefb90aef4288ac4d372b4962c2864aeea3a1a2460c0"}, - {file = "ruff-0.6.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:55bb01caeaf3a60b2b2bba07308a02fca6ab56233302406ed5245180a05c5625"}, - {file = "ruff-0.6.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:925d26471fa24b0ce5a6cdfab1bb526fb4159952385f386bdcc643813d472039"}, - {file = "ruff-0.6.9-py3-none-win32.whl", hash = "sha256:eb61ec9bdb2506cffd492e05ac40e5bc6284873aceb605503d8494180d6fc84d"}, - {file = "ruff-0.6.9-py3-none-win_amd64.whl", hash = "sha256:785d31851c1ae91f45b3d8fe23b8ae4b5170089021fbb42402d811135f0b7117"}, - {file = "ruff-0.6.9-py3-none-win_arm64.whl", hash = "sha256:a9641e31476d601f83cd602608739a0840e348bda93fec9f1ee816f8b6798b93"}, - {file = "ruff-0.6.9.tar.gz", hash = "sha256:b076ef717a8e5bc819514ee1d602bbdca5b4420ae13a9cf61a0c0a4f53a2baa2"}, + {file = "ruff-0.7.4-py3-none-linux_armv6l.whl", hash = "sha256:a4919925e7684a3f18e18243cd6bea7cfb8e968a6eaa8437971f681b7ec51478"}, + {file = "ruff-0.7.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cfb365c135b830778dda8c04fb7d4280ed0b984e1aec27f574445231e20d6c63"}, + {file = "ruff-0.7.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:63a569b36bc66fbadec5beaa539dd81e0527cb258b94e29e0531ce41bacc1f20"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d06218747d361d06fd2fdac734e7fa92df36df93035db3dc2ad7aa9852cb109"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0cea28d0944f74ebc33e9f934238f15c758841f9f5edd180b5315c203293452"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80094ecd4793c68b2571b128f91754d60f692d64bc0d7272ec9197fdd09bf9ea"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:997512325c6620d1c4c2b15db49ef59543ef9cd0f4aa8065ec2ae5103cedc7e7"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00b4cf3a6b5fad6d1a66e7574d78956bbd09abfd6c8a997798f01f5da3d46a05"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7dbdc7d8274e1422722933d1edddfdc65b4336abf0b16dfcb9dedd6e6a517d06"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e92dfb5f00eaedb1501b2f906ccabfd67b2355bdf117fea9719fc99ac2145bc"}, + {file = "ruff-0.7.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3bd726099f277d735dc38900b6a8d6cf070f80828877941983a57bca1cd92172"}, + {file = "ruff-0.7.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2e32829c429dd081ee5ba39aef436603e5b22335c3d3fff013cd585806a6486a"}, + {file = "ruff-0.7.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:662a63b4971807623f6f90c1fb664613f67cc182dc4d991471c23c541fee62dd"}, + {file = "ruff-0.7.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:876f5e09eaae3eb76814c1d3b68879891d6fde4824c015d48e7a7da4cf066a3a"}, + {file = "ruff-0.7.4-py3-none-win32.whl", hash = "sha256:75c53f54904be42dd52a548728a5b572344b50d9b2873d13a3f8c5e3b91f5cac"}, + {file = "ruff-0.7.4-py3-none-win_amd64.whl", hash = "sha256:745775c7b39f914238ed1f1b0bebed0b9155a17cd8bc0b08d3c87e4703b990d6"}, + {file = "ruff-0.7.4-py3-none-win_arm64.whl", hash = "sha256:11bff065102c3ae9d3ea4dc9ecdfe5a5171349cdd0787c1fc64761212fc9cf1f"}, + {file = "ruff-0.7.4.tar.gz", hash = "sha256:cd12e35031f5af6b9b93715d8c4f40360070b2041f81273d0527683d5708fce2"}, ] [[package]] @@ -689,17 +689,6 @@ files = [ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] -[[package]] -name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, -] - [[package]] name = "tzdata" version = "2024.1" @@ -731,4 +720,4 @@ watchdog = ["watchdog (>=2.3)"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "e664c86cc330480eb86239842f55f12b0fba4df5c2fc776d094f37f58320e637" +content-hash = "61443e0ce98d3e24a45da6f9c890699fc44fe98cd191b0eb38e6b59093e8149d" diff --git a/pyproject.toml b/pyproject.toml index ff151d31..afafd2fc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,7 +19,7 @@ pytest-asyncio = ">=0.23.2,<0.25.0" pytest-cov = ">=4.1,<6.0" pytest-httpserver = "^1.0.8" pillow = "^10.2.0" -ruff = ">=0.1.8,<0.7.0" +ruff = ">=0.1.8,<0.8.0" [build-system] requires = ["poetry-core"] From 52510cca53a960b189f18f6e9e0ec27df34706f1 Mon Sep 17 00:00:00 2001 From: Michael Yang Date: Tue, 19 Nov 2024 10:24:54 -0800 Subject: [PATCH 13/18] chore: add python3.13 to test matrix --- .github/workflows/test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 34349b74..deac0fbe 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -7,7 +7,7 @@ jobs: test: strategy: matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 From d0d7894f883f54ac02bc8a3b2102f8438e859d8b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 18:37:53 +0000 Subject: [PATCH 14/18] Bump pydantic from 2.9.0 to 2.9.2 Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.9.0 to 2.9.2. - [Release notes](https://github.com/pydantic/pydantic/releases) - [Changelog](https://github.com/pydantic/pydantic/blob/main/HISTORY.md) - [Commits](https://github.com/pydantic/pydantic/compare/v2.9.0...v2.9.2) --- updated-dependencies: - dependency-name: pydantic dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 203 +++++++++++++++++++++++++--------------------------- 1 file changed, 96 insertions(+), 107 deletions(-) diff --git a/poetry.lock b/poetry.lock index a59f458d..a08f7a07 100644 --- a/poetry.lock +++ b/poetry.lock @@ -435,123 +435,123 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pydantic" -version = "2.9.0" +version = "2.9.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.9.0-py3-none-any.whl", hash = "sha256:f66a7073abd93214a20c5f7b32d56843137a7a2e70d02111f3be287035c45370"}, - {file = "pydantic-2.9.0.tar.gz", hash = "sha256:c7a8a9fdf7d100afa49647eae340e2d23efa382466a8d177efcd1381e9be5598"}, + {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, + {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, ] [package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.23.2" +annotated-types = ">=0.6.0" +pydantic-core = "2.23.4" typing-extensions = [ {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, {version = ">=4.6.1", markers = "python_version < \"3.13\""}, ] -tzdata = {version = "*", markers = "python_version >= \"3.9\""} [package.extras] email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.23.2" +version = "2.23.4" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.23.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7d0324a35ab436c9d768753cbc3c47a865a2cbc0757066cb864747baa61f6ece"}, - {file = "pydantic_core-2.23.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:276ae78153a94b664e700ac362587c73b84399bd1145e135287513442e7dfbc7"}, - {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:964c7aa318da542cdcc60d4a648377ffe1a2ef0eb1e996026c7f74507b720a78"}, - {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1cf842265a3a820ebc6388b963ead065f5ce8f2068ac4e1c713ef77a67b71f7c"}, - {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae90b9e50fe1bd115b24785e962b51130340408156d34d67b5f8f3fa6540938e"}, - {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ae65fdfb8a841556b52935dfd4c3f79132dc5253b12c0061b96415208f4d622"}, - {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c8aa40f6ca803f95b1c1c5aeaee6237b9e879e4dfb46ad713229a63651a95fb"}, - {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c53100c8ee5a1e102766abde2158077d8c374bee0639201f11d3032e3555dfbc"}, - {file = "pydantic_core-2.23.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d6b9dd6aa03c812017411734e496c44fef29b43dba1e3dd1fa7361bbacfc1354"}, - {file = "pydantic_core-2.23.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b18cf68255a476b927910c6873d9ed00da692bb293c5b10b282bd48a0afe3ae2"}, - {file = "pydantic_core-2.23.2-cp310-none-win32.whl", hash = "sha256:e460475719721d59cd54a350c1f71c797c763212c836bf48585478c5514d2854"}, - {file = "pydantic_core-2.23.2-cp310-none-win_amd64.whl", hash = "sha256:5f3cf3721eaf8741cffaf092487f1ca80831202ce91672776b02b875580e174a"}, - {file = "pydantic_core-2.23.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:7ce8e26b86a91e305858e018afc7a6e932f17428b1eaa60154bd1f7ee888b5f8"}, - {file = "pydantic_core-2.23.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e9b24cca4037a561422bf5dc52b38d390fb61f7bfff64053ce1b72f6938e6b2"}, - {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:753294d42fb072aa1775bfe1a2ba1012427376718fa4c72de52005a3d2a22178"}, - {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:257d6a410a0d8aeb50b4283dea39bb79b14303e0fab0f2b9d617701331ed1515"}, - {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8319e0bd6a7b45ad76166cc3d5d6a36c97d0c82a196f478c3ee5346566eebfd"}, - {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a05c0240f6c711eb381ac392de987ee974fa9336071fb697768dfdb151345ce"}, - {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d5b0ff3218858859910295df6953d7bafac3a48d5cd18f4e3ed9999efd2245f"}, - {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:96ef39add33ff58cd4c112cbac076726b96b98bb8f1e7f7595288dcfb2f10b57"}, - {file = "pydantic_core-2.23.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0102e49ac7d2df3379ef8d658d3bc59d3d769b0bdb17da189b75efa861fc07b4"}, - {file = "pydantic_core-2.23.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a6612c2a844043e4d10a8324c54cdff0042c558eef30bd705770793d70b224aa"}, - {file = "pydantic_core-2.23.2-cp311-none-win32.whl", hash = "sha256:caffda619099cfd4f63d48462f6aadbecee3ad9603b4b88b60cb821c1b258576"}, - {file = "pydantic_core-2.23.2-cp311-none-win_amd64.whl", hash = "sha256:6f80fba4af0cb1d2344869d56430e304a51396b70d46b91a55ed4959993c0589"}, - {file = "pydantic_core-2.23.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:4c83c64d05ffbbe12d4e8498ab72bdb05bcc1026340a4a597dc647a13c1605ec"}, - {file = "pydantic_core-2.23.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6294907eaaccf71c076abdd1c7954e272efa39bb043161b4b8aa1cd76a16ce43"}, - {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a801c5e1e13272e0909c520708122496647d1279d252c9e6e07dac216accc41"}, - {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cc0c316fba3ce72ac3ab7902a888b9dc4979162d320823679da270c2d9ad0cad"}, - {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b06c5d4e8701ac2ba99a2ef835e4e1b187d41095a9c619c5b185c9068ed2a49"}, - {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82764c0bd697159fe9947ad59b6db6d7329e88505c8f98990eb07e84cc0a5d81"}, - {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b1a195efd347ede8bcf723e932300292eb13a9d2a3c1f84eb8f37cbbc905b7f"}, - {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7efb12e5071ad8d5b547487bdad489fbd4a5a35a0fc36a1941517a6ad7f23e0"}, - {file = "pydantic_core-2.23.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5dd0ec5f514ed40e49bf961d49cf1bc2c72e9b50f29a163b2cc9030c6742aa73"}, - {file = "pydantic_core-2.23.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:820f6ee5c06bc868335e3b6e42d7ef41f50dfb3ea32fbd523ab679d10d8741c0"}, - {file = "pydantic_core-2.23.2-cp312-none-win32.whl", hash = "sha256:3713dc093d5048bfaedbba7a8dbc53e74c44a140d45ede020dc347dda18daf3f"}, - {file = "pydantic_core-2.23.2-cp312-none-win_amd64.whl", hash = "sha256:e1895e949f8849bc2757c0dbac28422a04be031204df46a56ab34bcf98507342"}, - {file = "pydantic_core-2.23.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:da43cbe593e3c87d07108d0ebd73771dc414488f1f91ed2e204b0370b94b37ac"}, - {file = "pydantic_core-2.23.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:64d094ea1aa97c6ded4748d40886076a931a8bf6f61b6e43e4a1041769c39dd2"}, - {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:084414ffe9a85a52940b49631321d636dadf3576c30259607b75516d131fecd0"}, - {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:043ef8469f72609c4c3a5e06a07a1f713d53df4d53112c6d49207c0bd3c3bd9b"}, - {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3649bd3ae6a8ebea7dc381afb7f3c6db237fc7cebd05c8ac36ca8a4187b03b30"}, - {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6db09153d8438425e98cdc9a289c5fade04a5d2128faff8f227c459da21b9703"}, - {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5668b3173bb0b2e65020b60d83f5910a7224027232c9f5dc05a71a1deac9f960"}, - {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1c7b81beaf7c7ebde978377dc53679c6cba0e946426fc7ade54251dfe24a7604"}, - {file = "pydantic_core-2.23.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:ae579143826c6f05a361d9546446c432a165ecf1c0b720bbfd81152645cb897d"}, - {file = "pydantic_core-2.23.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:19f1352fe4b248cae22a89268720fc74e83f008057a652894f08fa931e77dced"}, - {file = "pydantic_core-2.23.2-cp313-none-win32.whl", hash = "sha256:e1a79ad49f346aa1a2921f31e8dbbab4d64484823e813a002679eaa46cba39e1"}, - {file = "pydantic_core-2.23.2-cp313-none-win_amd64.whl", hash = "sha256:582871902e1902b3c8e9b2c347f32a792a07094110c1bca6c2ea89b90150caac"}, - {file = "pydantic_core-2.23.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:743e5811b0c377eb830150d675b0847a74a44d4ad5ab8845923d5b3a756d8100"}, - {file = "pydantic_core-2.23.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6650a7bbe17a2717167e3e23c186849bae5cef35d38949549f1c116031b2b3aa"}, - {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56e6a12ec8d7679f41b3750ffa426d22b44ef97be226a9bab00a03365f217b2b"}, - {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:810ca06cca91de9107718dc83d9ac4d2e86efd6c02cba49a190abcaf33fb0472"}, - {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:785e7f517ebb9890813d31cb5d328fa5eda825bb205065cde760b3150e4de1f7"}, - {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ef71ec876fcc4d3bbf2ae81961959e8d62f8d74a83d116668409c224012e3af"}, - {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d50ac34835c6a4a0d456b5db559b82047403c4317b3bc73b3455fefdbdc54b0a"}, - {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16b25a4a120a2bb7dab51b81e3d9f3cde4f9a4456566c403ed29ac81bf49744f"}, - {file = "pydantic_core-2.23.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:41ae8537ad371ec018e3c5da0eb3f3e40ee1011eb9be1da7f965357c4623c501"}, - {file = "pydantic_core-2.23.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07049ec9306ec64e955b2e7c40c8d77dd78ea89adb97a2013d0b6e055c5ee4c5"}, - {file = "pydantic_core-2.23.2-cp38-none-win32.whl", hash = "sha256:086c5db95157dc84c63ff9d96ebb8856f47ce113c86b61065a066f8efbe80acf"}, - {file = "pydantic_core-2.23.2-cp38-none-win_amd64.whl", hash = "sha256:67b6655311b00581914aba481729971b88bb8bc7996206590700a3ac85e457b8"}, - {file = "pydantic_core-2.23.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:358331e21a897151e54d58e08d0219acf98ebb14c567267a87e971f3d2a3be59"}, - {file = "pydantic_core-2.23.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c4d9f15ffe68bcd3898b0ad7233af01b15c57d91cd1667f8d868e0eacbfe3f87"}, - {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0123655fedacf035ab10c23450163c2f65a4174f2bb034b188240a6cf06bb123"}, - {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e6e3ccebdbd6e53474b0bb7ab8b88e83c0cfe91484b25e058e581348ee5a01a5"}, - {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc535cb898ef88333cf317777ecdfe0faac1c2a3187ef7eb061b6f7ecf7e6bae"}, - {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aab9e522efff3993a9e98ab14263d4e20211e62da088298089a03056980a3e69"}, - {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05b366fb8fe3d8683b11ac35fa08947d7b92be78ec64e3277d03bd7f9b7cda79"}, - {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7568f682c06f10f30ef643a1e8eec4afeecdafde5c4af1b574c6df079e96f96c"}, - {file = "pydantic_core-2.23.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cdd02a08205dc90238669f082747612cb3c82bd2c717adc60f9b9ecadb540f80"}, - {file = "pydantic_core-2.23.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a2ab4f410f4b886de53b6bddf5dd6f337915a29dd9f22f20f3099659536b2f6"}, - {file = "pydantic_core-2.23.2-cp39-none-win32.whl", hash = "sha256:0448b81c3dfcde439551bb04a9f41d7627f676b12701865c8a2574bcea034437"}, - {file = "pydantic_core-2.23.2-cp39-none-win_amd64.whl", hash = "sha256:4cebb9794f67266d65e7e4cbe5dcf063e29fc7b81c79dc9475bd476d9534150e"}, - {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e758d271ed0286d146cf7c04c539a5169a888dd0b57026be621547e756af55bc"}, - {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f477d26183e94eaafc60b983ab25af2a809a1b48ce4debb57b343f671b7a90b6"}, - {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da3131ef2b940b99106f29dfbc30d9505643f766704e14c5d5e504e6a480c35e"}, - {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329a721253c7e4cbd7aad4a377745fbcc0607f9d72a3cc2102dd40519be75ed2"}, - {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7706e15cdbf42f8fab1e6425247dfa98f4a6f8c63746c995d6a2017f78e619ae"}, - {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e64ffaf8f6e17ca15eb48344d86a7a741454526f3a3fa56bc493ad9d7ec63936"}, - {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dd59638025160056687d598b054b64a79183f8065eae0d3f5ca523cde9943940"}, - {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:12625e69b1199e94b0ae1c9a95d000484ce9f0182f9965a26572f054b1537e44"}, - {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5d813fd871b3d5c3005157622ee102e8908ad6011ec915a18bd8fde673c4360e"}, - {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1eb37f7d6a8001c0f86dc8ff2ee8d08291a536d76e49e78cda8587bb54d8b329"}, - {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ce7eaf9a98680b4312b7cebcdd9352531c43db00fca586115845df388f3c465"}, - {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f087879f1ffde024dd2788a30d55acd67959dcf6c431e9d3682d1c491a0eb474"}, - {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ce883906810b4c3bd90e0ada1f9e808d9ecf1c5f0b60c6b8831d6100bcc7dd6"}, - {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:a8031074a397a5925d06b590121f8339d34a5a74cfe6970f8a1124eb8b83f4ac"}, - {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23af245b8f2f4ee9e2c99cb3f93d0e22fb5c16df3f2f643f5a8da5caff12a653"}, - {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c57e493a0faea1e4c38f860d6862ba6832723396c884fbf938ff5e9b224200e2"}, - {file = "pydantic_core-2.23.2.tar.gz", hash = "sha256:95d6bf449a1ac81de562d65d180af5d8c19672793c81877a2eda8fde5d08f2fd"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, + {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, + {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, + {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, + {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, + {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, + {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, + {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, + {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, + {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, + {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, + {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, + {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, + {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, ] [package.dependencies] @@ -689,17 +689,6 @@ files = [ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] -[[package]] -name = "tzdata" -version = "2024.1" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, -] - [[package]] name = "werkzeug" version = "3.0.6" From 5ec4d23893513aea48e7e921360598dfdc2ad689 Mon Sep 17 00:00:00 2001 From: Michael Yang Date: Tue, 19 Nov 2024 10:39:22 -0800 Subject: [PATCH 15/18] chore: update requirements.txt --- requirements.txt | 189 +++++++++++++++++++++++------------------------ 1 file changed, 93 insertions(+), 96 deletions(-) diff --git a/requirements.txt b/requirements.txt index 5b549609..c7bfb080 100644 --- a/requirements.txt +++ b/requirements.txt @@ -22,105 +22,102 @@ httpx==0.27.2 ; python_version >= "3.8" and python_version < "4.0" \ idna==3.10 ; python_version >= "3.8" and python_version < "4.0" \ --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 -pydantic-core==2.23.2 ; python_version >= "3.8" and python_version < "4.0" \ - --hash=sha256:0102e49ac7d2df3379ef8d658d3bc59d3d769b0bdb17da189b75efa861fc07b4 \ - --hash=sha256:0123655fedacf035ab10c23450163c2f65a4174f2bb034b188240a6cf06bb123 \ - --hash=sha256:043ef8469f72609c4c3a5e06a07a1f713d53df4d53112c6d49207c0bd3c3bd9b \ - --hash=sha256:0448b81c3dfcde439551bb04a9f41d7627f676b12701865c8a2574bcea034437 \ - --hash=sha256:05b366fb8fe3d8683b11ac35fa08947d7b92be78ec64e3277d03bd7f9b7cda79 \ - --hash=sha256:07049ec9306ec64e955b2e7c40c8d77dd78ea89adb97a2013d0b6e055c5ee4c5 \ - --hash=sha256:084414ffe9a85a52940b49631321d636dadf3576c30259607b75516d131fecd0 \ - --hash=sha256:086c5db95157dc84c63ff9d96ebb8856f47ce113c86b61065a066f8efbe80acf \ - --hash=sha256:12625e69b1199e94b0ae1c9a95d000484ce9f0182f9965a26572f054b1537e44 \ - --hash=sha256:16b25a4a120a2bb7dab51b81e3d9f3cde4f9a4456566c403ed29ac81bf49744f \ - --hash=sha256:19f1352fe4b248cae22a89268720fc74e83f008057a652894f08fa931e77dced \ - --hash=sha256:1a2ab4f410f4b886de53b6bddf5dd6f337915a29dd9f22f20f3099659536b2f6 \ - --hash=sha256:1c7b81beaf7c7ebde978377dc53679c6cba0e946426fc7ade54251dfe24a7604 \ - --hash=sha256:1cf842265a3a820ebc6388b963ead065f5ce8f2068ac4e1c713ef77a67b71f7c \ - --hash=sha256:1eb37f7d6a8001c0f86dc8ff2ee8d08291a536d76e49e78cda8587bb54d8b329 \ - --hash=sha256:23af245b8f2f4ee9e2c99cb3f93d0e22fb5c16df3f2f643f5a8da5caff12a653 \ - --hash=sha256:257d6a410a0d8aeb50b4283dea39bb79b14303e0fab0f2b9d617701331ed1515 \ - --hash=sha256:276ae78153a94b664e700ac362587c73b84399bd1145e135287513442e7dfbc7 \ - --hash=sha256:2b1a195efd347ede8bcf723e932300292eb13a9d2a3c1f84eb8f37cbbc905b7f \ - --hash=sha256:329a721253c7e4cbd7aad4a377745fbcc0607f9d72a3cc2102dd40519be75ed2 \ - --hash=sha256:358331e21a897151e54d58e08d0219acf98ebb14c567267a87e971f3d2a3be59 \ - --hash=sha256:3649bd3ae6a8ebea7dc381afb7f3c6db237fc7cebd05c8ac36ca8a4187b03b30 \ - --hash=sha256:3713dc093d5048bfaedbba7a8dbc53e74c44a140d45ede020dc347dda18daf3f \ - --hash=sha256:3ef71ec876fcc4d3bbf2ae81961959e8d62f8d74a83d116668409c224012e3af \ - --hash=sha256:41ae8537ad371ec018e3c5da0eb3f3e40ee1011eb9be1da7f965357c4623c501 \ - --hash=sha256:4a801c5e1e13272e0909c520708122496647d1279d252c9e6e07dac216accc41 \ - --hash=sha256:4c83c64d05ffbbe12d4e8498ab72bdb05bcc1026340a4a597dc647a13c1605ec \ - --hash=sha256:4cebb9794f67266d65e7e4cbe5dcf063e29fc7b81c79dc9475bd476d9534150e \ - --hash=sha256:5668b3173bb0b2e65020b60d83f5910a7224027232c9f5dc05a71a1deac9f960 \ - --hash=sha256:56e6a12ec8d7679f41b3750ffa426d22b44ef97be226a9bab00a03365f217b2b \ - --hash=sha256:582871902e1902b3c8e9b2c347f32a792a07094110c1bca6c2ea89b90150caac \ - --hash=sha256:5c8aa40f6ca803f95b1c1c5aeaee6237b9e879e4dfb46ad713229a63651a95fb \ - --hash=sha256:5d813fd871b3d5c3005157622ee102e8908ad6011ec915a18bd8fde673c4360e \ - --hash=sha256:5dd0ec5f514ed40e49bf961d49cf1bc2c72e9b50f29a163b2cc9030c6742aa73 \ - --hash=sha256:5f3cf3721eaf8741cffaf092487f1ca80831202ce91672776b02b875580e174a \ - --hash=sha256:6294907eaaccf71c076abdd1c7954e272efa39bb043161b4b8aa1cd76a16ce43 \ - --hash=sha256:64d094ea1aa97c6ded4748d40886076a931a8bf6f61b6e43e4a1041769c39dd2 \ - --hash=sha256:6650a7bbe17a2717167e3e23c186849bae5cef35d38949549f1c116031b2b3aa \ - --hash=sha256:67b6655311b00581914aba481729971b88bb8bc7996206590700a3ac85e457b8 \ - --hash=sha256:6b06c5d4e8701ac2ba99a2ef835e4e1b187d41095a9c619c5b185c9068ed2a49 \ - --hash=sha256:6ce883906810b4c3bd90e0ada1f9e808d9ecf1c5f0b60c6b8831d6100bcc7dd6 \ - --hash=sha256:6db09153d8438425e98cdc9a289c5fade04a5d2128faff8f227c459da21b9703 \ - --hash=sha256:6f80fba4af0cb1d2344869d56430e304a51396b70d46b91a55ed4959993c0589 \ - --hash=sha256:743e5811b0c377eb830150d675b0847a74a44d4ad5ab8845923d5b3a756d8100 \ - --hash=sha256:753294d42fb072aa1775bfe1a2ba1012427376718fa4c72de52005a3d2a22178 \ - --hash=sha256:7568f682c06f10f30ef643a1e8eec4afeecdafde5c4af1b574c6df079e96f96c \ - --hash=sha256:7706e15cdbf42f8fab1e6425247dfa98f4a6f8c63746c995d6a2017f78e619ae \ - --hash=sha256:785e7f517ebb9890813d31cb5d328fa5eda825bb205065cde760b3150e4de1f7 \ - --hash=sha256:7a05c0240f6c711eb381ac392de987ee974fa9336071fb697768dfdb151345ce \ - --hash=sha256:7ce7eaf9a98680b4312b7cebcdd9352531c43db00fca586115845df388f3c465 \ - --hash=sha256:7ce8e26b86a91e305858e018afc7a6e932f17428b1eaa60154bd1f7ee888b5f8 \ - --hash=sha256:7d0324a35ab436c9d768753cbc3c47a865a2cbc0757066cb864747baa61f6ece \ - --hash=sha256:7e9b24cca4037a561422bf5dc52b38d390fb61f7bfff64053ce1b72f6938e6b2 \ - --hash=sha256:810ca06cca91de9107718dc83d9ac4d2e86efd6c02cba49a190abcaf33fb0472 \ - --hash=sha256:820f6ee5c06bc868335e3b6e42d7ef41f50dfb3ea32fbd523ab679d10d8741c0 \ - --hash=sha256:82764c0bd697159fe9947ad59b6db6d7329e88505c8f98990eb07e84cc0a5d81 \ - --hash=sha256:8ae65fdfb8a841556b52935dfd4c3f79132dc5253b12c0061b96415208f4d622 \ - --hash=sha256:8d5b0ff3218858859910295df6953d7bafac3a48d5cd18f4e3ed9999efd2245f \ - --hash=sha256:95d6bf449a1ac81de562d65d180af5d8c19672793c81877a2eda8fde5d08f2fd \ - --hash=sha256:964c7aa318da542cdcc60d4a648377ffe1a2ef0eb1e996026c7f74507b720a78 \ - --hash=sha256:96ef39add33ff58cd4c112cbac076726b96b98bb8f1e7f7595288dcfb2f10b57 \ - --hash=sha256:a6612c2a844043e4d10a8324c54cdff0042c558eef30bd705770793d70b224aa \ - --hash=sha256:a8031074a397a5925d06b590121f8339d34a5a74cfe6970f8a1124eb8b83f4ac \ - --hash=sha256:aab9e522efff3993a9e98ab14263d4e20211e62da088298089a03056980a3e69 \ - --hash=sha256:ae579143826c6f05a361d9546446c432a165ecf1c0b720bbfd81152645cb897d \ - --hash=sha256:ae90b9e50fe1bd115b24785e962b51130340408156d34d67b5f8f3fa6540938e \ - --hash=sha256:b18cf68255a476b927910c6873d9ed00da692bb293c5b10b282bd48a0afe3ae2 \ - --hash=sha256:b7efb12e5071ad8d5b547487bdad489fbd4a5a35a0fc36a1941517a6ad7f23e0 \ - --hash=sha256:c4d9f15ffe68bcd3898b0ad7233af01b15c57d91cd1667f8d868e0eacbfe3f87 \ - --hash=sha256:c53100c8ee5a1e102766abde2158077d8c374bee0639201f11d3032e3555dfbc \ - --hash=sha256:c57e493a0faea1e4c38f860d6862ba6832723396c884fbf938ff5e9b224200e2 \ - --hash=sha256:c8319e0bd6a7b45ad76166cc3d5d6a36c97d0c82a196f478c3ee5346566eebfd \ - --hash=sha256:caffda619099cfd4f63d48462f6aadbecee3ad9603b4b88b60cb821c1b258576 \ - --hash=sha256:cc0c316fba3ce72ac3ab7902a888b9dc4979162d320823679da270c2d9ad0cad \ - --hash=sha256:cdd02a08205dc90238669f082747612cb3c82bd2c717adc60f9b9ecadb540f80 \ - --hash=sha256:d50ac34835c6a4a0d456b5db559b82047403c4317b3bc73b3455fefdbdc54b0a \ - --hash=sha256:d6b9dd6aa03c812017411734e496c44fef29b43dba1e3dd1fa7361bbacfc1354 \ - --hash=sha256:da3131ef2b940b99106f29dfbc30d9505643f766704e14c5d5e504e6a480c35e \ - --hash=sha256:da43cbe593e3c87d07108d0ebd73771dc414488f1f91ed2e204b0370b94b37ac \ - --hash=sha256:dd59638025160056687d598b054b64a79183f8065eae0d3f5ca523cde9943940 \ - --hash=sha256:e1895e949f8849bc2757c0dbac28422a04be031204df46a56ab34bcf98507342 \ - --hash=sha256:e1a79ad49f346aa1a2921f31e8dbbab4d64484823e813a002679eaa46cba39e1 \ - --hash=sha256:e460475719721d59cd54a350c1f71c797c763212c836bf48585478c5514d2854 \ - --hash=sha256:e64ffaf8f6e17ca15eb48344d86a7a741454526f3a3fa56bc493ad9d7ec63936 \ - --hash=sha256:e6e3ccebdbd6e53474b0bb7ab8b88e83c0cfe91484b25e058e581348ee5a01a5 \ - --hash=sha256:e758d271ed0286d146cf7c04c539a5169a888dd0b57026be621547e756af55bc \ - --hash=sha256:f087879f1ffde024dd2788a30d55acd67959dcf6c431e9d3682d1c491a0eb474 \ - --hash=sha256:f477d26183e94eaafc60b983ab25af2a809a1b48ce4debb57b343f671b7a90b6 \ - --hash=sha256:fc535cb898ef88333cf317777ecdfe0faac1c2a3187ef7eb061b6f7ecf7e6bae -pydantic==2.9.0 ; python_version >= "3.8" and python_version < "4.0" \ - --hash=sha256:c7a8a9fdf7d100afa49647eae340e2d23efa382466a8d177efcd1381e9be5598 \ - --hash=sha256:f66a7073abd93214a20c5f7b32d56843137a7a2e70d02111f3be287035c45370 +pydantic-core==2.23.4 ; python_version >= "3.8" and python_version < "4.0" \ + --hash=sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36 \ + --hash=sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05 \ + --hash=sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071 \ + --hash=sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327 \ + --hash=sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c \ + --hash=sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36 \ + --hash=sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29 \ + --hash=sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744 \ + --hash=sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d \ + --hash=sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec \ + --hash=sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e \ + --hash=sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e \ + --hash=sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577 \ + --hash=sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232 \ + --hash=sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863 \ + --hash=sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6 \ + --hash=sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368 \ + --hash=sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480 \ + --hash=sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2 \ + --hash=sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2 \ + --hash=sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6 \ + --hash=sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769 \ + --hash=sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d \ + --hash=sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2 \ + --hash=sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84 \ + --hash=sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166 \ + --hash=sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271 \ + --hash=sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5 \ + --hash=sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb \ + --hash=sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13 \ + --hash=sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323 \ + --hash=sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556 \ + --hash=sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665 \ + --hash=sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef \ + --hash=sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb \ + --hash=sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119 \ + --hash=sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126 \ + --hash=sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510 \ + --hash=sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b \ + --hash=sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87 \ + --hash=sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f \ + --hash=sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc \ + --hash=sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8 \ + --hash=sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21 \ + --hash=sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f \ + --hash=sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6 \ + --hash=sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658 \ + --hash=sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b \ + --hash=sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3 \ + --hash=sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb \ + --hash=sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59 \ + --hash=sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24 \ + --hash=sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9 \ + --hash=sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3 \ + --hash=sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd \ + --hash=sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753 \ + --hash=sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55 \ + --hash=sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad \ + --hash=sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a \ + --hash=sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605 \ + --hash=sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e \ + --hash=sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b \ + --hash=sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433 \ + --hash=sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8 \ + --hash=sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07 \ + --hash=sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728 \ + --hash=sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0 \ + --hash=sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327 \ + --hash=sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555 \ + --hash=sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64 \ + --hash=sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6 \ + --hash=sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea \ + --hash=sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b \ + --hash=sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df \ + --hash=sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e \ + --hash=sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd \ + --hash=sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068 \ + --hash=sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3 \ + --hash=sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040 \ + --hash=sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12 \ + --hash=sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916 \ + --hash=sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f \ + --hash=sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f \ + --hash=sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801 \ + --hash=sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231 \ + --hash=sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5 \ + --hash=sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8 \ + --hash=sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee \ + --hash=sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607 +pydantic==2.9.2 ; python_version >= "3.8" and python_version < "4.0" \ + --hash=sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f \ + --hash=sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12 sniffio==1.3.1 ; python_version >= "3.8" and python_version < "4.0" \ --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \ --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc typing-extensions==4.12.2 ; python_version >= "3.8" and python_version < "4.0" \ --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 -tzdata==2024.1 ; python_version >= "3.9" and python_version < "4.0" \ - --hash=sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd \ - --hash=sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252 From 139c89e833c38e522f6f646b0895729bdb0378ad Mon Sep 17 00:00:00 2001 From: Parth Sareen Date: Wed, 20 Nov 2024 15:49:50 -0800 Subject: [PATCH 16/18] Passing Functions as Tools (#321) * Functions can now be passed as tools --- ollama/_client.py | 62 ++++++- ollama/_types.py | 64 ++++---- ollama/_utils.py | 87 ++++++++++ tests/test_client.py | 56 ++++++- tests/test_type_serialization.py | 45 +++++- tests/test_utils.py | 270 +++++++++++++++++++++++++++++++ 6 files changed, 545 insertions(+), 39 deletions(-) create mode 100644 ollama/_utils.py create mode 100644 tests/test_utils.py diff --git a/ollama/_client.py b/ollama/_client.py index 095d901d..a8a19d35 100644 --- a/ollama/_client.py +++ b/ollama/_client.py @@ -10,6 +10,7 @@ from typing import ( Any, + Callable, Literal, Mapping, Optional, @@ -22,6 +23,9 @@ import sys + +from ollama._utils import convert_function_to_tool + if sys.version_info < (3, 9): from typing import Iterator, AsyncIterator else: @@ -284,7 +288,7 @@ def chat( model: str = '', messages: Optional[Sequence[Union[Mapping[str, Any], Message]]] = None, *, - tools: Optional[Sequence[Union[Mapping[str, Any], Tool]]] = None, + tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None, stream: bool = False, format: Optional[Literal['', 'json']] = None, options: Optional[Union[Mapping[str, Any], Options]] = None, @@ -293,6 +297,30 @@ def chat( """ Create a chat response using the requested model. + Args: + tools: + A JSON schema as a dict, an Ollama Tool or a Python Function. + Python functions need to follow Google style docstrings to be converted to an Ollama Tool. + For more information, see: https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings + stream: Whether to stream the response. + format: The format of the response. + + Example: + def add_two_numbers(a: int, b: int) -> int: + ''' + Add two numbers together. + + Args: + a: First number to add + b: Second number to add + + Returns: + int: The sum of a and b + ''' + return a + b + + client.chat(model='llama3.1:8b', tools=[add_two_numbers], messages=[...]) + Raises `RequestError` if a model is not provided. Raises `ResponseError` if the request could not be fulfilled. @@ -750,7 +778,7 @@ async def chat( model: str = '', messages: Optional[Sequence[Union[Mapping[str, Any], Message]]] = None, *, - tools: Optional[Sequence[Union[Mapping[str, Any], Tool]]] = None, + tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None, stream: Literal[True] = True, format: Optional[Literal['', 'json']] = None, options: Optional[Union[Mapping[str, Any], Options]] = None, @@ -771,6 +799,30 @@ async def chat( """ Create a chat response using the requested model. + Args: + tools: + A JSON schema as a dict, an Ollama Tool or a Python Function. + Python functions need to follow Google style docstrings to be converted to an Ollama Tool. + For more information, see: https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings + stream: Whether to stream the response. + format: The format of the response. + + Example: + def add_two_numbers(a: int, b: int) -> int: + ''' + Add two numbers together. + + Args: + a: First number to add + b: Second number to add + + Returns: + int: The sum of a and b + ''' + return a + b + + await client.chat(model='llama3.1:8b', tools=[add_two_numbers], messages=[...]) + Raises `RequestError` if a model is not provided. Raises `ResponseError` if the request could not be fulfilled. @@ -1075,9 +1127,9 @@ def _copy_messages(messages: Optional[Sequence[Union[Mapping[str, Any], Message] ) -def _copy_tools(tools: Optional[Sequence[Union[Mapping[str, Any], Tool]]]) -> Iterator[Tool]: - for tool in tools or []: - yield Tool.model_validate(tool) +def _copy_tools(tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None) -> Iterator[Tool]: + for unprocessed_tool in tools or []: + yield convert_function_to_tool(unprocessed_tool) if callable(unprocessed_tool) else Tool.model_validate(unprocessed_tool) def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]: diff --git a/ollama/_types.py b/ollama/_types.py index 968099dc..bcf88969 100644 --- a/ollama/_types.py +++ b/ollama/_types.py @@ -1,26 +1,18 @@ import json -from base64 import b64encode +from base64 import b64decode, b64encode from pathlib import Path from datetime import datetime -from typing import ( - Any, - Literal, - Mapping, - Optional, - Sequence, - Union, -) -from typing_extensions import Annotated +from typing import Any, Mapping, Optional, Union, Sequence + +from typing_extensions import Annotated, Literal from pydantic import ( BaseModel, ByteSize, + ConfigDict, Field, - FilePath, - Base64Str, model_serializer, ) -from pydantic.json_schema import JsonSchemaValue class SubscriptableBaseModel(BaseModel): @@ -95,16 +87,26 @@ class BaseGenerateRequest(BaseStreamableRequest): class Image(BaseModel): - value: Union[FilePath, Base64Str, bytes] + value: Union[str, bytes, Path] - # This overloads the `model_dump` method and returns values depending on the type of the `value` field @model_serializer def serialize_model(self): - if isinstance(self.value, Path): - return b64encode(self.value.read_bytes()).decode() - elif isinstance(self.value, bytes): - return b64encode(self.value).decode() - return self.value + if isinstance(self.value, (Path, bytes)): + return b64encode(self.value.read_bytes() if isinstance(self.value, Path) else self.value).decode() + + if isinstance(self.value, str): + if Path(self.value).exists(): + return b64encode(Path(self.value).read_bytes()).decode() + + if self.value.split('.')[-1] in ('png', 'jpg', 'jpeg', 'webp'): + raise ValueError(f'File {self.value} does not exist') + + try: + # Try to decode to check if it's already base64 + b64decode(self.value) + return self.value + except Exception: + raise ValueError('Invalid image data, expected base64 string or path to image file') from Exception class GenerateRequest(BaseGenerateRequest): @@ -222,20 +224,27 @@ class Function(SubscriptableBaseModel): class Tool(SubscriptableBaseModel): - type: Literal['function'] = 'function' + type: Optional[Literal['function']] = 'function' class Function(SubscriptableBaseModel): - name: str - description: str + name: Optional[str] = None + description: Optional[str] = None class Parameters(SubscriptableBaseModel): - type: str + type: Optional[Literal['object']] = 'object' required: Optional[Sequence[str]] = None - properties: Optional[JsonSchemaValue] = None - parameters: Parameters + class Property(SubscriptableBaseModel): + model_config = ConfigDict(arbitrary_types_allowed=True) + + type: Optional[str] = None + description: Optional[str] = None + + properties: Optional[Mapping[str, Property]] = None - function: Function + parameters: Optional[Parameters] = None + + function: Optional[Function] = None class ChatRequest(BaseGenerateRequest): @@ -335,6 +344,7 @@ class ModelDetails(SubscriptableBaseModel): class ListResponse(SubscriptableBaseModel): class Model(SubscriptableBaseModel): + model: Optional[str] = None modified_at: Optional[datetime] = None digest: Optional[str] = None size: Optional[ByteSize] = None diff --git a/ollama/_utils.py b/ollama/_utils.py new file mode 100644 index 00000000..c0b67c99 --- /dev/null +++ b/ollama/_utils.py @@ -0,0 +1,87 @@ +from __future__ import annotations +from collections import defaultdict +import inspect +from typing import Callable, Union +import re + +import pydantic +from ollama._types import Tool + + +def _parse_docstring(doc_string: Union[str, None]) -> dict[str, str]: + parsed_docstring = defaultdict(str) + if not doc_string: + return parsed_docstring + + key = hash(doc_string) + for line in doc_string.splitlines(): + lowered_line = line.lower().strip() + if lowered_line.startswith('args:'): + key = 'args' + elif lowered_line.startswith('returns:') or lowered_line.startswith('yields:') or lowered_line.startswith('raises:'): + key = '_' + + else: + # maybe change to a list and join later + parsed_docstring[key] += f'{line.strip()}\n' + + last_key = None + for line in parsed_docstring['args'].splitlines(): + line = line.strip() + if ':' in line: + # Split the line on either: + # 1. A parenthetical expression like (integer) - captured in group 1 + # 2. A colon : + # Followed by optional whitespace. Only split on first occurrence. + parts = re.split(r'(?:\(([^)]*)\)|:)\s*', line, maxsplit=1) + + arg_name = parts[0].strip() + last_key = arg_name + + # Get the description - will be in parts[1] if parenthetical or parts[-1] if after colon + arg_description = parts[-1].strip() + if len(parts) > 2 and parts[1]: # Has parenthetical content + arg_description = parts[-1].split(':', 1)[-1].strip() + + parsed_docstring[last_key] = arg_description + + elif last_key and line: + parsed_docstring[last_key] += ' ' + line + + return parsed_docstring + + +def convert_function_to_tool(func: Callable) -> Tool: + doc_string_hash = hash(inspect.getdoc(func)) + parsed_docstring = _parse_docstring(inspect.getdoc(func)) + schema = type( + func.__name__, + (pydantic.BaseModel,), + { + '__annotations__': {k: v.annotation if v.annotation != inspect._empty else str for k, v in inspect.signature(func).parameters.items()}, + '__signature__': inspect.signature(func), + '__doc__': parsed_docstring[doc_string_hash], + }, + ).model_json_schema() + + for k, v in schema.get('properties', {}).items(): + # If type is missing, the default is string + types = {t.get('type', 'string') for t in v.get('anyOf')} if 'anyOf' in v else {v.get('type', 'string')} + if 'null' in types: + schema['required'].remove(k) + types.discard('null') + + schema['properties'][k] = { + 'description': parsed_docstring[k], + 'type': ', '.join(types), + } + + tool = Tool( + function=Tool.Function( + name=func.__name__, + description=schema.get('description', ''), + parameters=Tool.Function.Parameters(**schema), + ) + ) + + return Tool.model_validate(tool) diff --git a/tests/test_client.py b/tests/test_client.py index 1dd22925..fbd01bda 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -1,6 +1,7 @@ import os import io import json +from pydantic import ValidationError import pytest import tempfile from pathlib import Path @@ -8,7 +9,7 @@ from werkzeug.wrappers import Request, Response from PIL import Image -from ollama._client import Client, AsyncClient +from ollama._client import Client, AsyncClient, _copy_tools class PrefixPattern(URIPattern): @@ -982,3 +983,56 @@ def test_headers(): ) assert client._client.headers['x-custom'] == 'value' assert client._client.headers['content-type'] == 'application/json' + + +def test_copy_tools(): + def func1(x: int) -> str: + """Simple function 1. + Args: + x (integer): A number + """ + pass + + def func2(y: str) -> int: + """Simple function 2. + Args: + y (string): A string + """ + pass + + # Test with list of functions + tools = list(_copy_tools([func1, func2])) + assert len(tools) == 2 + assert tools[0].function.name == 'func1' + assert tools[1].function.name == 'func2' + + # Test with empty input + assert list(_copy_tools()) == [] + assert list(_copy_tools(None)) == [] + assert list(_copy_tools([])) == [] + + # Test with mix of functions and tool dicts + tool_dict = { + 'type': 'function', + 'function': { + 'name': 'test', + 'description': 'Test function', + 'parameters': { + 'type': 'object', + 'properties': {'x': {'type': 'string', 'description': 'A string'}}, + 'required': ['x'], + }, + }, + } + + tools = list(_copy_tools([func1, tool_dict])) + assert len(tools) == 2 + assert tools[0].function.name == 'func1' + assert tools[1].function.name == 'test' + + +def test_tool_validation(): + # Raises ValidationError when used as it is a generator + with pytest.raises(ValidationError): + invalid_tool = {'type': 'invalid_type', 'function': {'name': 'test'}} + list(_copy_tools([invalid_tool])) diff --git a/tests/test_type_serialization.py b/tests/test_type_serialization.py index f127b03f..e3e8268c 100644 --- a/tests/test_type_serialization.py +++ b/tests/test_type_serialization.py @@ -1,15 +1,48 @@ -from base64 import b64decode, b64encode +from base64 import b64encode +from pathlib import Path +import pytest from ollama._types import Image +import tempfile -def test_image_serialization(): - # Test bytes serialization +def test_image_serialization_bytes(): image_bytes = b'test image bytes' + encoded_string = b64encode(image_bytes).decode() img = Image(value=image_bytes) - assert img.model_dump() == b64encode(image_bytes).decode() + assert img.model_dump() == encoded_string - # Test base64 string serialization + +def test_image_serialization_base64_string(): b64_str = 'dGVzdCBiYXNlNjQgc3RyaW5n' img = Image(value=b64_str) - assert img.model_dump() == b64decode(b64_str).decode() + assert img.model_dump() == b64_str # Should return as-is if valid base64 + + +def test_image_serialization_plain_string(): + img = Image(value='not a path or base64') + assert img.model_dump() == 'not a path or base64' # Should return as-is + + +def test_image_serialization_path(): + with tempfile.NamedTemporaryFile() as temp_file: + temp_file.write(b'test file content') + temp_file.flush() + img = Image(value=Path(temp_file.name)) + assert img.model_dump() == b64encode(b'test file content').decode() + + +def test_image_serialization_string_path(): + with tempfile.NamedTemporaryFile() as temp_file: + temp_file.write(b'test file content') + temp_file.flush() + img = Image(value=temp_file.name) + assert img.model_dump() == b64encode(b'test file content').decode() + + with pytest.raises(ValueError): + img = Image(value='some_path/that/does/not/exist.png') + img.model_dump() + + with pytest.raises(ValueError): + img = Image(value='not an image') + img.model_dump() diff --git a/tests/test_utils.py b/tests/test_utils.py new file mode 100644 index 00000000..9fb1e3b2 --- /dev/null +++ b/tests/test_utils.py @@ -0,0 +1,270 @@ +import json +import sys +from typing import Dict, List, Mapping, Sequence, Set, Tuple, Union + + +from ollama._utils import convert_function_to_tool + + +def test_function_to_tool_conversion(): + def add_numbers(x: int, y: Union[int, None] = None) -> int: + """Add two numbers together. + args: + x (integer): The first number + y (integer, optional): The second number + + Returns: + integer: The sum of x and y + """ + return x + y + + tool = convert_function_to_tool(add_numbers).model_dump() + + assert tool['type'] == 'function' + assert tool['function']['name'] == 'add_numbers' + assert tool['function']['description'] == 'Add two numbers together.' + assert tool['function']['parameters']['type'] == 'object' + assert tool['function']['parameters']['properties']['x']['type'] == 'integer' + assert tool['function']['parameters']['properties']['x']['description'] == 'The first number' + assert tool['function']['parameters']['required'] == ['x'] + + +def test_function_with_no_args(): + def simple_func(): + """ + A simple function with no arguments. + Args: + None + Returns: + None + """ + pass + + tool = convert_function_to_tool(simple_func).model_dump() + assert tool['function']['name'] == 'simple_func' + assert tool['function']['description'] == 'A simple function with no arguments.' + assert tool['function']['parameters']['properties'] == {} + + +def test_function_with_all_types(): + if sys.version_info >= (3, 10): + + def all_types( + x: int, + y: str, + z: list[int], + w: dict[str, int], + v: int | str | None, + ) -> int | dict[str, int] | str | list[int] | None: + """ + A function with all types. + Args: + x (integer): The first number + y (string): The second number + z (array): The third number + w (object): The fourth number + v (integer | string | None): The fifth number + """ + pass + else: + + def all_types( + x: int, + y: str, + z: Sequence, + w: Mapping[str, int], + d: Dict[str, int], + s: Set[int], + t: Tuple[int, str], + l: List[int], # noqa: E741 + o: Union[int, None], + ) -> Union[Mapping[str, int], str, None]: + """ + A function with all types. + Args: + x (integer): The first number + y (string): The second number + z (array): The third number + w (object): The fourth number + d (object): The fifth number + s (array): The sixth number + t (array): The seventh number + l (array): The eighth number + o (integer | None): The ninth number + """ + pass + + tool_json = convert_function_to_tool(all_types).model_dump_json() + tool = json.loads(tool_json) + assert tool['function']['parameters']['properties']['x']['type'] == 'integer' + assert tool['function']['parameters']['properties']['y']['type'] == 'string' + + if sys.version_info >= (3, 10): + assert tool['function']['parameters']['properties']['z']['type'] == 'array' + assert tool['function']['parameters']['properties']['w']['type'] == 'object' + assert set(x.strip().strip("'") for x in tool['function']['parameters']['properties']['v']['type'].removeprefix('[').removesuffix(']').split(',')) == {'string', 'integer'} + assert tool['function']['parameters']['properties']['v']['type'] != 'null' + assert tool['function']['parameters']['required'] == ['x', 'y', 'z', 'w'] + else: + assert tool['function']['parameters']['properties']['z']['type'] == 'array' + assert tool['function']['parameters']['properties']['w']['type'] == 'object' + assert tool['function']['parameters']['properties']['d']['type'] == 'object' + assert tool['function']['parameters']['properties']['s']['type'] == 'array' + assert tool['function']['parameters']['properties']['t']['type'] == 'array' + assert tool['function']['parameters']['properties']['l']['type'] == 'array' + assert tool['function']['parameters']['properties']['o']['type'] == 'integer' + assert tool['function']['parameters']['properties']['o']['type'] != 'null' + assert tool['function']['parameters']['required'] == ['x', 'y', 'z', 'w', 'd', 's', 't', 'l'] + + +def test_function_docstring_parsing(): + from typing import List, Dict, Any + + def func_with_complex_docs(x: int, y: List[str]) -> Dict[str, Any]: + """ + Test function with complex docstring. + + Args: + x (integer): A number + with multiple lines + y (array of string): A list + with multiple lines + + Returns: + object: A dictionary + with multiple lines + """ + pass + + tool = convert_function_to_tool(func_with_complex_docs).model_dump() + assert tool['function']['description'] == 'Test function with complex docstring.' + assert tool['function']['parameters']['properties']['x']['description'] == 'A number with multiple lines' + assert tool['function']['parameters']['properties']['y']['description'] == 'A list with multiple lines' + + +def test_skewed_docstring_parsing(): + def add_two_numbers(x: int, y: int) -> int: + """ + Add two numbers together. + Args: + x (integer): : The first number + + + + + y (integer ): The second number + Returns: + integer: The sum of x and y + """ + pass + + tool = convert_function_to_tool(add_two_numbers).model_dump() + assert tool['function']['parameters']['properties']['x']['description'] == ': The first number' + assert tool['function']['parameters']['properties']['y']['description'] == 'The second number' + + +def test_function_with_no_docstring(): + def no_docstring(): + pass + + def no_docstring_with_args(x: int, y: int): + pass + + tool = convert_function_to_tool(no_docstring).model_dump() + assert tool['function']['description'] == '' + + tool = convert_function_to_tool(no_docstring_with_args).model_dump() + assert tool['function']['description'] == '' + assert tool['function']['parameters']['properties']['x']['description'] == '' + assert tool['function']['parameters']['properties']['y']['description'] == '' + + +def test_function_with_only_description(): + def only_description(): + """ + A function with only a description. + """ + pass + + tool = convert_function_to_tool(only_description).model_dump() + assert tool['function']['description'] == 'A function with only a description.' + assert tool['function']['parameters'] == {'type': 'object', 'properties': {}, 'required': None} + + def only_description_with_args(x: int, y: int): + """ + A function with only a description. + """ + pass + + tool = convert_function_to_tool(only_description_with_args).model_dump() + assert tool['function']['description'] == 'A function with only a description.' + assert tool['function']['parameters'] == { + 'type': 'object', + 'properties': { + 'x': {'type': 'integer', 'description': ''}, + 'y': {'type': 'integer', 'description': ''}, + }, + 'required': ['x', 'y'], + } + + +def test_function_with_yields(): + def function_with_yields(x: int, y: int): + """ + A function with yields section. + + Args: + x: the first number + y: the second number + + Yields: + The sum of x and y + """ + pass + + tool = convert_function_to_tool(function_with_yields).model_dump() + assert tool['function']['description'] == 'A function with yields section.' + assert tool['function']['parameters']['properties']['x']['description'] == 'the first number' + assert tool['function']['parameters']['properties']['y']['description'] == 'the second number' + + +def test_function_with_no_types(): + def no_types(a, b): + """ + A function with no types. + """ + pass + + tool = convert_function_to_tool(no_types).model_dump() + assert tool['function']['parameters']['properties']['a']['type'] == 'string' + assert tool['function']['parameters']['properties']['b']['type'] == 'string' + + +def test_function_with_parentheses(): + def func_with_parentheses(a: int, b: int) -> int: + """ + A function with parentheses. + Args: + a: First (:thing) number to add + b: Second number to add + Returns: + int: The sum of a and b + """ + pass + + def func_with_parentheses_and_args(a: int, b: int): + """ + A function with parentheses and args. + Args: + a(integer) : First (:thing) number to add + b(integer) :Second number to add + """ + pass + + tool = convert_function_to_tool(func_with_parentheses).model_dump() + assert tool['function']['parameters']['properties']['a']['description'] == 'First (:thing) number to add' + assert tool['function']['parameters']['properties']['b']['description'] == 'Second number to add' + + tool = convert_function_to_tool(func_with_parentheses_and_args).model_dump() + assert tool['function']['parameters']['properties']['a']['description'] == 'First (:thing) number to add' + assert tool['function']['parameters']['properties']['b']['description'] == 'Second number to add' From 64c1eb78fff4b7594398e3e8e993f27ffde137b2 Mon Sep 17 00:00:00 2001 From: Parth Sareen Date: Thu, 21 Nov 2024 15:14:59 -0800 Subject: [PATCH 17/18] Examples refactor (#329) * Examples and README updates --------- Co-authored-by: fujitatomoya Co-authored-by: Michael Yang --- README.md | 139 ++++++++++-------- examples/README.md | 57 +++++++ examples/async-chat-stream/README.md | 3 - examples/async-chat-stream/main.py | 59 -------- examples/async-chat.py | 19 +++ examples/async-generate.py | 15 ++ examples/async-tools.py | 78 ++++++++++ .../{chat-stream/main.py => chat-stream.py} | 3 +- examples/chat-with-history.py | 38 +++++ examples/{chat/main.py => chat.py} | 3 +- examples/create.py | 30 ++++ examples/create/main.py | 20 --- examples/embed.py | 4 + .../main.py => fill-in-middle.py} | 0 .../main.py => generate-stream.py} | 2 +- examples/{generate/main.py => generate.py} | 2 +- examples/list.py | 14 ++ examples/multimodal-chat.py | 23 +++ .../main.py => multimodal-generate.py} | 0 examples/ps.py | 27 ++++ examples/ps/main.py | 31 ---- examples/pull-progress/README.md | 9 -- examples/pull-progress/requirements.txt | 1 - examples/{pull-progress/main.py => pull.py} | 2 +- examples/tools.py | 66 +++++++++ examples/tools/README.md | 3 - examples/tools/main.py | 87 ----------- ollama/_client.py | 4 +- 28 files changed, 457 insertions(+), 282 deletions(-) create mode 100644 examples/README.md delete mode 100644 examples/async-chat-stream/README.md delete mode 100644 examples/async-chat-stream/main.py create mode 100644 examples/async-chat.py create mode 100644 examples/async-generate.py create mode 100644 examples/async-tools.py rename examples/{chat-stream/main.py => chat-stream.py} (68%) create mode 100644 examples/chat-with-history.py rename examples/{chat/main.py => chat.py} (75%) create mode 100644 examples/create.py delete mode 100644 examples/create/main.py create mode 100644 examples/embed.py rename examples/{fill-in-middle/main.py => fill-in-middle.py} (100%) rename examples/{generate-stream/main.py => generate-stream.py} (51%) rename examples/{generate/main.py => generate.py} (50%) create mode 100644 examples/list.py create mode 100644 examples/multimodal-chat.py rename examples/{multimodal/main.py => multimodal-generate.py} (100%) create mode 100644 examples/ps.py delete mode 100644 examples/ps/main.py delete mode 100644 examples/pull-progress/README.md delete mode 100644 examples/pull-progress/requirements.txt rename examples/{pull-progress/main.py => pull.py} (92%) create mode 100644 examples/tools.py delete mode 100644 examples/tools/README.md delete mode 100644 examples/tools/main.py diff --git a/README.md b/README.md index e03ea00a..454c1595 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,12 @@ The Ollama Python library provides the easiest way to integrate Python 3.8+ projects with [Ollama](https://github.com/ollama/ollama). +## Prerequisites + +- [Ollama](https://ollama.com/download) should be installed and running +- Pull a model to use with the library: `ollama pull ` e.g. `ollama pull llama3.2` + - See [Ollama.com](https://ollama.com/search) for more information on the models available. + ## Install ```sh @@ -11,25 +17,34 @@ pip install ollama ## Usage ```python -import ollama -response = ollama.chat(model='llama3.1', messages=[ +from ollama import chat +from ollama import ChatResponse + +response: ChatResponse = chat(model='llama3.2', messages=[ { 'role': 'user', 'content': 'Why is the sky blue?', }, ]) print(response['message']['content']) +# or access fields directly from the response object +print(response.message.content) ``` +See [_types.py](ollama/_types.py) for more information on the response types. + ## Streaming responses -Response streaming can be enabled by setting `stream=True`, modifying function calls to return a Python generator where each part is an object in the stream. +Response streaming can be enabled by setting `stream=True`. + +> [!NOTE] +> Streaming Tool/Function calling is not yet supported. ```python -import ollama +from ollama import chat -stream = ollama.chat( - model='llama3.1', +stream = chat( + model='llama3.2', messages=[{'role': 'user', 'content': 'Why is the sky blue?'}], stream=True, ) @@ -38,6 +53,54 @@ for chunk in stream: print(chunk['message']['content'], end='', flush=True) ``` +## Custom client +A custom client can be created by instantiating `Client` or `AsyncClient` from `ollama`. + +All extra keyword arguments are passed into the [`httpx.Client`](https://www.python-httpx.org/api/#client). + +```python +from ollama import Client +client = Client( + host='http://localhost:11434', + headers={'x-some-header': 'some-value'} +) +response = client.chat(model='llama3.2', messages=[ + { + 'role': 'user', + 'content': 'Why is the sky blue?', + }, +]) +``` + +## Async client + +The `AsyncClient` class is used to make asynchronous requests. It can be configured with the same fields as the `Client` class. + +```python +import asyncio +from ollama import AsyncClient + +async def chat(): + message = {'role': 'user', 'content': 'Why is the sky blue?'} + response = await AsyncClient().chat(model='llama3.2', messages=[message]) + +asyncio.run(chat()) +``` + +Setting `stream=True` modifies functions to return a Python asynchronous generator: + +```python +import asyncio +from ollama import AsyncClient + +async def chat(): + message = {'role': 'user', 'content': 'Why is the sky blue?'} + async for part in await AsyncClient().chat(model='llama3.2', messages=[message], stream=True): + print(part['message']['content'], end='', flush=True) + +asyncio.run(chat()) +``` + ## API The Ollama Python library's API is designed around the [Ollama REST API](https://github.com/ollama/ollama/blob/main/docs/api.md) @@ -45,13 +108,13 @@ The Ollama Python library's API is designed around the [Ollama REST API](https:/ ### Chat ```python -ollama.chat(model='llama3.1', messages=[{'role': 'user', 'content': 'Why is the sky blue?'}]) +ollama.chat(model='llama3.2', messages=[{'role': 'user', 'content': 'Why is the sky blue?'}]) ``` ### Generate ```python -ollama.generate(model='llama3.1', prompt='Why is the sky blue?') +ollama.generate(model='llama3.2', prompt='Why is the sky blue?') ``` ### List @@ -63,14 +126,14 @@ ollama.list() ### Show ```python -ollama.show('llama3.1') +ollama.show('llama3.2') ``` ### Create ```python modelfile=''' -FROM llama3.1 +FROM llama3.2 SYSTEM You are mario from super mario bros. ''' @@ -80,37 +143,37 @@ ollama.create(model='example', modelfile=modelfile) ### Copy ```python -ollama.copy('llama3.1', 'user/llama3.1') +ollama.copy('llama3.2', 'user/llama3.2') ``` ### Delete ```python -ollama.delete('llama3.1') +ollama.delete('llama3.2') ``` ### Pull ```python -ollama.pull('llama3.1') +ollama.pull('llama3.2') ``` ### Push ```python -ollama.push('user/llama3.1') +ollama.push('user/llama3.2') ``` ### Embed ```python -ollama.embed(model='llama3.1', input='The sky is blue because of rayleigh scattering') +ollama.embed(model='llama3.2', input='The sky is blue because of rayleigh scattering') ``` ### Embed (batch) ```python -ollama.embed(model='llama3.1', input=['The sky is blue because of rayleigh scattering', 'Grass is green because of chlorophyll']) +ollama.embed(model='llama3.2', input=['The sky is blue because of rayleigh scattering', 'Grass is green because of chlorophyll']) ``` ### Ps @@ -119,50 +182,6 @@ ollama.embed(model='llama3.1', input=['The sky is blue because of rayleigh scatt ollama.ps() ``` -## Custom client - -A custom client can be created with the following fields: - -- `host`: The Ollama host to connect to -- `timeout`: The timeout for requests - -```python -from ollama import Client -client = Client(host='http://localhost:11434') -response = client.chat(model='llama3.1', messages=[ - { - 'role': 'user', - 'content': 'Why is the sky blue?', - }, -]) -``` - -## Async client - -```python -import asyncio -from ollama import AsyncClient - -async def chat(): - message = {'role': 'user', 'content': 'Why is the sky blue?'} - response = await AsyncClient().chat(model='llama3.1', messages=[message]) - -asyncio.run(chat()) -``` - -Setting `stream=True` modifies functions to return a Python asynchronous generator: - -```python -import asyncio -from ollama import AsyncClient - -async def chat(): - message = {'role': 'user', 'content': 'Why is the sky blue?'} - async for part in await AsyncClient().chat(model='llama3.1', messages=[message], stream=True): - print(part['message']['content'], end='', flush=True) - -asyncio.run(chat()) -``` ## Errors diff --git a/examples/README.md b/examples/README.md new file mode 100644 index 00000000..a455c602 --- /dev/null +++ b/examples/README.md @@ -0,0 +1,57 @@ +# Running Examples + +Run the examples in this directory with: +```sh +# Run example +python3 examples/.py +``` + +### Chat - Chat with a model +- [chat.py](chat.py) +- [async-chat.py](async-chat.py) +- [chat-stream.py](chat-stream.py) - Streamed outputs +- [chat-with-history.py](chat-with-history.py) - Chat with model and maintain history of the conversation + + +### Generate - Generate text with a model +- [generate.py](generate.py) +- [async-generate.py](async-generate.py) +- [generate-stream.py](generate-stream.py) - Streamed outputs +- [fill-in-middle.py](fill-in-middle.py) - Given a prefix and suffix, fill in the middle + + +### Tools/Function Calling - Call a function with a model +- [tools.py](tools.py) - Simple example of Tools/Function Calling +- [async-tools.py](async-tools.py) + + +### Multimodal with Images - Chat with a multimodal (image chat) model +- [multimodal_chat.py](multimodal_chat.py) +- [multimodal_generate.py](multimodal_generate.py) + + +### Ollama List - List all downloaded models and their properties +- [list.py](list.py) + + +### Ollama ps - Show model status with CPU/GPU usage +- [ps.py](ps.py) + + +### Ollama Pull - Pull a model from Ollama +Requirement: `pip install tqdm` +- [pull.py](pull.py) + + +### Ollama Create - Create a model from a Modelfile +```python +python create.py +``` +- [create.py](create.py) + +See [ollama/docs/modelfile.md](https://github.com/ollama/ollama/blob/main/docs/modelfile.md) for more information on the Modelfile format. + + +### Ollama Embed - Generate embeddings with a model +- [embed.py](embed.py) + diff --git a/examples/async-chat-stream/README.md b/examples/async-chat-stream/README.md deleted file mode 100644 index 611295a6..00000000 --- a/examples/async-chat-stream/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# async-chat-stream - -This example demonstrates how to create a conversation history using an asynchronous Ollama client and the chat endpoint. The streaming response is outputted to `stdout` as well as a TTS if enabled with `--speak` and available. Supported TTS are `say` on macOS and `espeak` on Linux. diff --git a/examples/async-chat-stream/main.py b/examples/async-chat-stream/main.py deleted file mode 100644 index 65047767..00000000 --- a/examples/async-chat-stream/main.py +++ /dev/null @@ -1,59 +0,0 @@ -import shutil -import asyncio -import argparse - -import ollama - - -async def speak(speaker, content): - if speaker: - p = await asyncio.create_subprocess_exec(speaker, content) - await p.communicate() - - -async def main(): - parser = argparse.ArgumentParser() - parser.add_argument('--speak', default=False, action='store_true') - args = parser.parse_args() - - speaker = None - if not args.speak: - ... - elif say := shutil.which('say'): - speaker = say - elif (espeak := shutil.which('espeak')) or (espeak := shutil.which('espeak-ng')): - speaker = espeak - - client = ollama.AsyncClient() - - messages = [] - - while True: - if content_in := input('>>> '): - messages.append({'role': 'user', 'content': content_in}) - - content_out = '' - message = {'role': 'assistant', 'content': ''} - async for response in await client.chat(model='mistral', messages=messages, stream=True): - if response['done']: - messages.append(message) - - content = response['message']['content'] - print(content, end='', flush=True) - - content_out += content - if content in ['.', '!', '?', '\n']: - await speak(speaker, content_out) - content_out = '' - - message['content'] += content - - if content_out: - await speak(speaker, content_out) - print() - - -try: - asyncio.run(main()) -except (KeyboardInterrupt, EOFError): - ... diff --git a/examples/async-chat.py b/examples/async-chat.py new file mode 100644 index 00000000..81a50d9a --- /dev/null +++ b/examples/async-chat.py @@ -0,0 +1,19 @@ +import asyncio +from ollama import AsyncClient + + +async def main(): + messages = [ + { + 'role': 'user', + 'content': 'Why is the sky blue?', + }, + ] + + client = AsyncClient() + response = await client.chat('llama3.2', messages=messages) + print(response['message']['content']) + + +if __name__ == '__main__': + asyncio.run(main()) diff --git a/examples/async-generate.py b/examples/async-generate.py new file mode 100644 index 00000000..0097af16 --- /dev/null +++ b/examples/async-generate.py @@ -0,0 +1,15 @@ +import asyncio +import ollama + + +async def main(): + client = ollama.AsyncClient() + response = await client.generate('llama3.2', 'Why is the sky blue?') + print(response['response']) + + +if __name__ == '__main__': + try: + asyncio.run(main()) + except KeyboardInterrupt: + print('\nGoodbye!') diff --git a/examples/async-tools.py b/examples/async-tools.py new file mode 100644 index 00000000..07b3c4a8 --- /dev/null +++ b/examples/async-tools.py @@ -0,0 +1,78 @@ +import asyncio +from ollama import ChatResponse +import ollama + + +def add_two_numbers(a: int, b: int) -> int: + """ + Add two numbers + + Args: + a (int): The first number + b (int): The second number + + Returns: + int: The sum of the two numbers + """ + return a + b + + +def subtract_two_numbers(a: int, b: int) -> int: + """ + Subtract two numbers + """ + return a - b + + +# Tools can still be manually defined and passed into chat +subtract_two_numbers_tool = { + 'type': 'function', + 'function': { + 'name': 'subtract_two_numbers', + 'description': 'Subtract two numbers', + 'parameters': { + 'type': 'object', + 'required': ['a', 'b'], + 'properties': { + 'a': {'type': 'integer', 'description': 'The first number'}, + 'b': {'type': 'integer', 'description': 'The second number'}, + }, + }, + }, +} + + +async def main(): + client = ollama.AsyncClient() + + prompt = 'What is three plus one?' + print('Prompt:', prompt) + + available_functions = { + 'add_two_numbers': add_two_numbers, + 'subtract_two_numbers': subtract_two_numbers, + } + + response: ChatResponse = await client.chat( + 'llama3.1', + messages=[{'role': 'user', 'content': prompt}], + tools=[add_two_numbers, subtract_two_numbers_tool], + ) + + if response.message.tool_calls: + # There may be multiple tool calls in the response + for tool in response.message.tool_calls: + # Ensure the function is available, and then call it + if function_to_call := available_functions.get(tool.function.name): + print('Calling function:', tool.function.name) + print('Arguments:', tool.function.arguments) + print('Function output:', function_to_call(**tool.function.arguments)) + else: + print('Function', tool.function.name, 'not found') + + +if __name__ == '__main__': + try: + asyncio.run(main()) + except KeyboardInterrupt: + print('\nGoodbye!') diff --git a/examples/chat-stream/main.py b/examples/chat-stream.py similarity index 68% rename from examples/chat-stream/main.py rename to examples/chat-stream.py index 2a573466..cccab01a 100644 --- a/examples/chat-stream/main.py +++ b/examples/chat-stream.py @@ -8,8 +8,7 @@ }, ] -for part in chat('mistral', messages=messages, stream=True): +for part in chat('llama3.2', messages=messages, stream=True): print(part['message']['content'], end='', flush=True) -# end with a newline print() diff --git a/examples/chat-with-history.py b/examples/chat-with-history.py new file mode 100644 index 00000000..e98d15f6 --- /dev/null +++ b/examples/chat-with-history.py @@ -0,0 +1,38 @@ +from ollama import chat + + +messages = [ + { + 'role': 'user', + 'content': 'Why is the sky blue?', + }, + { + 'role': 'assistant', + 'content': "The sky is blue because of the way the Earth's atmosphere scatters sunlight.", + }, + { + 'role': 'user', + 'content': 'What is the weather in Tokyo?', + }, + { + 'role': 'assistant', + 'content': 'The weather in Tokyo is typically warm and humid during the summer months, with temperatures often exceeding 30°C (86°F). The city experiences a rainy season from June to September, with heavy rainfall and occasional typhoons. Winter is mild, with temperatures rarely dropping below freezing. The city is known for its high-tech and vibrant culture, with many popular tourist attractions such as the Tokyo Tower, Senso-ji Temple, and the bustling Shibuya district.', + }, +] + +while True: + user_input = input('Chat with history: ') + response = chat( + 'llama3.2', + messages=messages + + [ + {'role': 'user', 'content': user_input}, + ], + ) + + # Add the response to the messages to maintain the history + messages.append( + {'role': 'user', 'content': user_input}, + {'role': 'assistant', 'content': response.message.content}, + ) + print(response.message.content + '\n') diff --git a/examples/chat/main.py b/examples/chat.py similarity index 75% rename from examples/chat/main.py rename to examples/chat.py index 90c5f90a..2a30f8a2 100644 --- a/examples/chat/main.py +++ b/examples/chat.py @@ -1,6 +1,5 @@ from ollama import chat - messages = [ { 'role': 'user', @@ -8,5 +7,5 @@ }, ] -response = chat('mistral', messages=messages) +response = chat('llama3.2', messages=messages) print(response['message']['content']) diff --git a/examples/create.py b/examples/create.py new file mode 100644 index 00000000..d4b5b1f3 --- /dev/null +++ b/examples/create.py @@ -0,0 +1,30 @@ +import sys + +from ollama import create + + +args = sys.argv[1:] +if len(args) == 2: + # create from local file + path = args[1] +else: + print('usage: python create.py ') + sys.exit(1) + +# TODO: update to real Modelfile values +modelfile = f""" +FROM {path} +""" +example_modelfile = """ +FROM llama3.2 +# sets the temperature to 1 [higher is more creative, lower is more coherent] +PARAMETER temperature 1 +# sets the context window size to 4096, this controls how many tokens the LLM can use as context to generate the next token +PARAMETER num_ctx 4096 + +# sets a custom system message to specify the behavior of the chat assistant +SYSTEM You are Mario from super mario bros, acting as an assistant. +""" + +for response in create(model=args[0], modelfile=modelfile, stream=True): + print(response['status']) diff --git a/examples/create/main.py b/examples/create/main.py deleted file mode 100644 index 0a1161d9..00000000 --- a/examples/create/main.py +++ /dev/null @@ -1,20 +0,0 @@ -import sys - -from ollama import create - - -args = sys.argv[1:] -if len(args) == 2: - # create from local file - path = args[1] -else: - print('usage: python main.py ') - sys.exit(1) - -# TODO: update to real Modelfile values -modelfile = f""" -FROM {path} -""" - -for response in create(model=args[0], modelfile=modelfile, stream=True): - print(response['status']) diff --git a/examples/embed.py b/examples/embed.py new file mode 100644 index 00000000..5af145ea --- /dev/null +++ b/examples/embed.py @@ -0,0 +1,4 @@ +from ollama import embed + +response = embed(model='llama3.2', input='Hello, world!') +print(response['embeddings']) diff --git a/examples/fill-in-middle/main.py b/examples/fill-in-middle.py similarity index 100% rename from examples/fill-in-middle/main.py rename to examples/fill-in-middle.py diff --git a/examples/generate-stream/main.py b/examples/generate-stream.py similarity index 51% rename from examples/generate-stream/main.py rename to examples/generate-stream.py index a24b4106..10b7dc76 100644 --- a/examples/generate-stream/main.py +++ b/examples/generate-stream.py @@ -1,5 +1,5 @@ from ollama import generate -for part in generate('mistral', 'Why is the sky blue?', stream=True): +for part in generate('llama3.2', 'Why is the sky blue?', stream=True): print(part['response'], end='', flush=True) diff --git a/examples/generate/main.py b/examples/generate.py similarity index 50% rename from examples/generate/main.py rename to examples/generate.py index e39e2950..1a2311dc 100644 --- a/examples/generate/main.py +++ b/examples/generate.py @@ -1,5 +1,5 @@ from ollama import generate -response = generate('mistral', 'Why is the sky blue?') +response = generate('llama3.2', 'Why is the sky blue?') print(response['response']) diff --git a/examples/list.py b/examples/list.py new file mode 100644 index 00000000..32d45257 --- /dev/null +++ b/examples/list.py @@ -0,0 +1,14 @@ +from ollama import list +from ollama import ListResponse + +response: ListResponse = list() + +for model in response.models: + print('Name:', model.model) + print(' Size (MB):', f'{(model.size.real / 1024 / 1024):.2f}') + if model.details: + print(' Format:', model.details.format) + print(' Family:', model.details.family) + print(' Parameter Size:', model.details.parameter_size) + print(' Quantization Level:', model.details.quantization_level) + print('\n') diff --git a/examples/multimodal-chat.py b/examples/multimodal-chat.py new file mode 100644 index 00000000..8aff9f46 --- /dev/null +++ b/examples/multimodal-chat.py @@ -0,0 +1,23 @@ +from ollama import chat +# from pathlib import Path + +# Pass in the path to the image +path = input('Please enter the path to the image: ') + +# You can also pass in base64 encoded image data +# img = base64.b64encode(Path(path).read_bytes()).decode() +# or the raw bytes +# img = Path(path).read_bytes() + +response = chat( + model='llama3.2-vision', + messages=[ + { + 'role': 'user', + 'content': 'What is in this image? Be concise.', + 'images': [path], + } + ], +) + +print(response.message.content) diff --git a/examples/multimodal/main.py b/examples/multimodal-generate.py similarity index 100% rename from examples/multimodal/main.py rename to examples/multimodal-generate.py diff --git a/examples/ps.py b/examples/ps.py new file mode 100644 index 00000000..34d5230a --- /dev/null +++ b/examples/ps.py @@ -0,0 +1,27 @@ +from ollama import ps, pull, chat +from ollama import ProcessResponse + +# Ensure at least one model is loaded +response = pull('llama3.2', stream=True) +progress_states = set() +for progress in response: + if progress.get('status') in progress_states: + continue + progress_states.add(progress.get('status')) + print(progress.get('status')) + +print('\n') + +print('Waiting for model to load... \n') +chat(model='llama3.2', messages=[{'role': 'user', 'content': 'Why is the sky blue?'}]) + + +response: ProcessResponse = ps() +for model in response.models: + print('Model: ', model.model) + print(' Digest: ', model.digest) + print(' Expires at: ', model.expires_at) + print(' Size: ', model.size) + print(' Size vram: ', model.size_vram) + print(' Details: ', model.details) + print('\n') diff --git a/examples/ps/main.py b/examples/ps/main.py deleted file mode 100644 index 822d09aa..00000000 --- a/examples/ps/main.py +++ /dev/null @@ -1,31 +0,0 @@ -from ollama import ps, pull, chat - -response = pull('mistral', stream=True) -progress_states = set() -for progress in response: - if progress.get('status') in progress_states: - continue - progress_states.add(progress.get('status')) - print(progress.get('status')) - -print('\n') - -response = chat('mistral', messages=[{'role': 'user', 'content': 'Hello!'}]) -print(response['message']['content']) - -print('\n') - -response = ps() - -name = response['models'][0]['name'] -size = response['models'][0]['size'] -size_vram = response['models'][0]['size_vram'] - -if size == size_vram: - print(f'{name}: 100% GPU') -elif not size_vram: - print(f'{name}: 100% CPU') -else: - size_cpu = size - size_vram - cpu_percent = round(size_cpu / size * 100) - print(f'{name}: {cpu_percent}% CPU/{100 - cpu_percent}% GPU') diff --git a/examples/pull-progress/README.md b/examples/pull-progress/README.md deleted file mode 100644 index 8a44f60c..00000000 --- a/examples/pull-progress/README.md +++ /dev/null @@ -1,9 +0,0 @@ -# pull-progress - -This example emulates `ollama pull` using the Python library and [`tqdm`](https://tqdm.github.io/). - -## Setup - -```shell -pip install -r requirements.txt -``` diff --git a/examples/pull-progress/requirements.txt b/examples/pull-progress/requirements.txt deleted file mode 100644 index ae3df91e..00000000 --- a/examples/pull-progress/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -tqdm==4.66.1 diff --git a/examples/pull-progress/main.py b/examples/pull.py similarity index 92% rename from examples/pull-progress/main.py rename to examples/pull.py index 89b2f3a1..e24f2e94 100644 --- a/examples/pull-progress/main.py +++ b/examples/pull.py @@ -3,7 +3,7 @@ current_digest, bars = '', {} -for progress in pull('mistral', stream=True): +for progress in pull('llama3.2', stream=True): digest = progress.get('digest', '') if digest != current_digest and current_digest in bars: bars[current_digest].close() diff --git a/examples/tools.py b/examples/tools.py new file mode 100644 index 00000000..6151cd96 --- /dev/null +++ b/examples/tools.py @@ -0,0 +1,66 @@ +from ollama import chat +from ollama import ChatResponse + + +def add_two_numbers(a: int, b: int) -> int: + """ + Add two numbers + + Args: + a (int): The first number + b (int): The second number + + Returns: + int: The sum of the two numbers + """ + return a + b + + +def subtract_two_numbers(a: int, b: int) -> int: + """ + Subtract two numbers + """ + return a - b + + +# Tools can still be manually defined and passed into chat +subtract_two_numbers_tool = { + 'type': 'function', + 'function': { + 'name': 'subtract_two_numbers', + 'description': 'Subtract two numbers', + 'parameters': { + 'type': 'object', + 'required': ['a', 'b'], + 'properties': { + 'a': {'type': 'integer', 'description': 'The first number'}, + 'b': {'type': 'integer', 'description': 'The second number'}, + }, + }, + }, +} + +prompt = 'What is three plus one?' +print('Prompt:', prompt) + +available_functions = { + 'add_two_numbers': add_two_numbers, + 'subtract_two_numbers': subtract_two_numbers, +} + +response: ChatResponse = chat( + 'llama3.1', + messages=[{'role': 'user', 'content': prompt}], + tools=[add_two_numbers, subtract_two_numbers_tool], +) + +if response.message.tool_calls: + # There may be multiple tool calls in the response + for tool in response.message.tool_calls: + # Ensure the function is available, and then call it + if function_to_call := available_functions.get(tool.function.name): + print('Calling function:', tool.function.name) + print('Arguments:', tool.function.arguments) + print('Function output:', function_to_call(**tool.function.arguments)) + else: + print('Function', tool.function.name, 'not found') diff --git a/examples/tools/README.md b/examples/tools/README.md deleted file mode 100644 index 85ca5dd8..00000000 --- a/examples/tools/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# tools - -This example demonstrates how to utilize tool calls with an asynchronous Ollama client and the chat endpoint. diff --git a/examples/tools/main.py b/examples/tools/main.py deleted file mode 100644 index 133b2384..00000000 --- a/examples/tools/main.py +++ /dev/null @@ -1,87 +0,0 @@ -import json -import ollama -import asyncio - - -# Simulates an API call to get flight times -# In a real application, this would fetch data from a live database or API -def get_flight_times(departure: str, arrival: str) -> str: - flights = { - 'NYC-LAX': {'departure': '08:00 AM', 'arrival': '11:30 AM', 'duration': '5h 30m'}, - 'LAX-NYC': {'departure': '02:00 PM', 'arrival': '10:30 PM', 'duration': '5h 30m'}, - 'LHR-JFK': {'departure': '10:00 AM', 'arrival': '01:00 PM', 'duration': '8h 00m'}, - 'JFK-LHR': {'departure': '09:00 PM', 'arrival': '09:00 AM', 'duration': '7h 00m'}, - 'CDG-DXB': {'departure': '11:00 AM', 'arrival': '08:00 PM', 'duration': '6h 00m'}, - 'DXB-CDG': {'departure': '03:00 AM', 'arrival': '07:30 AM', 'duration': '7h 30m'}, - } - - key = f'{departure}-{arrival}'.upper() - return json.dumps(flights.get(key, {'error': 'Flight not found'})) - - -async def run(model: str): - client = ollama.AsyncClient() - # Initialize conversation with a user query - messages = [{'role': 'user', 'content': 'What is the flight time from New York (NYC) to Los Angeles (LAX)?'}] - - # First API call: Send the query and function description to the model - response = await client.chat( - model=model, - messages=messages, - tools=[ - { - 'type': 'function', - 'function': { - 'name': 'get_flight_times', - 'description': 'Get the flight times between two cities', - 'parameters': { - 'type': 'object', - 'properties': { - 'departure': { - 'type': 'string', - 'description': 'The departure city (airport code)', - }, - 'arrival': { - 'type': 'string', - 'description': 'The arrival city (airport code)', - }, - }, - 'required': ['departure', 'arrival'], - }, - }, - }, - ], - ) - - # Add the model's response to the conversation history - messages.append(response['message']) - - # Check if the model decided to use the provided function - if not response['message'].get('tool_calls'): - print("The model didn't use the function. Its response was:") - print(response['message']['content']) - return - - # Process function calls made by the model - if response['message'].get('tool_calls'): - available_functions = { - 'get_flight_times': get_flight_times, - } - for tool in response['message']['tool_calls']: - function_to_call = available_functions[tool['function']['name']] - function_response = function_to_call(tool['function']['arguments']['departure'], tool['function']['arguments']['arrival']) - # Add function response to the conversation - messages.append( - { - 'role': 'tool', - 'content': function_response, - } - ) - - # Second API call: Get final response from the model - final_response = await client.chat(model=model, messages=messages) - print(final_response['message']['content']) - - -# Run the async function -asyncio.run(run('mistral')) diff --git a/ollama/_client.py b/ollama/_client.py index a8a19d35..548f3432 100644 --- a/ollama/_client.py +++ b/ollama/_client.py @@ -319,7 +319,7 @@ def add_two_numbers(a: int, b: int) -> int: ''' return a + b - client.chat(model='llama3.1:8b', tools=[add_two_numbers], messages=[...]) + client.chat(model='llama3.2', tools=[add_two_numbers], messages=[...]) Raises `RequestError` if a model is not provided. @@ -821,7 +821,7 @@ def add_two_numbers(a: int, b: int) -> int: ''' return a + b - await client.chat(model='llama3.1:8b', tools=[add_two_numbers], messages=[...]) + await client.chat(model='llama3.2', tools=[add_two_numbers], messages=[...]) Raises `RequestError` if a model is not provided. From bfea240300b3ffd2dfe57bec225974ec07e01f53 Mon Sep 17 00:00:00 2001 From: ParthSareen Date: Thu, 21 Nov 2024 15:16:40 -0800 Subject: [PATCH 18/18] Disable tests for readmes and examples --- .github/workflows/test.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index deac0fbe..c35a470c 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -2,6 +2,9 @@ name: test on: pull_request: + paths: + - 'examples/**' + - '**/README.md' jobs: test: