diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 2a2ee2b8f3..4bf1b35ae4 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.92.0" + ".": "1.92.1" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 60ab8eb6a8..02c63e5813 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,14 @@ # Changelog +## 1.92.1 (2025-06-26) + +Full Changelog: [v1.92.0...v1.92.1](https://github.com/openai/openai-python/compare/v1.92.0...v1.92.1) + +### Chores + +* **client:** sync stream/parse methods over ([e2536cf](https://github.com/openai/openai-python/commit/e2536cfd74224047cece9c2ad86f0ffe51c0667c)) +* **docs:** update README to include links to docs on Webhooks ([ddbf9f1](https://github.com/openai/openai-python/commit/ddbf9f1dc47a32257716189f2056b45933328c9c)) + ## 1.92.0 (2025-06-26) Full Changelog: [v1.91.0...v1.92.0](https://github.com/openai/openai-python/compare/v1.91.0...v1.92.0) diff --git a/README.md b/README.md index 763428ddc8..b38ef578d2 100644 --- a/README.md +++ b/README.md @@ -410,6 +410,8 @@ The async client uses the exact same interface. If you pass a [`PathLike`](https Verifying webhook signatures is _optional but encouraged_. +For more information about webhooks, see [the API docs](https://platform.openai.com/docs/guides/webhooks). + ### Parsing webhook payloads For most use cases, you will likely want to verify the webhook and parse the payload at the same time. To achieve this, we provide the method `client.webhooks.unwrap()`, which parses a webhook request and verifies that it was sent by OpenAI. This method will raise an error if the signature is invalid. diff --git a/pyproject.toml b/pyproject.toml index eb9008a3a6..802229939c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "openai" -version = "1.92.0" +version = "1.92.1" description = "The official Python library for the openai API" dynamic = ["readme"] license = "Apache-2.0" diff --git a/src/openai/_version.py b/src/openai/_version.py index 64bc847523..9392416ade 100644 --- a/src/openai/_version.py +++ b/src/openai/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "openai" -__version__ = "1.92.0" # x-release-please-version +__version__ = "1.92.1" # x-release-please-version diff --git a/src/openai/resources/chat/completions/completions.py b/src/openai/resources/chat/completions/completions.py index 2a5622b092..5806296773 100644 --- a/src/openai/resources/chat/completions/completions.py +++ b/src/openai/resources/chat/completions/completions.py @@ -103,7 +103,7 @@ def parse( presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, reasoning_effort: Optional[ReasoningEffort] | NotGiven = NOT_GIVEN, seed: Optional[int] | NotGiven = NOT_GIVEN, - service_tier: Optional[Literal["auto", "default", "flex", "scale"]] | NotGiven = NOT_GIVEN, + service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | NotGiven = NOT_GIVEN, stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, store: Optional[bool] | NotGiven = NOT_GIVEN, stream_options: Optional[ChatCompletionStreamOptionsParam] | NotGiven = NOT_GIVEN, @@ -1334,7 +1334,7 @@ def stream( presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, reasoning_effort: Optional[ReasoningEffort] | NotGiven = NOT_GIVEN, seed: Optional[int] | NotGiven = NOT_GIVEN, - service_tier: Optional[Literal["auto", "default", "flex", "scale"]] | NotGiven = NOT_GIVEN, + service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | NotGiven = NOT_GIVEN, stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, store: Optional[bool] | NotGiven = NOT_GIVEN, stream_options: Optional[ChatCompletionStreamOptionsParam] | NotGiven = NOT_GIVEN, @@ -1470,7 +1470,7 @@ async def parse( presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, reasoning_effort: Optional[ReasoningEffort] | NotGiven = NOT_GIVEN, seed: Optional[int] | NotGiven = NOT_GIVEN, - service_tier: Optional[Literal["auto", "default", "flex", "scale"]] | NotGiven = NOT_GIVEN, + service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | NotGiven = NOT_GIVEN, stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, store: Optional[bool] | NotGiven = NOT_GIVEN, stream_options: Optional[ChatCompletionStreamOptionsParam] | NotGiven = NOT_GIVEN, @@ -2701,7 +2701,7 @@ def stream( presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, reasoning_effort: Optional[ReasoningEffort] | NotGiven = NOT_GIVEN, seed: Optional[int] | NotGiven = NOT_GIVEN, - service_tier: Optional[Literal["auto", "default", "flex", "scale"]] | NotGiven = NOT_GIVEN, + service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | NotGiven = NOT_GIVEN, stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, store: Optional[bool] | NotGiven = NOT_GIVEN, stream_options: Optional[ChatCompletionStreamOptionsParam] | NotGiven = NOT_GIVEN,