diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 1f41588..8edf5a6 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -7,12 +7,17 @@ on:
- 'integrated/**'
- 'stl-preview-head/**'
- 'stl-preview-base/**'
+ pull_request:
+ branches-ignore:
+ - 'stl-preview-head/**'
+ - 'stl-preview-base/**'
jobs:
lint:
timeout-minutes: 10
name: lint
runs-on: ${{ github.repository == 'stainless-sdks/browserbase-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }}
+ if: github.event_name == 'push' || github.event.pull_request.head.repo.fork
steps:
- uses: actions/checkout@v4
@@ -30,24 +35,40 @@ jobs:
- name: Run lints
run: ./scripts/lint
- upload:
- if: github.repository == 'stainless-sdks/browserbase-python'
+ build:
+ if: github.event_name == 'push' || github.event.pull_request.head.repo.fork
timeout-minutes: 10
- name: upload
+ name: build
permissions:
contents: read
id-token: write
- runs-on: depot-ubuntu-24.04
+ runs-on: ${{ github.repository == 'stainless-sdks/browserbase-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }}
steps:
- uses: actions/checkout@v4
+ - name: Install Rye
+ run: |
+ curl -sSf https://rye.astral.sh/get | bash
+ echo "$HOME/.rye/shims" >> $GITHUB_PATH
+ env:
+ RYE_VERSION: '0.44.0'
+ RYE_INSTALL_OPTION: '--yes'
+
+ - name: Install dependencies
+ run: rye sync --all-features
+
+ - name: Run build
+ run: rye build
+
- name: Get GitHub OIDC Token
+ if: github.repository == 'stainless-sdks/browserbase-python'
id: github-oidc
uses: actions/github-script@v6
with:
script: core.setOutput('github_token', await core.getIDToken());
- name: Upload tarball
+ if: github.repository == 'stainless-sdks/browserbase-python'
env:
URL: https://pkg.stainless.com/s
AUTH: ${{ steps.github-oidc.outputs.github_token }}
@@ -58,6 +79,7 @@ jobs:
timeout-minutes: 10
name: test
runs-on: ${{ github.repository == 'stainless-sdks/browserbase-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }}
+ if: github.event_name == 'push' || github.event.pull_request.head.repo.fork
steps:
- uses: actions/checkout@v4
diff --git a/.gitignore b/.gitignore
index 4615233..117701e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,5 +1,4 @@
.prism.log
-.vscode
_dev
__pycache__
diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index 3e9af1b..7a22c4a 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "1.4.0"
+ ".": "1.5.0-alpha.0"
}
\ No newline at end of file
diff --git a/.stats.yml b/.stats.yml
index 38c95a8..a50ccc0 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,4 +1,4 @@
configured_endpoints: 18
-openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/browserbase%2Fbrowserbase-e2ed1b5267eeff92982918505349017b9155da2c7ab948787ab11cf9068af1b8.yml
-openapi_spec_hash: 6639c21dccb52ca610cae833227a9791
-config_hash: 74882e23a455dece33e43a27e67f0fbb
+openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/browserbase%2Fbrowserbase-a9ab6f9017f7645722d220eb8172516a7a5400e86542c28fc7e121adcd1f344f.yml
+openapi_spec_hash: e29347aba2697d4efa3dce7794810dbd
+config_hash: ec077c0d8cde29588ca4ff30d49575a4
diff --git a/.vscode/settings.json b/.vscode/settings.json
new file mode 100644
index 0000000..5b01030
--- /dev/null
+++ b/.vscode/settings.json
@@ -0,0 +1,3 @@
+{
+ "python.analysis.importFormat": "relative",
+}
diff --git a/CHANGELOG.md b/CHANGELOG.md
index fb20598..b1b9474 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,79 @@
# Changelog
+## 1.5.0-alpha.0 (2025-09-05)
+
+Full Changelog: [v1.4.0...v1.5.0-alpha.0](https://github.com/browserbase/sdk-python/compare/v1.4.0...v1.5.0-alpha.0)
+
+### Features
+
+* **api:** api update ([e94ddbd](https://github.com/browserbase/sdk-python/commit/e94ddbd8777b97d4e8ab193e1bf3eaad983ecec9))
+* **api:** api update ([28115fb](https://github.com/browserbase/sdk-python/commit/28115fb584336dbf5b08043ad8f9cf1d911240ea))
+* **api:** api update ([3209287](https://github.com/browserbase/sdk-python/commit/32092872a3d4d48824b4d77d517ffdb06470ad95))
+* **api:** api update ([f38e029](https://github.com/browserbase/sdk-python/commit/f38e02981ae0777cb3d922845902b2673dc832fa))
+* **api:** api update ([1d9f769](https://github.com/browserbase/sdk-python/commit/1d9f7694bc0d465ce758ddcec41359e9cd1a08ad))
+* **api:** api update ([d72f39f](https://github.com/browserbase/sdk-python/commit/d72f39fbe29342cfc77e9b224f2ad0a5a77aaae4))
+* **api:** api update ([6d449b3](https://github.com/browserbase/sdk-python/commit/6d449b3deb284a72528877a8729f4cf7a418275d))
+* **api:** api update ([8bd5f8b](https://github.com/browserbase/sdk-python/commit/8bd5f8bcca3a2e5baadfc06009546692e63eb744))
+* **api:** api update ([1ce99ef](https://github.com/browserbase/sdk-python/commit/1ce99efe89c1d0757ca3100cca8619faa4082f74))
+* **api:** api update ([1cbb849](https://github.com/browserbase/sdk-python/commit/1cbb8498bf70c15c001f620b821519216cbadd97))
+* **api:** manual updates ([5893fc6](https://github.com/browserbase/sdk-python/commit/5893fc6165cfd88378d6725317e30c7cb6faf8df))
+* **api:** manual updates ([074f06d](https://github.com/browserbase/sdk-python/commit/074f06d0dfb08554229348828afd2cc1defe94ee))
+* clean up environment call outs ([82c38c4](https://github.com/browserbase/sdk-python/commit/82c38c494a175c1b6b38bab3615916c30ba25d14))
+* **client:** add follow_redirects request option ([a8b0b5e](https://github.com/browserbase/sdk-python/commit/a8b0b5e4c6445e0e8c0d3673a090aabab09a50fd))
+* **client:** add support for aiohttp ([3516092](https://github.com/browserbase/sdk-python/commit/35160921e262f147cc723a754f14cfd9875603f5))
+* **client:** support file upload requests ([2f338f0](https://github.com/browserbase/sdk-python/commit/2f338f009e556ef9be05f49816b17cef138bda17))
+* improve future compat with pydantic v3 ([8b5256c](https://github.com/browserbase/sdk-python/commit/8b5256c801e1423a4daf6bf49de7509a32ebfde2))
+* **types:** replace List[str] with SequenceNotStr in params ([55083f6](https://github.com/browserbase/sdk-python/commit/55083f678b68020fae835af5cd58e0e5deea2888))
+
+
+### Bug Fixes
+
+* avoid newer type syntax ([85f597b](https://github.com/browserbase/sdk-python/commit/85f597b34d149138f1b5afdc52062cb131e3a30a))
+* **ci:** correct conditional ([a36b873](https://github.com/browserbase/sdk-python/commit/a36b87379b404613673720dd9f498ed76dfe5c3a))
+* **ci:** release-doctor — report correct token name ([61b97ff](https://github.com/browserbase/sdk-python/commit/61b97fff5ea92bade293c5f5f4a84b0d991375e7))
+* **client:** correctly parse binary response | stream ([9614c4c](https://github.com/browserbase/sdk-python/commit/9614c4c05bc57ea60100aec9a194aee7a39e701b))
+* **client:** don't send Content-Type header on GET requests ([c4c4185](https://github.com/browserbase/sdk-python/commit/c4c4185de32b28c09565b6fe84efd65fd411abb9))
+* fix extension types in playwright_extensions ([8b652e7](https://github.com/browserbase/sdk-python/commit/8b652e78be1493d03e13d2a116cbc6969a880e58))
+* **parsing:** correctly handle nested discriminated unions ([d020678](https://github.com/browserbase/sdk-python/commit/d0206786894ecfb22e0924edb8a227414b17788d))
+* **parsing:** ignore empty metadata ([118c4d4](https://github.com/browserbase/sdk-python/commit/118c4d41bda811d2d942793d8ab029b272c7a5c6))
+* **parsing:** parse extra field types ([c7ef875](https://github.com/browserbase/sdk-python/commit/c7ef87549e324fb06fab945e1754ef7b56b30031))
+* **tests:** fix: tests which call HTTP endpoints directly with the example parameters ([e298407](https://github.com/browserbase/sdk-python/commit/e2984077537fd6dee0191329a083ad0ccf9fd76f))
+
+
+### Chores
+
+* **ci:** change upload type ([e42da7c](https://github.com/browserbase/sdk-python/commit/e42da7c1fed216ff2b15223c49f1111bc0ef16e5))
+* **ci:** enable for pull requests ([03a6db7](https://github.com/browserbase/sdk-python/commit/03a6db72e98bf1606bf68928b2ac5029cba088df))
+* **ci:** only run for pushes and fork pull requests ([c8cb51f](https://github.com/browserbase/sdk-python/commit/c8cb51f311f4d39863127fab189c95d84a186bc6))
+* **docs:** grammar improvements ([f32a9e2](https://github.com/browserbase/sdk-python/commit/f32a9e258a9b0b4d29c24137d5a7207907f00f9b))
+* **docs:** remove reference to rye shell ([07d129a](https://github.com/browserbase/sdk-python/commit/07d129a04211037d123b06d36347741960e75323))
+* **docs:** remove unnecessary param examples ([62209dc](https://github.com/browserbase/sdk-python/commit/62209dcac034f40ac8b3b8a119e532201a227680))
+* **internal:** add Sequence related utils ([34b0dd6](https://github.com/browserbase/sdk-python/commit/34b0dd6b4297fafc2bcb9e8243c8d3c2e2e435fc))
+* **internal:** bump pinned h11 dep ([5e3270d](https://github.com/browserbase/sdk-python/commit/5e3270da2e4f41efdd345d073a42d6791eb22a84))
+* **internal:** change ci workflow machines ([14c0ac4](https://github.com/browserbase/sdk-python/commit/14c0ac49a6d9d42f5401a5c24ddb8586b3998fb2))
+* **internal:** codegen related update ([f979aff](https://github.com/browserbase/sdk-python/commit/f979aff605c0d74efb561e0b169ad39b486ab5a0))
+* **internal:** codegen related update ([12de9f3](https://github.com/browserbase/sdk-python/commit/12de9f324fbb40bec91cd7c6b16af1440c4f7373))
+* **internal:** codegen related update ([c4157cb](https://github.com/browserbase/sdk-python/commit/c4157cb8470b1d0ca67e6757f4fe9146a630cc82))
+* **internal:** codegen related update ([ccb2c95](https://github.com/browserbase/sdk-python/commit/ccb2c95002bb6a38e1eb8b9a84e4a335d5ee1a13))
+* **internal:** fix ruff target version ([e6a3df4](https://github.com/browserbase/sdk-python/commit/e6a3df40564b4ba3d23514e0b42221010d465bf6))
+* **internal:** update comment in script ([a7aec17](https://github.com/browserbase/sdk-python/commit/a7aec17c02632684dfeb7759dd6a5322efe092ce))
+* **internal:** update conftest.py ([5d3a2b1](https://github.com/browserbase/sdk-python/commit/5d3a2b1906ca5fca5c84c6d6684a8a62b6700479))
+* **internal:** update pyright exclude list ([33ba4b4](https://github.com/browserbase/sdk-python/commit/33ba4b47ddeb8c0aa19a11f35a7cea9aa9a0966d))
+* **package:** mark python 3.13 as supported ([2450b8e](https://github.com/browserbase/sdk-python/commit/2450b8eb2349adde689febd09269915d41e7a590))
+* **project:** add settings file for vscode ([a406241](https://github.com/browserbase/sdk-python/commit/a4062413b2fce397d59ea9ceaec7ed0565880fe2))
+* **readme:** fix version rendering on pypi ([a8afe1a](https://github.com/browserbase/sdk-python/commit/a8afe1a67c48080ef202cac88da9b5d59534799a))
+* **readme:** update badges ([869a3f4](https://github.com/browserbase/sdk-python/commit/869a3f4dd7e6f19225b697aeee89ce98a2174c0a))
+* **tests:** add tests for httpx client instantiation & proxies ([9c5d88c](https://github.com/browserbase/sdk-python/commit/9c5d88cb4cbbda5aa618cba2f5217bacd4a228cc))
+* **tests:** run tests in parallel ([94308de](https://github.com/browserbase/sdk-python/commit/94308dea065f54268145b175a13e0dbfd2a9cc81))
+* **tests:** skip some failing tests on the latest python versions ([7bc40f0](https://github.com/browserbase/sdk-python/commit/7bc40f068d290a479a0d4070ef54e8f8c4ef598d))
+* update @stainless-api/prism-cli to v5.15.0 ([b48933b](https://github.com/browserbase/sdk-python/commit/b48933b2f68eafaa554662eb7f41bf960a74d8b6))
+* update github action ([d57dc03](https://github.com/browserbase/sdk-python/commit/d57dc0398b083556ed7ceee265efcf282062005d))
+
+
+### Documentation
+
+* **client:** fix httpx.Timeout documentation reference ([4bbda56](https://github.com/browserbase/sdk-python/commit/4bbda56cdb4adf677f67011f42f5c3e324a5f60e))
+
## 1.4.0 (2025-05-16)
Full Changelog: [v1.3.0...v1.4.0](https://github.com/browserbase/sdk-python/compare/v1.3.0...v1.4.0)
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 45a7298..5f8bfea 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -17,8 +17,7 @@ $ rye sync --all-features
You can then run scripts using `rye run python script.py` or by activating the virtual environment:
```sh
-$ rye shell
-# or manually activate - https://docs.python.org/3/library/venv.html#how-venvs-work
+# Activate the virtual environment - https://docs.python.org/3/library/venv.html#how-venvs-work
$ source .venv/bin/activate
# now you can omit the `rye run` prefix
diff --git a/README.md b/README.md
index 43a407e..5c1155d 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,7 @@
# Browserbase Python API library
-[](https://pypi.org/project/browserbase/)
+
+[)](https://pypi.org/project/browserbase/)
The Browserbase Python library provides convenient access to the Browserbase REST API from any Python 3.8+
application. The library includes type definitions for all request params and response fields,
@@ -16,7 +17,7 @@ The REST API documentation can be found on [docs.browserbase.com](https://docs.b
```sh
# install from PyPI
-pip install browserbase
+pip install --pre browserbase
```
## Usage
@@ -81,6 +82,39 @@ rye run example playwright_basic # replace with the example you want to run
> [!NOTE]
> Make sure you have a `.env` file that matches the [.env.example](.env.example) file in the root of this repository.
+### With aiohttp
+
+By default, the async client uses `httpx` for HTTP requests. However, for improved concurrency performance you may also use `aiohttp` as the HTTP backend.
+
+You can enable this by installing `aiohttp`:
+
+```sh
+# install from PyPI
+pip install --pre browserbase[aiohttp]
+```
+
+Then you can enable it by instantiating the client with `http_client=DefaultAioHttpClient()`:
+
+```python
+import asyncio
+from browserbase import DefaultAioHttpClient
+from browserbase import AsyncBrowserbase
+
+
+async def main() -> None:
+ async with AsyncBrowserbase(
+ api_key="My API Key",
+ http_client=DefaultAioHttpClient(),
+ ) as client:
+ session = await client.sessions.create(
+ project_id="your_project_id",
+ )
+ print(session.id)
+
+
+asyncio.run(main())
+```
+
## Using types
Nested request parameters are [TypedDicts](https://docs.python.org/3/library/typing.html#typing.TypedDict). Responses are [Pydantic models](https://docs.pydantic.dev) which also provide helper methods for things like:
@@ -101,37 +135,7 @@ client = Browserbase()
session = client.sessions.create(
project_id="projectId",
- browser_settings={
- "advanced_stealth": True,
- "block_ads": True,
- "captcha_image_selector": "captchaImageSelector",
- "captcha_input_selector": "captchaInputSelector",
- "context": {
- "id": "id",
- "persist": True,
- },
- "extension_id": "extensionId",
- "fingerprint": {
- "browsers": ["chrome"],
- "devices": ["desktop"],
- "http_version": "1",
- "locales": ["string"],
- "operating_systems": ["android"],
- "screen": {
- "max_height": 0,
- "max_width": 0,
- "min_height": 0,
- "min_width": 0,
- },
- },
- "log_session": True,
- "record_session": True,
- "solve_captchas": True,
- "viewport": {
- "height": 0,
- "width": 0,
- },
- },
+ browser_settings={},
)
print(session.browser_settings)
```
@@ -222,7 +226,7 @@ client.with_options(max_retries=5).sessions.create(
### Timeouts
By default requests time out after 1 minute. You can configure this with a `timeout` option,
-which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/#fine-tuning-the-configuration) object:
+which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/timeouts/#fine-tuning-the-configuration) object:
```python
from browserbase import Browserbase
diff --git a/SECURITY.md b/SECURITY.md
index e10eb19..ad64e4b 100644
--- a/SECURITY.md
+++ b/SECURITY.md
@@ -16,11 +16,11 @@ before making any information public.
## Reporting Non-SDK Related Security Issues
If you encounter security issues that are not directly related to SDKs but pertain to the services
-or products provided by Browserbase please follow the respective company's security reporting guidelines.
+or products provided by Browserbase, please follow the respective company's security reporting guidelines.
### Browserbase Terms and Policies
-Please contact support@browserbase.com for any questions or concerns regarding security of our services.
+Please contact support@browserbase.com for any questions or concerns regarding the security of our services.
---
diff --git a/api.md b/api.md
index dbb776f..0145485 100644
--- a/api.md
+++ b/api.md
@@ -3,13 +3,13 @@
Types:
```python
-from browserbase.types import Context, ContextCreateResponse, ContextUpdateResponse
+from browserbase.types import ContextCreateResponse, ContextRetrieveResponse, ContextUpdateResponse
```
Methods:
- client.contexts.create(\*\*params) -> ContextCreateResponse
-- client.contexts.retrieve(id) -> Context
+- client.contexts.retrieve(id) -> ContextRetrieveResponse
- client.contexts.update(id) -> ContextUpdateResponse
# Extensions
@@ -17,13 +17,13 @@ Methods:
Types:
```python
-from browserbase.types import Extension
+from browserbase.types import ExtensionCreateResponse, ExtensionRetrieveResponse
```
Methods:
-- client.extensions.create(\*\*params) -> Extension
-- client.extensions.retrieve(id) -> Extension
+- client.extensions.create(\*\*params) -> ExtensionCreateResponse
+- client.extensions.retrieve(id) -> ExtensionRetrieveResponse
- client.extensions.delete(id) -> None
# Projects
@@ -31,14 +31,14 @@ Methods:
Types:
```python
-from browserbase.types import Project, ProjectUsage, ProjectListResponse
+from browserbase.types import ProjectRetrieveResponse, ProjectListResponse, ProjectUsageResponse
```
Methods:
-- client.projects.retrieve(id) -> Project
+- client.projects.retrieve(id) -> ProjectRetrieveResponse
- client.projects.list() -> ProjectListResponse
-- client.projects.usage(id) -> ProjectUsage
+- client.projects.usage(id) -> ProjectUsageResponse
# Sessions
@@ -46,11 +46,11 @@ Types:
```python
from browserbase.types import (
- Session,
- SessionLiveURLs,
SessionCreateResponse,
SessionRetrieveResponse,
+ SessionUpdateResponse,
SessionListResponse,
+ SessionDebugResponse,
)
```
@@ -58,9 +58,9 @@ Methods:
- client.sessions.create(\*\*params) -> SessionCreateResponse
- client.sessions.retrieve(id) -> SessionRetrieveResponse
-- client.sessions.update(id, \*\*params) -> Session
+- client.sessions.update(id, \*\*params) -> SessionUpdateResponse
- client.sessions.list(\*\*params) -> SessionListResponse
-- client.sessions.debug(id) -> SessionLiveURLs
+- client.sessions.debug(id) -> SessionDebugResponse
## Downloads
@@ -73,7 +73,7 @@ Methods:
Types:
```python
-from browserbase.types.sessions import SessionLog, LogListResponse
+from browserbase.types.sessions import LogListResponse
```
Methods:
@@ -85,7 +85,7 @@ Methods:
Types:
```python
-from browserbase.types.sessions import SessionRecording, RecordingRetrieveResponse
+from browserbase.types.sessions import RecordingRetrieveResponse
```
Methods:
diff --git a/bin/check-release-environment b/bin/check-release-environment
index 6ad04d3..b845b0f 100644
--- a/bin/check-release-environment
+++ b/bin/check-release-environment
@@ -3,7 +3,7 @@
errors=()
if [ -z "${PYPI_TOKEN}" ]; then
- errors+=("The BROWSERBASE_PYPI_TOKEN secret has not been set. Please set it in either this repository's secrets or your organization secrets.")
+ errors+=("The PYPI_TOKEN secret has not been set. Please set it in either this repository's secrets or your organization secrets.")
fi
lenErrors=${#errors[@]}
diff --git a/examples/playwright_extensions.py b/examples/playwright_extensions.py
index 6ef1f98..f2c2f7f 100644
--- a/examples/playwright_extensions.py
+++ b/examples/playwright_extensions.py
@@ -10,7 +10,7 @@
BROWSERBASE_PROJECT_ID,
bb,
)
-from browserbase.types import Extension, SessionCreateResponse
+from browserbase.types import SessionCreateResponse, ExtensionRetrieveResponse
PATH_TO_EXTENSION = Path.cwd() / "examples" / "packages" / "extensions" / "browserbase-test"
@@ -47,11 +47,11 @@ def zip_extension(path: Path = PATH_TO_EXTENSION, save_local: bool = False) -> B
def create_extension() -> str:
zip_data = zip_extension(save_local=True)
- extension: Extension = bb.extensions.create(file=("extension.zip", zip_data.getvalue()))
+ extension = bb.extensions.create(file=("extension.zip", zip_data.getvalue()))
return extension.id
-def get_extension(id: str) -> Extension:
+def get_extension(id: str) -> ExtensionRetrieveResponse:
return bb.extensions.retrieve(id)
diff --git a/pyproject.toml b/pyproject.toml
index e5e2f6c..d42ea80 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "browserbase"
-version = "1.4.0"
+version = "1.5.0-alpha.0"
description = "The official Python library for the Browserbase API"
dynamic = ["readme"]
license = "Apache-2.0"
@@ -24,6 +24,7 @@ classifiers = [
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
"Operating System :: OS Independent",
"Operating System :: POSIX",
"Operating System :: MacOS",
@@ -37,6 +38,8 @@ classifiers = [
Homepage = "https://github.com/browserbase/sdk-python"
Repository = "https://github.com/browserbase/sdk-python"
+[project.optional-dependencies]
+aiohttp = ["aiohttp", "httpx_aiohttp>=0.1.8"]
[tool.rye]
managed = true
@@ -55,6 +58,7 @@ dev-dependencies = [
"importlib-metadata>=6.7.0",
"rich>=13.7.1",
"nest_asyncio==1.6.0",
+ "pytest-xdist>=3.6.1",
"python-dotenv",
"playwright",
"selenium",
@@ -134,7 +138,7 @@ replacement = '[\1](https://github.com/browserbase/sdk-python/tree/main/\g<2>)'
[tool.pytest.ini_options]
testpaths = ["tests"]
-addopts = "--tb=short"
+addopts = "--tb=short -n auto"
xfail_strict = true
asyncio_mode = "auto"
asyncio_default_fixture_loop_scope = "session"
@@ -153,6 +157,7 @@ exclude = [
"_dev",
".venv",
".nox",
+ ".git",
]
reportImplicitOverride = true
@@ -164,7 +169,7 @@ reportPrivateUsage = false
[tool.ruff]
line-length = 120
output-format = "grouped"
-target-version = "py37"
+target-version = "py38"
[tool.ruff.format]
docstring-code-format = true
diff --git a/requirements-dev.lock b/requirements-dev.lock
index b0045a6..23d3fed 100644
--- a/requirements-dev.lock
+++ b/requirements-dev.lock
@@ -10,6 +10,13 @@
# universal: false
-e file:.
+aiohappyeyeballs==2.6.1
+ # via aiohttp
+aiohttp==3.12.15
+ # via browserbase
+ # via httpx-aiohttp
+aiosignal==1.4.0
+ # via aiohttp
annotated-types==0.7.0
# via pydantic
anyio==4.6.2.post1
@@ -17,7 +24,10 @@ anyio==4.6.2.post1
# via httpx
argcomplete==3.5.1
# via nox
+async-timeout==5.0.1
+ # via aiohttp
attrs==24.2.0
+ # via aiohttp
# via outcome
# via trio
certifi==2024.8.30
@@ -39,8 +49,13 @@ exceptiongroup==1.2.2
# via pytest
# via trio
# via trio-websocket
+execnet==2.1.1
+ # via pytest-xdist
filelock==3.16.1
# via virtualenv
+frozenlist==1.7.0
+ # via aiohttp
+ # via aiosignal
greenlet==3.1.1
# via playwright
h11==0.14.0
@@ -50,12 +65,16 @@ httpcore==1.0.6
# via httpx
httpx==0.28.1
# via browserbase
+ # via httpx-aiohttp
# via respx
+httpx-aiohttp==0.1.8
+ # via browserbase
idna==3.10
# via anyio
# via httpx
# via requests
# via trio
+ # via yarl
importlib-metadata==8.5.0
iniconfig==2.0.0
# via pytest
@@ -63,6 +82,9 @@ markdown-it-py==3.0.0
# via rich
mdurl==0.1.2
# via markdown-it-py
+multidict==6.6.4
+ # via aiohttp
+ # via yarl
mypy==1.14.1
mypy-extensions==1.0.0
# via mypy
@@ -81,6 +103,9 @@ playwright==1.48.0
# via pytest-playwright
pluggy==1.5.0
# via pytest
+propcache==0.3.2
+ # via aiohttp
+ # via yarl
pydantic==2.10.3
# via browserbase
pydantic-core==2.27.1
@@ -89,17 +114,19 @@ pyee==12.0.0
# via playwright
pygments==2.18.0
# via rich
+pyright==1.1.399
pysocks==1.7.1
# via urllib3
-pyright==1.1.399
pytest==8.3.3
# via pytest-asyncio
# via pytest-base-url
# via pytest-playwright
+ # via pytest-xdist
pytest-asyncio==0.24.0
pytest-base-url==2.1.0
# via pytest-playwright
pytest-playwright==0.5.2
+pytest-xdist==3.8.0
python-dateutil==2.9.0.post0
# via time-machine
python-dotenv==1.0.1
@@ -132,8 +159,10 @@ trio==0.27.0
trio-websocket==0.11.1
# via selenium
typing-extensions==4.12.2
+ # via aiosignal
# via anyio
# via browserbase
+ # via multidict
# via mypy
# via pydantic
# via pydantic-core
@@ -150,5 +179,7 @@ websocket-client==1.8.0
# via selenium
wsproto==1.2.0
# via trio-websocket
+yarl==1.20.1
+ # via aiohttp
zipp==3.20.2
# via importlib-metadata
diff --git a/requirements.lock b/requirements.lock
index 9efa54d..6f4c4c9 100644
--- a/requirements.lock
+++ b/requirements.lock
@@ -10,11 +10,22 @@
# universal: false
-e file:.
+aiohappyeyeballs==2.6.1
+ # via aiohttp
+aiohttp==3.12.8
+ # via browserbase
+ # via httpx-aiohttp
+aiosignal==1.3.2
+ # via aiohttp
annotated-types==0.6.0
# via pydantic
anyio==4.4.0
# via browserbase
# via httpx
+async-timeout==5.0.1
+ # via aiohttp
+attrs==25.3.0
+ # via aiohttp
certifi==2023.7.22
# via httpcore
# via httpx
@@ -22,15 +33,28 @@ distro==1.8.0
# via browserbase
exceptiongroup==1.2.2
# via anyio
-h11==0.14.0
+frozenlist==1.6.2
+ # via aiohttp
+ # via aiosignal
+h11==0.16.0
# via httpcore
-httpcore==1.0.2
+httpcore==1.0.9
# via httpx
httpx==0.28.1
# via browserbase
+ # via httpx-aiohttp
+httpx-aiohttp==0.1.8
+ # via browserbase
idna==3.4
# via anyio
# via httpx
+ # via yarl
+multidict==6.4.4
+ # via aiohttp
+ # via yarl
+propcache==0.3.1
+ # via aiohttp
+ # via yarl
pydantic==2.10.3
# via browserbase
pydantic-core==2.27.1
@@ -41,5 +65,8 @@ sniffio==1.3.0
typing-extensions==4.12.2
# via anyio
# via browserbase
+ # via multidict
# via pydantic
# via pydantic-core
+yarl==1.20.0
+ # via aiohttp
diff --git a/scripts/mock b/scripts/mock
index d2814ae..0b28f6e 100755
--- a/scripts/mock
+++ b/scripts/mock
@@ -21,7 +21,7 @@ echo "==> Starting mock server with URL ${URL}"
# Run prism mock on the given spec
if [ "$1" == "--daemon" ]; then
- npm exec --package=@stainless-api/prism-cli@5.8.5 -- prism mock "$URL" &> .prism.log &
+ npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock "$URL" &> .prism.log &
# Wait for server to come online
echo -n "Waiting for server"
@@ -37,5 +37,5 @@ if [ "$1" == "--daemon" ]; then
echo
else
- npm exec --package=@stainless-api/prism-cli@5.8.5 -- prism mock "$URL"
+ npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock "$URL"
fi
diff --git a/scripts/test b/scripts/test
index 2b87845..dbeda2d 100755
--- a/scripts/test
+++ b/scripts/test
@@ -43,7 +43,7 @@ elif ! prism_is_running ; then
echo -e "To run the server, pass in the path or url of your OpenAPI"
echo -e "spec to the prism command:"
echo
- echo -e " \$ ${YELLOW}npm exec --package=@stoplight/prism-cli@~5.3.2 -- prism mock path/to/your.openapi.yml${NC}"
+ echo -e " \$ ${YELLOW}npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock path/to/your.openapi.yml${NC}"
echo
exit 1
diff --git a/scripts/utils/upload-artifact.sh b/scripts/utils/upload-artifact.sh
index 7c3d028..4fa5766 100755
--- a/scripts/utils/upload-artifact.sh
+++ b/scripts/utils/upload-artifact.sh
@@ -1,7 +1,9 @@
#!/usr/bin/env bash
set -exuo pipefail
-RESPONSE=$(curl -X POST "$URL" \
+FILENAME=$(basename dist/*.whl)
+
+RESPONSE=$(curl -X POST "$URL?filename=$FILENAME" \
-H "Authorization: Bearer $AUTH" \
-H "Content-Type: application/json")
@@ -12,13 +14,13 @@ if [[ "$SIGNED_URL" == "null" ]]; then
exit 1
fi
-UPLOAD_RESPONSE=$(tar -cz . | curl -v -X PUT \
- -H "Content-Type: application/gzip" \
- --data-binary @- "$SIGNED_URL" 2>&1)
+UPLOAD_RESPONSE=$(curl -v -X PUT \
+ -H "Content-Type: binary/octet-stream" \
+ --data-binary "@dist/$FILENAME" "$SIGNED_URL" 2>&1)
if echo "$UPLOAD_RESPONSE" | grep -q "HTTP/[0-9.]* 200"; then
echo -e "\033[32mUploaded build to Stainless storage.\033[0m"
- echo -e "\033[32mInstallation: pip install 'https://pkg.stainless.com/s/browserbase-python/$SHA'\033[0m"
+ echo -e "\033[32mInstallation: pip install 'https://pkg.stainless.com/s/browserbase-python/$SHA/$FILENAME'\033[0m"
else
echo -e "\033[31mFailed to upload artifact.\033[0m"
exit 1
diff --git a/src/browserbase/__init__.py b/src/browserbase/__init__.py
index e954b0e..8e12884 100644
--- a/src/browserbase/__init__.py
+++ b/src/browserbase/__init__.py
@@ -36,7 +36,7 @@
UnprocessableEntityError,
APIResponseValidationError,
)
-from ._base_client import DefaultHttpxClient, DefaultAsyncHttpxClient
+from ._base_client import DefaultHttpxClient, DefaultAioHttpClient, DefaultAsyncHttpxClient
from ._utils._logs import setup_logging as _setup_logging
__all__ = [
@@ -78,6 +78,7 @@
"DEFAULT_CONNECTION_LIMITS",
"DefaultHttpxClient",
"DefaultAsyncHttpxClient",
+ "DefaultAioHttpClient",
]
if not _t.TYPE_CHECKING:
diff --git a/src/browserbase/_base_client.py b/src/browserbase/_base_client.py
index 82e76c9..8954933 100644
--- a/src/browserbase/_base_client.py
+++ b/src/browserbase/_base_client.py
@@ -59,7 +59,7 @@
ModelBuilderProtocol,
)
from ._utils import is_dict, is_list, asyncify, is_given, lru_cache, is_mapping
-from ._compat import PYDANTIC_V2, model_copy, model_dump
+from ._compat import PYDANTIC_V1, model_copy, model_dump
from ._models import GenericModel, FinalRequestOptions, validate_type, construct_type
from ._response import (
APIResponse,
@@ -232,7 +232,7 @@ def _set_private_attributes(
model: Type[_T],
options: FinalRequestOptions,
) -> None:
- if PYDANTIC_V2 and getattr(self, "__pydantic_private__", None) is None:
+ if (not PYDANTIC_V1) and getattr(self, "__pydantic_private__", None) is None:
self.__pydantic_private__ = {}
self._model = model
@@ -320,7 +320,7 @@ def _set_private_attributes(
client: AsyncAPIClient,
options: FinalRequestOptions,
) -> None:
- if PYDANTIC_V2 and getattr(self, "__pydantic_private__", None) is None:
+ if (not PYDANTIC_V1) and getattr(self, "__pydantic_private__", None) is None:
self.__pydantic_private__ = {}
self._model = model
@@ -529,6 +529,18 @@ def _build_request(
# work around https://github.com/encode/httpx/discussions/2880
kwargs["extensions"] = {"sni_hostname": prepared_url.host.replace("_", "-")}
+ is_body_allowed = options.method.lower() != "get"
+
+ if is_body_allowed:
+ if isinstance(json_data, bytes):
+ kwargs["content"] = json_data
+ else:
+ kwargs["json"] = json_data if is_given(json_data) else None
+ kwargs["files"] = files
+ else:
+ headers.pop("Content-Type", None)
+ kwargs.pop("data", None)
+
# TODO: report this error to httpx
return self._client.build_request( # pyright: ignore[reportUnknownMemberType]
headers=headers,
@@ -540,8 +552,6 @@ def _build_request(
# so that passing a `TypedDict` doesn't cause an error.
# https://github.com/microsoft/pyright/issues/3526#event-6715453066
params=self.qs.stringify(cast(Mapping[str, Any], params)) if params else None,
- json=json_data if is_given(json_data) else None,
- files=files,
**kwargs,
)
@@ -960,6 +970,9 @@ def request(
if self.custom_auth is not None:
kwargs["auth"] = self.custom_auth
+ if options.follow_redirects is not None:
+ kwargs["follow_redirects"] = options.follow_redirects
+
log.debug("Sending HTTP Request: %s %s", request.method, request.url)
response = None
@@ -1068,7 +1081,14 @@ def _process_response(
) -> ResponseT:
origin = get_origin(cast_to) or cast_to
- if inspect.isclass(origin) and issubclass(origin, BaseAPIResponse):
+ if (
+ inspect.isclass(origin)
+ and issubclass(origin, BaseAPIResponse)
+ # we only want to actually return the custom BaseAPIResponse class if we're
+ # returning the raw response, or if we're not streaming SSE, as if we're streaming
+ # SSE then `cast_to` doesn't actively reflect the type we need to parse into
+ and (not stream or bool(response.request.headers.get(RAW_RESPONSE_HEADER)))
+ ):
if not issubclass(origin, APIResponse):
raise TypeError(f"API Response types must subclass {APIResponse}; Received {origin}")
@@ -1279,6 +1299,24 @@ def __init__(self, **kwargs: Any) -> None:
super().__init__(**kwargs)
+try:
+ import httpx_aiohttp
+except ImportError:
+
+ class _DefaultAioHttpClient(httpx.AsyncClient):
+ def __init__(self, **_kwargs: Any) -> None:
+ raise RuntimeError("To use the aiohttp client you must have installed the package with the `aiohttp` extra")
+else:
+
+ class _DefaultAioHttpClient(httpx_aiohttp.HttpxAiohttpClient): # type: ignore
+ def __init__(self, **kwargs: Any) -> None:
+ kwargs.setdefault("timeout", DEFAULT_TIMEOUT)
+ kwargs.setdefault("limits", DEFAULT_CONNECTION_LIMITS)
+ kwargs.setdefault("follow_redirects", True)
+
+ super().__init__(**kwargs)
+
+
if TYPE_CHECKING:
DefaultAsyncHttpxClient = httpx.AsyncClient
"""An alias to `httpx.AsyncClient` that provides the same defaults that this SDK
@@ -1287,8 +1325,12 @@ def __init__(self, **kwargs: Any) -> None:
This is useful because overriding the `http_client` with your own instance of
`httpx.AsyncClient` will result in httpx's defaults being used, not ours.
"""
+
+ DefaultAioHttpClient = httpx.AsyncClient
+ """An alias to `httpx.AsyncClient` that changes the default HTTP transport to `aiohttp`."""
else:
DefaultAsyncHttpxClient = _DefaultAsyncHttpxClient
+ DefaultAioHttpClient = _DefaultAioHttpClient
class AsyncHttpxClientWrapper(DefaultAsyncHttpxClient):
@@ -1460,6 +1502,9 @@ async def request(
if self.custom_auth is not None:
kwargs["auth"] = self.custom_auth
+ if options.follow_redirects is not None:
+ kwargs["follow_redirects"] = options.follow_redirects
+
log.debug("Sending HTTP Request: %s %s", request.method, request.url)
response = None
@@ -1568,7 +1613,14 @@ async def _process_response(
) -> ResponseT:
origin = get_origin(cast_to) or cast_to
- if inspect.isclass(origin) and issubclass(origin, BaseAPIResponse):
+ if (
+ inspect.isclass(origin)
+ and issubclass(origin, BaseAPIResponse)
+ # we only want to actually return the custom BaseAPIResponse class if we're
+ # returning the raw response, or if we're not streaming SSE, as if we're streaming
+ # SSE then `cast_to` doesn't actively reflect the type we need to parse into
+ and (not stream or bool(response.request.headers.get(RAW_RESPONSE_HEADER)))
+ ):
if not issubclass(origin, AsyncAPIResponse):
raise TypeError(f"API Response types must subclass {AsyncAPIResponse}; Received {origin}")
diff --git a/src/browserbase/_compat.py b/src/browserbase/_compat.py
index 92d9ee6..bdef67f 100644
--- a/src/browserbase/_compat.py
+++ b/src/browserbase/_compat.py
@@ -12,14 +12,13 @@
_T = TypeVar("_T")
_ModelT = TypeVar("_ModelT", bound=pydantic.BaseModel)
-# --------------- Pydantic v2 compatibility ---------------
+# --------------- Pydantic v2, v3 compatibility ---------------
# Pyright incorrectly reports some of our functions as overriding a method when they don't
# pyright: reportIncompatibleMethodOverride=false
-PYDANTIC_V2 = pydantic.VERSION.startswith("2.")
+PYDANTIC_V1 = pydantic.VERSION.startswith("1.")
-# v1 re-exports
if TYPE_CHECKING:
def parse_date(value: date | StrBytesIntFloat) -> date: # noqa: ARG001
@@ -44,90 +43,92 @@ def is_typeddict(type_: type[Any]) -> bool: # noqa: ARG001
...
else:
- if PYDANTIC_V2:
- from pydantic.v1.typing import (
+ # v1 re-exports
+ if PYDANTIC_V1:
+ from pydantic.typing import (
get_args as get_args,
is_union as is_union,
get_origin as get_origin,
is_typeddict as is_typeddict,
is_literal_type as is_literal_type,
)
- from pydantic.v1.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
+ from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
else:
- from pydantic.typing import (
+ from ._utils import (
get_args as get_args,
is_union as is_union,
get_origin as get_origin,
+ parse_date as parse_date,
is_typeddict as is_typeddict,
+ parse_datetime as parse_datetime,
is_literal_type as is_literal_type,
)
- from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
# refactored config
if TYPE_CHECKING:
from pydantic import ConfigDict as ConfigDict
else:
- if PYDANTIC_V2:
- from pydantic import ConfigDict
- else:
+ if PYDANTIC_V1:
# TODO: provide an error message here?
ConfigDict = None
+ else:
+ from pydantic import ConfigDict as ConfigDict
# renamed methods / properties
def parse_obj(model: type[_ModelT], value: object) -> _ModelT:
- if PYDANTIC_V2:
- return model.model_validate(value)
- else:
+ if PYDANTIC_V1:
return cast(_ModelT, model.parse_obj(value)) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
+ else:
+ return model.model_validate(value)
def field_is_required(field: FieldInfo) -> bool:
- if PYDANTIC_V2:
- return field.is_required()
- return field.required # type: ignore
+ if PYDANTIC_V1:
+ return field.required # type: ignore
+ return field.is_required()
def field_get_default(field: FieldInfo) -> Any:
value = field.get_default()
- if PYDANTIC_V2:
- from pydantic_core import PydanticUndefined
-
- if value == PydanticUndefined:
- return None
+ if PYDANTIC_V1:
return value
+ from pydantic_core import PydanticUndefined
+
+ if value == PydanticUndefined:
+ return None
return value
def field_outer_type(field: FieldInfo) -> Any:
- if PYDANTIC_V2:
- return field.annotation
- return field.outer_type_ # type: ignore
+ if PYDANTIC_V1:
+ return field.outer_type_ # type: ignore
+ return field.annotation
def get_model_config(model: type[pydantic.BaseModel]) -> Any:
- if PYDANTIC_V2:
- return model.model_config
- return model.__config__ # type: ignore
+ if PYDANTIC_V1:
+ return model.__config__ # type: ignore
+ return model.model_config
def get_model_fields(model: type[pydantic.BaseModel]) -> dict[str, FieldInfo]:
- if PYDANTIC_V2:
- return model.model_fields
- return model.__fields__ # type: ignore
+ if PYDANTIC_V1:
+ return model.__fields__ # type: ignore
+ return model.model_fields
def model_copy(model: _ModelT, *, deep: bool = False) -> _ModelT:
- if PYDANTIC_V2:
- return model.model_copy(deep=deep)
- return model.copy(deep=deep) # type: ignore
+ if PYDANTIC_V1:
+ return model.copy(deep=deep) # type: ignore
+ return model.model_copy(deep=deep)
def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str:
- if PYDANTIC_V2:
- return model.model_dump_json(indent=indent)
- return model.json(indent=indent) # type: ignore
+ if PYDANTIC_V1:
+ return model.json(indent=indent) # type: ignore
+ return model.model_dump_json(indent=indent)
def model_dump(
@@ -139,14 +140,14 @@ def model_dump(
warnings: bool = True,
mode: Literal["json", "python"] = "python",
) -> dict[str, Any]:
- if PYDANTIC_V2 or hasattr(model, "model_dump"):
+ if (not PYDANTIC_V1) or hasattr(model, "model_dump"):
return model.model_dump(
mode=mode,
exclude=exclude,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
# warnings are not supported in Pydantic v1
- warnings=warnings if PYDANTIC_V2 else True,
+ warnings=True if PYDANTIC_V1 else warnings,
)
return cast(
"dict[str, Any]",
@@ -159,9 +160,9 @@ def model_dump(
def model_parse(model: type[_ModelT], data: Any) -> _ModelT:
- if PYDANTIC_V2:
- return model.model_validate(data)
- return model.parse_obj(data) # pyright: ignore[reportDeprecated]
+ if PYDANTIC_V1:
+ return model.parse_obj(data) # pyright: ignore[reportDeprecated]
+ return model.model_validate(data)
# generic models
@@ -170,17 +171,16 @@ def model_parse(model: type[_ModelT], data: Any) -> _ModelT:
class GenericModel(pydantic.BaseModel): ...
else:
- if PYDANTIC_V2:
+ if PYDANTIC_V1:
+ import pydantic.generics
+
+ class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ...
+ else:
# there no longer needs to be a distinction in v2 but
# we still have to create our own subclass to avoid
# inconsistent MRO ordering errors
class GenericModel(pydantic.BaseModel): ...
- else:
- import pydantic.generics
-
- class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ...
-
# cached properties
if TYPE_CHECKING:
diff --git a/src/browserbase/_files.py b/src/browserbase/_files.py
index c690226..ff951be 100644
--- a/src/browserbase/_files.py
+++ b/src/browserbase/_files.py
@@ -69,12 +69,12 @@ def _transform_file(file: FileTypes) -> HttpxFileTypes:
return file
if is_tuple_t(file):
- return (file[0], _read_file_content(file[1]), *file[2:])
+ return (file[0], read_file_content(file[1]), *file[2:])
raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple")
-def _read_file_content(file: FileContent) -> HttpxFileContent:
+def read_file_content(file: FileContent) -> HttpxFileContent:
if isinstance(file, os.PathLike):
return pathlib.Path(file).read_bytes()
return file
@@ -111,12 +111,12 @@ async def _async_transform_file(file: FileTypes) -> HttpxFileTypes:
return file
if is_tuple_t(file):
- return (file[0], await _async_read_file_content(file[1]), *file[2:])
+ return (file[0], await async_read_file_content(file[1]), *file[2:])
raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple")
-async def _async_read_file_content(file: FileContent) -> HttpxFileContent:
+async def async_read_file_content(file: FileContent) -> HttpxFileContent:
if isinstance(file, os.PathLike):
return await anyio.Path(file).read_bytes()
diff --git a/src/browserbase/_models.py b/src/browserbase/_models.py
index 798956f..3a6017e 100644
--- a/src/browserbase/_models.py
+++ b/src/browserbase/_models.py
@@ -2,9 +2,10 @@
import os
import inspect
-from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, cast
+from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, Optional, cast
from datetime import date, datetime
from typing_extensions import (
+ List,
Unpack,
Literal,
ClassVar,
@@ -49,7 +50,7 @@
strip_annotated_type,
)
from ._compat import (
- PYDANTIC_V2,
+ PYDANTIC_V1,
ConfigDict,
GenericModel as BaseGenericModel,
get_args,
@@ -80,11 +81,7 @@ class _ConfigProtocol(Protocol):
class BaseModel(pydantic.BaseModel):
- if PYDANTIC_V2:
- model_config: ClassVar[ConfigDict] = ConfigDict(
- extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true"))
- )
- else:
+ if PYDANTIC_V1:
@property
@override
@@ -94,6 +91,10 @@ def model_fields_set(self) -> set[str]:
class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated]
extra: Any = pydantic.Extra.allow # type: ignore
+ else:
+ model_config: ClassVar[ConfigDict] = ConfigDict(
+ extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true"))
+ )
def to_dict(
self,
@@ -207,28 +208,32 @@ def construct( # pyright: ignore[reportIncompatibleMethodOverride]
else:
fields_values[name] = field_get_default(field)
+ extra_field_type = _get_extra_fields_type(__cls)
+
_extra = {}
for key, value in values.items():
if key not in model_fields:
- if PYDANTIC_V2:
- _extra[key] = value
- else:
+ parsed = construct_type(value=value, type_=extra_field_type) if extra_field_type is not None else value
+
+ if PYDANTIC_V1:
_fields_set.add(key)
- fields_values[key] = value
+ fields_values[key] = parsed
+ else:
+ _extra[key] = parsed
object.__setattr__(m, "__dict__", fields_values)
- if PYDANTIC_V2:
- # these properties are copied from Pydantic's `model_construct()` method
- object.__setattr__(m, "__pydantic_private__", None)
- object.__setattr__(m, "__pydantic_extra__", _extra)
- object.__setattr__(m, "__pydantic_fields_set__", _fields_set)
- else:
+ if PYDANTIC_V1:
# init_private_attributes() does not exist in v2
m._init_private_attributes() # type: ignore
# copied from Pydantic v1's `construct()` method
object.__setattr__(m, "__fields_set__", _fields_set)
+ else:
+ # these properties are copied from Pydantic's `model_construct()` method
+ object.__setattr__(m, "__pydantic_private__", None)
+ object.__setattr__(m, "__pydantic_extra__", _extra)
+ object.__setattr__(m, "__pydantic_fields_set__", _fields_set)
return m
@@ -238,7 +243,7 @@ def construct( # pyright: ignore[reportIncompatibleMethodOverride]
# although not in practice
model_construct = construct
- if not PYDANTIC_V2:
+ if PYDANTIC_V1:
# we define aliases for some of the new pydantic v2 methods so
# that we can just document these methods without having to specify
# a specific pydantic version as some users may not know which
@@ -299,7 +304,7 @@ def model_dump(
exclude_none=exclude_none,
)
- return cast(dict[str, Any], json_safe(dumped)) if mode == "json" else dumped
+ return cast("dict[str, Any]", json_safe(dumped)) if mode == "json" else dumped
@override
def model_dump_json(
@@ -358,15 +363,32 @@ def _construct_field(value: object, field: FieldInfo, key: str) -> object:
if value is None:
return field_get_default(field)
- if PYDANTIC_V2:
- type_ = field.annotation
- else:
+ if PYDANTIC_V1:
type_ = cast(type, field.outer_type_) # type: ignore
+ else:
+ type_ = field.annotation # type: ignore
if type_ is None:
raise RuntimeError(f"Unexpected field type is None for {key}")
- return construct_type(value=value, type_=type_)
+ return construct_type(value=value, type_=type_, metadata=getattr(field, "metadata", None))
+
+
+def _get_extra_fields_type(cls: type[pydantic.BaseModel]) -> type | None:
+ if PYDANTIC_V1:
+ # TODO
+ return None
+
+ schema = cls.__pydantic_core_schema__
+ if schema["type"] == "model":
+ fields = schema["schema"]
+ if fields["type"] == "model-fields":
+ extras = fields.get("extras_schema")
+ if extras and "cls" in extras:
+ # mypy can't narrow the type
+ return extras["cls"] # type: ignore[no-any-return]
+
+ return None
def is_basemodel(type_: type) -> bool:
@@ -420,7 +442,7 @@ def construct_type_unchecked(*, value: object, type_: type[_T]) -> _T:
return cast(_T, construct_type(value=value, type_=type_))
-def construct_type(*, value: object, type_: object) -> object:
+def construct_type(*, value: object, type_: object, metadata: Optional[List[Any]] = None) -> object:
"""Loose coercion to the expected type with construction of nested values.
If the given value does not match the expected type then it is returned as-is.
@@ -438,8 +460,10 @@ def construct_type(*, value: object, type_: object) -> object:
type_ = type_.__value__ # type: ignore[unreachable]
# unwrap `Annotated[T, ...]` -> `T`
- if is_annotated_type(type_):
- meta: tuple[Any, ...] = get_args(type_)[1:]
+ if metadata is not None and len(metadata) > 0:
+ meta: tuple[Any, ...] = tuple(metadata)
+ elif is_annotated_type(type_):
+ meta = get_args(type_)[1:]
type_ = extract_type_arg(type_, 0)
else:
meta = tuple()
@@ -604,30 +628,30 @@ def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any,
for variant in get_args(union):
variant = strip_annotated_type(variant)
if is_basemodel_type(variant):
- if PYDANTIC_V2:
- field = _extract_field_schema_pv2(variant, discriminator_field_name)
- if not field:
+ if PYDANTIC_V1:
+ field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
+ if not field_info:
continue
# Note: if one variant defines an alias then they all should
- discriminator_alias = field.get("serialization_alias")
-
- field_schema = field["schema"]
+ discriminator_alias = field_info.alias
- if field_schema["type"] == "literal":
- for entry in cast("LiteralSchema", field_schema)["expected"]:
+ if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation):
+ for entry in get_args(annotation):
if isinstance(entry, str):
mapping[entry] = variant
else:
- field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
- if not field_info:
+ field = _extract_field_schema_pv2(variant, discriminator_field_name)
+ if not field:
continue
# Note: if one variant defines an alias then they all should
- discriminator_alias = field_info.alias
+ discriminator_alias = field.get("serialization_alias")
- if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation):
- for entry in get_args(annotation):
+ field_schema = field["schema"]
+
+ if field_schema["type"] == "literal":
+ for entry in cast("LiteralSchema", field_schema)["expected"]:
if isinstance(entry, str):
mapping[entry] = variant
@@ -690,7 +714,7 @@ class GenericModel(BaseGenericModel, BaseModel):
pass
-if PYDANTIC_V2:
+if not PYDANTIC_V1:
from pydantic import TypeAdapter as _TypeAdapter
_CachedTypeAdapter = cast("TypeAdapter[object]", lru_cache(maxsize=None)(_TypeAdapter))
@@ -737,6 +761,7 @@ class FinalRequestOptionsInput(TypedDict, total=False):
idempotency_key: str
json_data: Body
extra_json: AnyMapping
+ follow_redirects: bool
@final
@@ -750,18 +775,19 @@ class FinalRequestOptions(pydantic.BaseModel):
files: Union[HttpxRequestFiles, None] = None
idempotency_key: Union[str, None] = None
post_parser: Union[Callable[[Any], Any], NotGiven] = NotGiven()
+ follow_redirects: Union[bool, None] = None
# It should be noted that we cannot use `json` here as that would override
# a BaseModel method in an incompatible fashion.
json_data: Union[Body, None] = None
extra_json: Union[AnyMapping, None] = None
- if PYDANTIC_V2:
- model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True)
- else:
+ if PYDANTIC_V1:
class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated]
arbitrary_types_allowed: bool = True
+ else:
+ model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True)
def get_max_retries(self, max_retries: int) -> int:
if isinstance(self.max_retries, NotGiven):
@@ -794,9 +820,9 @@ def construct( # type: ignore
key: strip_not_given(value)
for key, value in values.items()
}
- if PYDANTIC_V2:
- return super().model_construct(_fields_set, **kwargs)
- return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated]
+ if PYDANTIC_V1:
+ return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated]
+ return super().model_construct(_fields_set, **kwargs)
if not TYPE_CHECKING:
# type checkers incorrectly complain about this assignment
diff --git a/src/browserbase/_types.py b/src/browserbase/_types.py
index a8833dc..b954306 100644
--- a/src/browserbase/_types.py
+++ b/src/browserbase/_types.py
@@ -13,10 +13,21 @@
Mapping,
TypeVar,
Callable,
+ Iterator,
Optional,
Sequence,
)
-from typing_extensions import Set, Literal, Protocol, TypeAlias, TypedDict, override, runtime_checkable
+from typing_extensions import (
+ Set,
+ Literal,
+ Protocol,
+ TypeAlias,
+ TypedDict,
+ SupportsIndex,
+ overload,
+ override,
+ runtime_checkable,
+)
import httpx
import pydantic
@@ -100,6 +111,7 @@ class RequestOptions(TypedDict, total=False):
params: Query
extra_json: AnyMapping
idempotency_key: str
+ follow_redirects: bool
# Sentinel class used until PEP 0661 is accepted
@@ -215,3 +227,27 @@ class _GenericAlias(Protocol):
class HttpxSendArgs(TypedDict, total=False):
auth: httpx.Auth
+ follow_redirects: bool
+
+
+_T_co = TypeVar("_T_co", covariant=True)
+
+
+if TYPE_CHECKING:
+ # This works because str.__contains__ does not accept object (either in typeshed or at runtime)
+ # https://github.com/hauntsaninja/useful_types/blob/5e9710f3875107d068e7679fd7fec9cfab0eff3b/useful_types/__init__.py#L285
+ class SequenceNotStr(Protocol[_T_co]):
+ @overload
+ def __getitem__(self, index: SupportsIndex, /) -> _T_co: ...
+ @overload
+ def __getitem__(self, index: slice, /) -> Sequence[_T_co]: ...
+ def __contains__(self, value: object, /) -> bool: ...
+ def __len__(self) -> int: ...
+ def __iter__(self) -> Iterator[_T_co]: ...
+ def index(self, value: Any, start: int = 0, stop: int = ..., /) -> int: ...
+ def count(self, value: Any, /) -> int: ...
+ def __reversed__(self) -> Iterator[_T_co]: ...
+else:
+ # just point this to a normal `Sequence` at runtime to avoid having to special case
+ # deserializing our custom sequence type
+ SequenceNotStr = Sequence
diff --git a/src/browserbase/_utils/__init__.py b/src/browserbase/_utils/__init__.py
index d4fda26..dc64e29 100644
--- a/src/browserbase/_utils/__init__.py
+++ b/src/browserbase/_utils/__init__.py
@@ -10,7 +10,6 @@
lru_cache as lru_cache,
is_mapping as is_mapping,
is_tuple_t as is_tuple_t,
- parse_date as parse_date,
is_iterable as is_iterable,
is_sequence as is_sequence,
coerce_float as coerce_float,
@@ -23,7 +22,6 @@
coerce_boolean as coerce_boolean,
coerce_integer as coerce_integer,
file_from_path as file_from_path,
- parse_datetime as parse_datetime,
strip_not_given as strip_not_given,
deepcopy_minimal as deepcopy_minimal,
get_async_library as get_async_library,
@@ -32,12 +30,20 @@
maybe_coerce_boolean as maybe_coerce_boolean,
maybe_coerce_integer as maybe_coerce_integer,
)
+from ._compat import (
+ get_args as get_args,
+ is_union as is_union,
+ get_origin as get_origin,
+ is_typeddict as is_typeddict,
+ is_literal_type as is_literal_type,
+)
from ._typing import (
is_list_type as is_list_type,
is_union_type as is_union_type,
extract_type_arg as extract_type_arg,
is_iterable_type as is_iterable_type,
is_required_type as is_required_type,
+ is_sequence_type as is_sequence_type,
is_annotated_type as is_annotated_type,
is_type_alias_type as is_type_alias_type,
strip_annotated_type as strip_annotated_type,
@@ -55,3 +61,4 @@
function_has_argument as function_has_argument,
assert_signatures_in_sync as assert_signatures_in_sync,
)
+from ._datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
diff --git a/src/browserbase/_utils/_compat.py b/src/browserbase/_utils/_compat.py
new file mode 100644
index 0000000..dd70323
--- /dev/null
+++ b/src/browserbase/_utils/_compat.py
@@ -0,0 +1,45 @@
+from __future__ import annotations
+
+import sys
+import typing_extensions
+from typing import Any, Type, Union, Literal, Optional
+from datetime import date, datetime
+from typing_extensions import get_args as _get_args, get_origin as _get_origin
+
+from .._types import StrBytesIntFloat
+from ._datetime_parse import parse_date as _parse_date, parse_datetime as _parse_datetime
+
+_LITERAL_TYPES = {Literal, typing_extensions.Literal}
+
+
+def get_args(tp: type[Any]) -> tuple[Any, ...]:
+ return _get_args(tp)
+
+
+def get_origin(tp: type[Any]) -> type[Any] | None:
+ return _get_origin(tp)
+
+
+def is_union(tp: Optional[Type[Any]]) -> bool:
+ if sys.version_info < (3, 10):
+ return tp is Union # type: ignore[comparison-overlap]
+ else:
+ import types
+
+ return tp is Union or tp is types.UnionType
+
+
+def is_typeddict(tp: Type[Any]) -> bool:
+ return typing_extensions.is_typeddict(tp)
+
+
+def is_literal_type(tp: Type[Any]) -> bool:
+ return get_origin(tp) in _LITERAL_TYPES
+
+
+def parse_date(value: Union[date, StrBytesIntFloat]) -> date:
+ return _parse_date(value)
+
+
+def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime:
+ return _parse_datetime(value)
diff --git a/src/browserbase/_utils/_datetime_parse.py b/src/browserbase/_utils/_datetime_parse.py
new file mode 100644
index 0000000..7cb9d9e
--- /dev/null
+++ b/src/browserbase/_utils/_datetime_parse.py
@@ -0,0 +1,136 @@
+"""
+This file contains code from https://github.com/pydantic/pydantic/blob/main/pydantic/v1/datetime_parse.py
+without the Pydantic v1 specific errors.
+"""
+
+from __future__ import annotations
+
+import re
+from typing import Dict, Union, Optional
+from datetime import date, datetime, timezone, timedelta
+
+from .._types import StrBytesIntFloat
+
+date_expr = r"(?P\d{4})-(?P\d{1,2})-(?P\d{1,2})"
+time_expr = (
+ r"(?P\d{1,2}):(?P\d{1,2})"
+ r"(?::(?P\d{1,2})(?:\.(?P\d{1,6})\d{0,6})?)?"
+ r"(?PZ|[+-]\d{2}(?::?\d{2})?)?$"
+)
+
+date_re = re.compile(f"{date_expr}$")
+datetime_re = re.compile(f"{date_expr}[T ]{time_expr}")
+
+
+EPOCH = datetime(1970, 1, 1)
+# if greater than this, the number is in ms, if less than or equal it's in seconds
+# (in seconds this is 11th October 2603, in ms it's 20th August 1970)
+MS_WATERSHED = int(2e10)
+# slightly more than datetime.max in ns - (datetime.max - EPOCH).total_seconds() * 1e9
+MAX_NUMBER = int(3e20)
+
+
+def _get_numeric(value: StrBytesIntFloat, native_expected_type: str) -> Union[None, int, float]:
+ if isinstance(value, (int, float)):
+ return value
+ try:
+ return float(value)
+ except ValueError:
+ return None
+ except TypeError:
+ raise TypeError(f"invalid type; expected {native_expected_type}, string, bytes, int or float") from None
+
+
+def _from_unix_seconds(seconds: Union[int, float]) -> datetime:
+ if seconds > MAX_NUMBER:
+ return datetime.max
+ elif seconds < -MAX_NUMBER:
+ return datetime.min
+
+ while abs(seconds) > MS_WATERSHED:
+ seconds /= 1000
+ dt = EPOCH + timedelta(seconds=seconds)
+ return dt.replace(tzinfo=timezone.utc)
+
+
+def _parse_timezone(value: Optional[str]) -> Union[None, int, timezone]:
+ if value == "Z":
+ return timezone.utc
+ elif value is not None:
+ offset_mins = int(value[-2:]) if len(value) > 3 else 0
+ offset = 60 * int(value[1:3]) + offset_mins
+ if value[0] == "-":
+ offset = -offset
+ return timezone(timedelta(minutes=offset))
+ else:
+ return None
+
+
+def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime:
+ """
+ Parse a datetime/int/float/string and return a datetime.datetime.
+
+ This function supports time zone offsets. When the input contains one,
+ the output uses a timezone with a fixed offset from UTC.
+
+ Raise ValueError if the input is well formatted but not a valid datetime.
+ Raise ValueError if the input isn't well formatted.
+ """
+ if isinstance(value, datetime):
+ return value
+
+ number = _get_numeric(value, "datetime")
+ if number is not None:
+ return _from_unix_seconds(number)
+
+ if isinstance(value, bytes):
+ value = value.decode()
+
+ assert not isinstance(value, (float, int))
+
+ match = datetime_re.match(value)
+ if match is None:
+ raise ValueError("invalid datetime format")
+
+ kw = match.groupdict()
+ if kw["microsecond"]:
+ kw["microsecond"] = kw["microsecond"].ljust(6, "0")
+
+ tzinfo = _parse_timezone(kw.pop("tzinfo"))
+ kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None}
+ kw_["tzinfo"] = tzinfo
+
+ return datetime(**kw_) # type: ignore
+
+
+def parse_date(value: Union[date, StrBytesIntFloat]) -> date:
+ """
+ Parse a date/int/float/string and return a datetime.date.
+
+ Raise ValueError if the input is well formatted but not a valid date.
+ Raise ValueError if the input isn't well formatted.
+ """
+ if isinstance(value, date):
+ if isinstance(value, datetime):
+ return value.date()
+ else:
+ return value
+
+ number = _get_numeric(value, "date")
+ if number is not None:
+ return _from_unix_seconds(number).date()
+
+ if isinstance(value, bytes):
+ value = value.decode()
+
+ assert not isinstance(value, (float, int))
+ match = date_re.match(value)
+ if match is None:
+ raise ValueError("invalid date format")
+
+ kw = {k: int(v) for k, v in match.groupdict().items()}
+
+ try:
+ return date(**kw)
+ except ValueError:
+ raise ValueError("invalid date format") from None
diff --git a/src/browserbase/_utils/_transform.py b/src/browserbase/_utils/_transform.py
index b0cc20a..c19124f 100644
--- a/src/browserbase/_utils/_transform.py
+++ b/src/browserbase/_utils/_transform.py
@@ -16,18 +16,20 @@
lru_cache,
is_mapping,
is_iterable,
+ is_sequence,
)
from .._files import is_base64_file_input
+from ._compat import get_origin, is_typeddict
from ._typing import (
is_list_type,
is_union_type,
extract_type_arg,
is_iterable_type,
is_required_type,
+ is_sequence_type,
is_annotated_type,
strip_annotated_type,
)
-from .._compat import get_origin, model_dump, is_typeddict
_T = TypeVar("_T")
@@ -167,6 +169,8 @@ def _transform_recursive(
Defaults to the same value as the `annotation` argument.
"""
+ from .._compat import model_dump
+
if inner_type is None:
inner_type = annotation
@@ -184,6 +188,8 @@ def _transform_recursive(
(is_list_type(stripped_type) and is_list(data))
# Iterable[T]
or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str))
+ # Sequence[T]
+ or (is_sequence_type(stripped_type) and is_sequence(data) and not isinstance(data, str))
):
# dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually
# intended as an iterable, so we don't transform it.
@@ -329,6 +335,8 @@ async def _async_transform_recursive(
Defaults to the same value as the `annotation` argument.
"""
+ from .._compat import model_dump
+
if inner_type is None:
inner_type = annotation
@@ -346,6 +354,8 @@ async def _async_transform_recursive(
(is_list_type(stripped_type) and is_list(data))
# Iterable[T]
or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str))
+ # Sequence[T]
+ or (is_sequence_type(stripped_type) and is_sequence(data) and not isinstance(data, str))
):
# dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually
# intended as an iterable, so we don't transform it.
diff --git a/src/browserbase/_utils/_typing.py b/src/browserbase/_utils/_typing.py
index 1bac954..193109f 100644
--- a/src/browserbase/_utils/_typing.py
+++ b/src/browserbase/_utils/_typing.py
@@ -15,7 +15,7 @@
from ._utils import lru_cache
from .._types import InheritsGeneric
-from .._compat import is_union as _is_union
+from ._compat import is_union as _is_union
def is_annotated_type(typ: type) -> bool:
@@ -26,6 +26,11 @@ def is_list_type(typ: type) -> bool:
return (get_origin(typ) or typ) == list
+def is_sequence_type(typ: type) -> bool:
+ origin = get_origin(typ) or typ
+ return origin == typing_extensions.Sequence or origin == typing.Sequence or origin == _c_abc.Sequence
+
+
def is_iterable_type(typ: type) -> bool:
"""If the given type is `typing.Iterable[T]`"""
origin = get_origin(typ) or typ
diff --git a/src/browserbase/_utils/_utils.py b/src/browserbase/_utils/_utils.py
index ea3cf3f..f081859 100644
--- a/src/browserbase/_utils/_utils.py
+++ b/src/browserbase/_utils/_utils.py
@@ -22,7 +22,6 @@
import sniffio
from .._types import NotGiven, FileTypes, NotGivenOr, HeadersLike
-from .._compat import parse_date as parse_date, parse_datetime as parse_datetime
_T = TypeVar("_T")
_TupleT = TypeVar("_TupleT", bound=Tuple[object, ...])
diff --git a/src/browserbase/_version.py b/src/browserbase/_version.py
index 3c0492e..25f1956 100644
--- a/src/browserbase/_version.py
+++ b/src/browserbase/_version.py
@@ -1,4 +1,4 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
__title__ = "browserbase"
-__version__ = "1.4.0" # x-release-please-version
+__version__ = "1.5.0-alpha.0" # x-release-please-version
diff --git a/src/browserbase/resources/contexts.py b/src/browserbase/resources/contexts.py
index 0a438ed..bc4d1cc 100644
--- a/src/browserbase/resources/contexts.py
+++ b/src/browserbase/resources/contexts.py
@@ -16,9 +16,9 @@
async_to_streamed_response_wrapper,
)
from .._base_client import make_request_options
-from ..types.context import Context
from ..types.context_create_response import ContextCreateResponse
from ..types.context_update_response import ContextUpdateResponse
+from ..types.context_retrieve_response import ContextRetrieveResponse
__all__ = ["ContextsResource", "AsyncContextsResource"]
@@ -89,9 +89,9 @@ def retrieve(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Context:
+ ) -> ContextRetrieveResponse:
"""
- Context
+ Get a Context
Args:
extra_headers: Send extra headers
@@ -109,7 +109,7 @@ def retrieve(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=Context,
+ cast_to=ContextRetrieveResponse,
)
def update(
@@ -124,7 +124,7 @@ def update(
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> ContextUpdateResponse:
"""
- Update Context
+ Update a Context
Args:
extra_headers: Send extra headers
@@ -212,9 +212,9 @@ async def retrieve(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Context:
+ ) -> ContextRetrieveResponse:
"""
- Context
+ Get a Context
Args:
extra_headers: Send extra headers
@@ -232,7 +232,7 @@ async def retrieve(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=Context,
+ cast_to=ContextRetrieveResponse,
)
async def update(
@@ -247,7 +247,7 @@ async def update(
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> ContextUpdateResponse:
"""
- Update Context
+ Update a Context
Args:
extra_headers: Send extra headers
diff --git a/src/browserbase/resources/extensions.py b/src/browserbase/resources/extensions.py
index c7b0fae..4dcd248 100644
--- a/src/browserbase/resources/extensions.py
+++ b/src/browserbase/resources/extensions.py
@@ -18,7 +18,8 @@
async_to_streamed_response_wrapper,
)
from .._base_client import make_request_options
-from ..types.extension import Extension
+from ..types.extension_create_response import ExtensionCreateResponse
+from ..types.extension_retrieve_response import ExtensionRetrieveResponse
__all__ = ["ExtensionsResource", "AsyncExtensionsResource"]
@@ -53,7 +54,7 @@ def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Extension:
+ ) -> ExtensionCreateResponse:
"""
Upload an Extension
@@ -79,7 +80,7 @@ def create(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=Extension,
+ cast_to=ExtensionCreateResponse,
)
def retrieve(
@@ -92,9 +93,9 @@ def retrieve(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Extension:
+ ) -> ExtensionRetrieveResponse:
"""
- Extension
+ Get an Extension
Args:
extra_headers: Send extra headers
@@ -112,7 +113,7 @@ def retrieve(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=Extension,
+ cast_to=ExtensionRetrieveResponse,
)
def delete(
@@ -127,7 +128,7 @@ def delete(
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> None:
"""
- Delete Extension
+ Delete an Extension
Args:
extra_headers: Send extra headers
@@ -180,7 +181,7 @@ async def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Extension:
+ ) -> ExtensionCreateResponse:
"""
Upload an Extension
@@ -206,7 +207,7 @@ async def create(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=Extension,
+ cast_to=ExtensionCreateResponse,
)
async def retrieve(
@@ -219,9 +220,9 @@ async def retrieve(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Extension:
+ ) -> ExtensionRetrieveResponse:
"""
- Extension
+ Get an Extension
Args:
extra_headers: Send extra headers
@@ -239,7 +240,7 @@ async def retrieve(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=Extension,
+ cast_to=ExtensionRetrieveResponse,
)
async def delete(
@@ -254,7 +255,7 @@ async def delete(
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> None:
"""
- Delete Extension
+ Delete an Extension
Args:
extra_headers: Send extra headers
diff --git a/src/browserbase/resources/projects.py b/src/browserbase/resources/projects.py
index fb337a0..e0e73b4 100644
--- a/src/browserbase/resources/projects.py
+++ b/src/browserbase/resources/projects.py
@@ -14,9 +14,9 @@
async_to_streamed_response_wrapper,
)
from .._base_client import make_request_options
-from ..types.project import Project
-from ..types.project_usage import ProjectUsage
from ..types.project_list_response import ProjectListResponse
+from ..types.project_usage_response import ProjectUsageResponse
+from ..types.project_retrieve_response import ProjectRetrieveResponse
__all__ = ["ProjectsResource", "AsyncProjectsResource"]
@@ -51,9 +51,9 @@ def retrieve(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Project:
+ ) -> ProjectRetrieveResponse:
"""
- Project
+ Get a Project
Args:
extra_headers: Send extra headers
@@ -71,7 +71,7 @@ def retrieve(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=Project,
+ cast_to=ProjectRetrieveResponse,
)
def list(
@@ -84,7 +84,7 @@ def list(
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> ProjectListResponse:
- """List projects"""
+ """List Projects"""
return self._get(
"/v1/projects",
options=make_request_options(
@@ -103,9 +103,9 @@ def usage(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> ProjectUsage:
+ ) -> ProjectUsageResponse:
"""
- Project Usage
+ Get Project Usage
Args:
extra_headers: Send extra headers
@@ -123,7 +123,7 @@ def usage(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=ProjectUsage,
+ cast_to=ProjectUsageResponse,
)
@@ -157,9 +157,9 @@ async def retrieve(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Project:
+ ) -> ProjectRetrieveResponse:
"""
- Project
+ Get a Project
Args:
extra_headers: Send extra headers
@@ -177,7 +177,7 @@ async def retrieve(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=Project,
+ cast_to=ProjectRetrieveResponse,
)
async def list(
@@ -190,7 +190,7 @@ async def list(
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> ProjectListResponse:
- """List projects"""
+ """List Projects"""
return await self._get(
"/v1/projects",
options=make_request_options(
@@ -209,9 +209,9 @@ async def usage(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> ProjectUsage:
+ ) -> ProjectUsageResponse:
"""
- Project Usage
+ Get Project Usage
Args:
extra_headers: Send extra headers
@@ -229,7 +229,7 @@ async def usage(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=ProjectUsage,
+ cast_to=ProjectUsageResponse,
)
diff --git a/src/browserbase/resources/sessions/sessions.py b/src/browserbase/resources/sessions/sessions.py
index bf3314a..01a4943 100644
--- a/src/browserbase/resources/sessions/sessions.py
+++ b/src/browserbase/resources/sessions/sessions.py
@@ -51,10 +51,10 @@
async_to_streamed_response_wrapper,
)
from ..._base_client import make_request_options
-from ...types.session import Session
-from ...types.session_live_urls import SessionLiveURLs
from ...types.session_list_response import SessionListResponse
+from ...types.session_debug_response import SessionDebugResponse
from ...types.session_create_response import SessionCreateResponse
+from ...types.session_update_response import SessionUpdateResponse
from ...types.session_retrieve_response import SessionRetrieveResponse
__all__ = ["SessionsResource", "AsyncSessionsResource"]
@@ -103,7 +103,7 @@ def create(
browser_settings: session_create_params.BrowserSettings | NotGiven = NOT_GIVEN,
extension_id: str | NotGiven = NOT_GIVEN,
keep_alive: bool | NotGiven = NOT_GIVEN,
- proxies: Union[bool, Iterable[session_create_params.ProxiesUnionMember1]] | NotGiven = NOT_GIVEN,
+ proxies: Union[Iterable[session_create_params.ProxiesUnionMember0], bool] | NotGiven = NOT_GIVEN,
region: Literal["us-west-2", "us-east-1", "eu-central-1", "ap-southeast-1"] | NotGiven = NOT_GIVEN,
api_timeout: int | NotGiven = NOT_GIVEN,
user_metadata: Dict[str, object] | NotGiven = NOT_GIVEN,
@@ -180,7 +180,7 @@ def retrieve(
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> SessionRetrieveResponse:
"""
- Session
+ Get a Session
Args:
extra_headers: Send extra headers
@@ -213,8 +213,8 @@ def update(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Session:
- """Update Session
+ ) -> SessionUpdateResponse:
+ """Update a Session
Args:
project_id: The Project ID.
@@ -247,7 +247,7 @@ def update(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=Session,
+ cast_to=SessionUpdateResponse,
)
def list(
@@ -307,7 +307,7 @@ def debug(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> SessionLiveURLs:
+ ) -> SessionDebugResponse:
"""
Session Live URLs
@@ -327,7 +327,7 @@ def debug(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=SessionLiveURLs,
+ cast_to=SessionDebugResponse,
)
@@ -374,7 +374,7 @@ async def create(
browser_settings: session_create_params.BrowserSettings | NotGiven = NOT_GIVEN,
extension_id: str | NotGiven = NOT_GIVEN,
keep_alive: bool | NotGiven = NOT_GIVEN,
- proxies: Union[bool, Iterable[session_create_params.ProxiesUnionMember1]] | NotGiven = NOT_GIVEN,
+ proxies: Union[Iterable[session_create_params.ProxiesUnionMember0], bool] | NotGiven = NOT_GIVEN,
region: Literal["us-west-2", "us-east-1", "eu-central-1", "ap-southeast-1"] | NotGiven = NOT_GIVEN,
api_timeout: int | NotGiven = NOT_GIVEN,
user_metadata: Dict[str, object] | NotGiven = NOT_GIVEN,
@@ -451,7 +451,7 @@ async def retrieve(
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> SessionRetrieveResponse:
"""
- Session
+ Get a Session
Args:
extra_headers: Send extra headers
@@ -484,8 +484,8 @@ async def update(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Session:
- """Update Session
+ ) -> SessionUpdateResponse:
+ """Update a Session
Args:
project_id: The Project ID.
@@ -518,7 +518,7 @@ async def update(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=Session,
+ cast_to=SessionUpdateResponse,
)
async def list(
@@ -578,7 +578,7 @@ async def debug(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> SessionLiveURLs:
+ ) -> SessionDebugResponse:
"""
Session Live URLs
@@ -598,7 +598,7 @@ async def debug(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=SessionLiveURLs,
+ cast_to=SessionDebugResponse,
)
diff --git a/src/browserbase/types/__init__.py b/src/browserbase/types/__init__.py
index 4dd85dd..20e2f90 100644
--- a/src/browserbase/types/__init__.py
+++ b/src/browserbase/types/__init__.py
@@ -2,20 +2,21 @@
from __future__ import annotations
-from .context import Context as Context
-from .project import Project as Project
-from .session import Session as Session
-from .extension import Extension as Extension
-from .project_usage import ProjectUsage as ProjectUsage
-from .session_live_urls import SessionLiveURLs as SessionLiveURLs
from .session_list_params import SessionListParams as SessionListParams
from .context_create_params import ContextCreateParams as ContextCreateParams
from .project_list_response import ProjectListResponse as ProjectListResponse
from .session_create_params import SessionCreateParams as SessionCreateParams
from .session_list_response import SessionListResponse as SessionListResponse
from .session_update_params import SessionUpdateParams as SessionUpdateParams
+from .project_usage_response import ProjectUsageResponse as ProjectUsageResponse
+from .session_debug_response import SessionDebugResponse as SessionDebugResponse
from .context_create_response import ContextCreateResponse as ContextCreateResponse
from .context_update_response import ContextUpdateResponse as ContextUpdateResponse
from .extension_create_params import ExtensionCreateParams as ExtensionCreateParams
from .session_create_response import SessionCreateResponse as SessionCreateResponse
+from .session_update_response import SessionUpdateResponse as SessionUpdateResponse
+from .context_retrieve_response import ContextRetrieveResponse as ContextRetrieveResponse
+from .extension_create_response import ExtensionCreateResponse as ExtensionCreateResponse
+from .project_retrieve_response import ProjectRetrieveResponse as ProjectRetrieveResponse
from .session_retrieve_response import SessionRetrieveResponse as SessionRetrieveResponse
+from .extension_retrieve_response import ExtensionRetrieveResponse as ExtensionRetrieveResponse
diff --git a/src/browserbase/types/context.py b/src/browserbase/types/context_retrieve_response.py
similarity index 84%
rename from src/browserbase/types/context.py
rename to src/browserbase/types/context_retrieve_response.py
index cb5c32f..c2cd692 100644
--- a/src/browserbase/types/context.py
+++ b/src/browserbase/types/context_retrieve_response.py
@@ -6,10 +6,10 @@
from .._models import BaseModel
-__all__ = ["Context"]
+__all__ = ["ContextRetrieveResponse"]
-class Context(BaseModel):
+class ContextRetrieveResponse(BaseModel):
id: str
created_at: datetime = FieldInfo(alias="createdAt")
diff --git a/src/browserbase/types/extension.py b/src/browserbase/types/extension_create_response.py
similarity index 85%
rename from src/browserbase/types/extension.py
rename to src/browserbase/types/extension_create_response.py
index 94582c3..d2b74f4 100644
--- a/src/browserbase/types/extension.py
+++ b/src/browserbase/types/extension_create_response.py
@@ -6,10 +6,10 @@
from .._models import BaseModel
-__all__ = ["Extension"]
+__all__ = ["ExtensionCreateResponse"]
-class Extension(BaseModel):
+class ExtensionCreateResponse(BaseModel):
id: str
created_at: datetime = FieldInfo(alias="createdAt")
diff --git a/src/browserbase/types/extension_retrieve_response.py b/src/browserbase/types/extension_retrieve_response.py
new file mode 100644
index 0000000..c786348
--- /dev/null
+++ b/src/browserbase/types/extension_retrieve_response.py
@@ -0,0 +1,22 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from datetime import datetime
+
+from pydantic import Field as FieldInfo
+
+from .._models import BaseModel
+
+__all__ = ["ExtensionRetrieveResponse"]
+
+
+class ExtensionRetrieveResponse(BaseModel):
+ id: str
+
+ created_at: datetime = FieldInfo(alias="createdAt")
+
+ file_name: str = FieldInfo(alias="fileName")
+
+ project_id: str = FieldInfo(alias="projectId")
+ """The Project ID linked to the uploaded Extension."""
+
+ updated_at: datetime = FieldInfo(alias="updatedAt")
diff --git a/src/browserbase/types/project_list_response.py b/src/browserbase/types/project_list_response.py
index 2d05a23..e364b52 100644
--- a/src/browserbase/types/project_list_response.py
+++ b/src/browserbase/types/project_list_response.py
@@ -1,10 +1,31 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List
+from datetime import datetime
from typing_extensions import TypeAlias
-from .project import Project
+from pydantic import Field as FieldInfo
-__all__ = ["ProjectListResponse"]
+from .._models import BaseModel
-ProjectListResponse: TypeAlias = List[Project]
+__all__ = ["ProjectListResponse", "ProjectListResponseItem"]
+
+
+class ProjectListResponseItem(BaseModel):
+ id: str
+
+ concurrency: int
+ """The maximum number of sessions that this project can run concurrently."""
+
+ created_at: datetime = FieldInfo(alias="createdAt")
+
+ default_timeout: int = FieldInfo(alias="defaultTimeout")
+
+ name: str
+
+ owner_id: str = FieldInfo(alias="ownerId")
+
+ updated_at: datetime = FieldInfo(alias="updatedAt")
+
+
+ProjectListResponse: TypeAlias = List[ProjectListResponseItem]
diff --git a/src/browserbase/types/project.py b/src/browserbase/types/project_retrieve_response.py
similarity index 87%
rename from src/browserbase/types/project.py
rename to src/browserbase/types/project_retrieve_response.py
index dc3cf33..7812667 100644
--- a/src/browserbase/types/project.py
+++ b/src/browserbase/types/project_retrieve_response.py
@@ -6,10 +6,10 @@
from .._models import BaseModel
-__all__ = ["Project"]
+__all__ = ["ProjectRetrieveResponse"]
-class Project(BaseModel):
+class ProjectRetrieveResponse(BaseModel):
id: str
concurrency: int
diff --git a/src/browserbase/types/project_usage.py b/src/browserbase/types/project_usage_response.py
similarity index 78%
rename from src/browserbase/types/project_usage.py
rename to src/browserbase/types/project_usage_response.py
index c8a03f5..b52fccf 100644
--- a/src/browserbase/types/project_usage.py
+++ b/src/browserbase/types/project_usage_response.py
@@ -4,10 +4,10 @@
from .._models import BaseModel
-__all__ = ["ProjectUsage"]
+__all__ = ["ProjectUsageResponse"]
-class ProjectUsage(BaseModel):
+class ProjectUsageResponse(BaseModel):
browser_minutes: int = FieldInfo(alias="browserMinutes")
proxy_bytes: int = FieldInfo(alias="proxyBytes")
diff --git a/src/browserbase/types/session_create_params.py b/src/browserbase/types/session_create_params.py
index 1f5324f..31a08ce 100644
--- a/src/browserbase/types/session_create_params.py
+++ b/src/browserbase/types/session_create_params.py
@@ -5,6 +5,7 @@
from typing import Dict, List, Union, Iterable
from typing_extensions import Literal, Required, Annotated, TypeAlias, TypedDict
+from .._types import SequenceNotStr
from .._utils import PropertyInfo
__all__ = [
@@ -14,10 +15,10 @@
"BrowserSettingsFingerprint",
"BrowserSettingsFingerprintScreen",
"BrowserSettingsViewport",
- "ProxiesUnionMember1",
- "ProxiesUnionMember1BrowserbaseProxyConfig",
- "ProxiesUnionMember1BrowserbaseProxyConfigGeolocation",
- "ProxiesUnionMember1ExternalProxyConfig",
+ "ProxiesUnionMember0",
+ "ProxiesUnionMember0UnionMember0",
+ "ProxiesUnionMember0UnionMember0Geolocation",
+ "ProxiesUnionMember0UnionMember1",
]
@@ -42,7 +43,7 @@ class SessionCreateParams(TypedDict, total=False):
Available on the Hobby Plan and above.
"""
- proxies: Union[bool, Iterable[ProxiesUnionMember1]]
+ proxies: Union[Iterable[ProxiesUnionMember0], bool]
"""Proxy configuration.
Can be true for default proxy, or an array of proxy configurations.
@@ -90,27 +91,21 @@ class BrowserSettingsFingerprint(TypedDict, total=False):
http_version: Annotated[Literal["1", "2"], PropertyInfo(alias="httpVersion")]
- locales: List[str]
- """
- Full list of locales is available
- [here](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Language).
- """
+ locales: SequenceNotStr[str]
operating_systems: Annotated[
List[Literal["android", "ios", "linux", "macos", "windows"]], PropertyInfo(alias="operatingSystems")
]
- """
- Note: `operatingSystems` set to `ios` or `android` requires `devices` to include
- `"mobile"`.
- """
screen: BrowserSettingsFingerprintScreen
class BrowserSettingsViewport(TypedDict, total=False):
height: int
+ """The height of the browser."""
width: int
+ """The width of the browser."""
class BrowserSettings(TypedDict, total=False):
@@ -143,12 +138,21 @@ class BrowserSettings(TypedDict, total=False):
fingerprint: BrowserSettingsFingerprint
"""
See usage examples
- [in the Stealth Mode page](/features/stealth-mode#fingerprinting).
+ [on the Stealth Mode page](/features/stealth-mode#fingerprinting)
"""
+ headful: bool
+ """[NOT IN DOCS] Enable or disable headful mode. Defaults to `false`."""
+
log_session: Annotated[bool, PropertyInfo(alias="logSession")]
"""Enable or disable session logging. Defaults to `true`."""
+ os: Literal["windows", "mac", "linux", "mobile", "tablet"]
+ """Operating system for stealth mode.
+
+ Valid values: windows, mac, linux, mobile, tablet
+ """
+
record_session: Annotated[bool, PropertyInfo(alias="recordSession")]
"""Enable or disable session recording. Defaults to `true`."""
@@ -158,7 +162,7 @@ class BrowserSettings(TypedDict, total=False):
viewport: BrowserSettingsViewport
-class ProxiesUnionMember1BrowserbaseProxyConfigGeolocation(TypedDict, total=False):
+class ProxiesUnionMember0UnionMember0Geolocation(TypedDict, total=False):
country: Required[str]
"""Country code in ISO 3166-1 alpha-2 format"""
@@ -169,7 +173,7 @@ class ProxiesUnionMember1BrowserbaseProxyConfigGeolocation(TypedDict, total=Fals
"""US state code (2 characters). Must also specify US as the country. Optional."""
-class ProxiesUnionMember1BrowserbaseProxyConfig(TypedDict, total=False):
+class ProxiesUnionMember0UnionMember0(TypedDict, total=False):
type: Required[Literal["browserbase"]]
"""Type of proxy.
@@ -182,11 +186,11 @@ class ProxiesUnionMember1BrowserbaseProxyConfig(TypedDict, total=False):
If omitted, defaults to all domains. Optional.
"""
- geolocation: ProxiesUnionMember1BrowserbaseProxyConfigGeolocation
- """Configuration for geolocation"""
+ geolocation: ProxiesUnionMember0UnionMember0Geolocation
+ """Geographic location for the proxy. Optional."""
-class ProxiesUnionMember1ExternalProxyConfig(TypedDict, total=False):
+class ProxiesUnionMember0UnionMember1(TypedDict, total=False):
server: Required[str]
"""Server URL for external proxy. Required."""
@@ -206,6 +210,4 @@ class ProxiesUnionMember1ExternalProxyConfig(TypedDict, total=False):
"""Username for external proxy authentication. Optional."""
-ProxiesUnionMember1: TypeAlias = Union[
- ProxiesUnionMember1BrowserbaseProxyConfig, ProxiesUnionMember1ExternalProxyConfig
-]
+ProxiesUnionMember0: TypeAlias = Union[ProxiesUnionMember0UnionMember0, ProxiesUnionMember0UnionMember1]
diff --git a/src/browserbase/types/session_live_urls.py b/src/browserbase/types/session_debug_response.py
similarity index 88%
rename from src/browserbase/types/session_live_urls.py
rename to src/browserbase/types/session_debug_response.py
index 3c7ba32..9cee7a7 100644
--- a/src/browserbase/types/session_live_urls.py
+++ b/src/browserbase/types/session_debug_response.py
@@ -6,7 +6,7 @@
from .._models import BaseModel
-__all__ = ["SessionLiveURLs", "Page"]
+__all__ = ["SessionDebugResponse", "Page"]
class Page(BaseModel):
@@ -23,7 +23,7 @@ class Page(BaseModel):
url: str
-class SessionLiveURLs(BaseModel):
+class SessionDebugResponse(BaseModel):
debugger_fullscreen_url: str = FieldInfo(alias="debuggerFullscreenUrl")
debugger_url: str = FieldInfo(alias="debuggerUrl")
diff --git a/src/browserbase/types/session_list_response.py b/src/browserbase/types/session_list_response.py
index ca162dd..4c1bd88 100644
--- a/src/browserbase/types/session_list_response.py
+++ b/src/browserbase/types/session_list_response.py
@@ -1,10 +1,58 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from typing import List
-from typing_extensions import TypeAlias
+from typing import Dict, List, Optional
+from datetime import datetime
+from typing_extensions import Literal, TypeAlias
-from .session import Session
+from pydantic import Field as FieldInfo
-__all__ = ["SessionListResponse"]
+from .._models import BaseModel
-SessionListResponse: TypeAlias = List[Session]
+__all__ = ["SessionListResponse", "SessionListResponseItem"]
+
+
+class SessionListResponseItem(BaseModel):
+ id: str
+
+ created_at: datetime = FieldInfo(alias="createdAt")
+
+ expires_at: datetime = FieldInfo(alias="expiresAt")
+
+ keep_alive: bool = FieldInfo(alias="keepAlive")
+ """Indicates if the Session was created to be kept alive upon disconnections"""
+
+ project_id: str = FieldInfo(alias="projectId")
+ """The Project ID linked to the Session."""
+
+ proxy_bytes: int = FieldInfo(alias="proxyBytes")
+ """Bytes used via the [Proxy](/features/stealth-mode#proxies-and-residential-ips)"""
+
+ region: Literal["us-west-2", "us-east-1", "eu-central-1", "ap-southeast-1"]
+ """The region where the Session is running."""
+
+ started_at: datetime = FieldInfo(alias="startedAt")
+
+ status: Literal["RUNNING", "ERROR", "TIMED_OUT", "COMPLETED"]
+
+ updated_at: datetime = FieldInfo(alias="updatedAt")
+
+ avg_cpu_usage: Optional[int] = FieldInfo(alias="avgCpuUsage", default=None)
+ """CPU used by the Session"""
+
+ context_id: Optional[str] = FieldInfo(alias="contextId", default=None)
+ """Optional. The Context linked to the Session."""
+
+ ended_at: Optional[datetime] = FieldInfo(alias="endedAt", default=None)
+
+ memory_usage: Optional[int] = FieldInfo(alias="memoryUsage", default=None)
+ """Memory used by the Session"""
+
+ user_metadata: Optional[Dict[str, object]] = FieldInfo(alias="userMetadata", default=None)
+ """Arbitrary user metadata to attach to the session.
+
+ To learn more about user metadata, see
+ [User Metadata](/features/sessions#user-metadata).
+ """
+
+
+SessionListResponse: TypeAlias = List[SessionListResponseItem]
diff --git a/src/browserbase/types/session.py b/src/browserbase/types/session_update_response.py
similarity index 95%
rename from src/browserbase/types/session.py
rename to src/browserbase/types/session_update_response.py
index 16450e2..67a1371 100644
--- a/src/browserbase/types/session.py
+++ b/src/browserbase/types/session_update_response.py
@@ -8,10 +8,10 @@
from .._models import BaseModel
-__all__ = ["Session"]
+__all__ = ["SessionUpdateResponse"]
-class Session(BaseModel):
+class SessionUpdateResponse(BaseModel):
id: str
created_at: datetime = FieldInfo(alias="createdAt")
diff --git a/src/browserbase/types/sessions/__init__.py b/src/browserbase/types/sessions/__init__.py
index 0cef6b1..69d5470 100644
--- a/src/browserbase/types/sessions/__init__.py
+++ b/src/browserbase/types/sessions/__init__.py
@@ -2,9 +2,7 @@
from __future__ import annotations
-from .session_log import SessionLog as SessionLog
from .log_list_response import LogListResponse as LogListResponse
-from .session_recording import SessionRecording as SessionRecording
from .upload_create_params import UploadCreateParams as UploadCreateParams
from .upload_create_response import UploadCreateResponse as UploadCreateResponse
from .recording_retrieve_response import RecordingRetrieveResponse as RecordingRetrieveResponse
diff --git a/src/browserbase/types/sessions/log_list_response.py b/src/browserbase/types/sessions/log_list_response.py
index 2b325a8..efd848a 100644
--- a/src/browserbase/types/sessions/log_list_response.py
+++ b/src/browserbase/types/sessions/log_list_response.py
@@ -1,10 +1,50 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from typing import List
+from typing import Dict, List, Optional
from typing_extensions import TypeAlias
-from .session_log import SessionLog
+from pydantic import Field as FieldInfo
-__all__ = ["LogListResponse"]
+from ..._models import BaseModel
-LogListResponse: TypeAlias = List[SessionLog]
+__all__ = ["LogListResponse", "LogListResponseItem", "LogListResponseItemRequest", "LogListResponseItemResponse"]
+
+
+class LogListResponseItemRequest(BaseModel):
+ params: Dict[str, object]
+
+ raw_body: str = FieldInfo(alias="rawBody")
+
+ timestamp: Optional[int] = None
+ """milliseconds that have elapsed since the UNIX epoch"""
+
+
+class LogListResponseItemResponse(BaseModel):
+ raw_body: str = FieldInfo(alias="rawBody")
+
+ result: Dict[str, object]
+
+ timestamp: Optional[int] = None
+ """milliseconds that have elapsed since the UNIX epoch"""
+
+
+class LogListResponseItem(BaseModel):
+ method: str
+
+ page_id: int = FieldInfo(alias="pageId")
+
+ session_id: str = FieldInfo(alias="sessionId")
+
+ frame_id: Optional[str] = FieldInfo(alias="frameId", default=None)
+
+ loader_id: Optional[str] = FieldInfo(alias="loaderId", default=None)
+
+ request: Optional[LogListResponseItemRequest] = None
+
+ response: Optional[LogListResponseItemResponse] = None
+
+ timestamp: Optional[int] = None
+ """milliseconds that have elapsed since the UNIX epoch"""
+
+
+LogListResponse: TypeAlias = List[LogListResponseItem]
diff --git a/src/browserbase/types/sessions/recording_retrieve_response.py b/src/browserbase/types/sessions/recording_retrieve_response.py
index 951969b..d3613b8 100644
--- a/src/browserbase/types/sessions/recording_retrieve_response.py
+++ b/src/browserbase/types/sessions/recording_retrieve_response.py
@@ -1,10 +1,28 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from typing import List
+from typing import Dict, List
from typing_extensions import TypeAlias
-from .session_recording import SessionRecording
+from pydantic import Field as FieldInfo
-__all__ = ["RecordingRetrieveResponse"]
+from ..._models import BaseModel
-RecordingRetrieveResponse: TypeAlias = List[SessionRecording]
+__all__ = ["RecordingRetrieveResponse", "RecordingRetrieveResponseItem"]
+
+
+class RecordingRetrieveResponseItem(BaseModel):
+ data: Dict[str, object]
+ """
+ See
+ [rrweb documentation](https://github.com/rrweb-io/rrweb/blob/master/docs/recipes/dive-into-event.md).
+ """
+
+ session_id: str = FieldInfo(alias="sessionId")
+
+ timestamp: int
+ """milliseconds that have elapsed since the UNIX epoch"""
+
+ type: int
+
+
+RecordingRetrieveResponse: TypeAlias = List[RecordingRetrieveResponseItem]
diff --git a/src/browserbase/types/sessions/session_log.py b/src/browserbase/types/sessions/session_log.py
deleted file mode 100644
index 428f518..0000000
--- a/src/browserbase/types/sessions/session_log.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from typing import Dict, Optional
-
-from pydantic import Field as FieldInfo
-
-from ..._models import BaseModel
-
-__all__ = ["SessionLog", "Request", "Response"]
-
-
-class Request(BaseModel):
- params: Dict[str, object]
-
- raw_body: str = FieldInfo(alias="rawBody")
-
- timestamp: Optional[int] = None
- """milliseconds that have elapsed since the UNIX epoch"""
-
-
-class Response(BaseModel):
- raw_body: str = FieldInfo(alias="rawBody")
-
- result: Dict[str, object]
-
- timestamp: Optional[int] = None
- """milliseconds that have elapsed since the UNIX epoch"""
-
-
-class SessionLog(BaseModel):
- method: str
-
- page_id: int = FieldInfo(alias="pageId")
-
- session_id: str = FieldInfo(alias="sessionId")
-
- frame_id: Optional[str] = FieldInfo(alias="frameId", default=None)
-
- loader_id: Optional[str] = FieldInfo(alias="loaderId", default=None)
-
- request: Optional[Request] = None
-
- response: Optional[Response] = None
-
- timestamp: Optional[int] = None
- """milliseconds that have elapsed since the UNIX epoch"""
diff --git a/src/browserbase/types/sessions/session_recording.py b/src/browserbase/types/sessions/session_recording.py
deleted file mode 100644
index c847137..0000000
--- a/src/browserbase/types/sessions/session_recording.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from typing import Dict
-
-from pydantic import Field as FieldInfo
-
-from ..._models import BaseModel
-
-__all__ = ["SessionRecording"]
-
-
-class SessionRecording(BaseModel):
- data: Dict[str, object]
- """
- See
- [rrweb documentation](https://github.com/rrweb-io/rrweb/blob/master/docs/recipes/dive-into-event.md).
- """
-
- session_id: str = FieldInfo(alias="sessionId")
-
- timestamp: int
- """milliseconds that have elapsed since the UNIX epoch"""
-
- type: int
diff --git a/tests/api_resources/sessions/test_downloads.py b/tests/api_resources/sessions/test_downloads.py
index 825ff78..10e84fd 100644
--- a/tests/api_resources/sessions/test_downloads.py
+++ b/tests/api_resources/sessions/test_downloads.py
@@ -75,7 +75,9 @@ def test_path_params_list(self, client: Browserbase) -> None:
class TestAsyncDownloads:
- parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
+ parametrize = pytest.mark.parametrize(
+ "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
+ )
@parametrize
@pytest.mark.respx(base_url=base_url)
diff --git a/tests/api_resources/sessions/test_logs.py b/tests/api_resources/sessions/test_logs.py
index c72002b..eadde72 100644
--- a/tests/api_resources/sessions/test_logs.py
+++ b/tests/api_resources/sessions/test_logs.py
@@ -57,7 +57,9 @@ def test_path_params_list(self, client: Browserbase) -> None:
class TestAsyncLogs:
- parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
+ parametrize = pytest.mark.parametrize(
+ "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
+ )
@parametrize
async def test_method_list(self, async_client: AsyncBrowserbase) -> None:
diff --git a/tests/api_resources/sessions/test_recording.py b/tests/api_resources/sessions/test_recording.py
index 0d7a542..f1e97d0 100644
--- a/tests/api_resources/sessions/test_recording.py
+++ b/tests/api_resources/sessions/test_recording.py
@@ -57,7 +57,9 @@ def test_path_params_retrieve(self, client: Browserbase) -> None:
class TestAsyncRecording:
- parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
+ parametrize = pytest.mark.parametrize(
+ "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
+ )
@parametrize
async def test_method_retrieve(self, async_client: AsyncBrowserbase) -> None:
diff --git a/tests/api_resources/sessions/test_uploads.py b/tests/api_resources/sessions/test_uploads.py
index f193256..748b92e 100644
--- a/tests/api_resources/sessions/test_uploads.py
+++ b/tests/api_resources/sessions/test_uploads.py
@@ -61,7 +61,9 @@ def test_path_params_create(self, client: Browserbase) -> None:
class TestAsyncUploads:
- parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
+ parametrize = pytest.mark.parametrize(
+ "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
+ )
@parametrize
async def test_method_create(self, async_client: AsyncBrowserbase) -> None:
diff --git a/tests/api_resources/test_contexts.py b/tests/api_resources/test_contexts.py
index e53b7e1..4ad2773 100644
--- a/tests/api_resources/test_contexts.py
+++ b/tests/api_resources/test_contexts.py
@@ -9,7 +9,11 @@
from browserbase import Browserbase, AsyncBrowserbase
from tests.utils import assert_matches_type
-from browserbase.types import Context, ContextCreateResponse, ContextUpdateResponse
+from browserbase.types import (
+ ContextCreateResponse,
+ ContextUpdateResponse,
+ ContextRetrieveResponse,
+)
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -53,7 +57,7 @@ def test_method_retrieve(self, client: Browserbase) -> None:
context = client.contexts.retrieve(
"id",
)
- assert_matches_type(Context, context, path=["response"])
+ assert_matches_type(ContextRetrieveResponse, context, path=["response"])
@parametrize
def test_raw_response_retrieve(self, client: Browserbase) -> None:
@@ -64,7 +68,7 @@ def test_raw_response_retrieve(self, client: Browserbase) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
context = response.parse()
- assert_matches_type(Context, context, path=["response"])
+ assert_matches_type(ContextRetrieveResponse, context, path=["response"])
@parametrize
def test_streaming_response_retrieve(self, client: Browserbase) -> None:
@@ -75,7 +79,7 @@ def test_streaming_response_retrieve(self, client: Browserbase) -> None:
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
context = response.parse()
- assert_matches_type(Context, context, path=["response"])
+ assert_matches_type(ContextRetrieveResponse, context, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -126,7 +130,9 @@ def test_path_params_update(self, client: Browserbase) -> None:
class TestAsyncContexts:
- parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
+ parametrize = pytest.mark.parametrize(
+ "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
+ )
@parametrize
async def test_method_create(self, async_client: AsyncBrowserbase) -> None:
@@ -164,7 +170,7 @@ async def test_method_retrieve(self, async_client: AsyncBrowserbase) -> None:
context = await async_client.contexts.retrieve(
"id",
)
- assert_matches_type(Context, context, path=["response"])
+ assert_matches_type(ContextRetrieveResponse, context, path=["response"])
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncBrowserbase) -> None:
@@ -175,7 +181,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncBrowserbase) -> No
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
context = await response.parse()
- assert_matches_type(Context, context, path=["response"])
+ assert_matches_type(ContextRetrieveResponse, context, path=["response"])
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncBrowserbase) -> None:
@@ -186,7 +192,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncBrowserbase)
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
context = await response.parse()
- assert_matches_type(Context, context, path=["response"])
+ assert_matches_type(ContextRetrieveResponse, context, path=["response"])
assert cast(Any, response.is_closed) is True
diff --git a/tests/api_resources/test_extensions.py b/tests/api_resources/test_extensions.py
index b7fec7a..e32ae9b 100644
--- a/tests/api_resources/test_extensions.py
+++ b/tests/api_resources/test_extensions.py
@@ -9,7 +9,7 @@
from browserbase import Browserbase, AsyncBrowserbase
from tests.utils import assert_matches_type
-from browserbase.types import Extension
+from browserbase.types import ExtensionCreateResponse, ExtensionRetrieveResponse
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -22,7 +22,7 @@ def test_method_create(self, client: Browserbase) -> None:
extension = client.extensions.create(
file=b"raw file contents",
)
- assert_matches_type(Extension, extension, path=["response"])
+ assert_matches_type(ExtensionCreateResponse, extension, path=["response"])
@parametrize
def test_raw_response_create(self, client: Browserbase) -> None:
@@ -33,7 +33,7 @@ def test_raw_response_create(self, client: Browserbase) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
extension = response.parse()
- assert_matches_type(Extension, extension, path=["response"])
+ assert_matches_type(ExtensionCreateResponse, extension, path=["response"])
@parametrize
def test_streaming_response_create(self, client: Browserbase) -> None:
@@ -44,7 +44,7 @@ def test_streaming_response_create(self, client: Browserbase) -> None:
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
extension = response.parse()
- assert_matches_type(Extension, extension, path=["response"])
+ assert_matches_type(ExtensionCreateResponse, extension, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -53,7 +53,7 @@ def test_method_retrieve(self, client: Browserbase) -> None:
extension = client.extensions.retrieve(
"id",
)
- assert_matches_type(Extension, extension, path=["response"])
+ assert_matches_type(ExtensionRetrieveResponse, extension, path=["response"])
@parametrize
def test_raw_response_retrieve(self, client: Browserbase) -> None:
@@ -64,7 +64,7 @@ def test_raw_response_retrieve(self, client: Browserbase) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
extension = response.parse()
- assert_matches_type(Extension, extension, path=["response"])
+ assert_matches_type(ExtensionRetrieveResponse, extension, path=["response"])
@parametrize
def test_streaming_response_retrieve(self, client: Browserbase) -> None:
@@ -75,7 +75,7 @@ def test_streaming_response_retrieve(self, client: Browserbase) -> None:
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
extension = response.parse()
- assert_matches_type(Extension, extension, path=["response"])
+ assert_matches_type(ExtensionRetrieveResponse, extension, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -126,14 +126,16 @@ def test_path_params_delete(self, client: Browserbase) -> None:
class TestAsyncExtensions:
- parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
+ parametrize = pytest.mark.parametrize(
+ "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
+ )
@parametrize
async def test_method_create(self, async_client: AsyncBrowserbase) -> None:
extension = await async_client.extensions.create(
file=b"raw file contents",
)
- assert_matches_type(Extension, extension, path=["response"])
+ assert_matches_type(ExtensionCreateResponse, extension, path=["response"])
@parametrize
async def test_raw_response_create(self, async_client: AsyncBrowserbase) -> None:
@@ -144,7 +146,7 @@ async def test_raw_response_create(self, async_client: AsyncBrowserbase) -> None
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
extension = await response.parse()
- assert_matches_type(Extension, extension, path=["response"])
+ assert_matches_type(ExtensionCreateResponse, extension, path=["response"])
@parametrize
async def test_streaming_response_create(self, async_client: AsyncBrowserbase) -> None:
@@ -155,7 +157,7 @@ async def test_streaming_response_create(self, async_client: AsyncBrowserbase) -
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
extension = await response.parse()
- assert_matches_type(Extension, extension, path=["response"])
+ assert_matches_type(ExtensionCreateResponse, extension, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -164,7 +166,7 @@ async def test_method_retrieve(self, async_client: AsyncBrowserbase) -> None:
extension = await async_client.extensions.retrieve(
"id",
)
- assert_matches_type(Extension, extension, path=["response"])
+ assert_matches_type(ExtensionRetrieveResponse, extension, path=["response"])
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncBrowserbase) -> None:
@@ -175,7 +177,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncBrowserbase) -> No
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
extension = await response.parse()
- assert_matches_type(Extension, extension, path=["response"])
+ assert_matches_type(ExtensionRetrieveResponse, extension, path=["response"])
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncBrowserbase) -> None:
@@ -186,7 +188,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncBrowserbase)
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
extension = await response.parse()
- assert_matches_type(Extension, extension, path=["response"])
+ assert_matches_type(ExtensionRetrieveResponse, extension, path=["response"])
assert cast(Any, response.is_closed) is True
diff --git a/tests/api_resources/test_projects.py b/tests/api_resources/test_projects.py
index 9e70d03..0d8e3c9 100644
--- a/tests/api_resources/test_projects.py
+++ b/tests/api_resources/test_projects.py
@@ -9,7 +9,7 @@
from browserbase import Browserbase, AsyncBrowserbase
from tests.utils import assert_matches_type
-from browserbase.types import Project, ProjectUsage, ProjectListResponse
+from browserbase.types import ProjectListResponse, ProjectUsageResponse, ProjectRetrieveResponse
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -22,7 +22,7 @@ def test_method_retrieve(self, client: Browserbase) -> None:
project = client.projects.retrieve(
"id",
)
- assert_matches_type(Project, project, path=["response"])
+ assert_matches_type(ProjectRetrieveResponse, project, path=["response"])
@parametrize
def test_raw_response_retrieve(self, client: Browserbase) -> None:
@@ -33,7 +33,7 @@ def test_raw_response_retrieve(self, client: Browserbase) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
project = response.parse()
- assert_matches_type(Project, project, path=["response"])
+ assert_matches_type(ProjectRetrieveResponse, project, path=["response"])
@parametrize
def test_streaming_response_retrieve(self, client: Browserbase) -> None:
@@ -44,7 +44,7 @@ def test_streaming_response_retrieve(self, client: Browserbase) -> None:
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
project = response.parse()
- assert_matches_type(Project, project, path=["response"])
+ assert_matches_type(ProjectRetrieveResponse, project, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -85,7 +85,7 @@ def test_method_usage(self, client: Browserbase) -> None:
project = client.projects.usage(
"id",
)
- assert_matches_type(ProjectUsage, project, path=["response"])
+ assert_matches_type(ProjectUsageResponse, project, path=["response"])
@parametrize
def test_raw_response_usage(self, client: Browserbase) -> None:
@@ -96,7 +96,7 @@ def test_raw_response_usage(self, client: Browserbase) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
project = response.parse()
- assert_matches_type(ProjectUsage, project, path=["response"])
+ assert_matches_type(ProjectUsageResponse, project, path=["response"])
@parametrize
def test_streaming_response_usage(self, client: Browserbase) -> None:
@@ -107,7 +107,7 @@ def test_streaming_response_usage(self, client: Browserbase) -> None:
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
project = response.parse()
- assert_matches_type(ProjectUsage, project, path=["response"])
+ assert_matches_type(ProjectUsageResponse, project, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -120,14 +120,16 @@ def test_path_params_usage(self, client: Browserbase) -> None:
class TestAsyncProjects:
- parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
+ parametrize = pytest.mark.parametrize(
+ "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
+ )
@parametrize
async def test_method_retrieve(self, async_client: AsyncBrowserbase) -> None:
project = await async_client.projects.retrieve(
"id",
)
- assert_matches_type(Project, project, path=["response"])
+ assert_matches_type(ProjectRetrieveResponse, project, path=["response"])
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncBrowserbase) -> None:
@@ -138,7 +140,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncBrowserbase) -> No
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
project = await response.parse()
- assert_matches_type(Project, project, path=["response"])
+ assert_matches_type(ProjectRetrieveResponse, project, path=["response"])
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncBrowserbase) -> None:
@@ -149,7 +151,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncBrowserbase)
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
project = await response.parse()
- assert_matches_type(Project, project, path=["response"])
+ assert_matches_type(ProjectRetrieveResponse, project, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -190,7 +192,7 @@ async def test_method_usage(self, async_client: AsyncBrowserbase) -> None:
project = await async_client.projects.usage(
"id",
)
- assert_matches_type(ProjectUsage, project, path=["response"])
+ assert_matches_type(ProjectUsageResponse, project, path=["response"])
@parametrize
async def test_raw_response_usage(self, async_client: AsyncBrowserbase) -> None:
@@ -201,7 +203,7 @@ async def test_raw_response_usage(self, async_client: AsyncBrowserbase) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
project = await response.parse()
- assert_matches_type(ProjectUsage, project, path=["response"])
+ assert_matches_type(ProjectUsageResponse, project, path=["response"])
@parametrize
async def test_streaming_response_usage(self, async_client: AsyncBrowserbase) -> None:
@@ -212,7 +214,7 @@ async def test_streaming_response_usage(self, async_client: AsyncBrowserbase) ->
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
project = await response.parse()
- assert_matches_type(ProjectUsage, project, path=["response"])
+ assert_matches_type(ProjectUsageResponse, project, path=["response"])
assert cast(Any, response.is_closed) is True
diff --git a/tests/api_resources/test_sessions.py b/tests/api_resources/test_sessions.py
index 4a17c4f..d7d6a90 100644
--- a/tests/api_resources/test_sessions.py
+++ b/tests/api_resources/test_sessions.py
@@ -10,10 +10,10 @@
from browserbase import Browserbase, AsyncBrowserbase
from tests.utils import assert_matches_type
from browserbase.types import (
- Session,
- SessionLiveURLs,
SessionListResponse,
+ SessionDebugResponse,
SessionCreateResponse,
+ SessionUpdateResponse,
SessionRetrieveResponse,
)
@@ -57,7 +57,9 @@ def test_method_create_with_all_params(self, client: Browserbase) -> None:
"min_width": 0,
},
},
+ "headful": True,
"log_session": True,
+ "os": "windows",
"record_session": True,
"solve_captchas": True,
"viewport": {
@@ -67,7 +69,17 @@ def test_method_create_with_all_params(self, client: Browserbase) -> None:
},
extension_id="extensionId",
keep_alive=True,
- proxies=True,
+ proxies=[
+ {
+ "type": "browserbase",
+ "domain_pattern": "domainPattern",
+ "geolocation": {
+ "country": "xx",
+ "city": "city",
+ "state": "xx",
+ },
+ }
+ ],
region="us-west-2",
api_timeout=60,
user_metadata={"foo": "bar"},
@@ -143,7 +155,7 @@ def test_method_update(self, client: Browserbase) -> None:
project_id="projectId",
status="REQUEST_RELEASE",
)
- assert_matches_type(Session, session, path=["response"])
+ assert_matches_type(SessionUpdateResponse, session, path=["response"])
@parametrize
def test_raw_response_update(self, client: Browserbase) -> None:
@@ -156,7 +168,7 @@ def test_raw_response_update(self, client: Browserbase) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
session = response.parse()
- assert_matches_type(Session, session, path=["response"])
+ assert_matches_type(SessionUpdateResponse, session, path=["response"])
@parametrize
def test_streaming_response_update(self, client: Browserbase) -> None:
@@ -169,7 +181,7 @@ def test_streaming_response_update(self, client: Browserbase) -> None:
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
session = response.parse()
- assert_matches_type(Session, session, path=["response"])
+ assert_matches_type(SessionUpdateResponse, session, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -220,7 +232,7 @@ def test_method_debug(self, client: Browserbase) -> None:
session = client.sessions.debug(
"id",
)
- assert_matches_type(SessionLiveURLs, session, path=["response"])
+ assert_matches_type(SessionDebugResponse, session, path=["response"])
@parametrize
def test_raw_response_debug(self, client: Browserbase) -> None:
@@ -231,7 +243,7 @@ def test_raw_response_debug(self, client: Browserbase) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
session = response.parse()
- assert_matches_type(SessionLiveURLs, session, path=["response"])
+ assert_matches_type(SessionDebugResponse, session, path=["response"])
@parametrize
def test_streaming_response_debug(self, client: Browserbase) -> None:
@@ -242,7 +254,7 @@ def test_streaming_response_debug(self, client: Browserbase) -> None:
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
session = response.parse()
- assert_matches_type(SessionLiveURLs, session, path=["response"])
+ assert_matches_type(SessionDebugResponse, session, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -255,7 +267,9 @@ def test_path_params_debug(self, client: Browserbase) -> None:
class TestAsyncSessions:
- parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
+ parametrize = pytest.mark.parametrize(
+ "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
+ )
@parametrize
async def test_method_create(self, async_client: AsyncBrowserbase) -> None:
@@ -291,7 +305,9 @@ async def test_method_create_with_all_params(self, async_client: AsyncBrowserbas
"min_width": 0,
},
},
+ "headful": True,
"log_session": True,
+ "os": "windows",
"record_session": True,
"solve_captchas": True,
"viewport": {
@@ -301,7 +317,17 @@ async def test_method_create_with_all_params(self, async_client: AsyncBrowserbas
},
extension_id="extensionId",
keep_alive=True,
- proxies=True,
+ proxies=[
+ {
+ "type": "browserbase",
+ "domain_pattern": "domainPattern",
+ "geolocation": {
+ "country": "xx",
+ "city": "city",
+ "state": "xx",
+ },
+ }
+ ],
region="us-west-2",
api_timeout=60,
user_metadata={"foo": "bar"},
@@ -377,7 +403,7 @@ async def test_method_update(self, async_client: AsyncBrowserbase) -> None:
project_id="projectId",
status="REQUEST_RELEASE",
)
- assert_matches_type(Session, session, path=["response"])
+ assert_matches_type(SessionUpdateResponse, session, path=["response"])
@parametrize
async def test_raw_response_update(self, async_client: AsyncBrowserbase) -> None:
@@ -390,7 +416,7 @@ async def test_raw_response_update(self, async_client: AsyncBrowserbase) -> None
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
session = await response.parse()
- assert_matches_type(Session, session, path=["response"])
+ assert_matches_type(SessionUpdateResponse, session, path=["response"])
@parametrize
async def test_streaming_response_update(self, async_client: AsyncBrowserbase) -> None:
@@ -403,7 +429,7 @@ async def test_streaming_response_update(self, async_client: AsyncBrowserbase) -
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
session = await response.parse()
- assert_matches_type(Session, session, path=["response"])
+ assert_matches_type(SessionUpdateResponse, session, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -454,7 +480,7 @@ async def test_method_debug(self, async_client: AsyncBrowserbase) -> None:
session = await async_client.sessions.debug(
"id",
)
- assert_matches_type(SessionLiveURLs, session, path=["response"])
+ assert_matches_type(SessionDebugResponse, session, path=["response"])
@parametrize
async def test_raw_response_debug(self, async_client: AsyncBrowserbase) -> None:
@@ -465,7 +491,7 @@ async def test_raw_response_debug(self, async_client: AsyncBrowserbase) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
session = await response.parse()
- assert_matches_type(SessionLiveURLs, session, path=["response"])
+ assert_matches_type(SessionDebugResponse, session, path=["response"])
@parametrize
async def test_streaming_response_debug(self, async_client: AsyncBrowserbase) -> None:
@@ -476,7 +502,7 @@ async def test_streaming_response_debug(self, async_client: AsyncBrowserbase) ->
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
session = await response.parse()
- assert_matches_type(SessionLiveURLs, session, path=["response"])
+ assert_matches_type(SessionDebugResponse, session, path=["response"])
assert cast(Any, response.is_closed) is True
diff --git a/tests/conftest.py b/tests/conftest.py
index 94b8e72..7fc31c4 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,13 +1,17 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
from __future__ import annotations
import os
import logging
from typing import TYPE_CHECKING, Iterator, AsyncIterator
+import httpx
import pytest
from pytest_asyncio import is_async_test
-from browserbase import Browserbase, AsyncBrowserbase
+from browserbase import Browserbase, AsyncBrowserbase, DefaultAioHttpClient
+from browserbase._utils import is_dict
if TYPE_CHECKING:
from _pytest.fixtures import FixtureRequest # pyright: ignore[reportPrivateImportUsage]
@@ -25,6 +29,19 @@ def pytest_collection_modifyitems(items: list[pytest.Function]) -> None:
for async_test in pytest_asyncio_tests:
async_test.add_marker(session_scope_marker, append=False)
+ # We skip tests that use both the aiohttp client and respx_mock as respx_mock
+ # doesn't support custom transports.
+ for item in items:
+ if "async_client" not in item.fixturenames or "respx_mock" not in item.fixturenames:
+ continue
+
+ if not hasattr(item, "callspec"):
+ continue
+
+ async_client_param = item.callspec.params.get("async_client")
+ if is_dict(async_client_param) and async_client_param.get("http_client") == "aiohttp":
+ item.add_marker(pytest.mark.skip(reason="aiohttp client is not compatible with respx_mock"))
+
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -43,9 +60,25 @@ def client(request: FixtureRequest) -> Iterator[Browserbase]:
@pytest.fixture(scope="session")
async def async_client(request: FixtureRequest) -> AsyncIterator[AsyncBrowserbase]:
- strict = getattr(request, "param", True)
- if not isinstance(strict, bool):
- raise TypeError(f"Unexpected fixture parameter type {type(strict)}, expected {bool}")
-
- async with AsyncBrowserbase(base_url=base_url, api_key=api_key, _strict_response_validation=strict) as client:
+ param = getattr(request, "param", True)
+
+ # defaults
+ strict = True
+ http_client: None | httpx.AsyncClient = None
+
+ if isinstance(param, bool):
+ strict = param
+ elif is_dict(param):
+ strict = param.get("strict", True)
+ assert isinstance(strict, bool)
+
+ http_client_type = param.get("http_client", "httpx")
+ if http_client_type == "aiohttp":
+ http_client = DefaultAioHttpClient()
+ else:
+ raise TypeError(f"Unexpected fixture parameter type {type(param)}, expected bool or dict")
+
+ async with AsyncBrowserbase(
+ base_url=base_url, api_key=api_key, _strict_response_validation=strict, http_client=http_client
+ ) as client:
yield client
diff --git a/tests/test_client.py b/tests/test_client.py
index d03654d..bf05825 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -23,17 +23,16 @@
from browserbase import Browserbase, AsyncBrowserbase, APIResponseValidationError
from browserbase._types import Omit
-from browserbase._utils import maybe_transform
from browserbase._models import BaseModel, FinalRequestOptions
-from browserbase._constants import RAW_RESPONSE_HEADER
from browserbase._exceptions import APIStatusError, APITimeoutError, BrowserbaseError, APIResponseValidationError
from browserbase._base_client import (
DEFAULT_TIMEOUT,
HTTPX_DEFAULT_TIMEOUT,
BaseClient,
+ DefaultHttpxClient,
+ DefaultAsyncHttpxClient,
make_request_options,
)
-from browserbase.types.session_create_params import SessionCreateParams
from .utils import update_env
@@ -192,6 +191,7 @@ def test_copy_signature(self) -> None:
copy_param = copy_signature.parameters.get(name)
assert copy_param is not None, f"copy() signature is missing the {name} param"
+ @pytest.mark.skipif(sys.version_info >= (3, 10), reason="fails because of a memory leak that started from 3.12")
def test_copy_build_request(self) -> None:
options = FinalRequestOptions(method="get", url="/foo")
@@ -464,7 +464,7 @@ def test_request_extra_query(self) -> None:
def test_multipart_repeating_array(self, client: Browserbase) -> None:
request = client._build_request(
FinalRequestOptions.construct(
- method="get",
+ method="post",
url="/foo",
headers={"Content-Type": "multipart/form-data; boundary=6b7ba517decee4a450543ea6ae821c82"},
json_data={"array": ["foo", "bar"]},
@@ -723,32 +723,21 @@ def test_parse_retry_after_header(self, remaining_retries: int, retry_after: str
@mock.patch("browserbase._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout)
@pytest.mark.respx(base_url=base_url)
- def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter) -> None:
+ def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter, client: Browserbase) -> None:
respx_mock.post("/v1/sessions").mock(side_effect=httpx.TimeoutException("Test timeout error"))
with pytest.raises(APITimeoutError):
- self.client.post(
- "/v1/sessions",
- body=cast(object, maybe_transform(dict(project_id="your_project_id"), SessionCreateParams)),
- cast_to=httpx.Response,
- options={"headers": {RAW_RESPONSE_HEADER: "stream"}},
- )
+ client.sessions.with_streaming_response.create(project_id="projectId").__enter__()
assert _get_open_connections(self.client) == 0
@mock.patch("browserbase._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout)
@pytest.mark.respx(base_url=base_url)
- def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter) -> None:
+ def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter, client: Browserbase) -> None:
respx_mock.post("/v1/sessions").mock(return_value=httpx.Response(500))
with pytest.raises(APIStatusError):
- self.client.post(
- "/v1/sessions",
- body=cast(object, maybe_transform(dict(project_id="your_project_id"), SessionCreateParams)),
- cast_to=httpx.Response,
- options={"headers": {RAW_RESPONSE_HEADER: "stream"}},
- )
-
+ client.sessions.with_streaming_response.create(project_id="projectId").__enter__()
assert _get_open_connections(self.client) == 0
@pytest.mark.parametrize("failures_before_success", [0, 2, 4])
@@ -832,6 +821,55 @@ def retry_handler(_request: httpx.Request) -> httpx.Response:
assert response.http_request.headers.get("x-stainless-retry-count") == "42"
+ def test_proxy_environment_variables(self, monkeypatch: pytest.MonkeyPatch) -> None:
+ # Test that the proxy environment variables are set correctly
+ monkeypatch.setenv("HTTPS_PROXY", "https://example.org")
+
+ client = DefaultHttpxClient()
+
+ mounts = tuple(client._mounts.items())
+ assert len(mounts) == 1
+ assert mounts[0][0].pattern == "https://"
+
+ @pytest.mark.filterwarnings("ignore:.*deprecated.*:DeprecationWarning")
+ def test_default_client_creation(self) -> None:
+ # Ensure that the client can be initialized without any exceptions
+ DefaultHttpxClient(
+ verify=True,
+ cert=None,
+ trust_env=True,
+ http1=True,
+ http2=False,
+ limits=httpx.Limits(max_connections=100, max_keepalive_connections=20),
+ )
+
+ @pytest.mark.respx(base_url=base_url)
+ def test_follow_redirects(self, respx_mock: MockRouter) -> None:
+ # Test that the default follow_redirects=True allows following redirects
+ respx_mock.post("/redirect").mock(
+ return_value=httpx.Response(302, headers={"Location": f"{base_url}/redirected"})
+ )
+ respx_mock.get("/redirected").mock(return_value=httpx.Response(200, json={"status": "ok"}))
+
+ response = self.client.post("/redirect", body={"key": "value"}, cast_to=httpx.Response)
+ assert response.status_code == 200
+ assert response.json() == {"status": "ok"}
+
+ @pytest.mark.respx(base_url=base_url)
+ def test_follow_redirects_disabled(self, respx_mock: MockRouter) -> None:
+ # Test that follow_redirects=False prevents following redirects
+ respx_mock.post("/redirect").mock(
+ return_value=httpx.Response(302, headers={"Location": f"{base_url}/redirected"})
+ )
+
+ with pytest.raises(APIStatusError) as exc_info:
+ self.client.post(
+ "/redirect", body={"key": "value"}, options={"follow_redirects": False}, cast_to=httpx.Response
+ )
+
+ assert exc_info.value.response.status_code == 302
+ assert exc_info.value.response.headers["Location"] == f"{base_url}/redirected"
+
class TestAsyncBrowserbase:
client = AsyncBrowserbase(base_url=base_url, api_key=api_key, _strict_response_validation=True)
@@ -968,6 +1006,7 @@ def test_copy_signature(self) -> None:
copy_param = copy_signature.parameters.get(name)
assert copy_param is not None, f"copy() signature is missing the {name} param"
+ @pytest.mark.skipif(sys.version_info >= (3, 10), reason="fails because of a memory leak that started from 3.12")
def test_copy_build_request(self) -> None:
options = FinalRequestOptions(method="get", url="/foo")
@@ -1240,7 +1279,7 @@ def test_request_extra_query(self) -> None:
def test_multipart_repeating_array(self, async_client: AsyncBrowserbase) -> None:
request = async_client._build_request(
FinalRequestOptions.construct(
- method="get",
+ method="post",
url="/foo",
headers={"Content-Type": "multipart/form-data; boundary=6b7ba517decee4a450543ea6ae821c82"},
json_data={"array": ["foo", "bar"]},
@@ -1503,32 +1542,25 @@ async def test_parse_retry_after_header(self, remaining_retries: int, retry_afte
@mock.patch("browserbase._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout)
@pytest.mark.respx(base_url=base_url)
- async def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter) -> None:
+ async def test_retrying_timeout_errors_doesnt_leak(
+ self, respx_mock: MockRouter, async_client: AsyncBrowserbase
+ ) -> None:
respx_mock.post("/v1/sessions").mock(side_effect=httpx.TimeoutException("Test timeout error"))
with pytest.raises(APITimeoutError):
- await self.client.post(
- "/v1/sessions",
- body=cast(object, maybe_transform(dict(project_id="your_project_id"), SessionCreateParams)),
- cast_to=httpx.Response,
- options={"headers": {RAW_RESPONSE_HEADER: "stream"}},
- )
+ await async_client.sessions.with_streaming_response.create(project_id="projectId").__aenter__()
assert _get_open_connections(self.client) == 0
@mock.patch("browserbase._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout)
@pytest.mark.respx(base_url=base_url)
- async def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter) -> None:
+ async def test_retrying_status_errors_doesnt_leak(
+ self, respx_mock: MockRouter, async_client: AsyncBrowserbase
+ ) -> None:
respx_mock.post("/v1/sessions").mock(return_value=httpx.Response(500))
with pytest.raises(APIStatusError):
- await self.client.post(
- "/v1/sessions",
- body=cast(object, maybe_transform(dict(project_id="your_project_id"), SessionCreateParams)),
- cast_to=httpx.Response,
- options={"headers": {RAW_RESPONSE_HEADER: "stream"}},
- )
-
+ await async_client.sessions.with_streaming_response.create(project_id="projectId").__aenter__()
assert _get_open_connections(self.client) == 0
@pytest.mark.parametrize("failures_before_success", [0, 2, 4])
@@ -1659,3 +1691,52 @@ async def test_main() -> None:
raise AssertionError("calling get_platform using asyncify resulted in a hung process")
time.sleep(0.1)
+
+ async def test_proxy_environment_variables(self, monkeypatch: pytest.MonkeyPatch) -> None:
+ # Test that the proxy environment variables are set correctly
+ monkeypatch.setenv("HTTPS_PROXY", "https://example.org")
+
+ client = DefaultAsyncHttpxClient()
+
+ mounts = tuple(client._mounts.items())
+ assert len(mounts) == 1
+ assert mounts[0][0].pattern == "https://"
+
+ @pytest.mark.filterwarnings("ignore:.*deprecated.*:DeprecationWarning")
+ async def test_default_client_creation(self) -> None:
+ # Ensure that the client can be initialized without any exceptions
+ DefaultAsyncHttpxClient(
+ verify=True,
+ cert=None,
+ trust_env=True,
+ http1=True,
+ http2=False,
+ limits=httpx.Limits(max_connections=100, max_keepalive_connections=20),
+ )
+
+ @pytest.mark.respx(base_url=base_url)
+ async def test_follow_redirects(self, respx_mock: MockRouter) -> None:
+ # Test that the default follow_redirects=True allows following redirects
+ respx_mock.post("/redirect").mock(
+ return_value=httpx.Response(302, headers={"Location": f"{base_url}/redirected"})
+ )
+ respx_mock.get("/redirected").mock(return_value=httpx.Response(200, json={"status": "ok"}))
+
+ response = await self.client.post("/redirect", body={"key": "value"}, cast_to=httpx.Response)
+ assert response.status_code == 200
+ assert response.json() == {"status": "ok"}
+
+ @pytest.mark.respx(base_url=base_url)
+ async def test_follow_redirects_disabled(self, respx_mock: MockRouter) -> None:
+ # Test that follow_redirects=False prevents following redirects
+ respx_mock.post("/redirect").mock(
+ return_value=httpx.Response(302, headers={"Location": f"{base_url}/redirected"})
+ )
+
+ with pytest.raises(APIStatusError) as exc_info:
+ await self.client.post(
+ "/redirect", body={"key": "value"}, options={"follow_redirects": False}, cast_to=httpx.Response
+ )
+
+ assert exc_info.value.response.status_code == 302
+ assert exc_info.value.response.headers["Location"] == f"{base_url}/redirected"
diff --git a/tests/test_models.py b/tests/test_models.py
index b5335f9..34f8733 100644
--- a/tests/test_models.py
+++ b/tests/test_models.py
@@ -1,5 +1,5 @@
import json
-from typing import Any, Dict, List, Union, Optional, cast
+from typing import TYPE_CHECKING, Any, Dict, List, Union, Optional, cast
from datetime import datetime, timezone
from typing_extensions import Literal, Annotated, TypeAliasType
@@ -8,7 +8,7 @@
from pydantic import Field
from browserbase._utils import PropertyInfo
-from browserbase._compat import PYDANTIC_V2, parse_obj, model_dump, model_json
+from browserbase._compat import PYDANTIC_V1, parse_obj, model_dump, model_json
from browserbase._models import BaseModel, construct_type
@@ -294,12 +294,12 @@ class Model(BaseModel):
assert cast(bool, m.foo) is True
m = Model.construct(foo={"name": 3})
- if PYDANTIC_V2:
- assert isinstance(m.foo, Submodel1)
- assert m.foo.name == 3 # type: ignore
- else:
+ if PYDANTIC_V1:
assert isinstance(m.foo, Submodel2)
assert m.foo.name == "3"
+ else:
+ assert isinstance(m.foo, Submodel1)
+ assert m.foo.name == 3 # type: ignore
def test_list_of_unions() -> None:
@@ -426,10 +426,10 @@ class Model(BaseModel):
expected = datetime(2019, 12, 27, 18, 11, 19, 117000, tzinfo=timezone.utc)
- if PYDANTIC_V2:
- expected_json = '{"created_at":"2019-12-27T18:11:19.117000Z"}'
- else:
+ if PYDANTIC_V1:
expected_json = '{"created_at": "2019-12-27T18:11:19.117000+00:00"}'
+ else:
+ expected_json = '{"created_at":"2019-12-27T18:11:19.117000Z"}'
model = Model.construct(created_at="2019-12-27T18:11:19.117Z")
assert model.created_at == expected
@@ -531,7 +531,7 @@ class Model2(BaseModel):
assert m4.to_dict(mode="python") == {"created_at": datetime.fromisoformat(time_str)}
assert m4.to_dict(mode="json") == {"created_at": time_str}
- if not PYDANTIC_V2:
+ if PYDANTIC_V1:
with pytest.raises(ValueError, match="warnings is only supported in Pydantic v2"):
m.to_dict(warnings=False)
@@ -556,7 +556,7 @@ class Model(BaseModel):
assert m3.model_dump() == {"foo": None}
assert m3.model_dump(exclude_none=True) == {}
- if not PYDANTIC_V2:
+ if PYDANTIC_V1:
with pytest.raises(ValueError, match="round_trip is only supported in Pydantic v2"):
m.model_dump(round_trip=True)
@@ -580,10 +580,10 @@ class Model(BaseModel):
assert json.loads(m.to_json()) == {"FOO": "hello"}
assert json.loads(m.to_json(use_api_names=False)) == {"foo": "hello"}
- if PYDANTIC_V2:
- assert m.to_json(indent=None) == '{"FOO":"hello"}'
- else:
+ if PYDANTIC_V1:
assert m.to_json(indent=None) == '{"FOO": "hello"}'
+ else:
+ assert m.to_json(indent=None) == '{"FOO":"hello"}'
m2 = Model()
assert json.loads(m2.to_json()) == {}
@@ -595,7 +595,7 @@ class Model(BaseModel):
assert json.loads(m3.to_json()) == {"FOO": None}
assert json.loads(m3.to_json(exclude_none=True)) == {}
- if not PYDANTIC_V2:
+ if PYDANTIC_V1:
with pytest.raises(ValueError, match="warnings is only supported in Pydantic v2"):
m.to_json(warnings=False)
@@ -622,7 +622,7 @@ class Model(BaseModel):
assert json.loads(m3.model_dump_json()) == {"foo": None}
assert json.loads(m3.model_dump_json(exclude_none=True)) == {}
- if not PYDANTIC_V2:
+ if PYDANTIC_V1:
with pytest.raises(ValueError, match="round_trip is only supported in Pydantic v2"):
m.model_dump_json(round_trip=True)
@@ -679,12 +679,12 @@ class B(BaseModel):
)
assert isinstance(m, A)
assert m.type == "a"
- if PYDANTIC_V2:
- assert m.data == 100 # type: ignore[comparison-overlap]
- else:
+ if PYDANTIC_V1:
# pydantic v1 automatically converts inputs to strings
# if the expected type is a str
assert m.data == "100"
+ else:
+ assert m.data == 100 # type: ignore[comparison-overlap]
def test_discriminated_unions_unknown_variant() -> None:
@@ -768,12 +768,12 @@ class B(BaseModel):
)
assert isinstance(m, A)
assert m.foo_type == "a"
- if PYDANTIC_V2:
- assert m.data == 100 # type: ignore[comparison-overlap]
- else:
+ if PYDANTIC_V1:
# pydantic v1 automatically converts inputs to strings
# if the expected type is a str
assert m.data == "100"
+ else:
+ assert m.data == 100 # type: ignore[comparison-overlap]
def test_discriminated_unions_overlapping_discriminators_invalid_data() -> None:
@@ -833,7 +833,7 @@ class B(BaseModel):
assert UnionType.__discriminator__ is discriminator
-@pytest.mark.skipif(not PYDANTIC_V2, reason="TypeAliasType is not supported in Pydantic v1")
+@pytest.mark.skipif(PYDANTIC_V1, reason="TypeAliasType is not supported in Pydantic v1")
def test_type_alias_type() -> None:
Alias = TypeAliasType("Alias", str) # pyright: ignore
@@ -849,7 +849,7 @@ class Model(BaseModel):
assert m.union == "bar"
-@pytest.mark.skipif(not PYDANTIC_V2, reason="TypeAliasType is not supported in Pydantic v1")
+@pytest.mark.skipif(PYDANTIC_V1, reason="TypeAliasType is not supported in Pydantic v1")
def test_field_named_cls() -> None:
class Model(BaseModel):
cls: str
@@ -889,3 +889,75 @@ class ModelB(BaseModel):
)
assert isinstance(m, ModelB)
+
+
+def test_nested_discriminated_union() -> None:
+ class InnerType1(BaseModel):
+ type: Literal["type_1"]
+
+ class InnerModel(BaseModel):
+ inner_value: str
+
+ class InnerType2(BaseModel):
+ type: Literal["type_2"]
+ some_inner_model: InnerModel
+
+ class Type1(BaseModel):
+ base_type: Literal["base_type_1"]
+ value: Annotated[
+ Union[
+ InnerType1,
+ InnerType2,
+ ],
+ PropertyInfo(discriminator="type"),
+ ]
+
+ class Type2(BaseModel):
+ base_type: Literal["base_type_2"]
+
+ T = Annotated[
+ Union[
+ Type1,
+ Type2,
+ ],
+ PropertyInfo(discriminator="base_type"),
+ ]
+
+ model = construct_type(
+ type_=T,
+ value={
+ "base_type": "base_type_1",
+ "value": {
+ "type": "type_2",
+ },
+ },
+ )
+ assert isinstance(model, Type1)
+ assert isinstance(model.value, InnerType2)
+
+
+@pytest.mark.skipif(PYDANTIC_V1, reason="this is only supported in pydantic v2 for now")
+def test_extra_properties() -> None:
+ class Item(BaseModel):
+ prop: int
+
+ class Model(BaseModel):
+ __pydantic_extra__: Dict[str, Item] = Field(init=False) # pyright: ignore[reportIncompatibleVariableOverride]
+
+ other: str
+
+ if TYPE_CHECKING:
+
+ def __getattr__(self, attr: str) -> Item: ...
+
+ model = construct_type(
+ type_=Model,
+ value={
+ "a": {"prop": 1},
+ "other": "foo",
+ },
+ )
+ assert isinstance(model, Model)
+ assert model.a.prop == 1
+ assert isinstance(model.a, Item)
+ assert model.other == "foo"
diff --git a/tests/test_transform.py b/tests/test_transform.py
index cba80b2..498d0d9 100644
--- a/tests/test_transform.py
+++ b/tests/test_transform.py
@@ -15,7 +15,7 @@
parse_datetime,
async_transform as _async_transform,
)
-from browserbase._compat import PYDANTIC_V2
+from browserbase._compat import PYDANTIC_V1
from browserbase._models import BaseModel
_T = TypeVar("_T")
@@ -189,7 +189,7 @@ class DateModel(BaseModel):
@pytest.mark.asyncio
async def test_iso8601_format(use_async: bool) -> None:
dt = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00")
- tz = "Z" if PYDANTIC_V2 else "+00:00"
+ tz = "+00:00" if PYDANTIC_V1 else "Z"
assert await transform({"foo": dt}, DatetimeDict, use_async) == {"foo": "2023-02-23T14:16:36.337692+00:00"} # type: ignore[comparison-overlap]
assert await transform(DatetimeModel(foo=dt), Any, use_async) == {"foo": "2023-02-23T14:16:36.337692" + tz} # type: ignore[comparison-overlap]
@@ -297,11 +297,11 @@ async def test_pydantic_unknown_field(use_async: bool) -> None:
@pytest.mark.asyncio
async def test_pydantic_mismatched_types(use_async: bool) -> None:
model = MyModel.construct(foo=True)
- if PYDANTIC_V2:
+ if PYDANTIC_V1:
+ params = await transform(model, Any, use_async)
+ else:
with pytest.warns(UserWarning):
params = await transform(model, Any, use_async)
- else:
- params = await transform(model, Any, use_async)
assert cast(Any, params) == {"foo": True}
@@ -309,11 +309,11 @@ async def test_pydantic_mismatched_types(use_async: bool) -> None:
@pytest.mark.asyncio
async def test_pydantic_mismatched_object_type(use_async: bool) -> None:
model = MyModel.construct(foo=MyModel.construct(hello="world"))
- if PYDANTIC_V2:
+ if PYDANTIC_V1:
+ params = await transform(model, Any, use_async)
+ else:
with pytest.warns(UserWarning):
params = await transform(model, Any, use_async)
- else:
- params = await transform(model, Any, use_async)
assert cast(Any, params) == {"foo": {"hello": "world"}}
diff --git a/tests/test_utils/test_datetime_parse.py b/tests/test_utils/test_datetime_parse.py
new file mode 100644
index 0000000..2834c47
--- /dev/null
+++ b/tests/test_utils/test_datetime_parse.py
@@ -0,0 +1,110 @@
+"""
+Copied from https://github.com/pydantic/pydantic/blob/v1.10.22/tests/test_datetime_parse.py
+with modifications so it works without pydantic v1 imports.
+"""
+
+from typing import Type, Union
+from datetime import date, datetime, timezone, timedelta
+
+import pytest
+
+from browserbase._utils import parse_date, parse_datetime
+
+
+def create_tz(minutes: int) -> timezone:
+ return timezone(timedelta(minutes=minutes))
+
+
+@pytest.mark.parametrize(
+ "value,result",
+ [
+ # Valid inputs
+ ("1494012444.883309", date(2017, 5, 5)),
+ (b"1494012444.883309", date(2017, 5, 5)),
+ (1_494_012_444.883_309, date(2017, 5, 5)),
+ ("1494012444", date(2017, 5, 5)),
+ (1_494_012_444, date(2017, 5, 5)),
+ (0, date(1970, 1, 1)),
+ ("2012-04-23", date(2012, 4, 23)),
+ (b"2012-04-23", date(2012, 4, 23)),
+ ("2012-4-9", date(2012, 4, 9)),
+ (date(2012, 4, 9), date(2012, 4, 9)),
+ (datetime(2012, 4, 9, 12, 15), date(2012, 4, 9)),
+ # Invalid inputs
+ ("x20120423", ValueError),
+ ("2012-04-56", ValueError),
+ (19_999_999_999, date(2603, 10, 11)), # just before watershed
+ (20_000_000_001, date(1970, 8, 20)), # just after watershed
+ (1_549_316_052, date(2019, 2, 4)), # nowish in s
+ (1_549_316_052_104, date(2019, 2, 4)), # nowish in ms
+ (1_549_316_052_104_324, date(2019, 2, 4)), # nowish in μs
+ (1_549_316_052_104_324_096, date(2019, 2, 4)), # nowish in ns
+ ("infinity", date(9999, 12, 31)),
+ ("inf", date(9999, 12, 31)),
+ (float("inf"), date(9999, 12, 31)),
+ ("infinity ", date(9999, 12, 31)),
+ (int("1" + "0" * 100), date(9999, 12, 31)),
+ (1e1000, date(9999, 12, 31)),
+ ("-infinity", date(1, 1, 1)),
+ ("-inf", date(1, 1, 1)),
+ ("nan", ValueError),
+ ],
+)
+def test_date_parsing(value: Union[str, bytes, int, float], result: Union[date, Type[Exception]]) -> None:
+ if type(result) == type and issubclass(result, Exception): # pyright: ignore[reportUnnecessaryIsInstance]
+ with pytest.raises(result):
+ parse_date(value)
+ else:
+ assert parse_date(value) == result
+
+
+@pytest.mark.parametrize(
+ "value,result",
+ [
+ # Valid inputs
+ # values in seconds
+ ("1494012444.883309", datetime(2017, 5, 5, 19, 27, 24, 883_309, tzinfo=timezone.utc)),
+ (1_494_012_444.883_309, datetime(2017, 5, 5, 19, 27, 24, 883_309, tzinfo=timezone.utc)),
+ ("1494012444", datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)),
+ (b"1494012444", datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)),
+ (1_494_012_444, datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)),
+ # values in ms
+ ("1494012444000.883309", datetime(2017, 5, 5, 19, 27, 24, 883, tzinfo=timezone.utc)),
+ ("-1494012444000.883309", datetime(1922, 8, 29, 4, 32, 35, 999117, tzinfo=timezone.utc)),
+ (1_494_012_444_000, datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)),
+ ("2012-04-23T09:15:00", datetime(2012, 4, 23, 9, 15)),
+ ("2012-4-9 4:8:16", datetime(2012, 4, 9, 4, 8, 16)),
+ ("2012-04-23T09:15:00Z", datetime(2012, 4, 23, 9, 15, 0, 0, timezone.utc)),
+ ("2012-4-9 4:8:16-0320", datetime(2012, 4, 9, 4, 8, 16, 0, create_tz(-200))),
+ ("2012-04-23T10:20:30.400+02:30", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(150))),
+ ("2012-04-23T10:20:30.400+02", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(120))),
+ ("2012-04-23T10:20:30.400-02", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(-120))),
+ (b"2012-04-23T10:20:30.400-02", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(-120))),
+ (datetime(2017, 5, 5), datetime(2017, 5, 5)),
+ (0, datetime(1970, 1, 1, 0, 0, 0, tzinfo=timezone.utc)),
+ # Invalid inputs
+ ("x20120423091500", ValueError),
+ ("2012-04-56T09:15:90", ValueError),
+ ("2012-04-23T11:05:00-25:00", ValueError),
+ (19_999_999_999, datetime(2603, 10, 11, 11, 33, 19, tzinfo=timezone.utc)), # just before watershed
+ (20_000_000_001, datetime(1970, 8, 20, 11, 33, 20, 1000, tzinfo=timezone.utc)), # just after watershed
+ (1_549_316_052, datetime(2019, 2, 4, 21, 34, 12, 0, tzinfo=timezone.utc)), # nowish in s
+ (1_549_316_052_104, datetime(2019, 2, 4, 21, 34, 12, 104_000, tzinfo=timezone.utc)), # nowish in ms
+ (1_549_316_052_104_324, datetime(2019, 2, 4, 21, 34, 12, 104_324, tzinfo=timezone.utc)), # nowish in μs
+ (1_549_316_052_104_324_096, datetime(2019, 2, 4, 21, 34, 12, 104_324, tzinfo=timezone.utc)), # nowish in ns
+ ("infinity", datetime(9999, 12, 31, 23, 59, 59, 999999)),
+ ("inf", datetime(9999, 12, 31, 23, 59, 59, 999999)),
+ ("inf ", datetime(9999, 12, 31, 23, 59, 59, 999999)),
+ (1e50, datetime(9999, 12, 31, 23, 59, 59, 999999)),
+ (float("inf"), datetime(9999, 12, 31, 23, 59, 59, 999999)),
+ ("-infinity", datetime(1, 1, 1, 0, 0)),
+ ("-inf", datetime(1, 1, 1, 0, 0)),
+ ("nan", ValueError),
+ ],
+)
+def test_datetime_parsing(value: Union[str, bytes, int, float], result: Union[datetime, Type[Exception]]) -> None:
+ if type(result) == type and issubclass(result, Exception): # pyright: ignore[reportUnnecessaryIsInstance]
+ with pytest.raises(result):
+ parse_datetime(value)
+ else:
+ assert parse_datetime(value) == result
diff --git a/tests/utils.py b/tests/utils.py
index ac183a7..55521a9 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -4,7 +4,7 @@
import inspect
import traceback
import contextlib
-from typing import Any, TypeVar, Iterator, cast
+from typing import Any, TypeVar, Iterator, Sequence, cast
from datetime import date, datetime
from typing_extensions import Literal, get_args, get_origin, assert_type
@@ -15,10 +15,11 @@
is_list_type,
is_union_type,
extract_type_arg,
+ is_sequence_type,
is_annotated_type,
is_type_alias_type,
)
-from browserbase._compat import PYDANTIC_V2, field_outer_type, get_model_fields
+from browserbase._compat import PYDANTIC_V1, field_outer_type, get_model_fields
from browserbase._models import BaseModel
BaseModelT = TypeVar("BaseModelT", bound=BaseModel)
@@ -27,12 +28,12 @@
def assert_matches_model(model: type[BaseModelT], value: BaseModelT, *, path: list[str]) -> bool:
for name, field in get_model_fields(model).items():
field_value = getattr(value, name)
- if PYDANTIC_V2:
- allow_none = False
- else:
+ if PYDANTIC_V1:
# in v1 nullability was structured differently
# https://docs.pydantic.dev/2.0/migration/#required-optional-and-nullable-fields
allow_none = getattr(field, "allow_none", False)
+ else:
+ allow_none = False
assert_matches_type(
field_outer_type(field),
@@ -71,6 +72,13 @@ def assert_matches_type(
if is_list_type(type_):
return _assert_list_type(type_, value)
+ if is_sequence_type(type_):
+ assert isinstance(value, Sequence)
+ inner_type = get_args(type_)[0]
+ for entry in value: # type: ignore
+ assert_type(inner_type, entry) # type: ignore
+ return
+
if origin == str:
assert isinstance(value, str)
elif origin == int: