diff --git a/.coveragerc b/.coveragerc
index 098720f67..b178b094a 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -1,3 +1,4 @@
+# Generated by synthtool. DO NOT EDIT!
[run]
branch = True
@@ -14,3 +15,5 @@ exclude_lines =
omit =
*/gapic/*.py
*/proto/*.py
+ */core/*.py
+ */site-packages/*.py
\ No newline at end of file
diff --git a/.flake8 b/.flake8
index 61766fa84..0268ecc9c 100644
--- a/.flake8
+++ b/.flake8
@@ -1,3 +1,4 @@
+# Generated by synthtool. DO NOT EDIT!
[flake8]
ignore = E203, E266, E501, W503
exclude =
diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md
new file mode 100644
index 000000000..939e5341e
--- /dev/null
+++ b/.github/CONTRIBUTING.md
@@ -0,0 +1,28 @@
+# How to Contribute
+
+We'd love to accept your patches and contributions to this project. There are
+just a few small guidelines you need to follow.
+
+## Contributor License Agreement
+
+Contributions to this project must be accompanied by a Contributor License
+Agreement. You (or your employer) retain the copyright to your contribution;
+this simply gives us permission to use and redistribute your contributions as
+part of the project. Head over to to see
+your current agreements on file or to sign a new one.
+
+You generally only need to submit a CLA once, so if you've already submitted one
+(even if it was for a different project), you probably don't need to do it
+again.
+
+## Code reviews
+
+All submissions, including submissions by project members, require review. We
+use GitHub pull requests for this purpose. Consult
+[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more
+information on using pull requests.
+
+## Community Guidelines
+
+This project follows [Google's Open Source Community
+Guidelines](https://opensource.google.com/conduct/).
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
new file mode 100644
index 000000000..1f2a8381f
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -0,0 +1,44 @@
+---
+name: Bug report
+about: Create a report to help us improve
+
+---
+
+Thanks for stopping by to let us know something could be better!
+
+**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response.
+
+Please run down the following list and make sure you've tried the usual "quick fixes":
+
+ - Search the issues already opened: https://github.com/googleapis/python-storage/issues
+ - Search the issues on our "catch-all" repository: https://github.com/googleapis/google-cloud-python
+ - Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+python
+
+If you are still having issues, please be sure to include as much information as possible:
+
+#### Environment details
+
+ - OS type and version:
+ - Python version: `python --version`
+ - pip version: `pip --version`
+ - `google-cloud-storage` version: `pip show google-cloud-storage`
+
+#### Steps to reproduce
+
+ 1. ?
+ 2. ?
+
+#### Code example
+
+```python
+# example
+```
+
+#### Stack trace
+```
+# example
+```
+
+Making sure to follow these steps will guarantee the quickest resolution possible.
+
+Thanks!
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
new file mode 100644
index 000000000..6365857f3
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -0,0 +1,18 @@
+---
+name: Feature request
+about: Suggest an idea for this library
+
+---
+
+Thanks for stopping by to let us know something could be better!
+
+**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response.
+
+ **Is your feature request related to a problem? Please describe.**
+A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
+ **Describe the solution you'd like**
+A clear and concise description of what you want to happen.
+ **Describe alternatives you've considered**
+A clear and concise description of any alternative solutions or features you've considered.
+ **Additional context**
+Add any other context or screenshots about the feature request here.
diff --git a/.github/ISSUE_TEMPLATE/support_request.md b/.github/ISSUE_TEMPLATE/support_request.md
new file mode 100644
index 000000000..995869032
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/support_request.md
@@ -0,0 +1,7 @@
+---
+name: Support request
+about: If you have a support contract with Google, please create an issue in the Google Cloud Support console.
+
+---
+
+**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response.
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 000000000..a1074d8c9
--- /dev/null
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,7 @@
+Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly:
+- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-storage/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea
+- [ ] Ensure the tests and linter pass
+- [ ] Code coverage does not decrease (if any source code was changed)
+- [ ] Appropriate docs were updated (if necessary)
+
+Fixes # 🦕
diff --git a/.github/release-please.yml b/.github/release-please.yml
new file mode 100644
index 000000000..4507ad059
--- /dev/null
+++ b/.github/release-please.yml
@@ -0,0 +1 @@
+releaseType: python
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 000000000..3fb06e09c
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,58 @@
+*.py[cod]
+*.sw[op]
+
+# C extensions
+*.so
+
+# Packages
+*.egg
+*.egg-info
+dist
+build
+eggs
+parts
+bin
+var
+sdist
+develop-eggs
+.installed.cfg
+lib
+lib64
+__pycache__
+
+# Installer logs
+pip-log.txt
+
+# Unit test / coverage reports
+.coverage
+.nox
+.cache
+.pytest_cache
+
+
+# Mac
+.DS_Store
+
+# JetBrains
+.idea
+
+# VS Code
+.vscode
+
+# emacs
+*~
+
+# Built documentation
+docs/_build
+bigquery/docs/generated
+
+# Virtual environment
+env/
+coverage.xml
+
+# System test environment variables.
+system_tests/local_test_setup
+
+# Make sure a generated file isn't accidentally committed.
+pylintrc
+pylintrc.test
\ No newline at end of file
diff --git a/.kokoro/build.sh b/.kokoro/build.sh
new file mode 100755
index 000000000..e8b4e8bfa
--- /dev/null
+++ b/.kokoro/build.sh
@@ -0,0 +1,39 @@
+#!/bin/bash
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+cd github/python-storage
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Debug: show build environment
+env | grep KOKORO
+
+# Setup service account credentials.
+export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
+
+# Setup project id.
+export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
+
+# Remove old nox
+python3.6 -m pip uninstall --yes --quiet nox-automation
+
+# Install nox
+python3.6 -m pip install --upgrade --quiet nox
+python3.6 -m nox --version
+
+python3.6 -m nox
diff --git a/.kokoro/continuous/common.cfg b/.kokoro/continuous/common.cfg
new file mode 100644
index 000000000..51201dfab
--- /dev/null
+++ b/.kokoro/continuous/common.cfg
@@ -0,0 +1,27 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Download resources for system tests (service account key, etc.)
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python"
+
+# Use the trampoline script to run in docker.
+build_file: "python-storage/.kokoro/trampoline.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-storage/.kokoro/build.sh"
+}
diff --git a/.kokoro/continuous/continuous.cfg b/.kokoro/continuous/continuous.cfg
new file mode 100644
index 000000000..8f43917d9
--- /dev/null
+++ b/.kokoro/continuous/continuous.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg
new file mode 100644
index 000000000..b1f35bff3
--- /dev/null
+++ b/.kokoro/docs/common.cfg
@@ -0,0 +1,48 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-storage/.kokoro/trampoline.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-storage/.kokoro/publish-docs.sh"
+}
+
+env_vars: {
+ key: "STAGING_BUCKET"
+ value: "docs-staging"
+}
+
+# Fetch the token needed for reporting release status to GitHub
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "yoshi-automation-github-key"
+ }
+ }
+}
+
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "docuploader_service_account"
+ }
+ }
+}
\ No newline at end of file
diff --git a/.kokoro/docs/docs.cfg b/.kokoro/docs/docs.cfg
new file mode 100644
index 000000000..8f43917d9
--- /dev/null
+++ b/.kokoro/docs/docs.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/presubmit/common.cfg b/.kokoro/presubmit/common.cfg
new file mode 100644
index 000000000..51201dfab
--- /dev/null
+++ b/.kokoro/presubmit/common.cfg
@@ -0,0 +1,27 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Download resources for system tests (service account key, etc.)
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python"
+
+# Use the trampoline script to run in docker.
+build_file: "python-storage/.kokoro/trampoline.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-storage/.kokoro/build.sh"
+}
diff --git a/.kokoro/presubmit/presubmit.cfg b/.kokoro/presubmit/presubmit.cfg
new file mode 100644
index 000000000..8f43917d9
--- /dev/null
+++ b/.kokoro/presubmit/presubmit.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh
new file mode 100755
index 000000000..893925df9
--- /dev/null
+++ b/.kokoro/publish-docs.sh
@@ -0,0 +1,57 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#!/bin/bash
+
+set -eo pipefail
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+cd github/python-storage
+
+# Remove old nox
+python3.6 -m pip uninstall --yes --quiet nox-automation
+
+# Install nox
+python3.6 -m pip install --upgrade --quiet nox
+python3.6 -m nox --version
+
+# build docs
+nox -s docs
+
+python3 -m pip install gcp-docuploader
+
+# install a json parser
+sudo apt-get update
+sudo apt-get -y install software-properties-common
+sudo add-apt-repository universe
+sudo apt-get update
+sudo apt-get -y install jq
+
+# create metadata
+python3 -m docuploader create-metadata \
+ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \
+ --version=$(python3 setup.py --version) \
+ --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \
+ --distribution-name=$(python3 setup.py --name) \
+ --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \
+ --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \
+ --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json)
+
+cat docs.metadata
+
+# upload docs
+python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket docs-staging
diff --git a/.kokoro/release.sh b/.kokoro/release.sh
new file mode 100755
index 000000000..73f9310b6
--- /dev/null
+++ b/.kokoro/release.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#!/bin/bash
+
+set -eo pipefail
+
+# Start the releasetool reporter
+python3 -m pip install gcp-releasetool
+python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script
+
+# Ensure that we have the latest versions of Twine, Wheel, and Setuptools.
+python3 -m pip install --upgrade twine wheel setuptools
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Move into the package, build the distribution and upload.
+TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google_cloud_pypi_password")
+cd github/python-storage
+python3 setup.py sdist bdist_wheel
+twine upload --username gcloudpypi --password "${TWINE_PASSWORD}" dist/*
diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg
new file mode 100644
index 000000000..b2dfeefd5
--- /dev/null
+++ b/.kokoro/release/common.cfg
@@ -0,0 +1,64 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-storage/.kokoro/trampoline.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-storage/.kokoro/release.sh"
+}
+
+# Fetch the token needed for reporting release status to GitHub
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "yoshi-automation-github-key"
+ }
+ }
+}
+
+# Fetch PyPI password
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "google_cloud_pypi_password"
+ }
+ }
+}
+
+# Fetch magictoken to use with Magic Github Proxy
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "releasetool-magictoken"
+ }
+ }
+}
+
+# Fetch api key to use with Magic Github Proxy
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "magic-github-proxy-api-key"
+ }
+ }
+}
diff --git a/.kokoro/release/release.cfg b/.kokoro/release/release.cfg
new file mode 100644
index 000000000..8f43917d9
--- /dev/null
+++ b/.kokoro/release/release.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh
new file mode 100755
index 000000000..e8c4251f3
--- /dev/null
+++ b/.kokoro/trampoline.sh
@@ -0,0 +1,23 @@
+#!/bin/bash
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$?
+
+chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
+${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true
+
+exit ${ret_code}
diff --git a/.repo-metadata.json b/.repo-metadata.json
index 0468fe0d5..499d6158c 100644
--- a/.repo-metadata.json
+++ b/.repo-metadata.json
@@ -6,7 +6,7 @@
"issue_tracker": "https://issuetracker.google.com/savedsearches/559782",
"release_level": "ga",
"language": "python",
- "repo": "googleapis/google-cloud-python",
+ "repo": "googleapis/python-storage",
"distribution_name": "google-cloud-storage",
"api_id": "storage.googleapis.com",
"requires_billing": true
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 4cd1577f6..237a2362b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,21 @@
[1]: https://pypi.org/project/google-cloud-storage/#history
+## [1.26.0](https://www.github.com/googleapis/python-storage/compare/v1.25.0...v1.26.0) (2020-02-12)
+
+
+### Features
+
+* **storage:** add support for signing URLs using token ([#9889](https://www.github.com/googleapis/python-storage/issues/9889)) ([ad280bf](https://www.github.com/googleapis/python-storage/commit/ad280bf506d3d7a37c402d06eac07422a5fe80af))
+* add timeout parameter to public methods ([#44](https://www.github.com/googleapis/python-storage/issues/44)) ([63abf07](https://www.github.com/googleapis/python-storage/commit/63abf0778686df1caa001270dd22f9df0daf0c78))
+
+
+### Bug Fixes
+
+* **storage:** fix documentation of max_result parameter in list_blob ([#43](https://www.github.com/googleapis/python-storage/issues/43)) ([ff15f19](https://www.github.com/googleapis/python-storage/commit/ff15f19d3a5830acdd540181dc6e9d07ca7d88ee))
+* **storage:** fix system test and change scope for iam access token ([#47](https://www.github.com/googleapis/python-storage/issues/47)) ([bc5375f](https://www.github.com/googleapis/python-storage/commit/bc5375f4c88f7e6ad1afbe7667c49d9a846e9757))
+* **tests:** remove low version error assertion from iam conditions system tests ([#53](https://www.github.com/googleapis/python-storage/issues/53)) ([8904aee](https://www.github.com/googleapis/python-storage/commit/8904aee9ad5dc01ab83e1460b6f186a739668eb7))
+
## 1.25.0
01-16-2020 11:00 PST
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
new file mode 100644
index 000000000..b3d1f6029
--- /dev/null
+++ b/CODE_OF_CONDUCT.md
@@ -0,0 +1,44 @@
+
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project,
+and in the interest of fostering an open and welcoming community,
+we pledge to respect all people who contribute through reporting issues,
+posting feature requests, updating documentation,
+submitting pull requests or patches, and other activities.
+
+We are committed to making participation in this project
+a harassment-free experience for everyone,
+regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance,
+body size, race, ethnicity, age, religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery
+* Personal attacks
+* Trolling or insulting/derogatory comments
+* Public or private harassment
+* Publishing other's private information,
+such as physical or electronic
+addresses, without explicit permission
+* Other unethical or unprofessional conduct.
+
+Project maintainers have the right and responsibility to remove, edit, or reject
+comments, commits, code, wiki edits, issues, and other contributions
+that are not aligned to this Code of Conduct.
+By adopting this Code of Conduct,
+project maintainers commit themselves to fairly and consistently
+applying these principles to every aspect of managing this project.
+Project maintainers who do not follow or enforce the Code of Conduct
+may be permanently removed from the project team.
+
+This code of conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior
+may be reported by opening an issue
+or contacting one or more of the project maintainers.
+
+This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0,
+available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/)
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
new file mode 100644
index 000000000..6b5dae5c4
--- /dev/null
+++ b/CONTRIBUTING.rst
@@ -0,0 +1,279 @@
+.. Generated by synthtool. DO NOT EDIT!
+############
+Contributing
+############
+
+#. **Please sign one of the contributor license agreements below.**
+#. Fork the repo, develop and test your code changes, add docs.
+#. Make sure that your commit messages clearly describe the changes.
+#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_)
+
+.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews
+
+.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries.
+
+***************
+Adding Features
+***************
+
+In order to add a feature:
+
+- The feature must be documented in both the API and narrative
+ documentation.
+
+- The feature must work fully on the following CPython versions: 2.7,
+ 3.5, 3.6, and 3.7 on both UNIX and Windows.
+
+- The feature must not add unnecessary dependencies (where
+ "unnecessary" is of course subjective, but new dependencies should
+ be discussed).
+
+****************************
+Using a Development Checkout
+****************************
+
+You'll have to create a development environment using a Git checkout:
+
+- While logged into your GitHub account, navigate to the
+ ``python-storage`` `repo`_ on GitHub.
+
+- Fork and clone the ``python-storage`` repository to your GitHub account by
+ clicking the "Fork" button.
+
+- Clone your fork of ``python-storage`` from your GitHub account to your local
+ computer, substituting your account username and specifying the destination
+ as ``hack-on-python-storage``. E.g.::
+
+ $ cd ${HOME}
+ $ git clone git@github.com:USERNAME/python-storage.git hack-on-python-storage
+ $ cd hack-on-python-storage
+ # Configure remotes such that you can pull changes from the googleapis/python-storage
+ # repository into your local repository.
+ $ git remote add upstream git@github.com:googleapis/python-storage.git
+ # fetch and merge changes from upstream into master
+ $ git fetch upstream
+ $ git merge upstream/master
+
+Now your local repo is set up such that you will push changes to your GitHub
+repo, from which you can submit a pull request.
+
+To work on the codebase and run the tests, we recommend using ``nox``,
+but you can also use a ``virtualenv`` of your own creation.
+
+.. _repo: https://github.com/googleapis/python-storage
+
+Using ``nox``
+=============
+
+We use `nox `__ to instrument our tests.
+
+- To test your changes, run unit tests with ``nox``::
+
+ $ nox -s unit-2.7
+ $ nox -s unit-3.7
+ $ ...
+
+ .. note::
+
+ The unit tests and system tests are described in the
+ ``noxfile.py`` files in each directory.
+
+.. nox: https://pypi.org/project/nox/
+
+Note on Editable Installs / Develop Mode
+========================================
+
+- As mentioned previously, using ``setuptools`` in `develop mode`_
+ or a ``pip`` `editable install`_ is not possible with this
+ library. This is because this library uses `namespace packages`_.
+ For context see `Issue #2316`_ and the relevant `PyPA issue`_.
+
+ Since ``editable`` / ``develop`` mode can't be used, packages
+ need to be installed directly. Hence your changes to the source
+ tree don't get incorporated into the **already installed**
+ package.
+
+.. _namespace packages: https://www.python.org/dev/peps/pep-0420/
+.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316
+.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12
+.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode
+.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs
+
+*****************************************
+I'm getting weird errors... Can you help?
+*****************************************
+
+If the error mentions ``Python.h`` not being found,
+install ``python-dev`` and try again.
+On Debian/Ubuntu::
+
+ $ sudo apt-get install python-dev
+
+************
+Coding Style
+************
+
+- PEP8 compliance, with exceptions defined in the linter configuration.
+ If you have ``nox`` installed, you can test that you have not introduced
+ any non-compliant code via::
+
+ $ nox -s lint
+
+- In order to make ``nox -s lint`` run faster, you can set some environment
+ variables::
+
+ export GOOGLE_CLOUD_TESTING_REMOTE="upstream"
+ export GOOGLE_CLOUD_TESTING_BRANCH="master"
+
+ By doing this, you are specifying the location of the most up-to-date
+ version of ``python-storage``. The the suggested remote name ``upstream``
+ should point to the official ``googleapis`` checkout and the
+ the branch should be the main branch on that remote (``master``).
+
+Exceptions to PEP8:
+
+- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for
+ "Function-Under-Test"), which is PEP8-incompliant, but more readable.
+ Some also use a local variable, ``MUT`` (short for "Module-Under-Test").
+
+********************
+Running System Tests
+********************
+
+- To run system tests, you can execute::
+
+ $ nox -s system-3.7
+ $ nox -s system-2.7
+
+ .. note::
+
+ System tests are only configured to run under Python 2.7 and
+ Python 3.7. For expediency, we do not run them in older versions
+ of Python 3.
+
+ This alone will not run the tests. You'll need to change some local
+ auth settings and change some configuration in your project to
+ run all the tests.
+
+- System tests will be run against an actual project and
+ so you'll need to provide some environment variables to facilitate
+ authentication to your project:
+
+ - ``GOOGLE_APPLICATION_CREDENTIALS``: The path to a JSON key file;
+ Such a file can be downloaded directly from the developer's console by clicking
+ "Generate new JSON key". See private key
+ `docs `__
+ for more details.
+
+- Once you have downloaded your json keys, set the environment variable
+ ``GOOGLE_APPLICATION_CREDENTIALS`` to the absolute path of the json file::
+
+ $ export GOOGLE_APPLICATION_CREDENTIALS="/Users//path/to/app_credentials.json"
+
+
+*************
+Test Coverage
+*************
+
+- The codebase *must* have 100% test statement coverage after each commit.
+ You can test coverage via ``nox -s cover``.
+
+******************************************************
+Documentation Coverage and Building HTML Documentation
+******************************************************
+
+If you fix a bug, and the bug requires an API or behavior modification, all
+documentation in this package which references that API or behavior must be
+changed to reflect the bug fix, ideally in the same commit that fixes the bug
+or adds the feature.
+
+Build the docs via:
+
+ $ nox -s docs
+
+********************************************
+Note About ``README`` as it pertains to PyPI
+********************************************
+
+The `description on PyPI`_ for the project comes directly from the
+``README``. Due to the reStructuredText (``rst``) parser used by
+PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst``
+instead of
+``https://github.com/googleapis/python-storage/blob/master/CONTRIBUTING.rst``)
+may cause problems creating links or rendering the description.
+
+.. _description on PyPI: https://pypi.org/project/google-cloud-storage
+
+
+*************************
+Supported Python Versions
+*************************
+
+We support:
+
+- `Python 3.5`_
+- `Python 3.6`_
+- `Python 3.7`_
+
+.. _Python 3.5: https://docs.python.org/3.5/
+.. _Python 3.6: https://docs.python.org/3.6/
+.. _Python 3.7: https://docs.python.org/3.7/
+
+
+Supported versions can be found in our ``noxfile.py`` `config`_.
+
+.. _config: https://github.com/googleapis/python-storage/blob/master/noxfile.py
+
+We explicitly decided not to support `Python 2.5`_ due to `decreased usage`_
+and lack of continuous integration `support`_.
+
+.. _Python 2.5: https://docs.python.org/2.5/
+.. _decreased usage: https://caremad.io/2013/10/a-look-at-pypi-downloads/
+.. _support: https://blog.travis-ci.com/2013-11-18-upcoming-build-environment-updates/
+
+We have `dropped 2.6`_ as a supported version as well since Python 2.6 is no
+longer supported by the core development team.
+
+Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020.
+
+We also explicitly decided to support Python 3 beginning with version
+3.5. Reasons for this include:
+
+- Encouraging use of newest versions of Python 3
+- Taking the lead of `prominent`_ open-source `projects`_
+- `Unicode literal support`_ which allows for a cleaner codebase that
+ works in both Python 2 and Python 3
+
+.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django
+.. _projects: http://flask.pocoo.org/docs/0.10/python3/
+.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/
+.. _dropped 2.6: https://github.com/googleapis/google-cloud-python/issues/995
+
+**********
+Versioning
+**********
+
+This library follows `Semantic Versioning`_.
+
+.. _Semantic Versioning: http://semver.org/
+
+Some packages are currently in major version zero (``0.y.z``), which means that
+anything may change at any time and the public API should not be considered
+stable.
+
+******************************
+Contributor License Agreements
+******************************
+
+Before we can accept your pull requests you'll need to sign a Contributor
+License Agreement (CLA):
+
+- **If you are an individual writing original source code** and **you own the
+ intellectual property**, then you'll need to sign an
+ `individual CLA `__.
+- **If you work for a company that wants to allow you to contribute your work**,
+ then you'll need to sign a
+ `corporate CLA `__.
+
+You can sign these electronically (just scroll to the bottom). After that,
+we'll be able to accept your pull requests.
diff --git a/LICENSE b/LICENSE
index d64569567..a8ee855de 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,7 +1,6 @@
-
- Apache License
+ Apache License
Version 2.0, January 2004
- http://www.apache.org/licenses/
+ https://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
@@ -193,7 +192,7 @@
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
+ https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
diff --git a/MANIFEST.in b/MANIFEST.in
index fc77f8c82..cd011be27 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,4 +1,6 @@
+# Generated by synthtool. DO NOT EDIT!
include README.rst LICENSE
recursive-include google *.json *.proto
recursive-include tests *
-global-exclude *.pyc __pycache__
+global-exclude *.py[co]
+global-exclude __pycache__
diff --git a/docs/_static/custom.css b/docs/_static/custom.css
index 9a6f9f8dd..0abaf229f 100644
--- a/docs/_static/custom.css
+++ b/docs/_static/custom.css
@@ -1,4 +1,4 @@
div#python2-eol {
border-color: red;
border-width: medium;
-}
\ No newline at end of file
+}
\ No newline at end of file
diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html
index de457b2c2..228529efe 100644
--- a/docs/_templates/layout.html
+++ b/docs/_templates/layout.html
@@ -1,3 +1,4 @@
+
{% extends "!layout.html" %}
{%- block content %}
{%- if theme_fixed_sidebar|lower == 'true' %}
diff --git a/docs/conf.py b/docs/conf.py
index cdf8c7c62..4f6c1ec3a 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -20,7 +20,7 @@
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(".."))
-__version__ = "0.1.0"
+__version__ = ""
# -- General configuration ------------------------------------------------
@@ -66,7 +66,7 @@
# General information about the project.
project = u"google-cloud-storage"
-copyright = u"2017, Google"
+copyright = u"2019, Google"
author = u"Google APIs"
# The version info for the project you're documenting, acts as replacement for
@@ -133,9 +133,9 @@
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
- "description": "Google Cloud Client Libraries for Python",
+ "description": "Google Cloud Client Libraries for google-cloud-storage",
"github_user": "googleapis",
- "github_repo": "google-cloud-python",
+ "github_repo": "python-storage",
"github_banner": True,
"font_family": "'Roboto', Georgia, sans",
"head_font_family": "'Roboto', Georgia, serif",
@@ -318,7 +318,7 @@
u"google-cloud-storage Documentation",
author,
"google-cloud-storage",
- "GAPIC library for the Storage API",
+ "google-cloud-storage Library",
"APIs",
)
]
@@ -340,9 +340,8 @@
intersphinx_mapping = {
"python": ("http://python.readthedocs.org/en/latest/", None),
"google-auth": ("https://google-auth.readthedocs.io/en/stable", None),
- "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None),
+ "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None),
"grpc": ("https://grpc.io/grpc/python/", None),
- "requests": ("https://requests.kennethreitz.org/en/stable/", None),
}
diff --git a/google/cloud/storage/_helpers.py b/google/cloud/storage/_helpers.py
index 5bfa13e31..ae53a7b65 100644
--- a/google/cloud/storage/_helpers.py
+++ b/google/cloud/storage/_helpers.py
@@ -21,6 +21,9 @@
from hashlib import md5
import os
+from google.cloud.storage.constants import _DEFAULT_TIMEOUT
+
+
STORAGE_EMULATOR_ENV_VAR = "STORAGE_EMULATOR_HOST"
"""Environment variable defining host for Storage emulator."""
@@ -117,7 +120,7 @@ def _query_params(self):
params["userProject"] = self.user_project
return params
- def reload(self, client=None):
+ def reload(self, client=None, timeout=_DEFAULT_TIMEOUT):
"""Reload properties from Cloud Storage.
If :attr:`user_project` is set, bills the API request to that project.
@@ -126,6 +129,12 @@ def reload(self, client=None):
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current object.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
"""
client = self._require_client(client)
query_params = self._query_params
@@ -138,6 +147,7 @@ def reload(self, client=None):
query_params=query_params,
headers=self._encryption_headers(),
_target_object=self,
+ timeout=timeout,
)
self._set_properties(api_response)
@@ -169,7 +179,7 @@ def _set_properties(self, value):
# If the values are reset, the changes must as well.
self._changes = set()
- def patch(self, client=None):
+ def patch(self, client=None, timeout=_DEFAULT_TIMEOUT):
"""Sends all changed properties in a PATCH request.
Updates the ``_properties`` with the response from the backend.
@@ -180,6 +190,12 @@ def patch(self, client=None):
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current object.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
"""
client = self._require_client(client)
query_params = self._query_params
@@ -195,10 +211,11 @@ def patch(self, client=None):
data=update_properties,
query_params=query_params,
_target_object=self,
+ timeout=timeout,
)
self._set_properties(api_response)
- def update(self, client=None):
+ def update(self, client=None, timeout=_DEFAULT_TIMEOUT):
"""Sends all properties in a PUT request.
Updates the ``_properties`` with the response from the backend.
@@ -209,6 +226,12 @@ def update(self, client=None):
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current object.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
"""
client = self._require_client(client)
query_params = self._query_params
@@ -219,6 +242,7 @@ def update(self, client=None):
data=self._properties,
query_params=query_params,
_target_object=self,
+ timeout=timeout,
)
self._set_properties(api_response)
diff --git a/google/cloud/storage/_signing.py b/google/cloud/storage/_signing.py
index 9fafcaca1..e7c8e3328 100644
--- a/google/cloud/storage/_signing.py
+++ b/google/cloud/storage/_signing.py
@@ -19,10 +19,14 @@
import datetime
import hashlib
import re
+import json
import six
import google.auth.credentials
+
+from google.auth import exceptions
+from google.auth.transport import requests
from google.cloud import _helpers
@@ -265,6 +269,8 @@ def generate_signed_url_v2(
generation=None,
headers=None,
query_parameters=None,
+ service_account_email=None,
+ access_token=None,
):
"""Generate a V2 signed URL to provide query-string auth'n to a resource.
@@ -340,6 +346,12 @@ def generate_signed_url_v2(
Requests using the signed URL *must* pass the specified header
(name and value) with each request for the URL.
+ :type service_account_email: str
+ :param service_account_email: (Optional) E-mail address of the service account.
+
+ :type access_token: str
+ :param access_token: (Optional) Access token for a service account.
+
:type query_parameters: dict
:param query_parameters:
(Optional) Additional query paramtersto be included as part of the
@@ -370,9 +382,17 @@ def generate_signed_url_v2(
string_to_sign = "\n".join(elements_to_sign)
# Set the right query parameters.
- signed_query_params = get_signed_query_params_v2(
- credentials, expiration_stamp, string_to_sign
- )
+ if access_token and service_account_email:
+ signature = _sign_message(string_to_sign, access_token, service_account_email)
+ signed_query_params = {
+ "GoogleAccessId": service_account_email,
+ "Expires": str(expiration),
+ "Signature": signature,
+ }
+ else:
+ signed_query_params = get_signed_query_params_v2(
+ credentials, expiration_stamp, string_to_sign
+ )
if response_type is not None:
signed_query_params["response-content-type"] = response_type
@@ -409,6 +429,8 @@ def generate_signed_url_v4(
generation=None,
headers=None,
query_parameters=None,
+ service_account_email=None,
+ access_token=None,
_request_timestamp=None, # for testing only
):
"""Generate a V4 signed URL to provide query-string auth'n to a resource.
@@ -492,6 +514,12 @@ def generate_signed_url_v4(
signed URLs. See:
https://cloud.google.com/storage/docs/xml-api/reference-headers#query
+ :type service_account_email: str
+ :param service_account_email: (Optional) E-mail address of the service account.
+
+ :type access_token: str
+ :param access_token: (Optional) Access token for a service account.
+
:raises: :exc:`TypeError` when expiration is not a valid type.
:raises: :exc:`AttributeError` if credentials is not an instance
of :class:`google.auth.credentials.Signing`.
@@ -583,9 +611,58 @@ def generate_signed_url_v4(
]
string_to_sign = "\n".join(string_elements)
- signature_bytes = credentials.sign_bytes(string_to_sign.encode("ascii"))
- signature = binascii.hexlify(signature_bytes).decode("ascii")
+ if access_token and service_account_email:
+ signature = _sign_message(string_to_sign, access_token, service_account_email)
+ signature_bytes = base64.b64decode(signature)
+ signature = binascii.hexlify(signature_bytes).decode("ascii")
+ else:
+ signature_bytes = credentials.sign_bytes(string_to_sign.encode("ascii"))
+ signature = binascii.hexlify(signature_bytes).decode("ascii")
return "{}{}?{}&X-Goog-Signature={}".format(
api_access_endpoint, resource, canonical_query_string, signature
)
+
+
+def _sign_message(message, access_token, service_account_email):
+
+ """Signs a message.
+
+ :type message: str
+ :param message: The message to be signed.
+
+ :type access_token: str
+ :param access_token: Access token for a service account.
+
+
+ :type service_account_email: str
+ :param service_account_email: E-mail address of the service account.
+
+ :raises: :exc:`TransportError` if an `access_token` is unauthorized.
+
+ :rtype: str
+ :returns: The signature of the message.
+
+ """
+ message = _helpers._to_bytes(message)
+
+ method = "POST"
+ url = "https://iam.googleapis.com/v1/projects/-/serviceAccounts/{}:signBlob?alt=json".format(
+ service_account_email
+ )
+ headers = {
+ "Authorization": "Bearer " + access_token,
+ "Content-type": "application/json",
+ }
+ body = json.dumps({"bytesToSign": base64.b64encode(message).decode("utf-8")})
+
+ request = requests.Request()
+ response = request(url=url, method=method, body=body, headers=headers)
+
+ if response.status != six.moves.http_client.OK:
+ raise exceptions.TransportError(
+ "Error calling the IAM signBytes API: {}".format(response.data)
+ )
+
+ data = json.loads(response.data.decode("utf-8"))
+ return data["signature"]
diff --git a/google/cloud/storage/acl.py b/google/cloud/storage/acl.py
index 9b1af1d87..7260c50b0 100644
--- a/google/cloud/storage/acl.py
+++ b/google/cloud/storage/acl.py
@@ -79,6 +79,8 @@
when sending metadata for ACLs to the API.
"""
+from google.cloud.storage.constants import _DEFAULT_TIMEOUT
+
class _ACLEntity(object):
"""Class representing a set of roles for an entity.
@@ -206,10 +208,18 @@ class ACL(object):
def __init__(self):
self.entities = {}
- def _ensure_loaded(self):
- """Load if not already loaded."""
+ def _ensure_loaded(self, timeout=_DEFAULT_TIMEOUT):
+ """Load if not already loaded.
+
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+ """
if not self.loaded:
- self.reload()
+ self.reload(timeout=timeout)
@classmethod
def validate_predefined(cls, predefined):
@@ -415,7 +425,7 @@ def _require_client(self, client):
client = self.client
return client
- def reload(self, client=None):
+ def reload(self, client=None, timeout=_DEFAULT_TIMEOUT):
"""Reload the ACL data from Cloud Storage.
If :attr:`user_project` is set, bills the API request to that project.
@@ -424,6 +434,12 @@ def reload(self, client=None):
``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the ACL's parent.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
"""
path = self.reload_path
client = self._require_client(client)
@@ -435,13 +451,13 @@ def reload(self, client=None):
self.entities.clear()
found = client._connection.api_request(
- method="GET", path=path, query_params=query_params
+ method="GET", path=path, query_params=query_params, timeout=timeout
)
self.loaded = True
for entry in found.get("items", ()):
self.add_entity(self.entity_from_dict(entry))
- def _save(self, acl, predefined, client):
+ def _save(self, acl, predefined, client, timeout=_DEFAULT_TIMEOUT):
"""Helper for :meth:`save` and :meth:`save_predefined`.
:type acl: :class:`google.cloud.storage.acl.ACL`, or a compatible list.
@@ -457,6 +473,12 @@ def _save(self, acl, predefined, client):
``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the ACL's parent.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
"""
query_params = {"projection": "full"}
if predefined is not None:
@@ -474,13 +496,14 @@ def _save(self, acl, predefined, client):
path=path,
data={self._URL_PATH_ELEM: list(acl)},
query_params=query_params,
+ timeout=timeout,
)
self.entities.clear()
for entry in result.get(self._URL_PATH_ELEM, ()):
self.add_entity(self.entity_from_dict(entry))
self.loaded = True
- def save(self, acl=None, client=None):
+ def save(self, acl=None, client=None, timeout=_DEFAULT_TIMEOUT):
"""Save this ACL for the current bucket.
If :attr:`user_project` is set, bills the API request to that project.
@@ -493,6 +516,12 @@ def save(self, acl=None, client=None):
``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the ACL's parent.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
"""
if acl is None:
acl = self
@@ -501,9 +530,9 @@ def save(self, acl=None, client=None):
save_to_backend = True
if save_to_backend:
- self._save(acl, None, client)
+ self._save(acl, None, client, timeout=timeout)
- def save_predefined(self, predefined, client=None):
+ def save_predefined(self, predefined, client=None, timeout=_DEFAULT_TIMEOUT):
"""Save this ACL for the current bucket using a predefined ACL.
If :attr:`user_project` is set, bills the API request to that project.
@@ -519,11 +548,17 @@ def save_predefined(self, predefined, client=None):
``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the ACL's parent.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
"""
predefined = self.validate_predefined(predefined)
- self._save(None, predefined, client)
+ self._save(None, predefined, client, timeout=timeout)
- def clear(self, client=None):
+ def clear(self, client=None, timeout=_DEFAULT_TIMEOUT):
"""Remove all ACL entries.
If :attr:`user_project` is set, bills the API request to that project.
@@ -537,8 +572,14 @@ def clear(self, client=None):
``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the ACL's parent.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
"""
- self.save([], client=client)
+ self.save([], client=client, timeout=timeout)
class BucketACL(ACL):
diff --git a/google/cloud/storage/batch.py b/google/cloud/storage/batch.py
index 92f1a18d1..89425f9b8 100644
--- a/google/cloud/storage/batch.py
+++ b/google/cloud/storage/batch.py
@@ -29,6 +29,7 @@
from google.cloud import _helpers
from google.cloud import exceptions
from google.cloud.storage._http import Connection
+from google.cloud.storage.constants import _DEFAULT_TIMEOUT
class MIMEApplicationHTTP(MIMEApplication):
@@ -150,7 +151,9 @@ def __init__(self, client):
self._requests = []
self._target_objects = []
- def _do_request(self, method, url, headers, data, target_object, timeout=None):
+ def _do_request(
+ self, method, url, headers, data, target_object, timeout=_DEFAULT_TIMEOUT
+ ):
"""Override Connection: defer actual HTTP request.
Only allow up to ``_MAX_BATCH_SIZE`` requests to be deferred.
@@ -175,7 +178,8 @@ def _do_request(self, method, url, headers, data, target_object, timeout=None):
:type timeout: float or tuple
:param timeout: (optional) The amount of time, in seconds, to wait
- for the server response. By default, the method waits indefinitely.
+ for the server response.
+
Can also be passed as a tuple (connect_timeout, read_timeout).
See :meth:`requests.Session.request` documentation for details.
@@ -206,8 +210,8 @@ def _prepare_batch_request(self):
multi = MIMEMultipart()
- # Use timeout of last request, default to None (indefinite)
- timeout = None
+ # Use timeout of last request, default to _DEFAULT_TIMEOUT
+ timeout = _DEFAULT_TIMEOUT
for method, uri, headers, body, _timeout in self._requests:
subrequest = MIMEApplicationHTTP(method, uri, headers, body)
multi.attach(subrequest)
diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py
index 68832b718..93beced90 100644
--- a/google/cloud/storage/blob.py
+++ b/google/cloud/storage/blob.py
@@ -61,6 +61,7 @@
from google.cloud.storage._signing import generate_signed_url_v4
from google.cloud.storage.acl import ACL
from google.cloud.storage.acl import ObjectACL
+from google.cloud.storage.constants import _DEFAULT_TIMEOUT
from google.cloud.storage.constants import ARCHIVE_STORAGE_CLASS
from google.cloud.storage.constants import COLDLINE_STORAGE_CLASS
from google.cloud.storage.constants import MULTI_REGIONAL_LEGACY_STORAGE_CLASS
@@ -358,6 +359,8 @@ def generate_signed_url(
client=None,
credentials=None,
version=None,
+ service_account_email=None,
+ access_token=None,
):
"""Generates a signed URL for this blob.
@@ -445,6 +448,12 @@ def generate_signed_url(
:param version: (Optional) The version of signed credential to create.
Must be one of 'v2' | 'v4'.
+ :type service_account_email: str
+ :param service_account_email: (Optional) E-mail address of the service account.
+
+ :type access_token: str
+ :param access_token: (Optional) Access token for a service account.
+
:raises: :exc:`ValueError` when version is invalid.
:raises: :exc:`TypeError` when expiration is not a valid type.
:raises: :exc:`AttributeError` if credentials is not an instance
@@ -497,9 +506,11 @@ def generate_signed_url(
generation=generation,
headers=headers,
query_parameters=query_parameters,
+ service_account_email=service_account_email,
+ access_token=access_token,
)
- def exists(self, client=None):
+ def exists(self, client=None, timeout=_DEFAULT_TIMEOUT):
"""Determines whether or not this blob exists.
If :attr:`user_project` is set on the bucket, bills the API request
@@ -509,6 +520,12 @@ def exists(self, client=None):
``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the blob's bucket.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
:rtype: bool
:returns: True if the blob exists in Cloud Storage.
@@ -527,6 +544,7 @@ def exists(self, client=None):
path=self.path,
query_params=query_params,
_target_object=None,
+ timeout=timeout,
)
# NOTE: This will not fail immediately in a batch. However, when
# Batch.finish() is called, the resulting `NotFound` will be
@@ -535,7 +553,7 @@ def exists(self, client=None):
except NotFound:
return False
- def delete(self, client=None):
+ def delete(self, client=None, timeout=_DEFAULT_TIMEOUT):
"""Deletes a blob from Cloud Storage.
If :attr:`user_project` is set on the bucket, bills the API request
@@ -545,12 +563,20 @@ def delete(self, client=None):
``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the blob's bucket.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
:raises: :class:`google.cloud.exceptions.NotFound`
(propagated from
:meth:`google.cloud.storage.bucket.Bucket.delete_blob`).
"""
- self.bucket.delete_blob(self.name, client=client, generation=self.generation)
+ self.bucket.delete_blob(
+ self.name, client=client, generation=self.generation, timeout=timeout
+ )
def _get_transport(self, client):
"""Return the client's transport.
@@ -1454,7 +1480,9 @@ def create_resumable_upload_session(
except resumable_media.InvalidResponse as exc:
_raise_from_invalid_response(exc)
- def get_iam_policy(self, client=None, requested_policy_version=None):
+ def get_iam_policy(
+ self, client=None, requested_policy_version=None, timeout=_DEFAULT_TIMEOUT
+ ):
"""Retrieve the IAM policy for the object.
.. note:
@@ -1484,6 +1512,12 @@ def get_iam_policy(self, client=None, requested_policy_version=None):
The service might return a policy with version lower
than the one that was requested, based on the
feature syntax in the policy fetched.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
:rtype: :class:`google.api_core.iam.Policy`
:returns: the policy instance, based on the resource returned from
@@ -1504,10 +1538,11 @@ def get_iam_policy(self, client=None, requested_policy_version=None):
path="%s/iam" % (self.path,),
query_params=query_params,
_target_object=None,
+ timeout=timeout,
)
return Policy.from_api_repr(info)
- def set_iam_policy(self, policy, client=None):
+ def set_iam_policy(self, policy, client=None, timeout=_DEFAULT_TIMEOUT):
"""Update the IAM policy for the bucket.
.. note:
@@ -1528,6 +1563,12 @@ def set_iam_policy(self, policy, client=None):
``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the current bucket.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
:rtype: :class:`google.api_core.iam.Policy`
:returns: the policy instance, based on the resource returned from
@@ -1548,10 +1589,11 @@ def set_iam_policy(self, policy, client=None):
query_params=query_params,
data=resource,
_target_object=None,
+ timeout=timeout,
)
return Policy.from_api_repr(info)
- def test_iam_permissions(self, permissions, client=None):
+ def test_iam_permissions(self, permissions, client=None, timeout=_DEFAULT_TIMEOUT):
"""API call: test permissions
.. note:
@@ -1572,6 +1614,12 @@ def test_iam_permissions(self, permissions, client=None):
``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the current bucket.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
:rtype: list of string
:returns: the permissions returned by the ``testIamPermissions`` API
@@ -1585,7 +1633,7 @@ def test_iam_permissions(self, permissions, client=None):
path = "%s/iam/testPermissions" % (self.path,)
resp = client._connection.api_request(
- method="GET", path=path, query_params=query_params
+ method="GET", path=path, query_params=query_params, timeout=timeout
)
return resp.get("permissions", [])
@@ -1612,7 +1660,7 @@ def make_private(self, client=None):
self.acl.all().revoke_read()
self.acl.save(client=client)
- def compose(self, sources, client=None):
+ def compose(self, sources, client=None, timeout=_DEFAULT_TIMEOUT):
"""Concatenate source blobs into this one.
If :attr:`user_project` is set on the bucket, bills the API request
@@ -1625,6 +1673,12 @@ def compose(self, sources, client=None):
``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the blob's bucket.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
"""
client = self._require_client(client)
query_params = {}
@@ -1642,10 +1696,11 @@ def compose(self, sources, client=None):
query_params=query_params,
data=request,
_target_object=self,
+ timeout=timeout,
)
self._set_properties(api_response)
- def rewrite(self, source, token=None, client=None):
+ def rewrite(self, source, token=None, client=None, timeout=_DEFAULT_TIMEOUT):
"""Rewrite source blob into this one.
If :attr:`user_project` is set on the bucket, bills the API request
@@ -1664,6 +1719,13 @@ def rewrite(self, source, token=None, client=None):
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the blob's bucket.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:rtype: tuple
:returns: ``(token, bytes_rewritten, total_bytes)``, where ``token``
is a rewrite token (``None`` if the rewrite is complete),
@@ -1695,6 +1757,7 @@ def rewrite(self, source, token=None, client=None):
data=self._properties,
headers=headers,
_target_object=self,
+ timeout=timeout,
)
rewritten = int(api_response["totalBytesRewritten"])
size = int(api_response["objectSize"])
diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py
index ed275e88f..4914844dd 100644
--- a/google/cloud/storage/bucket.py
+++ b/google/cloud/storage/bucket.py
@@ -17,6 +17,7 @@
import base64
import copy
import datetime
+import functools
import json
import warnings
@@ -39,6 +40,7 @@
from google.cloud.storage.acl import BucketACL
from google.cloud.storage.acl import DefaultObjectACL
from google.cloud.storage.blob import Blob
+from google.cloud.storage.constants import _DEFAULT_TIMEOUT
from google.cloud.storage.constants import ARCHIVE_STORAGE_CLASS
from google.cloud.storage.constants import COLDLINE_STORAGE_CLASS
from google.cloud.storage.constants import DUAL_REGION_LOCATION_TYPE
@@ -638,7 +640,7 @@ def notification(
payload_format=payload_format,
)
- def exists(self, client=None):
+ def exists(self, client=None, timeout=_DEFAULT_TIMEOUT):
"""Determines whether or not this bucket exists.
If :attr:`user_project` is set, bills the API request to that project.
@@ -647,6 +649,12 @@ def exists(self, client=None):
``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the current bucket.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
:rtype: bool
:returns: True if the bucket exists in Cloud Storage.
@@ -667,6 +675,7 @@ def exists(self, client=None):
path=self.path,
query_params=query_params,
_target_object=None,
+ timeout=timeout,
)
# NOTE: This will not fail immediately in a batch. However, when
# Batch.finish() is called, the resulting `NotFound` will be
@@ -682,6 +691,7 @@ def create(
location=None,
predefined_acl=None,
predefined_default_object_acl=None,
+ timeout=_DEFAULT_TIMEOUT,
):
"""Creates current bucket.
@@ -719,6 +729,13 @@ def create(
:param predefined_default_object_acl:
Optional. Name of predefined ACL to apply to bucket's objects. See:
https://cloud.google.com/storage/docs/access-control/lists#predefined-acl
+
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
"""
if self.user_project is not None:
raise ValueError("Cannot create bucket with 'user_project' set.")
@@ -755,10 +772,11 @@ def create(
query_params=query_params,
data=properties,
_target_object=self,
+ timeout=timeout,
)
self._set_properties(api_response)
- def patch(self, client=None):
+ def patch(self, client=None, timeout=_DEFAULT_TIMEOUT):
"""Sends all changed properties in a PATCH request.
Updates the ``_properties`` with the response from the backend.
@@ -769,6 +787,12 @@ def patch(self, client=None):
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current object.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
"""
# Special case: For buckets, it is possible that labels are being
# removed; this requires special handling.
@@ -779,7 +803,7 @@ def patch(self, client=None):
self._properties["labels"][removed_label] = None
# Call the superclass method.
- return super(Bucket, self).patch(client=client)
+ return super(Bucket, self).patch(client=client, timeout=timeout)
@property
def acl(self):
@@ -812,7 +836,13 @@ def path(self):
return self.path_helper(self.name)
def get_blob(
- self, blob_name, client=None, encryption_key=None, generation=None, **kwargs
+ self,
+ blob_name,
+ client=None,
+ encryption_key=None,
+ generation=None,
+ timeout=_DEFAULT_TIMEOUT,
+ **kwargs
):
"""Get a blob object by name.
@@ -842,6 +872,13 @@ def get_blob(
:param generation: Optional. If present, selects a specific revision of
this object.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:param kwargs: Keyword arguments to pass to the
:class:`~google.cloud.storage.blob.Blob` constructor.
@@ -859,7 +896,7 @@ def get_blob(
# NOTE: This will not fail immediately in a batch. However, when
# Batch.finish() is called, the resulting `NotFound` will be
# raised.
- blob.reload(client=client)
+ blob.reload(client=client, timeout=timeout)
except NotFound:
return None
else:
@@ -875,6 +912,7 @@ def list_blobs(
projection="noAcl",
fields=None,
client=None,
+ timeout=_DEFAULT_TIMEOUT,
):
"""Return an iterator used to find blobs in the bucket.
@@ -885,9 +923,7 @@ def list_blobs(
:type max_results: int
:param max_results:
- (Optional) The maximum number of blobs in each page of results
- from this request. Non-positive values are ignored. Defaults to
- a sensible value set by the API.
+ (Optional) The maximum number of blobs to return.
:type page_token: str
:param page_token:
@@ -926,6 +962,13 @@ def list_blobs(
:param client: (Optional) The client to use. If not passed, falls back
to the ``client`` stored on the current bucket.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:rtype: :class:`~google.api_core.page_iterator.Iterator`
:returns: Iterator of all :class:`~google.cloud.storage.blob.Blob`
in this bucket matching the arguments.
@@ -949,9 +992,10 @@ def list_blobs(
client = self._require_client(client)
path = self.path + "/o"
+ api_request = functools.partial(client._connection.api_request, timeout=timeout)
iterator = page_iterator.HTTPIterator(
client=client,
- api_request=client._connection.api_request,
+ api_request=api_request,
path=path,
item_to_value=_item_to_blob,
page_token=page_token,
@@ -963,7 +1007,7 @@ def list_blobs(
iterator.prefixes = set()
return iterator
- def list_notifications(self, client=None):
+ def list_notifications(self, client=None, timeout=_DEFAULT_TIMEOUT):
"""List Pub / Sub notifications for this bucket.
See:
@@ -975,22 +1019,29 @@ def list_notifications(self, client=None):
``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the current bucket.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
:rtype: list of :class:`.BucketNotification`
:returns: notification instances
"""
client = self._require_client(client)
path = self.path + "/notificationConfigs"
+ api_request = functools.partial(client._connection.api_request, timeout=timeout)
iterator = page_iterator.HTTPIterator(
client=client,
- api_request=client._connection.api_request,
+ api_request=api_request,
path=path,
item_to_value=_item_to_notification,
)
iterator.bucket = self
return iterator
- def delete(self, force=False, client=None):
+ def delete(self, force=False, client=None, timeout=_DEFAULT_TIMEOUT):
"""Delete this bucket.
The bucket **must** be empty in order to submit a delete request. If
@@ -1016,6 +1067,12 @@ def delete(self, force=False, client=None):
``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the current bucket.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response on each request.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
:raises: :class:`ValueError` if ``force`` is ``True`` and the bucket
contains more than 256 objects / blobs.
@@ -1029,7 +1086,9 @@ def delete(self, force=False, client=None):
if force:
blobs = list(
self.list_blobs(
- max_results=self._MAX_OBJECTS_FOR_ITERATION + 1, client=client
+ max_results=self._MAX_OBJECTS_FOR_ITERATION + 1,
+ client=client,
+ timeout=timeout,
)
)
if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION:
@@ -1042,7 +1101,9 @@ def delete(self, force=False, client=None):
raise ValueError(message)
# Ignore 404 errors on delete.
- self.delete_blobs(blobs, on_error=lambda blob: None, client=client)
+ self.delete_blobs(
+ blobs, on_error=lambda blob: None, client=client, timeout=timeout
+ )
# We intentionally pass `_target_object=None` since a DELETE
# request has no response value (whether in a standard request or
@@ -1052,9 +1113,12 @@ def delete(self, force=False, client=None):
path=self.path,
query_params=query_params,
_target_object=None,
+ timeout=timeout,
)
- def delete_blob(self, blob_name, client=None, generation=None):
+ def delete_blob(
+ self, blob_name, client=None, generation=None, timeout=_DEFAULT_TIMEOUT
+ ):
"""Deletes a blob from the current bucket.
If the blob isn't found (backend 404), raises a
@@ -1080,6 +1144,13 @@ def delete_blob(self, blob_name, client=None, generation=None):
:param generation: Optional. If present, permanently deletes a specific
revision of this object.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:raises: :class:`google.cloud.exceptions.NotFound` (to suppress
the exception, call ``delete_blobs``, passing a no-op
``on_error`` callback, e.g.:
@@ -1100,9 +1171,10 @@ def delete_blob(self, blob_name, client=None, generation=None):
path=blob.path,
query_params=blob._query_params,
_target_object=None,
+ timeout=timeout,
)
- def delete_blobs(self, blobs, on_error=None, client=None):
+ def delete_blobs(self, blobs, on_error=None, client=None, timeout=_DEFAULT_TIMEOUT):
"""Deletes a list of blobs from the current bucket.
Uses :meth:`delete_blob` to delete each individual blob.
@@ -1123,6 +1195,14 @@ def delete_blobs(self, blobs, on_error=None, client=None):
:param client: (Optional) The client to use. If not passed, falls back
to the ``client`` stored on the current bucket.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response. The timeout applies to each individual
+ blob delete request.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:raises: :class:`~google.cloud.exceptions.NotFound` (if
`on_error` is not passed).
"""
@@ -1131,7 +1211,7 @@ def delete_blobs(self, blobs, on_error=None, client=None):
blob_name = blob
if not isinstance(blob_name, six.string_types):
blob_name = blob.name
- self.delete_blob(blob_name, client=client)
+ self.delete_blob(blob_name, client=client, timeout=timeout)
except NotFound:
if on_error is not None:
on_error(blob)
@@ -1146,6 +1226,7 @@ def copy_blob(
client=None,
preserve_acl=True,
source_generation=None,
+ timeout=_DEFAULT_TIMEOUT,
):
"""Copy the given blob to the given bucket, optionally with a new name.
@@ -1174,6 +1255,13 @@ def copy_blob(
:param source_generation: Optional. The generation of the blob to be
copied.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:rtype: :class:`google.cloud.storage.blob.Blob`
:returns: The new Blob.
"""
@@ -1196,15 +1284,16 @@ def copy_blob(
path=api_path,
query_params=query_params,
_target_object=new_blob,
+ timeout=timeout,
)
if not preserve_acl:
- new_blob.acl.save(acl={}, client=client)
+ new_blob.acl.save(acl={}, client=client, timeout=timeout)
new_blob._set_properties(copy_result)
return new_blob
- def rename_blob(self, blob, new_name, client=None):
+ def rename_blob(self, blob, new_name, client=None, timeout=_DEFAULT_TIMEOUT):
"""Rename the given blob using copy and delete operations.
If :attr:`user_project` is set, bills the API request to that project.
@@ -1229,15 +1318,23 @@ def rename_blob(self, blob, new_name, client=None):
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the current bucket.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response. The timeout applies to each individual
+ request.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:rtype: :class:`Blob`
:returns: The newly-renamed blob.
"""
same_name = blob.name == new_name
- new_blob = self.copy_blob(blob, self, new_name, client=client)
+ new_blob = self.copy_blob(blob, self, new_name, client=client, timeout=timeout)
if not same_name:
- blob.delete(client=client)
+ blob.delete(client=client, timeout=timeout)
return new_blob
@@ -1865,7 +1962,9 @@ def disable_website(self):
"""
return self.configure_website(None, None)
- def get_iam_policy(self, client=None, requested_policy_version=None):
+ def get_iam_policy(
+ self, client=None, requested_policy_version=None, timeout=_DEFAULT_TIMEOUT
+ ):
"""Retrieve the IAM policy for the bucket.
See
@@ -1890,6 +1989,13 @@ def get_iam_policy(self, client=None, requested_policy_version=None):
than the one that was requested, based on the
feature syntax in the policy fetched.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:rtype: :class:`google.api_core.iam.Policy`
:returns: the policy instance, based on the resource returned from
the ``getIamPolicy`` API request.
@@ -1932,10 +2038,11 @@ def get_iam_policy(self, client=None, requested_policy_version=None):
path="%s/iam" % (self.path,),
query_params=query_params,
_target_object=None,
+ timeout=timeout,
)
return Policy.from_api_repr(info)
- def set_iam_policy(self, policy, client=None):
+ def set_iam_policy(self, policy, client=None, timeout=_DEFAULT_TIMEOUT):
"""Update the IAM policy for the bucket.
See
@@ -1951,6 +2058,13 @@ def set_iam_policy(self, policy, client=None):
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the current bucket.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:rtype: :class:`google.api_core.iam.Policy`
:returns: the policy instance, based on the resource returned from
the ``setIamPolicy`` API request.
@@ -1969,10 +2083,11 @@ def set_iam_policy(self, policy, client=None):
query_params=query_params,
data=resource,
_target_object=None,
+ timeout=timeout,
)
return Policy.from_api_repr(info)
- def test_iam_permissions(self, permissions, client=None):
+ def test_iam_permissions(self, permissions, client=None, timeout=_DEFAULT_TIMEOUT):
"""API call: test permissions
See
@@ -1988,6 +2103,13 @@ def test_iam_permissions(self, permissions, client=None):
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the current bucket.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:rtype: list of string
:returns: the permissions returned by the ``testIamPermissions`` API
request.
@@ -2000,11 +2122,13 @@ def test_iam_permissions(self, permissions, client=None):
path = "%s/iam/testPermissions" % (self.path,)
resp = client._connection.api_request(
- method="GET", path=path, query_params=query_params
+ method="GET", path=path, query_params=query_params, timeout=timeout
)
return resp.get("permissions", [])
- def make_public(self, recursive=False, future=False, client=None):
+ def make_public(
+ self, recursive=False, future=False, client=None, timeout=_DEFAULT_TIMEOUT
+ ):
"""Update bucket's ACL, granting read access to anonymous users.
:type recursive: bool
@@ -2019,6 +2143,13 @@ def make_public(self, recursive=False, future=False, client=None):
``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the current bucket.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response. The timeout applies to each underlying
+ request.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
:raises ValueError:
If ``recursive`` is True, and the bucket contains more than 256
@@ -2029,14 +2160,14 @@ def make_public(self, recursive=False, future=False, client=None):
for each blob.
"""
self.acl.all().grant_read()
- self.acl.save(client=client)
+ self.acl.save(client=client, timeout=timeout)
if future:
doa = self.default_object_acl
if not doa.loaded:
- doa.reload(client=client)
+ doa.reload(client=client, timeout=timeout)
doa.all().grant_read()
- doa.save(client=client)
+ doa.save(client=client, timeout=timeout)
if recursive:
blobs = list(
@@ -2044,6 +2175,7 @@ def make_public(self, recursive=False, future=False, client=None):
projection="full",
max_results=self._MAX_OBJECTS_FOR_ITERATION + 1,
client=client,
+ timeout=timeout,
)
)
if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION:
@@ -2058,9 +2190,11 @@ def make_public(self, recursive=False, future=False, client=None):
for blob in blobs:
blob.acl.all().grant_read()
- blob.acl.save(client=client)
+ blob.acl.save(client=client, timeout=timeout)
- def make_private(self, recursive=False, future=False, client=None):
+ def make_private(
+ self, recursive=False, future=False, client=None, timeout=_DEFAULT_TIMEOUT
+ ):
"""Update bucket's ACL, revoking read access for anonymous users.
:type recursive: bool
@@ -2076,6 +2210,14 @@ def make_private(self, recursive=False, future=False, client=None):
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the current bucket.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response. The timeout applies to each underlying
+ request.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:raises ValueError:
If ``recursive`` is True, and the bucket contains more than 256
blobs. This is to prevent extremely long runtime of this
@@ -2085,14 +2227,14 @@ def make_private(self, recursive=False, future=False, client=None):
for each blob.
"""
self.acl.all().revoke_read()
- self.acl.save(client=client)
+ self.acl.save(client=client, timeout=timeout)
if future:
doa = self.default_object_acl
if not doa.loaded:
- doa.reload(client=client)
+ doa.reload(client=client, timeout=timeout)
doa.all().revoke_read()
- doa.save(client=client)
+ doa.save(client=client, timeout=timeout)
if recursive:
blobs = list(
@@ -2100,6 +2242,7 @@ def make_private(self, recursive=False, future=False, client=None):
projection="full",
max_results=self._MAX_OBJECTS_FOR_ITERATION + 1,
client=client,
+ timeout=timeout,
)
)
if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION:
@@ -2114,7 +2257,7 @@ def make_private(self, recursive=False, future=False, client=None):
for blob in blobs:
blob.acl.all().revoke_read()
- blob.acl.save(client=client)
+ blob.acl.save(client=client, timeout=timeout)
def generate_upload_policy(self, conditions, expiration=None, client=None):
"""Create a signed upload policy for uploading objects.
@@ -2178,9 +2321,16 @@ def generate_upload_policy(self, conditions, expiration=None, client=None):
return fields
- def lock_retention_policy(self, client=None):
+ def lock_retention_policy(self, client=None, timeout=_DEFAULT_TIMEOUT):
"""Lock the bucket's retention policy.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:raises ValueError:
if the bucket has no metageneration (i.e., new or never reloaded);
if the bucket has no retention policy assigned;
@@ -2206,7 +2356,11 @@ def lock_retention_policy(self, client=None):
path = "/b/{}/lockRetentionPolicy".format(self.name)
api_response = client._connection.api_request(
- method="POST", path=path, query_params=query_params, _target_object=self
+ method="POST",
+ path=path,
+ query_params=query_params,
+ _target_object=self,
+ timeout=timeout,
)
self._set_properties(api_response)
diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py
index 9c89b342d..41c123880 100644
--- a/google/cloud/storage/client.py
+++ b/google/cloud/storage/client.py
@@ -14,6 +14,8 @@
"""Client for interacting with the Google Cloud Storage API."""
+import functools
+
import google.api_core.client_options
from google.auth.credentials import AnonymousCredentials
@@ -30,6 +32,7 @@
from google.cloud.storage.hmac_key import HMACKeyMetadata
from google.cloud.storage.acl import BucketACL
from google.cloud.storage.acl import DefaultObjectACL
+from google.cloud.storage.constants import _DEFAULT_TIMEOUT
_marker = object()
@@ -211,13 +214,19 @@ def current_batch(self):
"""
return self._batch_stack.top
- def get_service_account_email(self, project=None):
+ def get_service_account_email(self, project=None, timeout=_DEFAULT_TIMEOUT):
"""Get the email address of the project's GCS service account
:type project: str
:param project:
(Optional) Project ID to use for retreiving GCS service account
email address. Defaults to the client's project.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
:rtype: str
:returns: service account email address
@@ -225,7 +234,9 @@ def get_service_account_email(self, project=None):
if project is None:
project = self.project
path = "/projects/%s/serviceAccount" % (project,)
- api_response = self._base_connection.api_request(method="GET", path=path)
+ api_response = self._base_connection.api_request(
+ method="GET", path=path, timeout=timeout
+ )
return api_response["email_address"]
def bucket(self, bucket_name, user_project=None):
@@ -259,7 +270,7 @@ def batch(self):
"""
return Batch(client=self)
- def get_bucket(self, bucket_or_name):
+ def get_bucket(self, bucket_or_name, timeout=_DEFAULT_TIMEOUT):
"""API call: retrieve a bucket via a GET request.
See
@@ -272,6 +283,12 @@ def get_bucket(self, bucket_or_name):
]):
The bucket resource to pass or name to create.
+ timeout (Optional[Union[float, Tuple[float, float]]]):
+ The amount of time, in seconds, to wait for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
Returns:
google.cloud.storage.bucket.Bucket
The bucket matching the name provided.
@@ -302,10 +319,10 @@ def get_bucket(self, bucket_or_name):
"""
bucket = self._bucket_arg_to_bucket(bucket_or_name)
- bucket.reload(client=self)
+ bucket.reload(client=self, timeout=timeout)
return bucket
- def lookup_bucket(self, bucket_name):
+ def lookup_bucket(self, bucket_name, timeout=_DEFAULT_TIMEOUT):
"""Get a bucket by name, returning None if not found.
You can use this if you would rather check for a None value
@@ -318,11 +335,18 @@ def lookup_bucket(self, bucket_name):
:type bucket_name: str
:param bucket_name: The name of the bucket to get.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:rtype: :class:`google.cloud.storage.bucket.Bucket`
:returns: The bucket matching the name provided or None if not found.
"""
try:
- return self.get_bucket(bucket_name)
+ return self.get_bucket(bucket_name, timeout=timeout)
except NotFound:
return None
@@ -335,6 +359,7 @@ def create_bucket(
location=None,
predefined_acl=None,
predefined_default_object_acl=None,
+ timeout=_DEFAULT_TIMEOUT,
):
"""API call: create a new bucket via a POST request.
@@ -366,6 +391,11 @@ def create_bucket(
predefined_default_object_acl (str):
Optional. Name of predefined ACL to apply to bucket's objects. See:
https://cloud.google.com/storage/docs/access-control/lists#predefined-acl
+ timeout (Optional[Union[float, Tuple[float, float]]]):
+ The amount of time, in seconds, to wait for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
Returns:
google.cloud.storage.bucket.Bucket
@@ -434,6 +464,7 @@ def create_bucket(
query_params=query_params,
data=properties,
_target_object=bucket,
+ timeout=timeout,
)
bucket._set_properties(api_response)
@@ -495,6 +526,7 @@ def list_blobs(
versions=None,
projection="noAcl",
fields=None,
+ timeout=_DEFAULT_TIMEOUT,
):
"""Return an iterator used to find blobs in the bucket.
@@ -508,9 +540,7 @@ def list_blobs(
The bucket resource to pass or name to create.
max_results (int):
- (Optional) The maximum number of blobs in each page of results
- from this request. Non-positive values are ignored. Defaults to
- a sensible value set by the API.
+ (Optional) The maximum number of blobs to return.
page_token (str):
(Optional) If present, return the next batch of blobs, using the
@@ -543,6 +573,12 @@ def list_blobs(
``'items(name,contentLanguage),nextPageToken'``.
See: https://cloud.google.com/storage/docs/json_api/v1/parameters#fields
+ timeout (Optional[Union[float, Tuple[float, float]]]):
+ The amount of time, in seconds, to wait for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
Returns:
Iterator of all :class:`~google.cloud.storage.blob.Blob`
in this bucket matching the arguments.
@@ -557,6 +593,7 @@ def list_blobs(
projection=projection,
fields=fields,
client=self,
+ timeout=timeout,
)
def list_buckets(
@@ -567,6 +604,7 @@ def list_buckets(
projection="noAcl",
fields=None,
project=None,
+ timeout=_DEFAULT_TIMEOUT,
):
"""Get all buckets in the project associated to the client.
@@ -610,6 +648,13 @@ def list_buckets(
:param project: (Optional) the project whose buckets are to be listed.
If not passed, uses the project set on the client.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:rtype: :class:`~google.api_core.page_iterator.Iterator`
:raises ValueError: if both ``project`` is ``None`` and the client's
project is also ``None``.
@@ -632,9 +677,11 @@ def list_buckets(
if fields is not None:
extra_params["fields"] = fields
+ api_request = functools.partial(self._connection.api_request, timeout=timeout)
+
return page_iterator.HTTPIterator(
client=self,
- api_request=self._connection.api_request,
+ api_request=api_request,
path="/b",
item_to_value=_item_to_bucket,
page_token=page_token,
@@ -643,7 +690,11 @@ def list_buckets(
)
def create_hmac_key(
- self, service_account_email, project_id=None, user_project=None
+ self,
+ service_account_email,
+ project_id=None,
+ user_project=None,
+ timeout=_DEFAULT_TIMEOUT,
):
"""Create an HMAC key for a service account.
@@ -657,6 +708,13 @@ def create_hmac_key(
:type user_project: str
:param user_project: (Optional) This parameter is currently ignored.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:rtype:
Tuple[:class:`~google.cloud.storage.hmac_key.HMACKeyMetadata`, str]
:returns: metadata for the created key, plus the bytes of the key's secret, which is an 40-character base64-encoded string.
@@ -671,7 +729,7 @@ def create_hmac_key(
qs_params["userProject"] = user_project
api_response = self._connection.api_request(
- method="POST", path=path, query_params=qs_params
+ method="POST", path=path, query_params=qs_params, timeout=timeout
)
metadata = HMACKeyMetadata(self)
metadata._properties = api_response["metadata"]
@@ -685,6 +743,7 @@ def list_hmac_keys(
show_deleted_keys=None,
project_id=None,
user_project=None,
+ timeout=_DEFAULT_TIMEOUT,
):
"""List HMAC keys for a project.
@@ -708,6 +767,13 @@ def list_hmac_keys(
:type user_project: str
:param user_project: (Optional) This parameter is currently ignored.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:rtype:
Tuple[:class:`~google.cloud.storage.hmac_key.HMACKeyMetadata`, str]
:returns: metadata for the created key, plus the bytes of the key's secret, which is an 40-character base64-encoded string.
@@ -727,16 +793,20 @@ def list_hmac_keys(
if user_project is not None:
extra_params["userProject"] = user_project
+ api_request = functools.partial(self._connection.api_request, timeout=timeout)
+
return page_iterator.HTTPIterator(
client=self,
- api_request=self._connection.api_request,
+ api_request=api_request,
path=path,
item_to_value=_item_to_hmac_key_metadata,
max_results=max_results,
extra_params=extra_params,
)
- def get_hmac_key_metadata(self, access_id, project_id=None, user_project=None):
+ def get_hmac_key_metadata(
+ self, access_id, project_id=None, user_project=None, timeout=_DEFAULT_TIMEOUT
+ ):
"""Return a metadata instance for the given HMAC key.
:type access_id: str
@@ -746,11 +816,18 @@ def get_hmac_key_metadata(self, access_id, project_id=None, user_project=None):
:param project_id: (Optional) project ID of an existing key.
Defaults to client's project.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:type user_project: str
:param user_project: (Optional) This parameter is currently ignored.
"""
metadata = HMACKeyMetadata(self, access_id, project_id, user_project)
- metadata.reload() # raises NotFound for missing key
+ metadata.reload(timeout=timeout) # raises NotFound for missing key
return metadata
diff --git a/google/cloud/storage/constants.py b/google/cloud/storage/constants.py
index faadff1f0..621508669 100644
--- a/google/cloud/storage/constants.py
+++ b/google/cloud/storage/constants.py
@@ -89,3 +89,10 @@
Provides high availability and low latency across two regions.
"""
+
+
+# Internal constants
+
+_DEFAULT_TIMEOUT = 60 # in seconds
+"""The default request timeout in seconds if a timeout is not explicitly given.
+"""
diff --git a/google/cloud/storage/hmac_key.py b/google/cloud/storage/hmac_key.py
index 96ccbcaed..296b38e92 100644
--- a/google/cloud/storage/hmac_key.py
+++ b/google/cloud/storage/hmac_key.py
@@ -15,6 +15,8 @@
from google.cloud.exceptions import NotFound
from google.cloud._helpers import _rfc3339_to_datetime
+from google.cloud.storage.constants import _DEFAULT_TIMEOUT
+
class HMACKeyMetadata(object):
"""Metadata about an HMAC service account key withn Cloud Storage.
@@ -185,9 +187,16 @@ def user_project(self):
"""
return self._user_project
- def exists(self):
+ def exists(self, timeout=_DEFAULT_TIMEOUT):
"""Determine whether or not the key for this metadata exists.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:rtype: bool
:returns: True if the key exists in Cloud Storage.
"""
@@ -198,16 +207,23 @@ def exists(self):
qs_params["userProject"] = self.user_project
self._client._connection.api_request(
- method="GET", path=self.path, query_params=qs_params
+ method="GET", path=self.path, query_params=qs_params, timeout=timeout
)
except NotFound:
return False
else:
return True
- def reload(self):
+ def reload(self, timeout=_DEFAULT_TIMEOUT):
"""Reload properties from Cloud Storage.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:raises :class:`~google.api_core.exceptions.NotFound`:
if the key does not exist on the back-end.
"""
@@ -217,12 +233,19 @@ def reload(self):
qs_params["userProject"] = self.user_project
self._properties = self._client._connection.api_request(
- method="GET", path=self.path, query_params=qs_params
+ method="GET", path=self.path, query_params=qs_params, timeout=timeout
)
- def update(self):
+ def update(self, timeout=_DEFAULT_TIMEOUT):
"""Save writable properties to Cloud Storage.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:raises :class:`~google.api_core.exceptions.NotFound`:
if the key does not exist on the back-end.
"""
@@ -232,12 +255,23 @@ def update(self):
payload = {"state": self.state}
self._properties = self._client._connection.api_request(
- method="PUT", path=self.path, data=payload, query_params=qs_params
+ method="PUT",
+ path=self.path,
+ data=payload,
+ query_params=qs_params,
+ timeout=timeout,
)
- def delete(self):
+ def delete(self, timeout=_DEFAULT_TIMEOUT):
"""Delete the key from Cloud Storage.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:raises :class:`~google.api_core.exceptions.NotFound`:
if the key does not exist on the back-end.
"""
@@ -249,5 +283,5 @@ def delete(self):
qs_params["userProject"] = self.user_project
self._client._connection.api_request(
- method="DELETE", path=self.path, query_params=qs_params
+ method="DELETE", path=self.path, query_params=qs_params, timeout=timeout
)
diff --git a/google/cloud/storage/notification.py b/google/cloud/storage/notification.py
index 982dc16c0..e9618f668 100644
--- a/google/cloud/storage/notification.py
+++ b/google/cloud/storage/notification.py
@@ -18,6 +18,8 @@
from google.api_core.exceptions import NotFound
+from google.cloud.storage.constants import _DEFAULT_TIMEOUT
+
OBJECT_FINALIZE_EVENT_TYPE = "OBJECT_FINALIZE"
OBJECT_METADATA_UPDATE_EVENT_TYPE = "OBJECT_METADATA_UPDATE"
@@ -221,7 +223,7 @@ def _set_properties(self, response):
self._properties.clear()
self._properties.update(response)
- def create(self, client=None):
+ def create(self, client=None, timeout=_DEFAULT_TIMEOUT):
"""API wrapper: create the notification.
See:
@@ -233,6 +235,12 @@ def create(self, client=None):
:type client: :class:`~google.cloud.storage.client.Client`
:param client: (Optional) the client to use. If not passed, falls back
to the ``client`` stored on the notification's bucket.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
"""
if self.notification_id is not None:
raise ValueError(
@@ -249,10 +257,14 @@ def create(self, client=None):
properties = self._properties.copy()
properties["topic"] = _TOPIC_REF_FMT.format(self.topic_project, self.topic_name)
self._properties = client._connection.api_request(
- method="POST", path=path, query_params=query_params, data=properties
+ method="POST",
+ path=path,
+ query_params=query_params,
+ data=properties,
+ timeout=timeout,
)
- def exists(self, client=None):
+ def exists(self, client=None, timeout=_DEFAULT_TIMEOUT):
"""Test whether this notification exists.
See:
@@ -265,6 +277,12 @@ def exists(self, client=None):
``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the current bucket.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
:rtype: bool
:returns: True, if the notification exists, else False.
@@ -281,14 +299,14 @@ def exists(self, client=None):
try:
client._connection.api_request(
- method="GET", path=self.path, query_params=query_params
+ method="GET", path=self.path, query_params=query_params, timeout=timeout
)
except NotFound:
return False
else:
return True
- def reload(self, client=None):
+ def reload(self, client=None, timeout=_DEFAULT_TIMEOUT):
"""Update this notification from the server configuration.
See:
@@ -301,6 +319,12 @@ def reload(self, client=None):
``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the current bucket.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
:rtype: bool
:returns: True, if the notification exists, else False.
@@ -316,11 +340,11 @@ def reload(self, client=None):
query_params["userProject"] = self.bucket.user_project
response = client._connection.api_request(
- method="GET", path=self.path, query_params=query_params
+ method="GET", path=self.path, query_params=query_params, timeout=timeout
)
self._set_properties(response)
- def delete(self, client=None):
+ def delete(self, client=None, timeout=_DEFAULT_TIMEOUT):
"""Delete this notification.
See:
@@ -333,6 +357,12 @@ def delete(self, client=None):
``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the current bucket.
+ :type timeout: float or tuple
+ :param timeout: (optional) The amount of time, in seconds, to wait
+ for the server response.
+
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
:raises: :class:`google.api_core.exceptions.NotFound`:
if the notification does not exist.
@@ -348,7 +378,7 @@ def delete(self, client=None):
query_params["userProject"] = self.bucket.user_project
client._connection.api_request(
- method="DELETE", path=self.path, query_params=query_params
+ method="DELETE", path=self.path, query_params=query_params, timeout=timeout
)
diff --git a/noxfile.py b/noxfile.py
index a391c6732..1b44b309f 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -1,10 +1,12 @@
-# Copyright 2016 Google LLC
+# -*- coding: utf-8 -*-
+#
+# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
-# http://www.apache.org/licenses/LICENSE-2.0
+# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
@@ -12,15 +14,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from __future__ import absolute_import
+# Generated by synthtool. DO NOT EDIT!
+from __future__ import absolute_import
import os
import shutil
import nox
-LOCAL_DEPS = (os.path.join("..", "api_core"), os.path.join("..", "core"))
BLACK_VERSION = "black==19.3b0"
BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"]
@@ -35,7 +37,7 @@ def lint(session):
Returns a failure if the linters find linting errors or sufficiently
serious code quality issues.
"""
- session.install("flake8", BLACK_VERSION, *LOCAL_DEPS)
+ session.install("flake8", BLACK_VERSION)
session.run("black", "--check", *BLACK_PATHS)
session.run("flake8", "google", "tests")
@@ -57,23 +59,13 @@ def blacken(session):
@nox.session(python="3.7")
def lint_setup_py(session):
"""Verify that setup.py is valid (including RST check)."""
- session.install("docutils", "Pygments")
+ session.install("docutils", "pygments")
session.run("python", "setup.py", "check", "--restructuredtext", "--strict")
-@nox.session
def default(session):
- """Default unit test session.
-
- This is intended to be run **without** an interpreter set, so
- that the current ``python`` (on the ``PATH``) or the version of
- Python corresponding to the ``nox`` binary the ``PATH`` can
- run the tests.
- """
- # Install all test dependencies, then install local packages in-place.
+ # Install all test dependencies, then install this package in-place.
session.install("mock", "pytest", "pytest-cov")
- for local_dep in LOCAL_DEPS:
- session.install("-e", local_dep)
session.install("-e", ".")
# Run py.test against the unit tests.
@@ -85,43 +77,54 @@ def default(session):
"--cov-append",
"--cov-config=.coveragerc",
"--cov-report=",
- "--cov-fail-under=97",
- "tests/unit",
- *session.posargs
+ "--cov-fail-under=0",
+ os.path.join("tests", "unit"),
+ *session.posargs,
)
-@nox.session(python=["2.7", "3.5", "3.6", "3.7"])
+@nox.session(python=["2.7", "3.5", "3.6", "3.7", "3.8"])
def unit(session):
"""Run the unit test suite."""
default(session)
-@nox.session(python=["2.7", "3.6"])
+@nox.session(python=["2.7", "3.7"])
def system(session):
"""Run the system test suite."""
-
- # Sanity check: Only run system tests if the environment variable is set.
+ system_test_path = os.path.join("tests", "system.py")
+ system_test_folder_path = os.path.join("tests", "system")
+ # Sanity check: Only run tests if the environment variable is set.
if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""):
- session.skip("Credentials must be set via environment variable.")
+ session.skip("Credentials must be set via environment variable")
+
+ system_test_exists = os.path.exists(system_test_path)
+ system_test_folder_exists = os.path.exists(system_test_folder_path)
+ # Sanity check: only run tests if found.
+ if not system_test_exists and not system_test_folder_exists:
+ session.skip("System tests were not found")
+ session.install("google-cloud-iam")
+ session.install("google-cloud-pubsub")
+ session.install("google-cloud-kms")
# Use pre-release gRPC for system tests.
session.install("--pre", "grpcio")
- # Install all test dependencies, then install local packages in-place.
+ # Install all test dependencies, then install this package into the
+ # virtualenv's dist-packages.
session.install("mock", "pytest")
- for local_dep in LOCAL_DEPS:
- session.install("-e", local_dep)
- systest_deps = ["../test_utils/", "../pubsub", "../kms"]
- for systest_dep in systest_deps:
- session.install("-e", systest_dep)
+
session.install("-e", ".")
+ session.install("-e", "test_utils")
# Run py.test against the system tests.
- session.run("py.test", "--quiet", "tests/system.py", *session.posargs)
+ if system_test_exists:
+ session.run("py.test", "--quiet", system_test_path, *session.posargs)
+ if system_test_folder_exists:
+ session.run("py.test", "--quiet", system_test_folder_path, *session.posargs)
-@nox.session(python="3.6")
+@nox.session(python="3.7")
def cover(session):
"""Run the final coverage report.
@@ -130,6 +133,7 @@ def cover(session):
"""
session.install("coverage", "pytest-cov")
session.run("coverage", "report", "--show-missing", "--fail-under=100")
+
session.run("coverage", "erase")
diff --git a/renovate.json b/renovate.json
new file mode 100644
index 000000000..4fa949311
--- /dev/null
+++ b/renovate.json
@@ -0,0 +1,5 @@
+{
+ "extends": [
+ "config:base", ":preserveSemverRanges"
+ ]
+}
diff --git a/setup.cfg b/setup.cfg
index 2a9acf13d..3bd555500 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,2 +1,3 @@
+# Generated by synthtool. DO NOT EDIT!
[bdist_wheel]
universal = 1
diff --git a/setup.py b/setup.py
index 50d526f2a..f1a344032 100644
--- a/setup.py
+++ b/setup.py
@@ -22,14 +22,14 @@
name = "google-cloud-storage"
description = "Google Cloud Storage API client library"
-version = "1.25.0"
+version = "1.26.0"
# Should be one of:
# 'Development Status :: 3 - Alpha'
# 'Development Status :: 4 - Beta'
# 'Development Status :: 5 - Production/Stable'
release_status = "Development Status :: 5 - Production/Stable"
dependencies = [
- "google-auth >= 1.9.0, < 2.0dev",
+ "google-auth >= 1.11.0, < 2.0dev",
"google-cloud-core >= 1.2.0, < 2.0dev",
"google-resumable-media >= 0.5.0, < 0.6dev",
]
@@ -64,7 +64,7 @@
author="Google LLC",
author_email="googleapis-packages@google.com",
license="Apache 2.0",
- url="https://github.com/GoogleCloudPlatform/google-cloud-python",
+ url="https://github.com/googleapis/python-storage",
classifiers=[
release_status,
"Intended Audience :: Developers",
diff --git a/synth.metadata b/synth.metadata
new file mode 100644
index 000000000..cc98759ac
--- /dev/null
+++ b/synth.metadata
@@ -0,0 +1,12 @@
+{
+ "updateTime": "2020-01-31T20:56:14.590164Z",
+ "sources": [
+ {
+ "template": {
+ "name": "python_split_library",
+ "origin": "synthtool.gcp",
+ "version": "2019.10.17"
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/synth.py b/synth.py
new file mode 100644
index 000000000..2ca8c4287
--- /dev/null
+++ b/synth.py
@@ -0,0 +1,30 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This script is used to synthesize generated parts of this library."""
+
+import re
+
+import synthtool as s
+from synthtool import gcp
+
+common = gcp.CommonTemplates()
+
+# ----------------------------------------------------------------------------
+# Add templated files
+# ----------------------------------------------------------------------------
+templated_files = common.py_library(cov_level=99)
+s.move(templated_files, excludes=["noxfile.py"])
+
+s.shell.run(["nox", "-s", "blacken"], hide_output=False)
diff --git a/test_utils/credentials.json.enc b/test_utils/credentials.json.enc
new file mode 100644
index 000000000..f073c7e4f
--- /dev/null
+++ b/test_utils/credentials.json.enc
@@ -0,0 +1,49 @@
+U2FsdGVkX1/vVm/dOEg1DCACYbdOcL+ey6+64A+DZGZVgF8Z/3skK6rpPocu6GOA
+UZAqASsBH9QifDf8cKVXQXVYpYq6HSv2O0w7vOmVorZO9GYPo98s9/8XO+4ty/AU
+aB6TD68frBAYv4cT/l5m7aYdzfzMTy0EOXoleZT09JYP3B5FV3KCO114FzMXGwrj
+HXsR6E5SyUUlUnWPC3eD3aqmovay0gxOKYO3ZwjFK1nlbN/8q6/8nwBCf/Bg6SHV
+V93pNxdolRlJev9kgKz4RN1z4jGCy5PAndhSLE82NFIs9LoAiEOU5YeMlN+Ulqus
+J92nh+ptUe9a4pJGbAuveUWO7zdS1QyXvTMUcmmSfXCNm/eIQjNuu5+rHtIjWKh8
+Ilwj2w1aTfSptQEhk/kwRgFz/d11vfwJzvwTmCxO6zyOeL0VUWLqdCBGgG5As9He
+/RenF8PZ1O0WbTt7fns5oTlTk/MUo+0xJ1xqvu/y45LaqqcBAnEdrWKmtM3dJHWv
+ufQku+kD+83F/VwBnQdvgMHu6KZEs6LRrNo58r4QuK6fS7VCACdzxID1RM2cL7kT
+6BFRlyGj1aigmjne9g9M9Jx4R+mZDpPU1WDzzG71J4qCUwaX8Dfwutuv4uiFvzwq
+NUF0wLJJPtKWmtW+hnZ/fhHQGCRsOpZzFnqp6Zv7J7k6esqxMgIjfal7Djk5Acy8
+j3iVvm6CYmKMVqzL62JHYS9Ye83tzBCaR8hpnJQKgH3FSOFY8HSwrtQSIsl/hSeF
+41sgnz0Y+/gkzNeU18qFk+eCZmvljyu+JK0nPYUgpOCJYVBNQpNHz5PUyiAEKhtM
+IOSdjPRW1Y+Xf4RroJnLPoF24Ijwrow5LCm9hBRY6TPPMMmnIXCd23xcLJ1rMj6g
+x4ZikElans+cwuc9wtbb7w01DcpTwQ1+eIV1qV+KIgpnLjRGLhZD4etobBsrwYu/
+vnIwy2QHCKENPb8sbdgp7x2mF7VSX0/7tf+9+i70EBiMzpOKBkiZhtLzm6hOBkEy
+ODaWrx4lTTwbSw8Rmtf58APhPFMsjHoNsjiUoK249Y8Y2Ff4fMfqYsXu6VC1n/At
+CuWYHc3EfBwFcLJS+RQB9kFk/4FygFBWq4Kj0MqoRruLbKmoGeJKH9q35W0f0NCD
+j+iHt3014kMGiuyJe1UDQ6fvEihFFdHuDivFpPAXDt4PTY/WtpDhaGMx23kb54pK
+jkAuxpznAB1lK3u9bGRXDasGeHIrNtIlPvgkrWHXvoBVqM7zry8TGtoxp3E3I42Z
+cUfDWfB9GqVdrOwvrTzyZsl2uShRkAJaZFZj5aMyYxiptp4gM8CwWiNtOd2EwtRO
+LxZX4M02PQFIqXV3FSDA0q6EwglUrTZdAlYeOEkopaKCtG31dEPOSQG3NGJAEYso
+Cxm99H7970dp0OAgpNSgRbcWDbhVbQXnRzvFGqLeH6a9dQ/a8uD3s8Qm9Du/kB6d
+XxTRe2OGxzcD0AgI8GClE4rIZHCLbcwuJRp0EYcN+pgY80O4U98fZ5RYpU6OYbU/
+MEiaBYFKtZtGkV6AQD568V7hHJWqc5DDfVHUQ/aeQwnKi2vnU66u+nnV2rZxXxLP
++dqeLRpul+wKa5b/Z5SfQ14Ff8s7aVyxaogGpyggyPL1vyq4KWZ6Or/wEE5hgNO4
+kBh6ht0QT1Hti8XY2JK1M+Jgbjgcg4jkHBGVqegrG1Rvcc2A4TYKwx+QMSBhyxrU
+5qhROjS4lTcC42hQslMUkUwc4U/Y91XdFbOOnaAkwzI36NRYL0pmgZnYxGJZeRvr
+E5foOhnOEVSFGdOkLfFh+FkWZQf56Lmn8Gg2wHE3dZTxLHibiUYfkgOr1uEosq29
+D1NstvlJURPQ0Q+8QQNWcl9nEZHMAjOmnL1hbx+QfuC6seucp+sXGzdZByMLZbvT
+tG8KNL293CmyQowgf9MXToWYnwRkcvqfTaKyor2Ggze3JtoFW4t0j4DI1XPciZFX
+XmfApHrzdB/bZadzxyaZ2NE0CuH9zDelwI6rz38xsN5liYnp5qmNKVCZVOHccXa6
+J8x365m5/VaaA2RrtdPqKxn8VaKy7+T690QgMXVGM4PbzQzQxHuSleklocqlP+sB
+jSMXCZY+ng/i4UmRO9noiyW3UThYh0hIdMYs12EmmI9cnF/OuYZpl30fmqwV+VNM
+td5B2fYvAvvsjiX60SFCn3DATP1GrPMBlZSmhhP3GYS+xrWt3Xxta9qIX2BEF1Gg
+twnZZRjoULSRFUYPfJPEOfEH2UQwm84wxx/GezVE+S/RpBlatPOgCiLnNNaLfdTC
+mTG9qY9elJv3GGQO8Lqgf4i8blExs05lSPk1BDhzTB6H9TLz+Ge0/l1QxKf3gPXU
+aImK1azieXMXHECkdKxrzmehwu1dZ/oYOLc/OFQCETwSRoLPFOFpYUpizwmVVHR6
+uLSfRptte4ZOU3zHfpd/0+J4tkwHwEkGzsmMdqudlm7qME6upuIplyVBH8JiXzUK
+n1RIH/OPmVEluAnexWRLZNdk7MrakIO4XACVbICENiYQgAIErP568An6twWEGDbZ
+bEN64E3cVDTDRPRAunIhhsEaapcxpFEPWlHorxv36nMUt0R0h0bJlCu5QdzckfcX
+ZrRuu1kl76ZfbSE8T0G4/rBb9gsU4Gn3WyvLIO3MgFBuxR68ZwcR8LpEUd8qp38H
+NG4cxPmN1nGKo663Z+xI2Gt5up4gpl+fOt4mXqxY386rB7yHaOfElMG5TUYdrS9w
+1xbbCVgeJ6zxX+NFlndG33cSAPprhw+C18eUu6ZU63WZcYFo3GfK6rs3lvYtofvE
+8DxztdTidQedNVNE+63YCjhxd/cZUI5n/UpgYkr9owp7hNGJiR3tdoNLR2gcoGqL
+qWhH928k2aSgF2j97LZ2OqoPCp0tUB7ho4jD2u4Ik3GLVNlCc3dCvWRvpHtDTQDv
+tujESMfHUc9I2r4S/PD3bku/ABGwa977Yp1PjzJGr9RajA5is5n6GVpyynwjtKG4
+iyyITpdwpCgr8pueTBLwZnas3slmiMOog/E4PmPgctHzvC+vhQijhUtw5zSsmv0l
+bZlw/mVhp5Ta7dTcLBKR8DA3m3vTbaEGkz0xpfQr7GfiSMRbJyvIw88pDK0gyTMD
diff --git a/test_utils/scripts/circleci/get_tagged_package.py b/test_utils/scripts/circleci/get_tagged_package.py
new file mode 100644
index 000000000..c148b9dc2
--- /dev/null
+++ b/test_utils/scripts/circleci/get_tagged_package.py
@@ -0,0 +1,64 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helper to determine package from tag.
+Get the current package directory corresponding to the Circle Tag.
+"""
+
+from __future__ import print_function
+
+import os
+import re
+import sys
+
+
+TAG_RE = re.compile(r"""
+ ^
+ (?P
+ (([a-z]+)[_-])*) # pkg-name-with-hyphens-or-underscores (empty allowed)
+ ([0-9]+)\.([0-9]+)\.([0-9]+) # Version x.y.z (x, y, z all ints)
+ $
+""", re.VERBOSE)
+TAG_ENV = 'CIRCLE_TAG'
+ERROR_MSG = '%s env. var. not set' % (TAG_ENV,)
+BAD_TAG_MSG = 'Invalid tag name: %s. Expected pkg-name-x.y.z'
+CIRCLE_CI_SCRIPTS_DIR = os.path.dirname(__file__)
+ROOT_DIR = os.path.realpath(
+ os.path.join(CIRCLE_CI_SCRIPTS_DIR, '..', '..', '..'))
+
+
+def main():
+ """Get the current package directory.
+ Prints the package directory out so callers can consume it.
+ """
+ if TAG_ENV not in os.environ:
+ print(ERROR_MSG, file=sys.stderr)
+ sys.exit(1)
+
+ tag_name = os.environ[TAG_ENV]
+ match = TAG_RE.match(tag_name)
+ if match is None:
+ print(BAD_TAG_MSG % (tag_name,), file=sys.stderr)
+ sys.exit(1)
+
+ pkg_name = match.group('pkg')
+ if pkg_name is None:
+ print(ROOT_DIR)
+ else:
+ pkg_dir = pkg_name.rstrip('-').replace('-', '_')
+ print(os.path.join(ROOT_DIR, pkg_dir))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test_utils/scripts/circleci/twine_upload.sh b/test_utils/scripts/circleci/twine_upload.sh
new file mode 100755
index 000000000..23a4738e9
--- /dev/null
+++ b/test_utils/scripts/circleci/twine_upload.sh
@@ -0,0 +1,36 @@
+#!/bin/bash
+
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -ev
+
+# If this is not a CircleCI tag, no-op.
+if [[ -z "$CIRCLE_TAG" ]]; then
+ echo "This is not a release tag. Doing nothing."
+ exit 0
+fi
+
+# H/T: http://stackoverflow.com/a/246128/1068170
+SCRIPT="$(dirname "${BASH_SOURCE[0]}")/get_tagged_package.py"
+# Determine the package directory being deploying on this tag.
+PKG_DIR="$(python ${SCRIPT})"
+
+# Ensure that we have the latest versions of Twine, Wheel, and Setuptools.
+python3 -m pip install --upgrade twine wheel setuptools
+
+# Move into the package, build the distribution and upload.
+cd ${PKG_DIR}
+python3 setup.py sdist bdist_wheel
+twine upload dist/*
diff --git a/test_utils/scripts/get_target_packages.py b/test_utils/scripts/get_target_packages.py
new file mode 100644
index 000000000..1d51830cc
--- /dev/null
+++ b/test_utils/scripts/get_target_packages.py
@@ -0,0 +1,268 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Print a list of packages which require testing."""
+
+import os
+import re
+import subprocess
+import warnings
+
+
+CURRENT_DIR = os.path.realpath(os.path.dirname(__file__))
+BASE_DIR = os.path.realpath(os.path.join(CURRENT_DIR, '..', '..'))
+GITHUB_REPO = os.environ.get('GITHUB_REPO', 'google-cloud-python')
+CI = os.environ.get('CI', '')
+CI_BRANCH = os.environ.get('CIRCLE_BRANCH')
+CI_PR = os.environ.get('CIRCLE_PR_NUMBER')
+CIRCLE_TAG = os.environ.get('CIRCLE_TAG')
+head_hash, head_name = subprocess.check_output(['git', 'show-ref', 'HEAD']
+).strip().decode('ascii').split()
+rev_parse = subprocess.check_output(
+ ['git', 'rev-parse', '--abbrev-ref', 'HEAD']
+).strip().decode('ascii')
+MAJOR_DIV = '#' * 78
+MINOR_DIV = '#' + '-' * 77
+
+# NOTE: This reg-ex is copied from ``get_tagged_packages``.
+TAG_RE = re.compile(r"""
+ ^
+ (?P
+ (([a-z]+)-)*) # pkg-name-with-hyphens- (empty allowed)
+ ([0-9]+)\.([0-9]+)\.([0-9]+) # Version x.y.z (x, y, z all ints)
+ $
+""", re.VERBOSE)
+
+# This is the current set of dependencies by package.
+# As of this writing, the only "real" dependency is that of error_reporting
+# (on logging), the rest are just system test dependencies.
+PKG_DEPENDENCIES = {
+ 'logging': {'pubsub'},
+}
+
+
+def get_baseline():
+ """Return the baseline commit.
+
+ On a pull request, or on a branch, return the common parent revision
+ with the master branch.
+
+ Locally, return a value pulled from environment variables, or None if
+ the environment variables are not set.
+
+ On a push to master, return None. This will effectively cause everything
+ to be considered to be affected.
+ """
+
+ # If this is a pull request or branch, return the tip for master.
+ # We will test only packages which have changed since that point.
+ ci_non_master = (CI == 'true') and any([CI_BRANCH != 'master', CI_PR])
+
+ if ci_non_master:
+
+ repo_url = 'git@github.com:GoogleCloudPlatform/{}'.format(GITHUB_REPO)
+ subprocess.run(['git', 'remote', 'add', 'baseline', repo_url],
+ stderr=subprocess.DEVNULL)
+ subprocess.run(['git', 'pull', 'baseline'], stderr=subprocess.DEVNULL)
+
+ if CI_PR is None and CI_BRANCH is not None:
+ output = subprocess.check_output([
+ 'git', 'merge-base', '--fork-point',
+ 'baseline/master', CI_BRANCH])
+ return output.strip().decode('ascii')
+
+ return 'baseline/master'
+
+ # If environment variables are set identifying what the master tip is,
+ # use that.
+ if os.environ.get('GOOGLE_CLOUD_TESTING_REMOTE', ''):
+ remote = os.environ['GOOGLE_CLOUD_TESTING_REMOTE']
+ branch = os.environ.get('GOOGLE_CLOUD_TESTING_BRANCH', 'master')
+ return '%s/%s' % (remote, branch)
+
+ # If we are not in CI and we got this far, issue a warning.
+ if not CI:
+ warnings.warn('No baseline could be determined; this means tests '
+ 'will run for every package. If this is local '
+ 'development, set the $GOOGLE_CLOUD_TESTING_REMOTE '
+ 'environment variable.')
+
+ # That is all we can do; return None.
+ return None
+
+
+def get_changed_files():
+ """Return a list of files that have been changed since the baseline.
+
+ If there is no base, return None.
+ """
+ # Get the baseline, and fail quickly if there is no baseline.
+ baseline = get_baseline()
+ print('# Baseline commit: {}'.format(baseline))
+ if not baseline:
+ return None
+
+ # Return a list of altered files.
+ try:
+ return subprocess.check_output([
+ 'git', 'diff', '--name-only', '{}..HEAD'.format(baseline),
+ ], stderr=subprocess.DEVNULL).decode('utf8').strip().split('\n')
+ except subprocess.CalledProcessError:
+ warnings.warn('Unable to perform git diff; falling back to assuming '
+ 'all packages have changed.')
+ return None
+
+
+def reverse_map(dict_of_sets):
+ """Reverse a map of one-to-many.
+
+ So the map::
+
+ {
+ 'A': {'B', 'C'},
+ 'B': {'C'},
+ }
+
+ becomes
+
+ {
+ 'B': {'A'},
+ 'C': {'A', 'B'},
+ }
+
+ Args:
+ dict_of_sets (dict[set]): A dictionary of sets, mapping
+ one value to many.
+
+ Returns:
+ dict[set]: The reversed map.
+ """
+ result = {}
+ for key, values in dict_of_sets.items():
+ for value in values:
+ result.setdefault(value, set()).add(key)
+
+ return result
+
+def get_changed_packages(file_list):
+ """Return a list of changed packages based on the provided file list.
+
+ If the file list is None, then all packages should be considered to be
+ altered.
+ """
+ # Determine a complete list of packages.
+ all_packages = set()
+ for file_ in os.listdir(BASE_DIR):
+ abs_file = os.path.realpath(os.path.join(BASE_DIR, file_))
+ nox_file = os.path.join(abs_file, 'nox.py')
+ if os.path.isdir(abs_file) and os.path.isfile(nox_file):
+ all_packages.add(file_)
+
+ # If ther is no file list, send down the full package set.
+ if file_list is None:
+ return all_packages
+
+ # Create a set based on the list of changed files.
+ answer = set()
+ reverse_deps = reverse_map(PKG_DEPENDENCIES)
+ for file_ in file_list:
+ # Ignore root directory changes (setup.py, .gitignore, etc.).
+ if os.path.sep not in file_:
+ continue
+
+ # Ignore changes that are not in a package (usually this will be docs).
+ package = file_.split(os.path.sep, 1)[0]
+ if package not in all_packages:
+ continue
+
+ # If there is a change in core, short-circuit now and return
+ # everything.
+ if package in ('core',):
+ return all_packages
+
+ # Add the package, as well as any dependencies this package has.
+ # NOTE: For now, dependencies only go down one level.
+ answer.add(package)
+ answer = answer.union(reverse_deps.get(package, set()))
+
+ # We got this far without being short-circuited; return the final answer.
+ return answer
+
+
+def get_tagged_package():
+ """Return the package corresponding to the current tag.
+
+ If there is not tag, will return :data:`None`.
+ """
+ if CIRCLE_TAG is None:
+ return
+
+ match = TAG_RE.match(CIRCLE_TAG)
+ if match is None:
+ return
+
+ pkg_name = match.group('pkg')
+ if pkg_name == '':
+ # NOTE: This corresponds to the "umbrella" tag.
+ return
+
+ return pkg_name.rstrip('-').replace('-', '_')
+
+
+def get_target_packages():
+ """Return a list of target packages to be run in the current build.
+
+ If in a tag build, will run only the package(s) that are tagged, otherwise
+ will run the packages that have file changes in them (or packages that
+ depend on those).
+ """
+ tagged_package = get_tagged_package()
+ if tagged_package is None:
+ file_list = get_changed_files()
+ print(MAJOR_DIV)
+ print('# Changed files:')
+ print(MINOR_DIV)
+ for file_ in file_list or ():
+ print('# {}'.format(file_))
+ for package in sorted(get_changed_packages(file_list)):
+ yield package
+ else:
+ yield tagged_package
+
+
+def main():
+ print(MAJOR_DIV)
+ print('# Environment')
+ print(MINOR_DIV)
+ print('# CircleCI: {}'.format(CI))
+ print('# CircleCI branch: {}'.format(CI_BRANCH))
+ print('# CircleCI pr: {}'.format(CI_PR))
+ print('# CircleCI tag: {}'.format(CIRCLE_TAG))
+ print('# HEAD ref: {}'.format(head_hash))
+ print('# {}'.format(head_name))
+ print('# Git branch: {}'.format(rev_parse))
+ print(MAJOR_DIV)
+
+ packages = list(get_target_packages())
+
+ print(MAJOR_DIV)
+ print('# Target packages:')
+ print(MINOR_DIV)
+ for package in packages:
+ print(package)
+ print(MAJOR_DIV)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test_utils/scripts/get_target_packages_kokoro.py b/test_utils/scripts/get_target_packages_kokoro.py
new file mode 100644
index 000000000..27d3a0c94
--- /dev/null
+++ b/test_utils/scripts/get_target_packages_kokoro.py
@@ -0,0 +1,98 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Print a list of packages which require testing."""
+
+import pathlib
+import subprocess
+
+import ci_diff_helper
+import requests
+
+
+def print_environment(environment):
+ print("-> CI environment:")
+ print('Branch', environment.branch)
+ print('PR', environment.pr)
+ print('In PR', environment.in_pr)
+ print('Repo URL', environment.repo_url)
+ if environment.in_pr:
+ print('PR Base', environment.base)
+
+
+def get_base(environment):
+ if environment.in_pr:
+ return environment.base
+ else:
+ # If we're not in a PR, just calculate the changes between this commit
+ # and its parent.
+ return 'HEAD~1'
+
+
+def get_changed_files_from_base(base):
+ return subprocess.check_output([
+ 'git', 'diff', '--name-only', f'{base}..HEAD',
+ ], stderr=subprocess.DEVNULL).decode('utf8').strip().split('\n')
+
+
+_URL_TEMPLATE = (
+ 'https://api.github.com/repos/googleapis/google-cloud-python/pulls/'
+ '{}/files'
+)
+
+
+def get_changed_files_from_pr(pr):
+ url = _URL_TEMPLATE.format(pr)
+ while url is not None:
+ response = requests.get(url)
+ for info in response.json():
+ yield info['filename']
+ url = response.links.get('next', {}).get('url')
+
+
+def determine_changed_packages(changed_files):
+ packages = [
+ path.parent for path in pathlib.Path('.').glob('*/noxfile.py')
+ ]
+
+ changed_packages = set()
+ for file in changed_files:
+ file = pathlib.Path(file)
+ for package in packages:
+ if package in file.parents:
+ changed_packages.add(package)
+
+ return changed_packages
+
+
+def main():
+ environment = ci_diff_helper.get_config()
+ print_environment(environment)
+ base = get_base(environment)
+
+ if environment.in_pr:
+ changed_files = list(get_changed_files_from_pr(environment.pr))
+ else:
+ changed_files = get_changed_files_from_base(base)
+
+ packages = determine_changed_packages(changed_files)
+
+ print(f"Comparing against {base}.")
+ print("-> Changed packages:")
+
+ for package in packages:
+ print(package)
+
+
+main()
diff --git a/test_utils/scripts/run_emulator.py b/test_utils/scripts/run_emulator.py
new file mode 100644
index 000000000..287b08640
--- /dev/null
+++ b/test_utils/scripts/run_emulator.py
@@ -0,0 +1,199 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Run system tests locally with the emulator.
+
+First makes system calls to spawn the emulator and get the local environment
+variable needed for it. Then calls the system tests.
+"""
+
+
+import argparse
+import os
+import subprocess
+
+import psutil
+
+from google.cloud.environment_vars import BIGTABLE_EMULATOR
+from google.cloud.environment_vars import GCD_DATASET
+from google.cloud.environment_vars import GCD_HOST
+from google.cloud.environment_vars import PUBSUB_EMULATOR
+from run_system_test import run_module_tests
+
+
+BIGTABLE = 'bigtable'
+DATASTORE = 'datastore'
+PUBSUB = 'pubsub'
+PACKAGE_INFO = {
+ BIGTABLE: (BIGTABLE_EMULATOR,),
+ DATASTORE: (GCD_DATASET, GCD_HOST),
+ PUBSUB: (PUBSUB_EMULATOR,),
+}
+EXTRA = {
+ DATASTORE: ('--no-legacy',),
+}
+_DS_READY_LINE = '[datastore] Dev App Server is now running.\n'
+_PS_READY_LINE_PREFIX = '[pubsub] INFO: Server started, listening on '
+_BT_READY_LINE_PREFIX = '[bigtable] Cloud Bigtable emulator running on '
+
+
+def get_parser():
+ """Get simple ``argparse`` parser to determine package.
+
+ :rtype: :class:`argparse.ArgumentParser`
+ :returns: The parser for this script.
+ """
+ parser = argparse.ArgumentParser(
+ description='Run google-cloud system tests against local emulator.')
+ parser.add_argument('--package', dest='package',
+ choices=sorted(PACKAGE_INFO.keys()),
+ default=DATASTORE, help='Package to be tested.')
+ return parser
+
+
+def get_start_command(package):
+ """Get command line arguments for starting emulator.
+
+ :type package: str
+ :param package: The package to start an emulator for.
+
+ :rtype: tuple
+ :returns: The arguments to be used, in a tuple.
+ """
+ result = ('gcloud', 'beta', 'emulators', package, 'start')
+ extra = EXTRA.get(package, ())
+ return result + extra
+
+
+def get_env_init_command(package):
+ """Get command line arguments for getting emulator env. info.
+
+ :type package: str
+ :param package: The package to get environment info for.
+
+ :rtype: tuple
+ :returns: The arguments to be used, in a tuple.
+ """
+ result = ('gcloud', 'beta', 'emulators', package, 'env-init')
+ extra = EXTRA.get(package, ())
+ return result + extra
+
+
+def datastore_wait_ready(popen):
+ """Wait until the datastore emulator is ready to use.
+
+ :type popen: :class:`subprocess.Popen`
+ :param popen: An open subprocess to interact with.
+ """
+ emulator_ready = False
+ while not emulator_ready:
+ emulator_ready = popen.stderr.readline() == _DS_READY_LINE
+
+
+def wait_ready_prefix(popen, prefix):
+ """Wait until the a process encounters a line with matching prefix.
+
+ :type popen: :class:`subprocess.Popen`
+ :param popen: An open subprocess to interact with.
+
+ :type prefix: str
+ :param prefix: The prefix to match
+ """
+ emulator_ready = False
+ while not emulator_ready:
+ emulator_ready = popen.stderr.readline().startswith(prefix)
+
+
+def wait_ready(package, popen):
+ """Wait until the emulator is ready to use.
+
+ :type package: str
+ :param package: The package to check if ready.
+
+ :type popen: :class:`subprocess.Popen`
+ :param popen: An open subprocess to interact with.
+
+ :raises: :class:`KeyError` if the ``package`` is not among
+ ``datastore``, ``pubsub`` or ``bigtable``.
+ """
+ if package == DATASTORE:
+ datastore_wait_ready(popen)
+ elif package == PUBSUB:
+ wait_ready_prefix(popen, _PS_READY_LINE_PREFIX)
+ elif package == BIGTABLE:
+ wait_ready_prefix(popen, _BT_READY_LINE_PREFIX)
+ else:
+ raise KeyError('Package not supported', package)
+
+
+def cleanup(pid):
+ """Cleanup a process (including all of its children).
+
+ :type pid: int
+ :param pid: Process ID.
+ """
+ proc = psutil.Process(pid)
+ for child_proc in proc.children(recursive=True):
+ try:
+ child_proc.kill()
+ child_proc.terminate()
+ except psutil.NoSuchProcess:
+ pass
+ proc.terminate()
+ proc.kill()
+
+
+def run_tests_in_emulator(package):
+ """Spawn an emulator instance and run the system tests.
+
+ :type package: str
+ :param package: The package to run system tests against.
+ """
+ # Make sure this package has environment vars to replace.
+ env_vars = PACKAGE_INFO[package]
+
+ start_command = get_start_command(package)
+ # Ignore stdin and stdout, don't pollute the user's output with them.
+ proc_start = subprocess.Popen(start_command, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ try:
+ wait_ready(package, proc_start)
+ env_init_command = get_env_init_command(package)
+ proc_env = subprocess.Popen(env_init_command, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ env_status = proc_env.wait()
+ if env_status != 0:
+ raise RuntimeError(env_status, proc_env.stderr.read())
+ env_lines = proc_env.stdout.read().strip().split('\n')
+ # Set environment variables before running the system tests.
+ for env_var in env_vars:
+ line_prefix = 'export ' + env_var + '='
+ value, = [line.split(line_prefix, 1)[1] for line in env_lines
+ if line.startswith(line_prefix)]
+ os.environ[env_var] = value
+ run_module_tests(package,
+ ignore_requirements=True)
+ finally:
+ cleanup(proc_start.pid)
+
+
+def main():
+ """Main method to run this script."""
+ parser = get_parser()
+ args = parser.parse_args()
+ run_tests_in_emulator(args.package)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test_utils/scripts/update_docs.sh b/test_utils/scripts/update_docs.sh
new file mode 100755
index 000000000..8cbab9f0d
--- /dev/null
+++ b/test_utils/scripts/update_docs.sh
@@ -0,0 +1,93 @@
+#!/bin/bash
+
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -ev
+
+GH_OWNER='GoogleCloudPlatform'
+GH_PROJECT_NAME='google-cloud-python'
+
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+# Function to build the docs.
+function build_docs {
+ rm -rf docs/_build/
+ rm -f docs/bigquery/generated/*.rst
+ # -W -> warnings as errors
+ # -T -> show full traceback on exception
+ # -N -> no color
+ sphinx-build \
+ -W -T -N \
+ -b html \
+ -d docs/_build/doctrees \
+ docs/ \
+ docs/_build/html/
+ return $?
+}
+
+# Only update docs if we are on CircleCI.
+if [[ "${CIRCLE_BRANCH}" == "master" ]] && [[ -z "${CIRCLE_PR_NUMBER}" ]]; then
+ echo "Building new docs on a merged commit."
+elif [[ "$1" == "kokoro" ]]; then
+ echo "Building and publishing docs on Kokoro."
+elif [[ -n "${CIRCLE_TAG}" ]]; then
+ echo "Building new docs on a tag (but will not deploy)."
+ build_docs
+ exit $?
+else
+ echo "Not on master nor a release tag."
+ echo "Building new docs for testing purposes, but not deploying."
+ build_docs
+ exit $?
+fi
+
+# Adding GitHub pages branch. `git submodule add` checks it
+# out at HEAD.
+GH_PAGES_DIR='ghpages'
+git submodule add -q -b gh-pages \
+ "git@github.com:${GH_OWNER}/${GH_PROJECT_NAME}" ${GH_PAGES_DIR}
+
+# Determine if we are building a new tag or are building docs
+# for master. Then build new docs in docs/_build from master.
+if [[ -n "${CIRCLE_TAG}" ]]; then
+ # Sphinx will use the package version by default.
+ build_docs
+else
+ SPHINX_RELEASE=$(git log -1 --pretty=%h) build_docs
+fi
+
+# Update gh-pages with the created docs.
+cd ${GH_PAGES_DIR}
+git rm -fr latest/
+cp -R ../docs/_build/html/ latest/
+
+# Update the files push to gh-pages.
+git add .
+git status
+
+# If there are no changes, just exit cleanly.
+if [[ -z "$(git status --porcelain)" ]]; then
+ echo "Nothing to commit. Exiting without pushing changes."
+ exit
+fi
+
+# Commit to gh-pages branch to apply changes.
+git config --global user.email "dpebot@google.com"
+git config --global user.name "dpebot"
+git commit -m "Update docs after merge to master."
+
+# NOTE: This may fail if two docs updates (on merges to master)
+# happen in close proximity.
+git push -q origin HEAD:gh-pages
diff --git a/test_utils/setup.py b/test_utils/setup.py
new file mode 100644
index 000000000..8e9222a7f
--- /dev/null
+++ b/test_utils/setup.py
@@ -0,0 +1,64 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+from setuptools import find_packages
+from setuptools import setup
+
+
+PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__))
+
+
+# NOTE: This is duplicated throughout and we should try to
+# consolidate.
+SETUP_BASE = {
+ 'author': 'Google Cloud Platform',
+ 'author_email': 'googleapis-publisher@google.com',
+ 'scripts': [],
+ 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python',
+ 'license': 'Apache 2.0',
+ 'platforms': 'Posix; MacOS X; Windows',
+ 'include_package_data': True,
+ 'zip_safe': False,
+ 'classifiers': [
+ 'Development Status :: 4 - Beta',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: Apache Software License',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Topic :: Internet',
+ ],
+}
+
+
+REQUIREMENTS = [
+ 'google-auth >= 0.4.0',
+ 'six',
+]
+
+setup(
+ name='google-cloud-testutils',
+ version='0.24.0',
+ description='System test utilities for google-cloud-python',
+ packages=find_packages(),
+ install_requires=REQUIREMENTS,
+ python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*',
+ **SETUP_BASE
+)
diff --git a/test_utils/test_utils/__init__.py b/test_utils/test_utils/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/test_utils/test_utils/imports.py b/test_utils/test_utils/imports.py
new file mode 100644
index 000000000..5991af7fc
--- /dev/null
+++ b/test_utils/test_utils/imports.py
@@ -0,0 +1,38 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import mock
+import six
+
+
+def maybe_fail_import(predicate):
+ """Create and return a patcher that conditionally makes an import fail.
+
+ Args:
+ predicate (Callable[[...], bool]): A callable that, if it returns `True`,
+ triggers an `ImportError`. It must accept the same arguments as the
+ built-in `__import__` function.
+ https://docs.python.org/3/library/functions.html#__import__
+
+ Returns:
+ A mock patcher object that can be used to enable patched import behavior.
+ """
+ orig_import = six.moves.builtins.__import__
+
+ def custom_import(name, globals=None, locals=None, fromlist=(), level=0):
+ if predicate(name, globals, locals, fromlist, level):
+ raise ImportError
+ return orig_import(name, globals, locals, fromlist, level)
+
+ return mock.patch.object(six.moves.builtins, "__import__", new=custom_import)
diff --git a/test_utils/test_utils/retry.py b/test_utils/test_utils/retry.py
new file mode 100644
index 000000000..e61c001a0
--- /dev/null
+++ b/test_utils/test_utils/retry.py
@@ -0,0 +1,207 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import time
+from functools import wraps
+
+import six
+
+MAX_TRIES = 4
+DELAY = 1
+BACKOFF = 2
+
+
+def _retry_all(_):
+ """Retry all caught exceptions."""
+ return True
+
+
+class BackoffFailed(Exception):
+ """Retry w/ backoffs did not complete successfully."""
+
+
+class RetryBase(object):
+ """Base for retrying calling a decorated function w/ exponential backoff.
+
+ :type max_tries: int
+ :param max_tries: Number of times to try (not retry) before giving up.
+
+ :type delay: int
+ :param delay: Initial delay between retries in seconds.
+
+ :type backoff: int
+ :param backoff: Backoff multiplier e.g. value of 2 will double the
+ delay each retry.
+
+ :type logger: logging.Logger instance
+ :param logger: Logger to use. If None, print.
+ """
+ def __init__(self, max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF,
+ logger=None):
+ self.max_tries = max_tries
+ self.delay = delay
+ self.backoff = backoff
+ self.logger = logger.warning if logger else six.print_
+
+
+class RetryErrors(RetryBase):
+ """Decorator for retrying given exceptions in testing.
+
+ :type exception: Exception or tuple of Exceptions
+ :param exception: The exception to check or may be a tuple of
+ exceptions to check.
+
+ :type error_predicate: function, takes caught exception, returns bool
+ :param error_predicate: Predicate evaluating whether to retry after a
+ caught exception.
+
+ :type max_tries: int
+ :param max_tries: Number of times to try (not retry) before giving up.
+
+ :type delay: int
+ :param delay: Initial delay between retries in seconds.
+
+ :type backoff: int
+ :param backoff: Backoff multiplier e.g. value of 2 will double the
+ delay each retry.
+
+ :type logger: logging.Logger instance
+ :param logger: Logger to use. If None, print.
+ """
+ def __init__(self, exception, error_predicate=_retry_all,
+ max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF,
+ logger=None):
+ super(RetryErrors, self).__init__(max_tries, delay, backoff, logger)
+ self.exception = exception
+ self.error_predicate = error_predicate
+
+ def __call__(self, to_wrap):
+ @wraps(to_wrap)
+ def wrapped_function(*args, **kwargs):
+ tries = 0
+ while tries < self.max_tries:
+ try:
+ return to_wrap(*args, **kwargs)
+ except self.exception as caught_exception:
+
+ if not self.error_predicate(caught_exception):
+ raise
+
+ delay = self.delay * self.backoff**tries
+ msg = ("%s, Trying again in %d seconds..." %
+ (caught_exception, delay))
+ self.logger(msg)
+
+ time.sleep(delay)
+ tries += 1
+ return to_wrap(*args, **kwargs)
+
+ return wrapped_function
+
+
+class RetryResult(RetryBase):
+ """Decorator for retrying based on non-error result.
+
+ :type result_predicate: function, takes result, returns bool
+ :param result_predicate: Predicate evaluating whether to retry after a
+ result is returned.
+
+ :type max_tries: int
+ :param max_tries: Number of times to try (not retry) before giving up.
+
+ :type delay: int
+ :param delay: Initial delay between retries in seconds.
+
+ :type backoff: int
+ :param backoff: Backoff multiplier e.g. value of 2 will double the
+ delay each retry.
+
+ :type logger: logging.Logger instance
+ :param logger: Logger to use. If None, print.
+ """
+ def __init__(self, result_predicate,
+ max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF,
+ logger=None):
+ super(RetryResult, self).__init__(max_tries, delay, backoff, logger)
+ self.result_predicate = result_predicate
+
+ def __call__(self, to_wrap):
+ @wraps(to_wrap)
+ def wrapped_function(*args, **kwargs):
+ tries = 0
+ while tries < self.max_tries:
+ result = to_wrap(*args, **kwargs)
+ if self.result_predicate(result):
+ return result
+
+ delay = self.delay * self.backoff**tries
+ msg = "%s. Trying again in %d seconds..." % (
+ self.result_predicate.__name__, delay,)
+ self.logger(msg)
+
+ time.sleep(delay)
+ tries += 1
+ raise BackoffFailed()
+
+ return wrapped_function
+
+
+class RetryInstanceState(RetryBase):
+ """Decorator for retrying based on instance state.
+
+ :type instance_predicate: function, takes instance, returns bool
+ :param instance_predicate: Predicate evaluating whether to retry after an
+ API-invoking method is called.
+
+ :type max_tries: int
+ :param max_tries: Number of times to try (not retry) before giving up.
+
+ :type delay: int
+ :param delay: Initial delay between retries in seconds.
+
+ :type backoff: int
+ :param backoff: Backoff multiplier e.g. value of 2 will double the
+ delay each retry.
+
+ :type logger: logging.Logger instance
+ :param logger: Logger to use. If None, print.
+ """
+ def __init__(self, instance_predicate,
+ max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF,
+ logger=None):
+ super(RetryInstanceState, self).__init__(
+ max_tries, delay, backoff, logger)
+ self.instance_predicate = instance_predicate
+
+ def __call__(self, to_wrap):
+ instance = to_wrap.__self__ # only instance methods allowed
+
+ @wraps(to_wrap)
+ def wrapped_function(*args, **kwargs):
+ tries = 0
+ while tries < self.max_tries:
+ result = to_wrap(*args, **kwargs)
+ if self.instance_predicate(instance):
+ return result
+
+ delay = self.delay * self.backoff**tries
+ msg = "%s. Trying again in %d seconds..." % (
+ self.instance_predicate.__name__, delay,)
+ self.logger(msg)
+
+ time.sleep(delay)
+ tries += 1
+ raise BackoffFailed()
+
+ return wrapped_function
diff --git a/test_utils/test_utils/system.py b/test_utils/test_utils/system.py
new file mode 100644
index 000000000..590dc62a0
--- /dev/null
+++ b/test_utils/test_utils/system.py
@@ -0,0 +1,81 @@
+# Copyright 2014 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+import os
+import sys
+import time
+
+import google.auth.credentials
+from google.auth.environment_vars import CREDENTIALS as TEST_CREDENTIALS
+
+
+# From shell environ. May be None.
+CREDENTIALS = os.getenv(TEST_CREDENTIALS)
+
+ENVIRON_ERROR_MSG = """\
+To run the system tests, you need to set some environment variables.
+Please check the CONTRIBUTING guide for instructions.
+"""
+
+
+class EmulatorCreds(google.auth.credentials.Credentials):
+ """A mock credential object.
+
+ Used to avoid unnecessary token refreshing or reliance on the network
+ while an emulator is running.
+ """
+
+ def __init__(self): # pylint: disable=super-init-not-called
+ self.token = b'seekrit'
+ self.expiry = None
+
+ @property
+ def valid(self):
+ """Would-be validity check of the credentials.
+
+ Always is :data:`True`.
+ """
+ return True
+
+ def refresh(self, unused_request): # pylint: disable=unused-argument
+ """Off-limits implementation for abstract method."""
+ raise RuntimeError('Should never be refreshed.')
+
+
+def check_environ():
+ err_msg = None
+ if CREDENTIALS is None:
+ err_msg = '\nMissing variables: ' + TEST_CREDENTIALS
+ elif not os.path.isfile(CREDENTIALS):
+ err_msg = '\nThe %s path %r is not a file.' % (TEST_CREDENTIALS,
+ CREDENTIALS)
+
+ if err_msg is not None:
+ msg = ENVIRON_ERROR_MSG + err_msg
+ print(msg, file=sys.stderr)
+ sys.exit(1)
+
+
+def unique_resource_id(delimiter='_'):
+ """A unique identifier for a resource.
+
+ Intended to help locate resources created in particular
+ testing environments and at particular times.
+ """
+ build_id = os.getenv('CIRCLE_BUILD_NUM', '')
+ if build_id == '':
+ return '%s%d' % (delimiter, 1000 * time.time())
+ else:
+ return '%s%s%s%d' % (delimiter, build_id, delimiter, time.time())
diff --git a/test_utils/test_utils/vpcsc_config.py b/test_utils/test_utils/vpcsc_config.py
new file mode 100644
index 000000000..36b15d6be
--- /dev/null
+++ b/test_utils/test_utils/vpcsc_config.py
@@ -0,0 +1,118 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+import pytest
+
+
+INSIDE_VPCSC_ENVVAR = "GOOGLE_CLOUD_TESTS_IN_VPCSC"
+PROJECT_INSIDE_ENVVAR = "PROJECT_ID"
+PROJECT_OUTSIDE_ENVVAR = "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT"
+BUCKET_OUTSIDE_ENVVAR = "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_BUCKET"
+
+
+class VPCSCTestConfig(object):
+ """System test utility for VPCSC detection.
+
+ See: https://cloud.google.com/vpc-service-controls/docs/
+ """
+
+ @property
+ def inside_vpcsc(self):
+ """Test whether the test environment is configured to run inside VPCSC.
+
+ Returns:
+ bool:
+ true if the environment is configured to run inside VPCSC,
+ else false.
+ """
+ return INSIDE_VPCSC_ENVVAR in os.environ
+
+ @property
+ def project_inside(self):
+ """Project ID for testing outside access.
+
+ Returns:
+ str: project ID used for testing outside access; None if undefined.
+ """
+ return os.environ.get(PROJECT_INSIDE_ENVVAR, None)
+
+ @property
+ def project_outside(self):
+ """Project ID for testing inside access.
+
+ Returns:
+ str: project ID used for testing inside access; None if undefined.
+ """
+ return os.environ.get(PROJECT_OUTSIDE_ENVVAR, None)
+
+ @property
+ def bucket_outside(self):
+ """GCS bucket for testing inside access.
+
+ Returns:
+ str: bucket ID used for testing inside access; None if undefined.
+ """
+ return os.environ.get(BUCKET_OUTSIDE_ENVVAR, None)
+
+ def skip_if_inside_vpcsc(self, testcase):
+ """Test decorator: skip if running inside VPCSC."""
+ reason = (
+ "Running inside VPCSC. "
+ "Unset the {} environment variable to enable this test."
+ ).format(INSIDE_VPCSC_ENVVAR)
+ skip = pytest.mark.skipif(self.inside_vpcsc, reason=reason)
+ return skip(testcase)
+
+ def skip_unless_inside_vpcsc(self, testcase):
+ """Test decorator: skip if running outside VPCSC."""
+ reason = (
+ "Running outside VPCSC. "
+ "Set the {} environment variable to enable this test."
+ ).format(INSIDE_VPCSC_ENVVAR)
+ skip = pytest.mark.skipif(not self.inside_vpcsc, reason=reason)
+ return skip(testcase)
+
+ def skip_unless_inside_project(self, testcase):
+ """Test decorator: skip if inside project env var not set."""
+ reason = (
+ "Project ID for running inside VPCSC not set. "
+ "Set the {} environment variable to enable this test."
+ ).format(PROJECT_INSIDE_ENVVAR)
+ skip = pytest.mark.skipif(self.project_inside is None, reason=reason)
+ return skip(testcase)
+
+ def skip_unless_outside_project(self, testcase):
+ """Test decorator: skip if outside project env var not set."""
+ reason = (
+ "Project ID for running outside VPCSC not set. "
+ "Set the {} environment variable to enable this test."
+ ).format(PROJECT_OUTSIDE_ENVVAR)
+ skip = pytest.mark.skipif(self.project_outside is None, reason=reason)
+ return skip(testcase)
+
+ def skip_unless_outside_bucket(self, testcase):
+ """Test decorator: skip if outside bucket env var not set."""
+ reason = (
+ "Bucket ID for running outside VPCSC not set. "
+ "Set the {} environment variable to enable this test."
+ ).format(BUCKET_OUTSIDE_ENVVAR)
+ skip = pytest.mark.skipif(self.bucket_outside is None, reason=reason)
+ return skip(testcase)
+
+
+vpcsc_config = VPCSCTestConfig()
diff --git a/tests/system.py b/tests/system.py
index d689c2f2c..995b984ed 100644
--- a/tests/system.py
+++ b/tests/system.py
@@ -27,13 +27,13 @@
import six
from google.cloud import exceptions
+from google.cloud import iam_credentials_v1
from google.cloud import storage
from google.cloud.storage._helpers import _base64_md5hash
from google.cloud.storage.bucket import LifecycleRuleDelete
from google.cloud.storage.bucket import LifecycleRuleSetStorageClass
from google.cloud import kms
import google.oauth2
-
from test_utils.retry import RetryErrors
from test_utils.system import unique_resource_id
from test_utils.vpcsc_config import vpcsc_config
@@ -109,7 +109,6 @@ def tearDown(self):
def test_get_service_account_email(self):
domain = "gs-project-accounts.iam.gserviceaccount.com"
-
email = Config.CLIENT.get_service_account_email()
new_style = re.compile(r"service-(?P[^@]+)@" + domain)
@@ -310,15 +309,6 @@ def test_get_set_iam_policy(self):
self.assertEqual(returned_policy.version, 3)
self.assertEqual(returned_policy.bindings, policy.bindings)
- with pytest.raises(
- BadRequest, match="cannot be less than the existing policy version"
- ):
- bucket.get_iam_policy()
- with pytest.raises(
- BadRequest, match="cannot be less than the existing policy version"
- ):
- bucket.get_iam_policy(requested_policy_version=2)
-
fetched_policy = bucket.get_iam_policy(requested_policy_version=3)
self.assertEqual(fetched_policy.bindings, returned_policy.bindings)
@@ -962,6 +952,8 @@ def _create_signed_read_url_helper(
payload=None,
expiration=None,
encryption_key=None,
+ service_account_email=None,
+ access_token=None,
):
expiration = self._morph_expiration(version, expiration)
@@ -972,7 +964,12 @@ def _create_signed_read_url_helper(
blob = self.blob
signed_url = blob.generate_signed_url(
- expiration=expiration, method=method, client=Config.CLIENT, version=version
+ expiration=expiration,
+ method=method,
+ client=Config.CLIENT,
+ version=version,
+ service_account_email=service_account_email,
+ access_token=access_token,
)
headers = {}
@@ -1045,6 +1042,35 @@ def test_create_signed_read_url_v4_w_csek(self):
version="v4",
)
+ def test_create_signed_read_url_v2_w_access_token(self):
+ client = iam_credentials_v1.IAMCredentialsClient()
+ service_account_email = Config.CLIENT._credentials.service_account_email
+ name = client.service_account_path("-", service_account_email)
+ scope = [
+ "https://www.googleapis.com/auth/devstorage.read_write",
+ "https://www.googleapis.com/auth/iam",
+ ]
+ response = client.generate_access_token(name, scope)
+ self._create_signed_read_url_helper(
+ service_account_email=service_account_email,
+ access_token=response.access_token,
+ )
+
+ def test_create_signed_read_url_v4_w_access_token(self):
+ client = iam_credentials_v1.IAMCredentialsClient()
+ service_account_email = Config.CLIENT._credentials.service_account_email
+ name = client.service_account_path("-", service_account_email)
+ scope = [
+ "https://www.googleapis.com/auth/devstorage.read_write",
+ "https://www.googleapis.com/auth/iam",
+ ]
+ response = client.generate_access_token(name, scope)
+ self._create_signed_read_url_helper(
+ version="v4",
+ service_account_email=service_account_email,
+ access_token=response.access_token,
+ )
+
def _create_signed_delete_url_helper(self, version="v2", expiration=None):
expiration = self._morph_expiration(version, expiration)
diff --git a/tests/unit/test__helpers.py b/tests/unit/test__helpers.py
index 9b75b0e67..10b71b7bc 100644
--- a/tests/unit/test__helpers.py
+++ b/tests/unit/test__helpers.py
@@ -44,6 +44,12 @@ def test_w_env_var(self):
class Test_PropertyMixin(unittest.TestCase):
+ @staticmethod
+ def _get_default_timeout():
+ from google.cloud.storage.constants import _DEFAULT_TIMEOUT
+
+ return _DEFAULT_TIMEOUT
+
@staticmethod
def _get_target_class():
from google.cloud.storage._helpers import _PropertyMixin
@@ -103,7 +109,7 @@ def test_reload(self):
# Make sure changes is not a set instance before calling reload
# (which will clear / replace it with an empty set), checked below.
derived._changes = object()
- derived.reload(client=client)
+ derived.reload(client=client, timeout=42)
self.assertEqual(derived._properties, {"foo": "Foo"})
kw = connection._requested
self.assertEqual(len(kw), 1)
@@ -115,6 +121,7 @@ def test_reload(self):
"query_params": {"projection": "noAcl"},
"headers": {},
"_target_object": derived,
+ "timeout": 42,
},
)
self.assertEqual(derived._changes, set())
@@ -139,6 +146,7 @@ def test_reload_w_user_project(self):
"query_params": {"projection": "noAcl", "userProject": user_project},
"headers": {},
"_target_object": derived,
+ "timeout": self._get_default_timeout(),
},
)
self.assertEqual(derived._changes, set())
@@ -164,7 +172,7 @@ def test_patch(self):
BAZ = object()
derived._properties = {"bar": BAR, "baz": BAZ}
derived._changes = set(["bar"]) # Ignore baz.
- derived.patch(client=client)
+ derived.patch(client=client, timeout=42)
self.assertEqual(derived._properties, {"foo": "Foo"})
kw = connection._requested
self.assertEqual(len(kw), 1)
@@ -177,6 +185,7 @@ def test_patch(self):
# Since changes does not include `baz`, we don't see it sent.
"data": {"bar": BAR},
"_target_object": derived,
+ "timeout": 42,
},
)
# Make sure changes get reset by patch().
@@ -205,6 +214,7 @@ def test_patch_w_user_project(self):
# Since changes does not include `baz`, we don't see it sent.
"data": {"bar": BAR},
"_target_object": derived,
+ "timeout": self._get_default_timeout(),
},
)
# Make sure changes get reset by patch().
@@ -219,7 +229,7 @@ def test_update(self):
BAZ = object()
derived._properties = {"bar": BAR, "baz": BAZ}
derived._changes = set(["bar"]) # Update sends 'baz' anyway.
- derived.update(client=client)
+ derived.update(client=client, timeout=42)
self.assertEqual(derived._properties, {"foo": "Foo"})
kw = connection._requested
self.assertEqual(len(kw), 1)
@@ -227,6 +237,7 @@ def test_update(self):
self.assertEqual(kw[0]["path"], "/path")
self.assertEqual(kw[0]["query_params"], {"projection": "full"})
self.assertEqual(kw[0]["data"], {"bar": BAR, "baz": BAZ})
+ self.assertEqual(kw[0]["timeout"], 42)
# Make sure changes get reset by patch().
self.assertEqual(derived._changes, set())
@@ -250,6 +261,7 @@ def test_update_w_user_project(self):
kw[0]["query_params"], {"projection": "full", "userProject": user_project}
)
self.assertEqual(kw[0]["data"], {"bar": BAR, "baz": BAZ})
+ self.assertEqual(kw[0]["timeout"], self._get_default_timeout())
# Make sure changes get reset by patch().
self.assertEqual(derived._changes, set())
diff --git a/tests/unit/test__http.py b/tests/unit/test__http.py
index ab8ab27eb..021698eb9 100644
--- a/tests/unit/test__http.py
+++ b/tests/unit/test__http.py
@@ -30,6 +30,7 @@ def _make_one(self, *args, **kw):
def test_extra_headers(self):
import requests
from google.cloud import _http as base_http
+ from google.cloud.storage.constants import _DEFAULT_TIMEOUT
http = mock.create_autospec(requests.Session, instance=True)
response = requests.Response()
@@ -55,7 +56,7 @@ def test_extra_headers(self):
headers=expected_headers,
method="GET",
url=expected_uri,
- timeout=None,
+ timeout=_DEFAULT_TIMEOUT,
)
def test_build_api_url_no_extra_query_params(self):
diff --git a/tests/unit/test__signing.py b/tests/unit/test__signing.py
index bce709201..ebd7f9c17 100644
--- a/tests/unit/test__signing.py
+++ b/tests/unit/test__signing.py
@@ -390,6 +390,8 @@ def _generate_helper(
generation=generation,
headers=headers,
query_parameters=query_parameters,
+ service_account_email=None,
+ access_token=None,
)
# Check the mock was called.
@@ -504,6 +506,22 @@ def test_with_google_credentials(self):
with self.assertRaises(AttributeError):
self._call_fut(credentials, resource=resource, expiration=expiration)
+ def test_with_access_token(self):
+ resource = "/name/path"
+ credentials = _make_credentials()
+ expiration = int(time.time() + 5)
+ email = mock.sentinel.service_account_email
+ with mock.patch(
+ "google.cloud.storage._signing._sign_message", return_value=b"DEADBEEF"
+ ):
+ self._call_fut(
+ credentials,
+ resource=resource,
+ expiration=expiration,
+ service_account_email=email,
+ access_token="token",
+ )
+
class Test_generate_signed_url_v4(unittest.TestCase):
DEFAULT_EXPIRATION = 1000
@@ -638,6 +656,51 @@ def test_w_custom_query_parameters_w_string_value(self):
def test_w_custom_query_parameters_w_none_value(self):
self._generate_helper(query_parameters={"qux": None})
+ def test_with_access_token(self):
+ resource = "/name/path"
+ signer_email = "service@example.com"
+ credentials = _make_credentials(signer_email=signer_email)
+ with mock.patch(
+ "google.cloud.storage._signing._sign_message", return_value=b"DEADBEEF"
+ ):
+ self._call_fut(
+ credentials,
+ resource=resource,
+ expiration=datetime.timedelta(days=5),
+ service_account_email=signer_email,
+ access_token="token",
+ )
+
+
+class Test_sign_message(unittest.TestCase):
+ @staticmethod
+ def _call_fut(*args, **kwargs):
+ from google.cloud.storage._signing import _sign_message
+
+ return _sign_message(*args, **kwargs)
+
+ def test_sign_bytes(self):
+ signature = "DEADBEEF"
+ data = {"signature": signature}
+ request = make_request(200, data)
+ with mock.patch("google.auth.transport.requests.Request", return_value=request):
+ returned_signature = self._call_fut(
+ "123", service_account_email="service@example.com", access_token="token"
+ )
+ assert returned_signature == signature
+
+ def test_sign_bytes_failure(self):
+ from google.auth import exceptions
+
+ request = make_request(401)
+ with mock.patch("google.auth.transport.requests.Request", return_value=request):
+ with pytest.raises(exceptions.TransportError):
+ self._call_fut(
+ "123",
+ service_account_email="service@example.com",
+ access_token="token",
+ )
+
_DUMMY_SERVICE_ACCOUNT = None
@@ -697,3 +760,16 @@ def _make_credentials(signer_email=None):
return credentials
else:
return mock.Mock(spec=google.auth.credentials.Credentials)
+
+
+def make_request(status, data=None):
+ from google.auth import transport
+
+ response = mock.create_autospec(transport.Response, instance=True)
+ response.status = status
+ if data is not None:
+ response.data = json.dumps(data).encode("utf-8")
+
+ request = mock.create_autospec(transport.Request)
+ request.return_value = response
+ return request
diff --git a/tests/unit/test_acl.py b/tests/unit/test_acl.py
index d66a9439c..47400f1ef 100644
--- a/tests/unit/test_acl.py
+++ b/tests/unit/test_acl.py
@@ -125,6 +125,18 @@ def test_revoke_owner(self):
self.assertEqual(entity.get_roles(), set())
+class FakeReload(object):
+ """A callable used for faking the reload() method of an ACL instance."""
+
+ def __init__(self, acl):
+ self.acl = acl
+ self.timeouts_used = []
+
+ def __call__(self, timeout=None):
+ self.acl.loaded = True
+ self.timeouts_used.append(timeout)
+
+
class Test_ACL(unittest.TestCase):
@staticmethod
def _get_target_class():
@@ -132,6 +144,12 @@ def _get_target_class():
return ACL
+ @staticmethod
+ def _get_default_timeout():
+ from google.cloud.storage.constants import _DEFAULT_TIMEOUT
+
+ return _DEFAULT_TIMEOUT
+
def _make_one(self, *args, **kw):
return self._get_target_class()(*args, **kw)
@@ -150,13 +168,14 @@ def test_ctor(self):
def test__ensure_loaded(self):
acl = self._make_one()
+ _reload = FakeReload(acl)
+ acl.reload = _reload
+ acl.loaded = False
- def _reload():
- acl._really_loaded = True
+ acl._ensure_loaded(timeout=42)
- acl.reload = _reload
- acl._ensure_loaded()
- self.assertTrue(acl._really_loaded)
+ self.assertTrue(acl.loaded)
+ self.assertEqual(_reload.timeouts_used[0], 42)
def test_client_is_abstract(self):
acl = self._make_one()
@@ -179,13 +198,13 @@ def test___iter___empty_eager(self):
def test___iter___empty_lazy(self):
acl = self._make_one()
-
- def _reload():
- acl.loaded = True
+ _reload = FakeReload(acl)
+ acl.loaded = False
acl.reload = _reload
self.assertEqual(list(acl), [])
self.assertTrue(acl.loaded)
+ self.assertEqual(_reload.timeouts_used[0], self._get_default_timeout())
def test___iter___non_empty_no_roles(self):
TYPE = "type"
@@ -263,13 +282,13 @@ def test_has_entity_miss_str_eager(self):
def test_has_entity_miss_str_lazy(self):
acl = self._make_one()
-
- def _reload():
- acl.loaded = True
-
+ _reload = FakeReload(acl)
acl.reload = _reload
+ acl.loaded = False
+
self.assertFalse(acl.has_entity("nonesuch"))
self.assertTrue(acl.loaded)
+ self.assertEqual(_reload.timeouts_used[0], self._get_default_timeout())
def test_has_entity_miss_entity(self):
from google.cloud.storage.acl import _ACLEntity
@@ -304,13 +323,13 @@ def test_get_entity_miss_str_no_default_eager(self):
def test_get_entity_miss_str_no_default_lazy(self):
acl = self._make_one()
-
- def _reload():
- acl.loaded = True
-
+ _reload = FakeReload(acl)
acl.reload = _reload
+ acl.loaded = False
+
self.assertIsNone(acl.get_entity("nonesuch"))
self.assertTrue(acl.loaded)
+ self.assertEqual(_reload.timeouts_used[0], self._get_default_timeout())
def test_get_entity_miss_entity_no_default(self):
from google.cloud.storage.acl import _ACLEntity
@@ -380,15 +399,16 @@ def test_add_entity_miss_lazy(self):
entity.grant(ROLE)
acl = self._make_one()
- def _reload():
- acl.loaded = True
-
+ _reload = FakeReload(acl)
acl.reload = _reload
+ acl.loaded = False
+
acl.add_entity(entity)
self.assertTrue(acl.loaded)
self.assertEqual(list(acl), [{"entity": "type-id", "role": ROLE}])
self.assertEqual(list(acl.get_entities()), [entity])
self.assertTrue(acl.loaded)
+ self.assertEqual(_reload.timeouts_used[0], self._get_default_timeout())
def test_add_entity_hit(self):
from google.cloud.storage.acl import _ACLEntity
@@ -494,13 +514,13 @@ def test_get_entities_empty_eager(self):
def test_get_entities_empty_lazy(self):
acl = self._make_one()
-
- def _reload():
- acl.loaded = True
-
+ _reload = FakeReload(acl)
acl.reload = _reload
+ acl.loaded = False
+
self.assertEqual(acl.get_entities(), [])
self.assertTrue(acl.loaded)
+ self.assertEqual(_reload.timeouts_used[0], self._get_default_timeout())
def test_get_entities_nonempty(self):
TYPE = "type"
@@ -519,12 +539,18 @@ def test_reload_missing(self):
acl.reload_path = "/testing/acl"
acl.loaded = True
acl.entity("allUsers", ROLE)
- acl.reload(client=client)
+ acl.reload(client=client, timeout=42)
self.assertEqual(list(acl), [])
kw = connection._requested
self.assertEqual(len(kw), 1)
self.assertEqual(
- kw[0], {"method": "GET", "path": "/testing/acl", "query_params": {}}
+ kw[0],
+ {
+ "method": "GET",
+ "path": "/testing/acl",
+ "query_params": {},
+ "timeout": 42,
+ },
)
def test_reload_empty_result_clears_local(self):
@@ -543,7 +569,13 @@ def test_reload_empty_result_clears_local(self):
kw = connection._requested
self.assertEqual(len(kw), 1)
self.assertEqual(
- kw[0], {"method": "GET", "path": "/testing/acl", "query_params": {}}
+ kw[0],
+ {
+ "method": "GET",
+ "path": "/testing/acl",
+ "query_params": {},
+ "timeout": self._get_default_timeout(),
+ },
)
def test_reload_nonempty_result_w_user_project(self):
@@ -568,6 +600,7 @@ def test_reload_nonempty_result_w_user_project(self):
"method": "GET",
"path": "/testing/acl",
"query_params": {"userProject": USER_PROJECT},
+ "timeout": self._get_default_timeout(),
},
)
@@ -586,7 +619,7 @@ def test_save_existing_missing_none_passed(self):
acl = self._make_one()
acl.save_path = "/testing"
acl.loaded = True
- acl.save(client=client)
+ acl.save(client=client, timeout=42)
self.assertEqual(list(acl), [])
kw = connection._requested
self.assertEqual(len(kw), 1)
@@ -594,6 +627,7 @@ def test_save_existing_missing_none_passed(self):
self.assertEqual(kw[0]["path"], "/testing")
self.assertEqual(kw[0]["data"], {"acl": []})
self.assertEqual(kw[0]["query_params"], {"projection": "full"})
+ self.assertEqual(kw[0]["timeout"], 42)
def test_save_no_acl(self):
ROLE = "role"
@@ -617,6 +651,7 @@ def test_save_no_acl(self):
"path": "/testing",
"query_params": {"projection": "full"},
"data": {"acl": AFTER},
+ "timeout": self._get_default_timeout(),
},
)
@@ -648,6 +683,7 @@ def test_save_w_acl_w_user_project(self):
"path": "/testing",
"query_params": {"projection": "full", "userProject": USER_PROJECT},
"data": {"acl": new_acl},
+ "timeout": self._get_default_timeout(),
},
)
@@ -667,7 +703,7 @@ def test_save_predefined_valid(self):
acl = self._make_one()
acl.save_path = "/testing"
acl.loaded = True
- acl.save_predefined(PREDEFINED, client=client)
+ acl.save_predefined(PREDEFINED, client=client, timeout=42)
entries = list(acl)
self.assertEqual(len(entries), 0)
kw = connection._requested
@@ -679,6 +715,7 @@ def test_save_predefined_valid(self):
"path": "/testing",
"query_params": {"projection": "full", "predefinedAcl": PREDEFINED},
"data": {"acl": []},
+ "timeout": 42,
},
)
@@ -705,6 +742,7 @@ def test_save_predefined_w_XML_alias(self):
"predefinedAcl": PREDEFINED_JSON,
},
"data": {"acl": []},
+ "timeout": self._get_default_timeout(),
},
)
@@ -729,6 +767,7 @@ def test_save_predefined_valid_w_alternate_query_param(self):
"path": "/testing",
"query_params": {"projection": "full", "alternate": PREDEFINED},
"data": {"acl": []},
+ "timeout": self._get_default_timeout(),
},
)
@@ -742,7 +781,7 @@ def test_clear(self):
acl.save_path = "/testing"
acl.loaded = True
acl.entity("allUsers", ROLE1)
- acl.clear(client=client)
+ acl.clear(client=client, timeout=42)
self.assertEqual(list(acl), [STICKY])
kw = connection._requested
self.assertEqual(len(kw), 1)
@@ -753,6 +792,7 @@ def test_clear(self):
"path": "/testing",
"query_params": {"projection": "full"},
"data": {"acl": []},
+ "timeout": 42,
},
)
diff --git a/tests/unit/test_batch.py b/tests/unit/test_batch.py
index e18b1b9fa..ec8fe75de 100644
--- a/tests/unit/test_batch.py
+++ b/tests/unit/test_batch.py
@@ -90,6 +90,12 @@ def test_ctor_body_dict(self):
class TestBatch(unittest.TestCase):
+ @staticmethod
+ def _get_default_timeout():
+ from google.cloud.storage.constants import _DEFAULT_TIMEOUT
+
+ return _DEFAULT_TIMEOUT
+
@staticmethod
def _get_target_class():
from google.cloud.storage.batch import Batch
@@ -344,7 +350,7 @@ def test_finish_nonempty(self):
url=expected_url,
headers=mock.ANY,
data=mock.ANY,
- timeout=mock.ANY,
+ timeout=self._get_default_timeout(),
)
request_info = self._get_mutlipart_request(http)
@@ -393,8 +399,8 @@ def test_finish_nonempty_with_status_failure(self):
target1 = _MockObject()
target2 = _MockObject()
- batch._do_request("GET", url, {}, None, target1)
- batch._do_request("GET", url, {}, None, target2)
+ batch._do_request("GET", url, {}, None, target1, timeout=42)
+ batch._do_request("GET", url, {}, None, target2, timeout=420)
# Make sure futures are not populated.
self.assertEqual(
@@ -414,7 +420,7 @@ def test_finish_nonempty_with_status_failure(self):
url=expected_url,
headers=mock.ANY,
data=mock.ANY,
- timeout=mock.ANY,
+ timeout=420, # the last request timeout prevails
)
_, request_body, _, boundary = self._get_mutlipart_request(http)
diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py
index aa3819545..1c2b1e90d 100644
--- a/tests/unit/test_blob.py
+++ b/tests/unit/test_blob.py
@@ -43,6 +43,12 @@ def _make_one(*args, **kw):
blob._properties.update(properties)
return blob
+ @staticmethod
+ def _get_default_timeout():
+ from google.cloud.storage.constants import _DEFAULT_TIMEOUT
+
+ return _DEFAULT_TIMEOUT
+
def test_ctor_wo_encryption_key(self):
BLOB_NAME = "blob-name"
bucket = _Bucket()
@@ -391,6 +397,8 @@ def _generate_signed_url_helper(
credentials=None,
expiration=None,
encryption_key=None,
+ access_token=None,
+ service_account_email=None,
):
from six.moves.urllib import parse
from google.cloud._helpers import UTC
@@ -432,6 +440,8 @@ def _generate_signed_url_helper(
headers=headers,
query_parameters=query_parameters,
version=version,
+ access_token=access_token,
+ service_account_email=service_account_email,
)
self.assertEqual(signed_uri, signer.return_value)
@@ -464,6 +474,8 @@ def _generate_signed_url_helper(
"generation": generation,
"headers": expected_headers,
"query_parameters": query_parameters,
+ "access_token": access_token,
+ "service_account_email": service_account_email,
}
signer.assert_called_once_with(expected_creds, **expected_kwargs)
@@ -603,7 +615,7 @@ def test_exists_miss(self):
client = _Client(connection)
bucket = _Bucket(client)
blob = self._make_one(NONESUCH, bucket=bucket)
- self.assertFalse(blob.exists())
+ self.assertFalse(blob.exists(timeout=42))
self.assertEqual(len(connection._requested), 1)
self.assertEqual(
connection._requested[0],
@@ -612,6 +624,7 @@ def test_exists_miss(self):
"path": "/b/name/o/{}".format(NONESUCH),
"query_params": {"fields": "name"},
"_target_object": None,
+ "timeout": 42,
},
)
@@ -633,6 +646,7 @@ def test_exists_hit_w_user_project(self):
"path": "/b/name/o/{}".format(BLOB_NAME),
"query_params": {"fields": "name", "userProject": USER_PROJECT},
"_target_object": None,
+ "timeout": self._get_default_timeout(),
},
)
@@ -654,6 +668,7 @@ def test_exists_hit_w_generation(self):
"path": "/b/name/o/{}".format(BLOB_NAME),
"query_params": {"fields": "name", "generation": GENERATION},
"_target_object": None,
+ "timeout": self._get_default_timeout(),
},
)
@@ -667,7 +682,9 @@ def test_delete_wo_generation(self):
bucket._blobs[BLOB_NAME] = 1
blob.delete()
self.assertFalse(blob.exists())
- self.assertEqual(bucket._deleted, [(BLOB_NAME, None, None)])
+ self.assertEqual(
+ bucket._deleted, [(BLOB_NAME, None, None, self._get_default_timeout())]
+ )
def test_delete_w_generation(self):
BLOB_NAME = "blob-name"
@@ -678,9 +695,9 @@ def test_delete_w_generation(self):
bucket = _Bucket(client)
blob = self._make_one(BLOB_NAME, bucket=bucket, generation=GENERATION)
bucket._blobs[BLOB_NAME] = 1
- blob.delete()
+ blob.delete(timeout=42)
self.assertFalse(blob.exists())
- self.assertEqual(bucket._deleted, [(BLOB_NAME, None, GENERATION)])
+ self.assertEqual(bucket._deleted, [(BLOB_NAME, None, GENERATION, 42)])
def test__get_transport(self):
client = mock.Mock(spec=[u"_credentials", "_http"])
@@ -1954,7 +1971,7 @@ def test_get_iam_policy(self):
bucket = _Bucket(client=client)
blob = self._make_one(BLOB_NAME, bucket=bucket)
- policy = blob.get_iam_policy()
+ policy = blob.get_iam_policy(timeout=42)
self.assertIsInstance(policy, Policy)
self.assertEqual(policy.etag, RETURNED["etag"])
@@ -1970,6 +1987,7 @@ def test_get_iam_policy(self):
"path": "%s/iam" % (PATH,),
"query_params": {},
"_target_object": None,
+ "timeout": 42,
},
)
@@ -2005,6 +2023,7 @@ def test_get_iam_policy_w_requested_policy_version(self):
"path": "%s/iam" % (PATH,),
"query_params": {"optionsRequestedPolicyVersion": 3},
"_target_object": None,
+ "timeout": self._get_default_timeout(),
},
)
@@ -2045,6 +2064,7 @@ def test_get_iam_policy_w_user_project(self):
"path": "%s/iam" % (PATH,),
"query_params": {"userProject": USER_PROJECT},
"_target_object": None,
+ "timeout": self._get_default_timeout(),
},
)
@@ -2081,7 +2101,7 @@ def test_set_iam_policy(self):
bucket = _Bucket(client=client)
blob = self._make_one(BLOB_NAME, bucket=bucket)
- returned = blob.set_iam_policy(policy)
+ returned = blob.set_iam_policy(policy, timeout=42)
self.assertEqual(returned.etag, ETAG)
self.assertEqual(returned.version, VERSION)
@@ -2092,6 +2112,7 @@ def test_set_iam_policy(self):
self.assertEqual(kw[0]["method"], "PUT")
self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,))
self.assertEqual(kw[0]["query_params"], {})
+ self.assertEqual(kw[0]["timeout"], 42)
sent = kw[0]["data"]
self.assertEqual(sent["resourceId"], PATH)
self.assertEqual(len(sent["bindings"]), len(BINDINGS))
@@ -2153,7 +2174,7 @@ def test_test_iam_permissions(self):
bucket = _Bucket(client=client)
blob = self._make_one(BLOB_NAME, bucket=bucket)
- allowed = blob.test_iam_permissions(PERMISSIONS)
+ allowed = blob.test_iam_permissions(PERMISSIONS, timeout=42)
self.assertEqual(allowed, ALLOWED)
@@ -2162,6 +2183,7 @@ def test_test_iam_permissions(self):
self.assertEqual(kw[0]["method"], "GET")
self.assertEqual(kw[0]["path"], "%s/iam/testPermissions" % (PATH,))
self.assertEqual(kw[0]["query_params"], {"permissions": PERMISSIONS})
+ self.assertEqual(kw[0]["timeout"], 42)
def test_test_iam_permissions_w_user_project(self):
from google.cloud.storage.iam import STORAGE_OBJECTS_LIST
@@ -2196,6 +2218,7 @@ def test_test_iam_permissions_w_user_project(self):
kw[0]["query_params"],
{"permissions": PERMISSIONS, "userProject": USER_PROJECT},
)
+ self.assertEqual(kw[0]["timeout"], self._get_default_timeout())
def test_make_public(self):
from google.cloud.storage.acl import _ACLEntity
@@ -2266,6 +2289,7 @@ def test_compose_wo_content_type_set(self):
"destination": {},
},
"_target_object": destination,
+ "timeout": self._get_default_timeout(),
},
)
@@ -2284,7 +2308,7 @@ def test_compose_minimal_w_user_project(self):
destination = self._make_one(DESTINATION, bucket=bucket)
destination.content_type = "text/plain"
- destination.compose(sources=[source_1, source_2])
+ destination.compose(sources=[source_1, source_2], timeout=42)
self.assertEqual(destination.etag, "DEADBEEF")
@@ -2301,6 +2325,7 @@ def test_compose_minimal_w_user_project(self):
"destination": {"contentType": "text/plain"},
},
"_target_object": destination,
+ "timeout": 42,
},
)
@@ -2341,6 +2366,7 @@ def test_compose_w_additional_property_changes(self):
},
},
"_target_object": destination,
+ "timeout": self._get_default_timeout(),
},
)
@@ -2394,7 +2420,7 @@ def test_rewrite_w_generations(self):
DEST_BLOB, bucket=dest_bucket, generation=DEST_GENERATION
)
- token, rewritten, size = dest_blob.rewrite(source_blob)
+ token, rewritten, size = dest_blob.rewrite(source_blob, timeout=42)
self.assertEqual(token, TOKEN)
self.assertEqual(rewritten, 33)
@@ -2410,6 +2436,7 @@ def test_rewrite_w_generations(self):
),
)
self.assertEqual(kw["query_params"], {"sourceGeneration": SOURCE_GENERATION})
+ self.assertEqual(kw["timeout"], 42)
def test_rewrite_other_bucket_other_name_no_encryption_partial(self):
SOURCE_BLOB = "source"
@@ -2449,6 +2476,7 @@ def test_rewrite_other_bucket_other_name_no_encryption_partial(self):
self.assertEqual(kw[0]["query_params"], {})
SENT = {}
self.assertEqual(kw[0]["data"], SENT)
+ self.assertEqual(kw[0]["timeout"], self._get_default_timeout())
headers = {key.title(): str(value) for key, value in kw[0]["headers"].items()}
self.assertNotIn("X-Goog-Copy-Source-Encryption-Algorithm", headers)
@@ -2492,6 +2520,7 @@ def test_rewrite_same_name_no_old_key_new_key_done_w_user_project(self):
self.assertEqual(kw[0]["query_params"], {"userProject": USER_PROJECT})
SENT = {}
self.assertEqual(kw[0]["data"], SENT)
+ self.assertEqual(kw[0]["timeout"], self._get_default_timeout())
headers = {key.title(): str(value) for key, value in kw[0]["headers"].items()}
self.assertNotIn("X-Goog-Copy-Source-Encryption-Algorithm", headers)
@@ -2539,6 +2568,7 @@ def test_rewrite_same_name_no_key_new_key_w_token(self):
self.assertEqual(kw[0]["query_params"], {"rewriteToken": TOKEN})
SENT = {}
self.assertEqual(kw[0]["data"], SENT)
+ self.assertEqual(kw[0]["timeout"], self._get_default_timeout())
headers = {key.title(): str(value) for key, value in kw[0]["headers"].items()}
self.assertEqual(headers["X-Goog-Copy-Source-Encryption-Algorithm"], "AES256")
@@ -2589,6 +2619,7 @@ def test_rewrite_same_name_w_old_key_new_kms_key(self):
self.assertEqual(
kw[0]["query_params"], {"destinationKmsKeyName": DEST_KMS_RESOURCE}
)
+ self.assertEqual(kw[0]["timeout"], self._get_default_timeout())
SENT = {"kmsKeyName": DEST_KMS_RESOURCE}
self.assertEqual(kw[0]["data"], SENT)
@@ -3339,9 +3370,9 @@ def __init__(self, client=None, name="name", user_project=None):
self.path = "/b/" + name
self.user_project = user_project
- def delete_blob(self, blob_name, client=None, generation=None):
+ def delete_blob(self, blob_name, client=None, generation=None, timeout=None):
del self._blobs[blob_name]
- self._deleted.append((blob_name, client, generation))
+ self._deleted.append((blob_name, client, generation, timeout))
class _Client(object):
diff --git a/tests/unit/test_bucket.py b/tests/unit/test_bucket.py
index 68399b3c8..312fc0f65 100644
--- a/tests/unit/test_bucket.py
+++ b/tests/unit/test_bucket.py
@@ -361,6 +361,12 @@ def _get_target_class():
return Bucket
+ @staticmethod
+ def _get_default_timeout():
+ from google.cloud.storage.constants import _DEFAULT_TIMEOUT
+
+ return _DEFAULT_TIMEOUT
+
def _make_one(self, client=None, name=None, properties=None, user_project=None):
if client is None:
connection = _Connection()
@@ -571,12 +577,13 @@ def api_request(cls, *args, **kwargs):
BUCKET_NAME = "bucket-name"
bucket = self._make_one(name=BUCKET_NAME)
client = _Client(_FakeConnection)
- self.assertFalse(bucket.exists(client=client))
+ self.assertFalse(bucket.exists(client=client, timeout=42))
expected_called_kwargs = {
"method": "GET",
"path": bucket.path,
"query_params": {"fields": "name"},
"_target_object": None,
+ "timeout": 42,
}
expected_cw = [((), expected_called_kwargs)]
self.assertEqual(_FakeConnection._called_with, expected_cw)
@@ -603,6 +610,7 @@ def api_request(cls, *args, **kwargs):
"path": bucket.path,
"query_params": {"fields": "name", "userProject": USER_PROJECT},
"_target_object": None,
+ "timeout": self._get_default_timeout(),
}
expected_cw = [((), expected_called_kwargs)]
self.assertEqual(_FakeConnection._called_with, expected_cw)
@@ -653,6 +661,7 @@ def test_create_w_explicit_project(self):
query_params={"project": OTHER_PROJECT},
data=DATA,
_target_object=bucket,
+ timeout=self._get_default_timeout(),
)
def test_create_w_explicit_location(self):
@@ -679,6 +688,7 @@ def test_create_w_explicit_location(self):
data=DATA,
_target_object=bucket,
query_params={"project": "PROJECT"},
+ timeout=self._get_default_timeout(),
)
self.assertEqual(bucket.location, LOCATION)
@@ -693,7 +703,7 @@ def test_create_hit(self):
client._base_connection = connection
bucket = self._make_one(client=client, name=BUCKET_NAME)
- bucket.create()
+ bucket.create(timeout=42)
connection.api_request.assert_called_once_with(
method="POST",
@@ -701,6 +711,7 @@ def test_create_hit(self):
query_params={"project": PROJECT},
data=DATA,
_target_object=bucket,
+ timeout=42,
)
def test_create_w_extra_properties(self):
@@ -750,6 +761,7 @@ def test_create_w_extra_properties(self):
query_params={"project": PROJECT},
data=DATA,
_target_object=bucket,
+ timeout=self._get_default_timeout(),
)
def test_create_w_predefined_acl_invalid(self):
@@ -784,6 +796,7 @@ def test_create_w_predefined_acl_valid(self):
expected_qp = {"project": PROJECT, "predefinedAcl": "publicRead"}
self.assertEqual(kw["query_params"], expected_qp)
self.assertEqual(kw["data"], DATA)
+ self.assertEqual(kw["timeout"], self._get_default_timeout())
def test_create_w_predefined_default_object_acl_invalid(self):
from google.cloud.storage.client import Client
@@ -817,6 +830,7 @@ def test_create_w_predefined_default_object_acl_valid(self):
expected_qp = {"project": PROJECT, "predefinedDefaultObjectAcl": "publicRead"}
self.assertEqual(kw["query_params"], expected_qp)
self.assertEqual(kw["data"], DATA)
+ self.assertEqual(kw["timeout"], self._get_default_timeout())
def test_acl_property(self):
from google.cloud.storage.acl import BucketACL
@@ -849,11 +863,12 @@ def test_get_blob_miss(self):
connection = _Connection()
client = _Client(connection)
bucket = self._make_one(name=NAME)
- result = bucket.get_blob(NONESUCH, client=client)
+ result = bucket.get_blob(NONESUCH, client=client, timeout=42)
self.assertIsNone(result)
kw, = connection._requested
self.assertEqual(kw["method"], "GET")
self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, NONESUCH))
+ self.assertEqual(kw["timeout"], 42)
def test_get_blob_hit_w_user_project(self):
NAME = "name"
@@ -870,6 +885,7 @@ def test_get_blob_hit_w_user_project(self):
self.assertEqual(kw["method"], "GET")
self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME))
self.assertEqual(kw["query_params"], expected_qp)
+ self.assertEqual(kw["timeout"], self._get_default_timeout())
def test_get_blob_hit_w_generation(self):
NAME = "name"
@@ -887,6 +903,7 @@ def test_get_blob_hit_w_generation(self):
self.assertEqual(kw["method"], "GET")
self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME))
self.assertEqual(kw["query_params"], expected_qp)
+ self.assertEqual(kw["timeout"], self._get_default_timeout())
def test_get_blob_hit_with_kwargs(self):
from google.cloud.storage.blob import _get_encryption_headers
@@ -908,6 +925,7 @@ def test_get_blob_hit_with_kwargs(self):
self.assertEqual(kw["method"], "GET")
self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME))
self.assertEqual(kw["headers"], _get_encryption_headers(KEY))
+ self.assertEqual(kw["timeout"], self._get_default_timeout())
self.assertEqual(blob.chunk_size, CHUNK_SIZE)
self.assertEqual(blob._encryption_key, KEY)
@@ -923,6 +941,7 @@ def test_list_blobs_defaults(self):
self.assertEqual(kw["method"], "GET")
self.assertEqual(kw["path"], "/b/%s/o" % NAME)
self.assertEqual(kw["query_params"], {"projection": "noAcl"})
+ self.assertEqual(kw["timeout"], self._get_default_timeout())
def test_list_blobs_w_all_arguments_and_user_project(self):
NAME = "name"
@@ -956,6 +975,7 @@ def test_list_blobs_w_all_arguments_and_user_project(self):
projection=PROJECTION,
fields=FIELDS,
client=client,
+ timeout=42,
)
blobs = list(iterator)
self.assertEqual(blobs, [])
@@ -963,6 +983,7 @@ def test_list_blobs_w_all_arguments_and_user_project(self):
self.assertEqual(kw["method"], "GET")
self.assertEqual(kw["path"], "/b/%s/o" % NAME)
self.assertEqual(kw["query_params"], EXPECTED)
+ self.assertEqual(kw["timeout"], 42)
def test_list_notifications(self):
from google.cloud.storage.notification import BucketNotification
@@ -996,7 +1017,10 @@ def test_list_notifications(self):
client = _Client(connection)
bucket = self._make_one(client=client, name=NAME)
- notifications = list(bucket.list_notifications())
+ notifications = list(bucket.list_notifications(timeout=42))
+
+ req_args = client._connection._requested[0]
+ self.assertEqual(req_args.get("timeout"), 42)
self.assertEqual(len(notifications), len(resources))
for notification, resource, topic_ref in zip(
@@ -1033,6 +1057,7 @@ def test_delete_miss(self):
"path": bucket.path,
"query_params": {},
"_target_object": None,
+ "timeout": self._get_default_timeout(),
}
]
self.assertEqual(connection._deleted_buckets, expected_cw)
@@ -1045,7 +1070,7 @@ def test_delete_hit_with_user_project(self):
connection._delete_bucket = True
client = _Client(connection)
bucket = self._make_one(client=client, name=NAME, user_project=USER_PROJECT)
- result = bucket.delete(force=True)
+ result = bucket.delete(force=True, timeout=42)
self.assertIsNone(result)
expected_cw = [
{
@@ -1053,6 +1078,7 @@ def test_delete_hit_with_user_project(self):
"path": bucket.path,
"_target_object": None,
"query_params": {"userProject": USER_PROJECT},
+ "timeout": 42,
}
]
self.assertEqual(connection._deleted_buckets, expected_cw)
@@ -1075,6 +1101,7 @@ def test_delete_force_delete_blobs(self):
"path": bucket.path,
"query_params": {},
"_target_object": None,
+ "timeout": self._get_default_timeout(),
}
]
self.assertEqual(connection._deleted_buckets, expected_cw)
@@ -1096,6 +1123,7 @@ def test_delete_force_miss_blobs(self):
"path": bucket.path,
"query_params": {},
"_target_object": None,
+ "timeout": self._get_default_timeout(),
}
]
self.assertEqual(connection._deleted_buckets, expected_cw)
@@ -1128,6 +1156,7 @@ def test_delete_blob_miss(self):
self.assertEqual(kw["method"], "DELETE")
self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, NONESUCH))
self.assertEqual(kw["query_params"], {})
+ self.assertEqual(kw["timeout"], self._get_default_timeout())
def test_delete_blob_hit_with_user_project(self):
NAME = "name"
@@ -1136,12 +1165,13 @@ def test_delete_blob_hit_with_user_project(self):
connection = _Connection({})
client = _Client(connection)
bucket = self._make_one(client=client, name=NAME, user_project=USER_PROJECT)
- result = bucket.delete_blob(BLOB_NAME)
+ result = bucket.delete_blob(BLOB_NAME, timeout=42)
self.assertIsNone(result)
kw, = connection._requested
self.assertEqual(kw["method"], "DELETE")
self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME))
self.assertEqual(kw["query_params"], {"userProject": USER_PROJECT})
+ self.assertEqual(kw["timeout"], 42)
def test_delete_blob_hit_with_generation(self):
NAME = "name"
@@ -1156,6 +1186,7 @@ def test_delete_blob_hit_with_generation(self):
self.assertEqual(kw["method"], "DELETE")
self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME))
self.assertEqual(kw["query_params"], {"generation": GENERATION})
+ self.assertEqual(kw["timeout"], self._get_default_timeout())
def test_delete_blobs_empty(self):
NAME = "name"
@@ -1172,12 +1203,13 @@ def test_delete_blobs_hit_w_user_project(self):
connection = _Connection({})
client = _Client(connection)
bucket = self._make_one(client=client, name=NAME, user_project=USER_PROJECT)
- bucket.delete_blobs([BLOB_NAME])
+ bucket.delete_blobs([BLOB_NAME], timeout=42)
kw = connection._requested
self.assertEqual(len(kw), 1)
self.assertEqual(kw[0]["method"], "DELETE")
self.assertEqual(kw[0]["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME))
self.assertEqual(kw[0]["query_params"], {"userProject": USER_PROJECT})
+ self.assertEqual(kw[0]["timeout"], 42)
def test_delete_blobs_miss_no_on_error(self):
from google.cloud.exceptions import NotFound
@@ -1193,8 +1225,10 @@ def test_delete_blobs_miss_no_on_error(self):
self.assertEqual(len(kw), 2)
self.assertEqual(kw[0]["method"], "DELETE")
self.assertEqual(kw[0]["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME))
+ self.assertEqual(kw[0]["timeout"], self._get_default_timeout())
self.assertEqual(kw[1]["method"], "DELETE")
self.assertEqual(kw[1]["path"], "/b/%s/o/%s" % (NAME, NONESUCH))
+ self.assertEqual(kw[1]["timeout"], self._get_default_timeout())
def test_delete_blobs_miss_w_on_error(self):
NAME = "name"
@@ -1210,8 +1244,10 @@ def test_delete_blobs_miss_w_on_error(self):
self.assertEqual(len(kw), 2)
self.assertEqual(kw[0]["method"], "DELETE")
self.assertEqual(kw[0]["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME))
+ self.assertEqual(kw[0]["timeout"], self._get_default_timeout())
self.assertEqual(kw[1]["method"], "DELETE")
self.assertEqual(kw[1]["path"], "/b/%s/o/%s" % (NAME, NONESUCH))
+ self.assertEqual(kw[1]["timeout"], self._get_default_timeout())
@staticmethod
def _make_blob(bucket_name, blob_name):
@@ -1232,7 +1268,7 @@ def test_copy_blobs_wo_name(self):
dest = self._make_one(client=client, name=DEST)
blob = self._make_blob(SOURCE, BLOB_NAME)
- new_blob = source.copy_blob(blob, dest)
+ new_blob = source.copy_blob(blob, dest, timeout=42)
self.assertIs(new_blob.bucket, dest)
self.assertEqual(new_blob.name, BLOB_NAME)
@@ -1244,6 +1280,7 @@ def test_copy_blobs_wo_name(self):
self.assertEqual(kw["method"], "POST")
self.assertEqual(kw["path"], COPY_PATH)
self.assertEqual(kw["query_params"], {})
+ self.assertEqual(kw["timeout"], 42)
def test_copy_blobs_source_generation(self):
SOURCE = "source"
@@ -1269,6 +1306,7 @@ def test_copy_blobs_source_generation(self):
self.assertEqual(kw["method"], "POST")
self.assertEqual(kw["path"], COPY_PATH)
self.assertEqual(kw["query_params"], {"sourceGeneration": GENERATION})
+ self.assertEqual(kw["timeout"], self._get_default_timeout())
def test_copy_blobs_preserve_acl(self):
from google.cloud.storage.acl import ObjectACL
@@ -1301,10 +1339,12 @@ def test_copy_blobs_preserve_acl(self):
self.assertEqual(kw1["method"], "POST")
self.assertEqual(kw1["path"], COPY_PATH)
self.assertEqual(kw1["query_params"], {})
+ self.assertEqual(kw1["timeout"], self._get_default_timeout())
self.assertEqual(kw2["method"], "PATCH")
self.assertEqual(kw2["path"], NEW_BLOB_PATH)
self.assertEqual(kw2["query_params"], {"projection": "full"})
+ self.assertEqual(kw2["timeout"], self._get_default_timeout())
def test_copy_blobs_w_name_and_user_project(self):
SOURCE = "source"
@@ -1330,6 +1370,7 @@ def test_copy_blobs_w_name_and_user_project(self):
self.assertEqual(kw["method"], "POST")
self.assertEqual(kw["path"], COPY_PATH)
self.assertEqual(kw["query_params"], {"userProject": USER_PROJECT})
+ self.assertEqual(kw["timeout"], self._get_default_timeout())
def test_rename_blob(self):
BUCKET_NAME = "BUCKET_NAME"
@@ -1341,7 +1382,9 @@ def test_rename_blob(self):
bucket = self._make_one(client=client, name=BUCKET_NAME)
blob = self._make_blob(BUCKET_NAME, BLOB_NAME)
- renamed_blob = bucket.rename_blob(blob, NEW_BLOB_NAME, client=client)
+ renamed_blob = bucket.rename_blob(
+ blob, NEW_BLOB_NAME, client=client, timeout=42
+ )
self.assertIs(renamed_blob.bucket, bucket)
self.assertEqual(renamed_blob.name, NEW_BLOB_NAME)
@@ -1353,8 +1396,9 @@ def test_rename_blob(self):
self.assertEqual(kw["method"], "POST")
self.assertEqual(kw["path"], COPY_PATH)
self.assertEqual(kw["query_params"], {})
+ self.assertEqual(kw["timeout"], 42)
- blob.delete.assert_called_once_with(client)
+ blob.delete.assert_called_once_with(client=client, timeout=42)
def test_rename_blob_to_itself(self):
BUCKET_NAME = "BUCKET_NAME"
@@ -1377,6 +1421,7 @@ def test_rename_blob_to_itself(self):
self.assertEqual(kw["method"], "POST")
self.assertEqual(kw["path"], COPY_PATH)
self.assertEqual(kw["query_params"], {})
+ self.assertEqual(kw["timeout"], self._get_default_timeout())
blob.delete.assert_not_called()
@@ -1680,13 +1725,15 @@ def test_labels_setter_with_removal(self):
self.assertEqual(len(kwargs["data"]["labels"]), 2)
self.assertEqual(kwargs["data"]["labels"]["color"], "red")
self.assertIsNone(kwargs["data"]["labels"]["flavor"])
+ self.assertEqual(kwargs["timeout"], self._get_default_timeout())
# A second patch call should be a no-op for labels.
client._connection.api_request.reset_mock()
- bucket.patch(client=client)
+ bucket.patch(client=client, timeout=42)
client._connection.api_request.assert_called()
_, _, kwargs = client._connection.api_request.mock_calls[0]
self.assertNotIn("labels", kwargs["data"])
+ self.assertEqual(kwargs["timeout"], 42)
def test_location_type_getter_unset(self):
bucket = self._make_one()
@@ -2047,7 +2094,7 @@ def test_get_iam_policy(self):
client = _Client(connection, None)
bucket = self._make_one(client=client, name=NAME)
- policy = bucket.get_iam_policy()
+ policy = bucket.get_iam_policy(timeout=42)
self.assertIsInstance(policy, Policy)
self.assertEqual(policy.etag, RETURNED["etag"])
@@ -2059,6 +2106,7 @@ def test_get_iam_policy(self):
self.assertEqual(kw[0]["method"], "GET")
self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,))
self.assertEqual(kw[0]["query_params"], {})
+ self.assertEqual(kw[0]["timeout"], 42)
def test_get_iam_policy_w_user_project(self):
from google.api_core.iam import Policy
@@ -2091,6 +2139,7 @@ def test_get_iam_policy_w_user_project(self):
self.assertEqual(kw[0]["method"], "GET")
self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,))
self.assertEqual(kw[0]["query_params"], {"userProject": USER_PROJECT})
+ self.assertEqual(kw[0]["timeout"], self._get_default_timeout())
def test_get_iam_policy_w_requested_policy_version(self):
from google.cloud.storage.iam import STORAGE_OWNER_ROLE
@@ -2120,6 +2169,7 @@ def test_get_iam_policy_w_requested_policy_version(self):
self.assertEqual(kw[0]["method"], "GET")
self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,))
self.assertEqual(kw[0]["query_params"], {"optionsRequestedPolicyVersion": 3})
+ self.assertEqual(kw[0]["timeout"], self._get_default_timeout())
def test_set_iam_policy(self):
import operator
@@ -2152,7 +2202,7 @@ def test_set_iam_policy(self):
client = _Client(connection, None)
bucket = self._make_one(client=client, name=NAME)
- returned = bucket.set_iam_policy(policy)
+ returned = bucket.set_iam_policy(policy, timeout=42)
self.assertEqual(returned.etag, ETAG)
self.assertEqual(returned.version, VERSION)
@@ -2163,6 +2213,7 @@ def test_set_iam_policy(self):
self.assertEqual(kw[0]["method"], "PUT")
self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,))
self.assertEqual(kw[0]["query_params"], {})
+ self.assertEqual(kw[0]["timeout"], 42)
sent = kw[0]["data"]
self.assertEqual(sent["resourceId"], PATH)
self.assertEqual(len(sent["bindings"]), len(BINDINGS))
@@ -2216,6 +2267,7 @@ def test_set_iam_policy_w_user_project(self):
self.assertEqual(kw[0]["method"], "PUT")
self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,))
self.assertEqual(kw[0]["query_params"], {"userProject": USER_PROJECT})
+ self.assertEqual(kw[0]["timeout"], self._get_default_timeout())
sent = kw[0]["data"]
self.assertEqual(sent["resourceId"], PATH)
self.assertEqual(len(sent["bindings"]), len(BINDINGS))
@@ -2244,7 +2296,7 @@ def test_test_iam_permissions(self):
client = _Client(connection, None)
bucket = self._make_one(client=client, name=NAME)
- allowed = bucket.test_iam_permissions(PERMISSIONS)
+ allowed = bucket.test_iam_permissions(PERMISSIONS, timeout=42)
self.assertEqual(allowed, ALLOWED)
@@ -2253,6 +2305,7 @@ def test_test_iam_permissions(self):
self.assertEqual(kw[0]["method"], "GET")
self.assertEqual(kw[0]["path"], "%s/iam/testPermissions" % (PATH,))
self.assertEqual(kw[0]["query_params"], {"permissions": PERMISSIONS})
+ self.assertEqual(kw[0]["timeout"], 42)
def test_test_iam_permissions_w_user_project(self):
from google.cloud.storage.iam import STORAGE_OBJECTS_LIST
@@ -2285,6 +2338,7 @@ def test_test_iam_permissions_w_user_project(self):
kw[0]["query_params"],
{"permissions": PERMISSIONS, "userProject": USER_PROJECT},
)
+ self.assertEqual(kw[0]["timeout"], self._get_default_timeout())
def test_make_public_defaults(self):
from google.cloud.storage.acl import _ACLEntity
@@ -2306,6 +2360,7 @@ def test_make_public_defaults(self):
self.assertEqual(kw[0]["path"], "/b/%s" % NAME)
self.assertEqual(kw[0]["data"], {"acl": after["acl"]})
self.assertEqual(kw[0]["query_params"], {"projection": "full"})
+ self.assertEqual(kw[0]["timeout"], self._get_default_timeout())
def _make_public_w_future_helper(self, default_object_acl_loaded=True):
from google.cloud.storage.acl import _ACLEntity
@@ -2335,6 +2390,7 @@ def _make_public_w_future_helper(self, default_object_acl_loaded=True):
self.assertEqual(kw[0]["path"], "/b/%s" % NAME)
self.assertEqual(kw[0]["data"], {"acl": permissive})
self.assertEqual(kw[0]["query_params"], {"projection": "full"})
+ self.assertEqual(kw[0]["timeout"], self._get_default_timeout())
if not default_object_acl_loaded:
self.assertEqual(kw[1]["method"], "GET")
self.assertEqual(kw[1]["path"], "/b/%s/defaultObjectAcl" % NAME)
@@ -2343,6 +2399,7 @@ def _make_public_w_future_helper(self, default_object_acl_loaded=True):
self.assertEqual(kw[-1]["path"], "/b/%s" % NAME)
self.assertEqual(kw[-1]["data"], {"defaultObjectAcl": permissive})
self.assertEqual(kw[-1]["query_params"], {"projection": "full"})
+ self.assertEqual(kw[-1]["timeout"], self._get_default_timeout())
def test_make_public_w_future(self):
self._make_public_w_future_helper(default_object_acl_loaded=True)
@@ -2373,8 +2430,10 @@ def all(self):
def grant_read(self):
self._granted = True
- def save(self, client=None):
- _saved.append((self._bucket, self._name, self._granted, client))
+ def save(self, client=None, timeout=None):
+ _saved.append(
+ (self._bucket, self._name, self._granted, client, timeout)
+ )
def item_to_blob(self, item):
return _Blob(self.bucket, item["name"])
@@ -2390,22 +2449,24 @@ def item_to_blob(self, item):
bucket.default_object_acl.loaded = True
with mock.patch("google.cloud.storage.bucket._item_to_blob", new=item_to_blob):
- bucket.make_public(recursive=True)
+ bucket.make_public(recursive=True, timeout=42)
self.assertEqual(list(bucket.acl), permissive)
self.assertEqual(list(bucket.default_object_acl), [])
- self.assertEqual(_saved, [(bucket, BLOB_NAME, True, None)])
+ self.assertEqual(_saved, [(bucket, BLOB_NAME, True, None, 42)])
kw = connection._requested
self.assertEqual(len(kw), 2)
self.assertEqual(kw[0]["method"], "PATCH")
self.assertEqual(kw[0]["path"], "/b/%s" % NAME)
self.assertEqual(kw[0]["data"], {"acl": permissive})
self.assertEqual(kw[0]["query_params"], {"projection": "full"})
+ self.assertEqual(kw[0]["timeout"], 42)
self.assertEqual(kw[1]["method"], "GET")
self.assertEqual(kw[1]["path"], "/b/%s/o" % NAME)
max_results = bucket._MAX_OBJECTS_FOR_ITERATION + 1
self.assertEqual(
kw[1]["query_params"], {"maxResults": max_results, "projection": "full"}
)
+ self.assertEqual(kw[1]["timeout"], 42)
def test_make_public_recursive_too_many(self):
from google.cloud.storage.acl import _ACLEntity
@@ -2445,6 +2506,7 @@ def test_make_private_defaults(self):
self.assertEqual(kw[0]["path"], "/b/%s" % NAME)
self.assertEqual(kw[0]["data"], {"acl": after["acl"]})
self.assertEqual(kw[0]["query_params"], {"projection": "full"})
+ self.assertEqual(kw[0]["timeout"], self._get_default_timeout())
def _make_private_w_future_helper(self, default_object_acl_loaded=True):
NAME = "name"
@@ -2472,6 +2534,7 @@ def _make_private_w_future_helper(self, default_object_acl_loaded=True):
self.assertEqual(kw[0]["path"], "/b/%s" % NAME)
self.assertEqual(kw[0]["data"], {"acl": no_permissions})
self.assertEqual(kw[0]["query_params"], {"projection": "full"})
+ self.assertEqual(kw[0]["timeout"], self._get_default_timeout())
if not default_object_acl_loaded:
self.assertEqual(kw[1]["method"], "GET")
self.assertEqual(kw[1]["path"], "/b/%s/defaultObjectAcl" % NAME)
@@ -2480,6 +2543,7 @@ def _make_private_w_future_helper(self, default_object_acl_loaded=True):
self.assertEqual(kw[-1]["path"], "/b/%s" % NAME)
self.assertEqual(kw[-1]["data"], {"defaultObjectAcl": no_permissions})
self.assertEqual(kw[-1]["query_params"], {"projection": "full"})
+ self.assertEqual(kw[-1]["timeout"], self._get_default_timeout())
def test_make_private_w_future(self):
self._make_private_w_future_helper(default_object_acl_loaded=True)
@@ -2508,8 +2572,10 @@ def all(self):
def revoke_read(self):
self._granted = False
- def save(self, client=None):
- _saved.append((self._bucket, self._name, self._granted, client))
+ def save(self, client=None, timeout=None):
+ _saved.append(
+ (self._bucket, self._name, self._granted, client, timeout)
+ )
def item_to_blob(self, item):
return _Blob(self.bucket, item["name"])
@@ -2525,22 +2591,24 @@ def item_to_blob(self, item):
bucket.default_object_acl.loaded = True
with mock.patch("google.cloud.storage.bucket._item_to_blob", new=item_to_blob):
- bucket.make_private(recursive=True)
+ bucket.make_private(recursive=True, timeout=42)
self.assertEqual(list(bucket.acl), no_permissions)
self.assertEqual(list(bucket.default_object_acl), [])
- self.assertEqual(_saved, [(bucket, BLOB_NAME, False, None)])
+ self.assertEqual(_saved, [(bucket, BLOB_NAME, False, None, 42)])
kw = connection._requested
self.assertEqual(len(kw), 2)
self.assertEqual(kw[0]["method"], "PATCH")
self.assertEqual(kw[0]["path"], "/b/%s" % NAME)
self.assertEqual(kw[0]["data"], {"acl": no_permissions})
self.assertEqual(kw[0]["query_params"], {"projection": "full"})
+ self.assertEqual(kw[0]["timeout"], 42)
self.assertEqual(kw[1]["method"], "GET")
self.assertEqual(kw[1]["path"], "/b/%s/o" % NAME)
max_results = bucket._MAX_OBJECTS_FOR_ITERATION + 1
self.assertEqual(
kw[1]["query_params"], {"maxResults": max_results, "projection": "full"}
)
+ self.assertEqual(kw[1]["timeout"], 42)
def test_make_private_recursive_too_many(self):
NO_PERMISSIONS = []
@@ -2778,12 +2846,13 @@ def test_lock_retention_policy_ok(self):
"retentionPeriod": 86400 * 100, # 100 days
}
- bucket.lock_retention_policy()
+ bucket.lock_retention_policy(timeout=42)
kw, = connection._requested
self.assertEqual(kw["method"], "POST")
self.assertEqual(kw["path"], "/b/{}/lockRetentionPolicy".format(name))
self.assertEqual(kw["query_params"], {"ifMetagenerationMatch": 1234})
+ self.assertEqual(kw["timeout"], 42)
def test_lock_retention_policy_w_user_project(self):
name = "name"
@@ -2817,6 +2886,7 @@ def test_lock_retention_policy_w_user_project(self):
kw["query_params"],
{"ifMetagenerationMatch": 1234, "userProject": user_project},
)
+ self.assertEqual(kw["timeout"], self._get_default_timeout())
def test_generate_signed_url_w_invalid_version(self):
expiration = "2014-10-16T20:34:37.000Z"
diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py
index f3c090ebb..b3e5874ef 100644
--- a/tests/unit/test_client.py
+++ b/tests/unit/test_client.py
@@ -68,6 +68,12 @@ def _get_target_class():
return Client
+ @staticmethod
+ def _get_default_timeout():
+ from google.cloud.storage.constants import _DEFAULT_TIMEOUT
+
+ return _DEFAULT_TIMEOUT
+
def _make_one(self, *args, **kw):
return self._get_target_class()(*args, **kw)
@@ -259,7 +265,7 @@ def test_get_service_account_email_wo_project(self):
http = _make_requests_session([_make_json_response(RESOURCE)])
client._http_internal = http
- service_account_email = client.get_service_account_email()
+ service_account_email = client.get_service_account_email(timeout=42)
self.assertEqual(service_account_email, EMAIL)
URI = "/".join(
@@ -271,7 +277,7 @@ def test_get_service_account_email_wo_project(self):
]
)
http.request.assert_called_once_with(
- method="GET", url=URI, data=None, headers=mock.ANY, timeout=mock.ANY
+ method="GET", url=URI, data=None, headers=mock.ANY, timeout=42
)
def test_get_service_account_email_w_project(self):
@@ -297,7 +303,11 @@ def test_get_service_account_email_w_project(self):
]
)
http.request.assert_called_once_with(
- method="GET", url=URI, data=None, headers=mock.ANY, timeout=mock.ANY
+ method="GET",
+ url=URI,
+ data=None,
+ headers=mock.ANY,
+ timeout=self._get_default_timeout(),
)
def test_bucket(self):
@@ -363,10 +373,10 @@ def test_get_bucket_with_string_miss(self):
client._http_internal = http
with self.assertRaises(NotFound):
- client.get_bucket(NONESUCH)
+ client.get_bucket(NONESUCH, timeout=42)
http.request.assert_called_once_with(
- method="GET", url=URI, data=mock.ANY, headers=mock.ANY, timeout=mock.ANY
+ method="GET", url=URI, data=mock.ANY, headers=mock.ANY, timeout=42
)
def test_get_bucket_with_string_hit(self):
@@ -396,7 +406,11 @@ def test_get_bucket_with_string_hit(self):
self.assertIsInstance(bucket, Bucket)
self.assertEqual(bucket.name, BUCKET_NAME)
http.request.assert_called_once_with(
- method="GET", url=URI, data=mock.ANY, headers=mock.ANY, timeout=mock.ANY
+ method="GET",
+ url=URI,
+ data=mock.ANY,
+ headers=mock.ANY,
+ timeout=self._get_default_timeout(),
)
def test_get_bucket_with_object_miss(self):
@@ -427,7 +441,11 @@ def test_get_bucket_with_object_miss(self):
client.get_bucket(bucket_obj)
http.request.assert_called_once_with(
- method="GET", url=URI, data=mock.ANY, headers=mock.ANY, timeout=mock.ANY
+ method="GET",
+ url=URI,
+ data=mock.ANY,
+ headers=mock.ANY,
+ timeout=self._get_default_timeout(),
)
def test_get_bucket_with_object_hit(self):
@@ -458,7 +476,11 @@ def test_get_bucket_with_object_hit(self):
self.assertIsInstance(bucket, Bucket)
self.assertEqual(bucket.name, bucket_name)
http.request.assert_called_once_with(
- method="GET", url=URI, data=mock.ANY, headers=mock.ANY, timeout=mock.ANY
+ method="GET",
+ url=URI,
+ data=mock.ANY,
+ headers=mock.ANY,
+ timeout=self._get_default_timeout(),
)
def test_lookup_bucket_miss(self):
@@ -481,11 +503,11 @@ def test_lookup_bucket_miss(self):
)
client._http_internal = http
- bucket = client.lookup_bucket(NONESUCH)
+ bucket = client.lookup_bucket(NONESUCH, timeout=42)
self.assertIsNone(bucket)
http.request.assert_called_once_with(
- method="GET", url=URI, data=mock.ANY, headers=mock.ANY, timeout=mock.ANY
+ method="GET", url=URI, data=mock.ANY, headers=mock.ANY, timeout=42
)
def test_lookup_bucket_hit(self):
@@ -514,7 +536,11 @@ def test_lookup_bucket_hit(self):
self.assertIsInstance(bucket, Bucket)
self.assertEqual(bucket.name, BUCKET_NAME)
http.request.assert_called_once_with(
- method="GET", url=URI, data=mock.ANY, headers=mock.ANY, timeout=mock.ANY
+ method="GET",
+ url=URI,
+ data=mock.ANY,
+ headers=mock.ANY,
+ timeout=self._get_default_timeout(),
)
def test_create_bucket_w_missing_client_project(self):
@@ -550,6 +576,7 @@ def test_create_bucket_w_conflict(self):
query_params={"project": other_project, "userProject": user_project},
data=data,
_target_object=mock.ANY,
+ timeout=self._get_default_timeout(),
)
def test_create_bucket_w_predefined_acl_invalid(self):
@@ -570,7 +597,9 @@ def test_create_bucket_w_predefined_acl_valid(self):
client = self._make_one(project=project, credentials=credentials)
connection = _make_connection(data)
client._base_connection = connection
- bucket = client.create_bucket(bucket_name, predefined_acl="publicRead")
+ bucket = client.create_bucket(
+ bucket_name, predefined_acl="publicRead", timeout=42
+ )
connection.api_request.assert_called_once_with(
method="POST",
@@ -578,6 +607,7 @@ def test_create_bucket_w_predefined_acl_valid(self):
query_params={"project": project, "predefinedAcl": "publicRead"},
data=data,
_target_object=bucket,
+ timeout=42,
)
def test_create_bucket_w_predefined_default_object_acl_invalid(self):
@@ -612,6 +642,7 @@ def test_create_bucket_w_predefined_default_object_acl_valid(self):
},
data=data,
_target_object=bucket,
+ timeout=self._get_default_timeout(),
)
def test_create_bucket_w_explicit_location(self):
@@ -636,6 +667,7 @@ def test_create_bucket_w_explicit_location(self):
data=data,
_target_object=bucket,
query_params={"project": project},
+ timeout=self._get_default_timeout(),
)
self.assertEqual(bucket.location, location)
@@ -772,6 +804,7 @@ def test_list_blobs(self):
method="GET",
path="/b/%s/o" % BUCKET_NAME,
query_params={"projection": "noAcl"},
+ timeout=self._get_default_timeout(),
)
def test_list_blobs_w_all_arguments_and_user_project(self):
@@ -817,12 +850,16 @@ def test_list_blobs_w_all_arguments_and_user_project(self):
versions=VERSIONS,
projection=PROJECTION,
fields=FIELDS,
+ timeout=42,
)
blobs = list(iterator)
self.assertEqual(blobs, [])
connection.api_request.assert_called_once_with(
- method="GET", path="/b/%s/o" % BUCKET_NAME, query_params=EXPECTED
+ method="GET",
+ path="/b/%s/o" % BUCKET_NAME,
+ query_params=EXPECTED,
+ timeout=42,
)
def test_list_buckets_wo_project(self):
@@ -930,7 +967,7 @@ def test_list_buckets_non_empty(self):
url=mock.ANY,
data=mock.ANY,
headers=mock.ANY,
- timeout=mock.ANY,
+ timeout=self._get_default_timeout(),
)
def test_list_buckets_all_arguments(self):
@@ -956,15 +993,12 @@ def test_list_buckets_all_arguments(self):
prefix=PREFIX,
projection=PROJECTION,
fields=FIELDS,
+ timeout=42,
)
buckets = list(iterator)
self.assertEqual(buckets, [])
http.request.assert_called_once_with(
- method="GET",
- url=mock.ANY,
- data=mock.ANY,
- headers=mock.ANY,
- timeout=mock.ANY,
+ method="GET", url=mock.ANY, data=mock.ANY, headers=mock.ANY, timeout=42
)
requested_url = http.request.mock_calls[0][2]["url"]
@@ -1024,7 +1058,9 @@ def dummy_response():
self.assertIsInstance(bucket, Bucket)
self.assertEqual(bucket.name, blob_name)
- def _create_hmac_key_helper(self, explicit_project=None, user_project=None):
+ def _create_hmac_key_helper(
+ self, explicit_project=None, user_project=None, timeout=None
+ ):
import datetime
from pytz import UTC
from six.moves.urllib.parse import urlencode
@@ -1069,6 +1105,10 @@ def _create_hmac_key_helper(self, explicit_project=None, user_project=None):
if user_project is not None:
kwargs["user_project"] = user_project
+ if timeout is None:
+ timeout = self._get_default_timeout()
+ kwargs["timeout"] = timeout
+
metadata, secret = client.create_hmac_key(service_account_email=EMAIL, **kwargs)
self.assertIsInstance(metadata, HMACKeyMetadata)
@@ -1093,7 +1133,7 @@ def _create_hmac_key_helper(self, explicit_project=None, user_project=None):
FULL_URI = "{}?{}".format(URI, urlencode(qs_params))
http.request.assert_called_once_with(
- method="POST", url=FULL_URI, data=None, headers=mock.ANY, timeout=mock.ANY
+ method="POST", url=FULL_URI, data=None, headers=mock.ANY, timeout=timeout
)
def test_create_hmac_key_defaults(self):
@@ -1103,7 +1143,7 @@ def test_create_hmac_key_explicit_project(self):
self._create_hmac_key_helper(explicit_project="other-project-456")
def test_create_hmac_key_user_project(self):
- self._create_hmac_key_helper(user_project="billed-project")
+ self._create_hmac_key_helper(user_project="billed-project", timeout=42)
def test_list_hmac_keys_defaults_empty(self):
PROJECT = "PROJECT"
@@ -1128,7 +1168,11 @@ def test_list_hmac_keys_defaults_empty(self):
]
)
http.request.assert_called_once_with(
- method="GET", url=URI, data=None, headers=mock.ANY, timeout=mock.ANY
+ method="GET",
+ url=URI,
+ data=None,
+ headers=mock.ANY,
+ timeout=self._get_default_timeout(),
)
def test_list_hmac_keys_explicit_non_empty(self):
@@ -1165,6 +1209,7 @@ def test_list_hmac_keys_explicit_non_empty(self):
show_deleted_keys=True,
project_id=OTHER_PROJECT,
user_project=USER_PROJECT,
+ timeout=42,
)
)
@@ -1192,7 +1237,7 @@ def test_list_hmac_keys_explicit_non_empty(self):
"userProject": USER_PROJECT,
}
http.request.assert_called_once_with(
- method="GET", url=mock.ANY, data=None, headers=mock.ANY, timeout=mock.ANY
+ method="GET", url=mock.ANY, data=None, headers=mock.ANY, timeout=42
)
kwargs = http.request.mock_calls[0].kwargs
uri = kwargs["url"]
@@ -1220,7 +1265,7 @@ def test_get_hmac_key_metadata_wo_project(self):
http = _make_requests_session([_make_json_response(resource)])
client._http_internal = http
- metadata = client.get_hmac_key_metadata(ACCESS_ID)
+ metadata = client.get_hmac_key_metadata(ACCESS_ID, timeout=42)
self.assertIsInstance(metadata, HMACKeyMetadata)
self.assertIs(metadata._client, client)
@@ -1239,7 +1284,7 @@ def test_get_hmac_key_metadata_wo_project(self):
]
)
http.request.assert_called_once_with(
- method="GET", url=URI, data=None, headers=mock.ANY, timeout=mock.ANY
+ method="GET", url=URI, data=None, headers=mock.ANY, timeout=42
)
def test_get_hmac_key_metadata_w_project(self):
@@ -1289,5 +1334,9 @@ def test_get_hmac_key_metadata_w_project(self):
FULL_URI = "{}?{}".format(URI, urlencode(qs_params))
http.request.assert_called_once_with(
- method="GET", url=FULL_URI, data=None, headers=mock.ANY, timeout=mock.ANY
+ method="GET",
+ url=FULL_URI,
+ data=None,
+ headers=mock.ANY,
+ timeout=self._get_default_timeout(),
)
diff --git a/tests/unit/test_hmac_key.py b/tests/unit/test_hmac_key.py
index 138742d5b..a142939d5 100644
--- a/tests/unit/test_hmac_key.py
+++ b/tests/unit/test_hmac_key.py
@@ -18,6 +18,12 @@
class TestHMACKeyMetadata(unittest.TestCase):
+ @staticmethod
+ def _get_default_timeout():
+ from google.cloud.storage.constants import _DEFAULT_TIMEOUT
+
+ return _DEFAULT_TIMEOUT
+
@staticmethod
def _get_target_class():
from google.cloud.storage.hmac_key import HMACKeyMetadata
@@ -219,12 +225,17 @@ def test_exists_miss_no_project_set(self):
metadata = self._make_one(client)
metadata._properties["accessId"] = access_id
- self.assertFalse(metadata.exists())
+ self.assertFalse(metadata.exists(timeout=42))
expected_path = "/projects/{}/hmacKeys/{}".format(
client.DEFAULT_PROJECT, access_id
)
- expected_kwargs = {"method": "GET", "path": expected_path, "query_params": {}}
+ expected_kwargs = {
+ "method": "GET",
+ "path": expected_path,
+ "query_params": {},
+ "timeout": 42,
+ }
connection.api_request.assert_called_once_with(**expected_kwargs)
def test_exists_hit_w_project_set(self):
@@ -251,6 +262,7 @@ def test_exists_hit_w_project_set(self):
"method": "GET",
"path": expected_path,
"query_params": {"userProject": user_project},
+ "timeout": self._get_default_timeout(),
}
connection.api_request.assert_called_once_with(**expected_kwargs)
@@ -265,12 +277,17 @@ def test_reload_miss_no_project_set(self):
metadata._properties["accessId"] = access_id
with self.assertRaises(NotFound):
- metadata.reload()
+ metadata.reload(timeout=42)
expected_path = "/projects/{}/hmacKeys/{}".format(
client.DEFAULT_PROJECT, access_id
)
- expected_kwargs = {"method": "GET", "path": expected_path, "query_params": {}}
+ expected_kwargs = {
+ "method": "GET",
+ "path": expected_path,
+ "query_params": {},
+ "timeout": 42,
+ }
connection.api_request.assert_called_once_with(**expected_kwargs)
def test_reload_hit_w_project_set(self):
@@ -299,6 +316,7 @@ def test_reload_hit_w_project_set(self):
"method": "GET",
"path": expected_path,
"query_params": {"userProject": user_project},
+ "timeout": self._get_default_timeout(),
}
connection.api_request.assert_called_once_with(**expected_kwargs)
@@ -314,7 +332,7 @@ def test_update_miss_no_project_set(self):
metadata.state = "INACTIVE"
with self.assertRaises(NotFound):
- metadata.update()
+ metadata.update(timeout=42)
expected_path = "/projects/{}/hmacKeys/{}".format(
client.DEFAULT_PROJECT, access_id
@@ -324,6 +342,7 @@ def test_update_miss_no_project_set(self):
"path": expected_path,
"data": {"state": "INACTIVE"},
"query_params": {},
+ "timeout": 42,
}
connection.api_request.assert_called_once_with(**expected_kwargs)
@@ -356,6 +375,7 @@ def test_update_hit_w_project_set(self):
"path": expected_path,
"data": {"state": "ACTIVE"},
"query_params": {"userProject": user_project},
+ "timeout": self._get_default_timeout(),
}
connection.api_request.assert_called_once_with(**expected_kwargs)
@@ -379,7 +399,7 @@ def test_delete_miss_no_project_set(self):
metadata.state = "INACTIVE"
with self.assertRaises(NotFound):
- metadata.delete()
+ metadata.delete(timeout=42)
expected_path = "/projects/{}/hmacKeys/{}".format(
client.DEFAULT_PROJECT, access_id
@@ -388,6 +408,7 @@ def test_delete_miss_no_project_set(self):
"method": "DELETE",
"path": expected_path,
"query_params": {},
+ "timeout": 42,
}
connection.api_request.assert_called_once_with(**expected_kwargs)
@@ -410,6 +431,7 @@ def test_delete_hit_w_project_set(self):
"method": "DELETE",
"path": expected_path,
"query_params": {"userProject": user_project},
+ "timeout": self._get_default_timeout(),
}
connection.api_request.assert_called_once_with(**expected_kwargs)
diff --git a/tests/unit/test_notification.py b/tests/unit/test_notification.py
index 29b376b57..f056701e3 100644
--- a/tests/unit/test_notification.py
+++ b/tests/unit/test_notification.py
@@ -51,6 +51,12 @@ def payload_format():
return JSON_API_V1_PAYLOAD_FORMAT
+ @staticmethod
+ def _get_default_timeout():
+ from google.cloud.storage.constants import _DEFAULT_TIMEOUT
+
+ return _DEFAULT_TIMEOUT
+
@staticmethod
def _get_target_class():
from google.cloud.storage.notification import BucketNotification
@@ -258,7 +264,11 @@ def test_create_w_defaults(self):
data = {"topic": self.TOPIC_REF, "payload_format": NONE_PAYLOAD_FORMAT}
api_request.assert_called_once_with(
- method="POST", path=self.CREATE_PATH, query_params={}, data=data
+ method="POST",
+ path=self.CREATE_PATH,
+ query_params={},
+ data=data,
+ timeout=self._get_default_timeout(),
)
def test_create_w_explicit_client(self):
@@ -287,7 +297,7 @@ def test_create_w_explicit_client(self):
"selfLink": self.SELF_LINK,
}
- notification.create(client=alt_client)
+ notification.create(client=alt_client, timeout=42)
self.assertEqual(notification.custom_attributes, self.CUSTOM_ATTRIBUTES)
self.assertEqual(notification.event_types, self.event_types())
@@ -309,6 +319,7 @@ def test_create_w_explicit_client(self):
path=self.CREATE_PATH,
query_params={"userProject": USER_PROJECT},
data=data,
+ timeout=42,
)
def test_exists_wo_notification_id(self):
@@ -329,10 +340,10 @@ def test_exists_miss(self):
api_request = client._connection.api_request
api_request.side_effect = NotFound("testing")
- self.assertFalse(notification.exists())
+ self.assertFalse(notification.exists(timeout=42))
api_request.assert_called_once_with(
- method="GET", path=self.NOTIFICATION_PATH, query_params={}
+ method="GET", path=self.NOTIFICATION_PATH, query_params={}, timeout=42
)
def test_exists_hit(self):
@@ -355,6 +366,7 @@ def test_exists_hit(self):
method="GET",
path=self.NOTIFICATION_PATH,
query_params={"userProject": USER_PROJECT},
+ timeout=self._get_default_timeout(),
)
def test_reload_wo_notification_id(self):
@@ -376,10 +388,10 @@ def test_reload_miss(self):
api_request.side_effect = NotFound("testing")
with self.assertRaises(NotFound):
- notification.reload()
+ notification.reload(timeout=42)
api_request.assert_called_once_with(
- method="GET", path=self.NOTIFICATION_PATH, query_params={}
+ method="GET", path=self.NOTIFICATION_PATH, query_params={}, timeout=42
)
def test_reload_hit(self):
@@ -412,6 +424,7 @@ def test_reload_hit(self):
method="GET",
path=self.NOTIFICATION_PATH,
query_params={"userProject": USER_PROJECT},
+ timeout=self._get_default_timeout(),
)
def test_delete_wo_notification_id(self):
@@ -433,10 +446,10 @@ def test_delete_miss(self):
api_request.side_effect = NotFound("testing")
with self.assertRaises(NotFound):
- notification.delete()
+ notification.delete(timeout=42)
api_request.assert_called_once_with(
- method="DELETE", path=self.NOTIFICATION_PATH, query_params={}
+ method="DELETE", path=self.NOTIFICATION_PATH, query_params={}, timeout=42
)
def test_delete_hit(self):
@@ -454,6 +467,7 @@ def test_delete_hit(self):
method="DELETE",
path=self.NOTIFICATION_PATH,
query_params={"userProject": USER_PROJECT},
+ timeout=self._get_default_timeout(),
)