From 24c000fb7b9f576e6d6c6ec5733f3971fe133655 Mon Sep 17 00:00:00 2001
From: Andrew Gorcester
Date: Wed, 12 Feb 2025 15:02:24 -0800
Subject: [PATCH 1/7] feat: add Bucket.move_blob() for HNS-enabled buckets
(#1431)
---
google/cloud/storage/bucket.py | 124 +++++++++++++++++++++++++++++++++
tests/system/test_bucket.py | 34 +++++++++
tests/unit/test_bucket.py | 63 +++++++++++++++++
3 files changed, 221 insertions(+)
diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py
index 10156c795..88356d316 100644
--- a/google/cloud/storage/bucket.py
+++ b/google/cloud/storage/bucket.py
@@ -2236,6 +2236,130 @@ def rename_blob(
)
return new_blob
+ @create_trace_span(name="Storage.Bucket.moveBlob")
+ def move_blob(
+ self,
+ blob,
+ new_name,
+ client=None,
+ if_generation_match=None,
+ if_generation_not_match=None,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ if_source_generation_match=None,
+ if_source_generation_not_match=None,
+ if_source_metageneration_match=None,
+ if_source_metageneration_not_match=None,
+ timeout=_DEFAULT_TIMEOUT,
+ retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED,
+ ):
+ """Move a blob to a new name within a single HNS bucket.
+
+ *This feature is currently only supported for HNS (Heirarchical
+ Namespace) buckets.*
+
+ If :attr:`user_project` is set on the bucket, bills the API request to that project.
+
+ :type blob: :class:`google.cloud.storage.blob.Blob`
+ :param blob: The blob to be renamed.
+
+ :type new_name: str
+ :param new_name: The new name for this blob.
+
+ :type client: :class:`~google.cloud.storage.client.Client` or
+ ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+
+ :type if_generation_match: int
+ :param if_generation_match:
+ (Optional) See :ref:`using-if-generation-match`
+ Note that the generation to be matched is that of the
+ ``destination`` blob.
+
+ :type if_generation_not_match: int
+ :param if_generation_not_match:
+ (Optional) See :ref:`using-if-generation-not-match`
+ Note that the generation to be matched is that of the
+ ``destination`` blob.
+
+ :type if_metageneration_match: int
+ :param if_metageneration_match:
+ (Optional) See :ref:`using-if-metageneration-match`
+ Note that the metageneration to be matched is that of the
+ ``destination`` blob.
+
+ :type if_metageneration_not_match: int
+ :param if_metageneration_not_match:
+ (Optional) See :ref:`using-if-metageneration-not-match`
+ Note that the metageneration to be matched is that of the
+ ``destination`` blob.
+
+ :type if_source_generation_match: int
+ :param if_source_generation_match:
+ (Optional) Makes the operation conditional on whether the source
+ object's generation matches the given value.
+
+ :type if_source_generation_not_match: int
+ :param if_source_generation_not_match:
+ (Optional) Makes the operation conditional on whether the source
+ object's generation does not match the given value.
+
+ :type if_source_metageneration_match: int
+ :param if_source_metageneration_match:
+ (Optional) Makes the operation conditional on whether the source
+ object's current metageneration matches the given value.
+
+ :type if_source_metageneration_not_match: int
+ :param if_source_metageneration_not_match:
+ (Optional) Makes the operation conditional on whether the source
+ object's current metageneration does not match the given value.
+
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The amount of time, in seconds, to wait
+ for the server response. See: :ref:`configuring_timeouts`
+
+ :type retry: google.api_core.retry.Retry
+ :param retry:
+ (Optional) How to retry the RPC.
+ See [Configuring Retries](https://cloud.google.com/python/docs/reference/storage/latest/retry_timeout).
+
+ :rtype: :class:`Blob`
+ :returns: The newly-moved blob.
+ """
+ client = self._require_client(client)
+ query_params = {}
+
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
+
+ _add_generation_match_parameters(
+ query_params,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ if_source_generation_match=if_source_generation_match,
+ if_source_generation_not_match=if_source_generation_not_match,
+ if_source_metageneration_match=if_source_metageneration_match,
+ if_source_metageneration_not_match=if_source_metageneration_not_match,
+ )
+
+ new_blob = Blob(bucket=self, name=new_name)
+ api_path = blob.path + "/moveTo/o/" + new_blob.name
+ move_result = client._post_resource(
+ api_path,
+ None,
+ query_params=query_params,
+ timeout=timeout,
+ retry=retry,
+ _target_object=new_blob,
+ )
+
+ new_blob._set_properties(move_result)
+ return new_blob
+
@create_trace_span(name="Storage.Bucket.restore_blob")
def restore_blob(
self,
diff --git a/tests/system/test_bucket.py b/tests/system/test_bucket.py
index 270a77ad1..f06de8e8c 100644
--- a/tests/system/test_bucket.py
+++ b/tests/system/test_bucket.py
@@ -433,6 +433,40 @@ def test_bucket_copy_blob_w_metageneration_match(
assert new_blob.download_as_bytes() == payload
+def test_bucket_move_blob_hns(
+ storage_client,
+ buckets_to_delete,
+ blobs_to_delete,
+):
+ payload = b"move_blob_test"
+
+ # Feature currently only works on HNS buckets, so create one here
+ bucket_name = _helpers.unique_name("move-blob-hns-enabled")
+ bucket_obj = storage_client.bucket(bucket_name)
+ bucket_obj.hierarchical_namespace_enabled = True
+ bucket_obj.iam_configuration.uniform_bucket_level_access_enabled = True
+ created = _helpers.retry_429_503(storage_client.create_bucket)(bucket_obj)
+ buckets_to_delete.append(created)
+ assert created.hierarchical_namespace_enabled is True
+
+ source = created.blob("source")
+ source_gen = source.generation
+ source.upload_from_string(payload)
+ blobs_to_delete.append(source)
+
+ dest = created.move_blob(
+ source,
+ "dest",
+ if_source_generation_match=source.generation,
+ if_source_metageneration_match=source.metageneration,
+ )
+ blobs_to_delete.append(dest)
+
+ assert dest.download_as_bytes() == payload
+ assert dest.generation is not None
+ assert source_gen != dest.generation
+
+
def test_bucket_get_blob_with_user_project(
storage_client,
buckets_to_delete,
diff --git a/tests/unit/test_bucket.py b/tests/unit/test_bucket.py
index 7129232a0..ac9a5ede6 100644
--- a/tests/unit/test_bucket.py
+++ b/tests/unit/test_bucket.py
@@ -2289,6 +2289,69 @@ def test_copy_blob_w_name_and_user_project(self):
_target_object=new_blob,
)
+ def test_move_blob_w_no_retry_timeout_and_generation_match(self):
+ source_name = "source"
+ blob_name = "blob-name"
+ new_name = "new_name"
+ api_response = {}
+ client = mock.Mock(spec=["_post_resource"])
+ client._post_resource.return_value = api_response
+ source = self._make_one(client=client, name=source_name)
+ blob = self._make_blob(source_name, blob_name)
+
+ new_blob = source.move_blob(
+ blob, new_name, if_generation_match=0, retry=None, timeout=30
+ )
+
+ self.assertIs(new_blob.bucket, source)
+ self.assertEqual(new_blob.name, new_name)
+
+ expected_path = "/b/{}/o/{}/moveTo/o/{}".format(
+ source_name, blob_name, new_name
+ )
+ expected_data = None
+ expected_query_params = {"ifGenerationMatch": 0}
+ client._post_resource.assert_called_once_with(
+ expected_path,
+ expected_data,
+ query_params=expected_query_params,
+ timeout=30,
+ retry=None,
+ _target_object=new_blob,
+ )
+
+ def test_move_blob_w_user_project(self):
+ source_name = "source"
+ blob_name = "blob-name"
+ new_name = "new_name"
+ user_project = "user-project-123"
+ api_response = {}
+ client = mock.Mock(spec=["_post_resource"])
+ client._post_resource.return_value = api_response
+ source = self._make_one(
+ client=client, name=source_name, user_project=user_project
+ )
+ blob = self._make_blob(source_name, blob_name)
+
+ new_blob = source.move_blob(blob, new_name)
+
+ self.assertIs(new_blob.bucket, source)
+ self.assertEqual(new_blob.name, new_name)
+
+ expected_path = "/b/{}/o/{}/moveTo/o/{}".format(
+ source_name, blob_name, new_name
+ )
+ expected_data = None
+ expected_query_params = {"userProject": user_project}
+ client._post_resource.assert_called_once_with(
+ expected_path,
+ expected_data,
+ query_params=expected_query_params,
+ timeout=self._get_default_timeout(),
+ retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED,
+ _target_object=new_blob,
+ )
+
def _rename_blob_helper(self, explicit_client=False, same_name=False, **kw):
bucket_name = "BUCKET_NAME"
blob_name = "blob-name"
From b08aa0b1319d0762bcecfdcafb97089b5d29f1db Mon Sep 17 00:00:00 2001
From: Daniel B
Date: Tue, 25 Feb 2025 11:52:51 -0800
Subject: [PATCH 2/7] chore: set gcs-sdk-team as CODEOWNER (#1442)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
* chore: set gcs-sdk-team as CODEOWNER
Replace outdated cloud-storage-dpes group name with gcs-sdk-team
* Update .repo-metadata.json
* 🦉 Updates from OwlBot post-processor
See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md
* 🦉 Updates from OwlBot post-processor
See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md
---------
Co-authored-by: Owl Bot
---
.github/CODEOWNERS | 8 ++++----
.repo-metadata.json | 2 +-
2 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index b37686f76..6df17303f 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -5,8 +5,8 @@
# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax
# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json.
-# @googleapis/yoshi-python @googleapis/cloud-storage-dpe are the default owners for changes in this repo
-* @googleapis/yoshi-python @googleapis/cloud-storage-dpe
+# @googleapis/yoshi-python @googleapis/gcs-sdk-team are the default owners for changes in this repo
+* @googleapis/yoshi-python @googleapis/gcs-sdk-team
-# @googleapis/python-samples-reviewers @googleapis/cloud-storage-dpe are the default owners for samples changes
-/samples/ @googleapis/python-samples-reviewers @googleapis/cloud-storage-dpe
+# @googleapis/python-samples-reviewers @googleapis/gcs-sdk-team are the default owners for samples changes
+/samples/ @googleapis/python-samples-reviewers @googleapis/gcs-sdk-team
diff --git a/.repo-metadata.json b/.repo-metadata.json
index 9e537d52f..5d5e49c84 100644
--- a/.repo-metadata.json
+++ b/.repo-metadata.json
@@ -12,7 +12,7 @@
"api_id": "storage.googleapis.com",
"requires_billing": true,
"default_version": "",
- "codeowner_team": "@googleapis/cloud-storage-dpe",
+ "codeowner_team": "@googleapis/gcs-sdk-team",
"api_shortname": "storage",
"api_description": "is a durable and highly available object storage service. Google Cloud Storage is almost infinitely scalable and guarantees consistency: when a write succeeds, the latest copy of the object will be returned to any GET, globally."
}
From 511b6f5c2bbc8f781b2ec1c6df5b598ac5088b95 Mon Sep 17 00:00:00 2001
From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com>
Date: Tue, 25 Feb 2025 16:21:25 -0800
Subject: [PATCH 3/7] chore(python): conditionally load credentials in
.kokoro/build.sh (#1440)
Source-Link: https://github.com/googleapis/synthtool/commit/aa69fb74717c8f4c58c60f8cc101d3f4b2c07b09
Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f016446d6e520e5fb552c45b110cba3f217bffdd3d06bdddd076e9e6d13266cf
Co-authored-by: Owl Bot
Co-authored-by: cojenco
---
.github/.OwlBot.lock.yaml | 4 +-
.kokoro/build.sh | 20 ++-
.kokoro/docker/docs/requirements.in | 1 +
.kokoro/docker/docs/requirements.txt | 243 ++++++++++++++++++++++++++-
.kokoro/publish-docs.sh | 4 -
5 files changed, 251 insertions(+), 21 deletions(-)
diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
index 10cf433a8..3f7634f25 100644
--- a/.github/.OwlBot.lock.yaml
+++ b/.github/.OwlBot.lock.yaml
@@ -13,5 +13,5 @@
# limitations under the License.
docker:
image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
- digest: sha256:8ff1efe878e18bd82a0fb7b70bb86f77e7ab6901fed394440b6135db0ba8d84a
-# created: 2025-01-09T12:01:16.422459506Z
+ digest: sha256:f016446d6e520e5fb552c45b110cba3f217bffdd3d06bdddd076e9e6d13266cf
+# created: 2025-02-21T19:32:52.01306189Z
diff --git a/.kokoro/build.sh b/.kokoro/build.sh
index fdc6d0271..2e7461228 100755
--- a/.kokoro/build.sh
+++ b/.kokoro/build.sh
@@ -15,11 +15,13 @@
set -eo pipefail
+CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}")
+
if [[ -z "${PROJECT_ROOT:-}" ]]; then
- PROJECT_ROOT="github/python-storage"
+ PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..")
fi
-cd "${PROJECT_ROOT}"
+pushd "${PROJECT_ROOT}"
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
@@ -46,10 +48,16 @@ export TEST_UNIVERSE_LOCATION=$(gcloud secrets versions access latest --project
env | grep KOKORO
# Setup service account credentials.
-export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
+if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]]
+then
+ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
+fi
# Setup project id.
-export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
+if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]]
+then
+ export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
+fi
# If this is a continuous build, send the test log to the FlakyBot.
# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.
@@ -64,7 +72,7 @@ fi
# If NOX_SESSION is set, it only runs the specified session,
# otherwise run all the sessions.
if [[ -n "${NOX_SESSION:-}" ]]; then
- python3 -m nox -s ${NOX_SESSION:-}
+ python3 -m nox -s ${NOX_SESSION:-}
else
- python3 -m nox
+ python3 -m nox
fi
diff --git a/.kokoro/docker/docs/requirements.in b/.kokoro/docker/docs/requirements.in
index 816817c67..586bd0703 100644
--- a/.kokoro/docker/docs/requirements.in
+++ b/.kokoro/docker/docs/requirements.in
@@ -1 +1,2 @@
nox
+gcp-docuploader
diff --git a/.kokoro/docker/docs/requirements.txt b/.kokoro/docker/docs/requirements.txt
index f99a5c4aa..a9360a25b 100644
--- a/.kokoro/docker/docs/requirements.txt
+++ b/.kokoro/docker/docs/requirements.txt
@@ -2,16 +2,124 @@
# This file is autogenerated by pip-compile with Python 3.10
# by the following command:
#
-# pip-compile --allow-unsafe --generate-hashes synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in
+# pip-compile --allow-unsafe --generate-hashes requirements.in
#
-argcomplete==3.5.2 \
- --hash=sha256:036d020d79048a5d525bc63880d7a4b8d1668566b8a76daf1144c0bbe0f63472 \
- --hash=sha256:23146ed7ac4403b70bd6026402468942ceba34a6732255b9edf5b7354f68a6bb
+argcomplete==3.5.3 \
+ --hash=sha256:2ab2c4a215c59fd6caaff41a869480a23e8f6a5f910b266c1808037f4e375b61 \
+ --hash=sha256:c12bf50eded8aebb298c7b7da7a5ff3ee24dffd9f5281867dfe1424b58c55392
# via nox
+cachetools==5.5.0 \
+ --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \
+ --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a
+ # via google-auth
+certifi==2024.12.14 \
+ --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \
+ --hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db
+ # via requests
+charset-normalizer==3.4.1 \
+ --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \
+ --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \
+ --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \
+ --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \
+ --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \
+ --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \
+ --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \
+ --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \
+ --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \
+ --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \
+ --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \
+ --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \
+ --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \
+ --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \
+ --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \
+ --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \
+ --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \
+ --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \
+ --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \
+ --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \
+ --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \
+ --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \
+ --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \
+ --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \
+ --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \
+ --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \
+ --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \
+ --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \
+ --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \
+ --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \
+ --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \
+ --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \
+ --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \
+ --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \
+ --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \
+ --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \
+ --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \
+ --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \
+ --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \
+ --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \
+ --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \
+ --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \
+ --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \
+ --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \
+ --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \
+ --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \
+ --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \
+ --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \
+ --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \
+ --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \
+ --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \
+ --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \
+ --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \
+ --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \
+ --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \
+ --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \
+ --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \
+ --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \
+ --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \
+ --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \
+ --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \
+ --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \
+ --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \
+ --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \
+ --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \
+ --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \
+ --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \
+ --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \
+ --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \
+ --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \
+ --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \
+ --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \
+ --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \
+ --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \
+ --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \
+ --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \
+ --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \
+ --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \
+ --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \
+ --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \
+ --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \
+ --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \
+ --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \
+ --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \
+ --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \
+ --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \
+ --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \
+ --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \
+ --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \
+ --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \
+ --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \
+ --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616
+ # via requests
+click==8.1.8 \
+ --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \
+ --hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a
+ # via gcp-docuploader
colorlog==6.9.0 \
--hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \
--hash=sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2
- # via nox
+ # via
+ # gcp-docuploader
+ # nox
distlib==0.3.9 \
--hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \
--hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403
@@ -20,10 +128,78 @@ filelock==3.16.1 \
--hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \
--hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435
# via virtualenv
+gcp-docuploader==0.6.5 \
+ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \
+ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea
+ # via -r requirements.in
+google-api-core==2.24.0 \
+ --hash=sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9 \
+ --hash=sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf
+ # via
+ # google-cloud-core
+ # google-cloud-storage
+google-auth==2.37.0 \
+ --hash=sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00 \
+ --hash=sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0
+ # via
+ # google-api-core
+ # google-cloud-core
+ # google-cloud-storage
+google-cloud-core==2.4.1 \
+ --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \
+ --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61
+ # via google-cloud-storage
+google-cloud-storage==2.19.0 \
+ --hash=sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba \
+ --hash=sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2
+ # via gcp-docuploader
+google-crc32c==1.6.0 \
+ --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \
+ --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \
+ --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \
+ --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \
+ --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \
+ --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \
+ --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \
+ --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \
+ --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \
+ --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \
+ --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \
+ --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \
+ --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \
+ --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \
+ --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \
+ --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \
+ --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \
+ --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \
+ --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \
+ --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \
+ --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \
+ --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \
+ --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \
+ --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \
+ --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \
+ --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \
+ --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4
+ # via
+ # google-cloud-storage
+ # google-resumable-media
+google-resumable-media==2.7.2 \
+ --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \
+ --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0
+ # via google-cloud-storage
+googleapis-common-protos==1.66.0 \
+ --hash=sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c \
+ --hash=sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed
+ # via google-api-core
+idna==3.10 \
+ --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \
+ --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3
+ # via requests
nox==2024.10.9 \
--hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \
--hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95
- # via -r synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in
+ # via -r requirements.in
packaging==24.2 \
--hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \
--hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f
@@ -32,6 +208,51 @@ platformdirs==4.3.6 \
--hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \
--hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb
# via virtualenv
+proto-plus==1.25.0 \
+ --hash=sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961 \
+ --hash=sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91
+ # via google-api-core
+protobuf==5.29.3 \
+ --hash=sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f \
+ --hash=sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7 \
+ --hash=sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888 \
+ --hash=sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620 \
+ --hash=sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da \
+ --hash=sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252 \
+ --hash=sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a \
+ --hash=sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e \
+ --hash=sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107 \
+ --hash=sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f \
+ --hash=sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84
+ # via
+ # gcp-docuploader
+ # google-api-core
+ # googleapis-common-protos
+ # proto-plus
+pyasn1==0.6.1 \
+ --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \
+ --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034
+ # via
+ # pyasn1-modules
+ # rsa
+pyasn1-modules==0.4.1 \
+ --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \
+ --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c
+ # via google-auth
+requests==2.32.3 \
+ --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \
+ --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6
+ # via
+ # google-api-core
+ # google-cloud-storage
+rsa==4.9 \
+ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \
+ --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21
+ # via google-auth
+six==1.17.0 \
+ --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \
+ --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81
+ # via gcp-docuploader
tomli==2.2.1 \
--hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \
--hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \
@@ -66,7 +287,11 @@ tomli==2.2.1 \
--hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \
--hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7
# via nox
-virtualenv==20.28.0 \
- --hash=sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0 \
- --hash=sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa
+urllib3==2.3.0 \
+ --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \
+ --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d
+ # via requests
+virtualenv==20.28.1 \
+ --hash=sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb \
+ --hash=sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329
# via nox
diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh
index 233205d58..4ed4aaf13 100755
--- a/.kokoro/publish-docs.sh
+++ b/.kokoro/publish-docs.sh
@@ -20,10 +20,6 @@ export PYTHONUNBUFFERED=1
export PATH="${HOME}/.local/bin:${PATH}"
-# Install nox
-python3.10 -m pip install --require-hashes -r .kokoro/requirements.txt
-python3.10 -m nox --version
-
# build docs
nox -s docs
From 0378b444002fcaa3832dd9117d56e3612bb72d2c Mon Sep 17 00:00:00 2001
From: cojenco
Date: Tue, 25 Feb 2025 16:58:53 -0800
Subject: [PATCH 4/7] chore: move create_trace_span context manager within
(#1443)
---
google/cloud/storage/acl.py | 118 +--
google/cloud/storage/blob.py | 951 +++++++++++------------
google/cloud/storage/bucket.py | 1037 +++++++++++++-------------
google/cloud/storage/client.py | 427 +++++------
google/cloud/storage/hmac_key.py | 104 +--
google/cloud/storage/notification.py | 158 ++--
6 files changed, 1405 insertions(+), 1390 deletions(-)
diff --git a/google/cloud/storage/acl.py b/google/cloud/storage/acl.py
index 1384a5075..d70839e1b 100644
--- a/google/cloud/storage/acl.py
+++ b/google/cloud/storage/acl.py
@@ -360,7 +360,6 @@ def _require_client(self, client):
client = self.client
return client
- @create_trace_span(name="Storage.ACL.reload")
def reload(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY):
"""Reload the ACL data from Cloud Storage.
@@ -379,25 +378,26 @@ def reload(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY):
:param retry:
(Optional) How to retry the RPC. See: :ref:`configuring_retries`
"""
- path = self.reload_path
- client = self._require_client(client)
- query_params = {}
+ with create_trace_span(name="Storage.ACL.reload"):
+ path = self.reload_path
+ client = self._require_client(client)
+ query_params = {}
- if self.user_project is not None:
- query_params["userProject"] = self.user_project
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
- self.entities.clear()
+ self.entities.clear()
- found = client._get_resource(
- path,
- query_params=query_params,
- timeout=timeout,
- retry=retry,
- )
- self.loaded = True
+ found = client._get_resource(
+ path,
+ query_params=query_params,
+ timeout=timeout,
+ retry=retry,
+ )
+ self.loaded = True
- for entry in found.get("items", ()):
- self.add_entity(self.entity_from_dict(entry))
+ for entry in found.get("items", ()):
+ self.add_entity(self.entity_from_dict(entry))
def _save(
self,
@@ -486,7 +486,6 @@ def _save(
self.loaded = True
- @create_trace_span(name="Storage.ACL.save")
def save(
self,
acl=None,
@@ -536,26 +535,26 @@ def save(
:param retry:
(Optional) How to retry the RPC. See: :ref:`configuring_retries`
"""
- if acl is None:
- acl = self
- save_to_backend = acl.loaded
- else:
- save_to_backend = True
+ with create_trace_span(name="Storage.ACL.save"):
+ if acl is None:
+ acl = self
+ save_to_backend = acl.loaded
+ else:
+ save_to_backend = True
+
+ if save_to_backend:
+ self._save(
+ acl,
+ None,
+ client,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ timeout=timeout,
+ retry=retry,
+ )
- if save_to_backend:
- self._save(
- acl,
- None,
- client,
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- timeout=timeout,
- retry=retry,
- )
-
- @create_trace_span(name="Storage.ACL.savePredefined")
def save_predefined(
self,
predefined,
@@ -608,20 +607,20 @@ def save_predefined(
:param retry:
(Optional) How to retry the RPC. See: :ref:`configuring_retries`
"""
- predefined = self.validate_predefined(predefined)
- self._save(
- None,
- predefined,
- client,
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- timeout=timeout,
- retry=retry,
- )
+ with create_trace_span(name="Storage.ACL.savePredefined"):
+ predefined = self.validate_predefined(predefined)
+ self._save(
+ None,
+ predefined,
+ client,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ timeout=timeout,
+ retry=retry,
+ )
- @create_trace_span(name="Storage.ACL.clear")
def clear(
self,
client=None,
@@ -671,16 +670,17 @@ def clear(
:param retry:
(Optional) How to retry the RPC. See: :ref:`configuring_retries`
"""
- self.save(
- [],
- client=client,
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- timeout=timeout,
- retry=retry,
- )
+ with create_trace_span(name="Storage.ACL.clear"):
+ self.save(
+ [],
+ client=client,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ timeout=timeout,
+ retry=retry,
+ )
class BucketACL(ACL):
diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py
index 3cda582ca..0d0e8ee80 100644
--- a/google/cloud/storage/blob.py
+++ b/google/cloud/storage/blob.py
@@ -675,7 +675,6 @@ def generate_signed_url(
universe_domain=universe_domain,
)
- @create_trace_span(name="Storage.Blob.exists")
def exists(
self,
client=None,
@@ -742,46 +741,48 @@ def exists(
:rtype: bool
:returns: True if the blob exists in Cloud Storage.
"""
- client = self._require_client(client)
- # We only need the status code (200 or not) so we seek to
- # minimize the returned payload.
- query_params = self._query_params
- query_params["fields"] = "name"
- if soft_deleted is not None:
- query_params["softDeleted"] = soft_deleted
+ with create_trace_span(name="Storage.Blob.exists"):
+ client = self._require_client(client)
+ # We only need the status code (200 or not) so we seek to
+ # minimize the returned payload.
+ query_params = self._query_params
+ query_params["fields"] = "name"
+ if soft_deleted is not None:
+ query_params["softDeleted"] = soft_deleted
+
+ _add_generation_match_parameters(
+ query_params,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ )
- _add_generation_match_parameters(
- query_params,
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- )
+ headers = {}
+ _add_etag_match_headers(
+ headers,
+ if_etag_match=if_etag_match,
+ if_etag_not_match=if_etag_not_match,
+ )
- headers = {}
- _add_etag_match_headers(
- headers, if_etag_match=if_etag_match, if_etag_not_match=if_etag_not_match
- )
+ try:
+ # We intentionally pass `_target_object=None` since fields=name
+ # would limit the local properties.
+ client._get_resource(
+ self.path,
+ query_params=query_params,
+ headers=headers,
+ timeout=timeout,
+ retry=retry,
+ _target_object=None,
+ )
+ except NotFound:
+ # NOTE: This will not fail immediately in a batch. However, when
+ # Batch.finish() is called, the resulting `NotFound` will be
+ # raised.
+ return False
+ return True
- try:
- # We intentionally pass `_target_object=None` since fields=name
- # would limit the local properties.
- client._get_resource(
- self.path,
- query_params=query_params,
- headers=headers,
- timeout=timeout,
- retry=retry,
- _target_object=None,
- )
- except NotFound:
- # NOTE: This will not fail immediately in a batch. However, when
- # Batch.finish() is called, the resulting `NotFound` will be
- # raised.
- return False
- return True
-
- @create_trace_span(name="Storage.Blob.delete")
def delete(
self,
client=None,
@@ -844,17 +845,18 @@ def delete(
(propagated from
:meth:`google.cloud.storage.bucket.Bucket.delete_blob`).
"""
- self.bucket.delete_blob(
- self.name,
- client=client,
- generation=self.generation,
- timeout=timeout,
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- retry=retry,
- )
+ with create_trace_span(name="Storage.Blob.delete"):
+ self.bucket.delete_blob(
+ self.name,
+ client=client,
+ generation=self.generation,
+ timeout=timeout,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ retry=retry,
+ )
def _get_transport(self, client):
"""Return the client's transport.
@@ -1109,7 +1111,6 @@ def _do_download(
while not download.finished:
download.consume_next_chunk(transport, timeout=timeout)
- @create_trace_span(name="Storage.Blob.downloadToFile")
def download_to_file(
self,
file_obj,
@@ -1223,23 +1224,23 @@ def download_to_file(
:raises: :class:`google.cloud.exceptions.NotFound`
"""
-
- self._prep_and_do_download(
- file_obj,
- client=client,
- start=start,
- end=end,
- raw_download=raw_download,
- if_etag_match=if_etag_match,
- if_etag_not_match=if_etag_not_match,
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- timeout=timeout,
- checksum=checksum,
- retry=retry,
- )
+ with create_trace_span(name="Storage.Blob.downloadToFile"):
+ self._prep_and_do_download(
+ file_obj,
+ client=client,
+ start=start,
+ end=end,
+ raw_download=raw_download,
+ if_etag_match=if_etag_match,
+ if_etag_not_match=if_etag_not_match,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ timeout=timeout,
+ checksum=checksum,
+ retry=retry,
+ )
def _handle_filename_and_download(self, filename, *args, **kwargs):
"""Download the contents of this blob into a named file.
@@ -1268,7 +1269,6 @@ def _handle_filename_and_download(self, filename, *args, **kwargs):
mtime = updated.timestamp()
os.utime(file_obj.name, (mtime, mtime))
- @create_trace_span(name="Storage.Blob.downloadToFilename")
def download_to_filename(
self,
filename,
@@ -1372,25 +1372,24 @@ def download_to_filename(
:raises: :class:`google.cloud.exceptions.NotFound`
"""
+ with create_trace_span(name="Storage.Blob.downloadToFilename"):
+ self._handle_filename_and_download(
+ filename,
+ client=client,
+ start=start,
+ end=end,
+ raw_download=raw_download,
+ if_etag_match=if_etag_match,
+ if_etag_not_match=if_etag_not_match,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ timeout=timeout,
+ checksum=checksum,
+ retry=retry,
+ )
- self._handle_filename_and_download(
- filename,
- client=client,
- start=start,
- end=end,
- raw_download=raw_download,
- if_etag_match=if_etag_match,
- if_etag_not_match=if_etag_not_match,
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- timeout=timeout,
- checksum=checksum,
- retry=retry,
- )
-
- @create_trace_span(name="Storage.Blob.downloadAsBytes")
def download_as_bytes(
self,
client=None,
@@ -1490,28 +1489,27 @@ def download_as_bytes(
:raises: :class:`google.cloud.exceptions.NotFound`
"""
+ with create_trace_span(name="Storage.Blob.downloadAsBytes"):
+ string_buffer = BytesIO()
- string_buffer = BytesIO()
-
- self._prep_and_do_download(
- string_buffer,
- client=client,
- start=start,
- end=end,
- raw_download=raw_download,
- if_etag_match=if_etag_match,
- if_etag_not_match=if_etag_not_match,
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- timeout=timeout,
- checksum=checksum,
- retry=retry,
- )
- return string_buffer.getvalue()
+ self._prep_and_do_download(
+ string_buffer,
+ client=client,
+ start=start,
+ end=end,
+ raw_download=raw_download,
+ if_etag_match=if_etag_match,
+ if_etag_not_match=if_etag_not_match,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ timeout=timeout,
+ checksum=checksum,
+ retry=retry,
+ )
+ return string_buffer.getvalue()
- @create_trace_span(name="Storage.Blob.downloadAsString")
def download_as_string(
self,
client=None,
@@ -1604,22 +1602,22 @@ def download_as_string(
warnings.warn(
_DOWNLOAD_AS_STRING_DEPRECATED, PendingDeprecationWarning, stacklevel=2
)
- return self.download_as_bytes(
- client=client,
- start=start,
- end=end,
- raw_download=raw_download,
- if_etag_match=if_etag_match,
- if_etag_not_match=if_etag_not_match,
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- timeout=timeout,
- retry=retry,
- )
+ with create_trace_span(name="Storage.Blob.downloadAsString"):
+ return self.download_as_bytes(
+ client=client,
+ start=start,
+ end=end,
+ raw_download=raw_download,
+ if_etag_match=if_etag_match,
+ if_etag_not_match=if_etag_not_match,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ timeout=timeout,
+ retry=retry,
+ )
- @create_trace_span(name="Storage.Blob.downloadAsText")
def download_as_text(
self,
client=None,
@@ -1710,31 +1708,32 @@ def download_as_text(
:rtype: text
:returns: The data stored in this blob, decoded to text.
"""
- data = self.download_as_bytes(
- client=client,
- start=start,
- end=end,
- raw_download=raw_download,
- if_etag_match=if_etag_match,
- if_etag_not_match=if_etag_not_match,
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- timeout=timeout,
- retry=retry,
- )
+ with create_trace_span(name="Storage.Blob.downloadAsText"):
+ data = self.download_as_bytes(
+ client=client,
+ start=start,
+ end=end,
+ raw_download=raw_download,
+ if_etag_match=if_etag_match,
+ if_etag_not_match=if_etag_not_match,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ timeout=timeout,
+ retry=retry,
+ )
- if encoding is not None:
- return data.decode(encoding)
+ if encoding is not None:
+ return data.decode(encoding)
- if self.content_type is not None:
- msg = HeaderParser().parsestr("Content-Type: " + self.content_type)
- params = dict(msg.get_params()[1:])
- if "charset" in params:
- return data.decode(params["charset"])
+ if self.content_type is not None:
+ msg = HeaderParser().parsestr("Content-Type: " + self.content_type)
+ params = dict(msg.get_params()[1:])
+ if "charset" in params:
+ return data.decode(params["charset"])
- return data.decode("utf-8")
+ return data.decode("utf-8")
def _get_content_type(self, content_type, filename=None):
"""Determine the content type from the current object.
@@ -2655,7 +2654,6 @@ def _prep_and_do_upload(
except InvalidResponse as exc:
_raise_from_invalid_response(exc)
- @create_trace_span(name="Storage.Blob.uploadFromFile")
def upload_from_file(
self,
file_obj,
@@ -2782,21 +2780,22 @@ def upload_from_file(
:raises: :class:`~google.cloud.exceptions.GoogleCloudError`
if the upload response returns an error status.
"""
- self._prep_and_do_upload(
- file_obj,
- rewind=rewind,
- size=size,
- content_type=content_type,
- client=client,
- predefined_acl=predefined_acl,
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- timeout=timeout,
- checksum=checksum,
- retry=retry,
- )
+ with create_trace_span(name="Storage.Blob.uploadFromFile"):
+ self._prep_and_do_upload(
+ file_obj,
+ rewind=rewind,
+ size=size,
+ content_type=content_type,
+ client=client,
+ predefined_acl=predefined_acl,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ timeout=timeout,
+ checksum=checksum,
+ retry=retry,
+ )
def _handle_filename_and_upload(self, filename, content_type=None, *args, **kwargs):
"""Upload this blob's contents from the content of a named file.
@@ -2822,7 +2821,6 @@ def _handle_filename_and_upload(self, filename, content_type=None, *args, **kwar
**kwargs,
)
- @create_trace_span(name="Storage.Blob.uploadFromFilename")
def upload_from_filename(
self,
filename,
@@ -2931,22 +2929,21 @@ def upload_from_filename(
(google.cloud.storage.retry) for information on retry types and how
to configure them.
"""
+ with create_trace_span(name="Storage.Blob.uploadFromFilename"):
+ self._handle_filename_and_upload(
+ filename,
+ content_type=content_type,
+ client=client,
+ predefined_acl=predefined_acl,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ timeout=timeout,
+ checksum=checksum,
+ retry=retry,
+ )
- self._handle_filename_and_upload(
- filename,
- content_type=content_type,
- client=client,
- predefined_acl=predefined_acl,
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- timeout=timeout,
- checksum=checksum,
- retry=retry,
- )
-
- @create_trace_span(name="Storage.Blob.uploadFromString")
def upload_from_string(
self,
data,
@@ -3047,24 +3044,24 @@ def upload_from_string(
(google.cloud.storage.retry) for information on retry types and how
to configure them.
"""
- data = _to_bytes(data, encoding="utf-8")
- string_buffer = BytesIO(data)
- self.upload_from_file(
- file_obj=string_buffer,
- size=len(data),
- content_type=content_type,
- client=client,
- predefined_acl=predefined_acl,
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- timeout=timeout,
- checksum=checksum,
- retry=retry,
- )
+ with create_trace_span(name="Storage.Blob.uploadFromString"):
+ data = _to_bytes(data, encoding="utf-8")
+ string_buffer = BytesIO(data)
+ self.upload_from_file(
+ file_obj=string_buffer,
+ size=len(data),
+ content_type=content_type,
+ client=client,
+ predefined_acl=predefined_acl,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ timeout=timeout,
+ checksum=checksum,
+ retry=retry,
+ )
- @create_trace_span(name="Storage.Blob.createResumableUploadSession")
def create_resumable_upload_session(
self,
content_type=None,
@@ -3198,52 +3195,53 @@ def create_resumable_upload_session(
:raises: :class:`google.cloud.exceptions.GoogleCloudError`
if the session creation response returns an error status.
"""
+ with create_trace_span(name="Storage.Blob.createResumableUploadSession"):
+ # Handle ConditionalRetryPolicy.
+ if isinstance(retry, ConditionalRetryPolicy):
+ # Conditional retries are designed for non-media calls, which change
+ # arguments into query_params dictionaries. Media operations work
+ # differently, so here we make a "fake" query_params to feed to the
+ # ConditionalRetryPolicy.
+ query_params = {
+ "ifGenerationMatch": if_generation_match,
+ "ifMetagenerationMatch": if_metageneration_match,
+ }
+ retry = retry.get_retry_policy_if_conditions_met(
+ query_params=query_params
+ )
- # Handle ConditionalRetryPolicy.
- if isinstance(retry, ConditionalRetryPolicy):
- # Conditional retries are designed for non-media calls, which change
- # arguments into query_params dictionaries. Media operations work
- # differently, so here we make a "fake" query_params to feed to the
- # ConditionalRetryPolicy.
- query_params = {
- "ifGenerationMatch": if_generation_match,
- "ifMetagenerationMatch": if_metageneration_match,
- }
- retry = retry.get_retry_policy_if_conditions_met(query_params=query_params)
-
- extra_headers = {}
- if origin is not None:
- # This header is specifically for client-side uploads, it
- # determines the origins allowed for CORS.
- extra_headers["Origin"] = origin
-
- try:
- fake_stream = BytesIO(b"")
- # Send a fake the chunk size which we **know** will be acceptable
- # to the `ResumableUpload` constructor. The chunk size only
- # matters when **sending** bytes to an upload.
- upload, _ = self._initiate_resumable_upload(
- client,
- fake_stream,
- content_type,
- size,
- predefined_acl=predefined_acl,
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- extra_headers=extra_headers,
- chunk_size=self._CHUNK_SIZE_MULTIPLE,
- timeout=timeout,
- checksum=checksum,
- retry=retry,
- )
+ extra_headers = {}
+ if origin is not None:
+ # This header is specifically for client-side uploads, it
+ # determines the origins allowed for CORS.
+ extra_headers["Origin"] = origin
+
+ try:
+ fake_stream = BytesIO(b"")
+ # Send a fake the chunk size which we **know** will be acceptable
+ # to the `ResumableUpload` constructor. The chunk size only
+ # matters when **sending** bytes to an upload.
+ upload, _ = self._initiate_resumable_upload(
+ client,
+ fake_stream,
+ content_type,
+ size,
+ predefined_acl=predefined_acl,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ extra_headers=extra_headers,
+ chunk_size=self._CHUNK_SIZE_MULTIPLE,
+ timeout=timeout,
+ checksum=checksum,
+ retry=retry,
+ )
- return upload.resumable_url
- except InvalidResponse as exc:
- _raise_from_invalid_response(exc)
+ return upload.resumable_url
+ except InvalidResponse as exc:
+ _raise_from_invalid_response(exc)
- @create_trace_span(name="Storage.Blob.getIamPolicy")
def get_iam_policy(
self,
client=None,
@@ -3293,26 +3291,26 @@ def get_iam_policy(
:returns: the policy instance, based on the resource returned from
the ``getIamPolicy`` API request.
"""
- client = self._require_client(client)
+ with create_trace_span(name="Storage.Blob.getIamPolicy"):
+ client = self._require_client(client)
- query_params = {}
+ query_params = {}
- if self.user_project is not None:
- query_params["userProject"] = self.user_project
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
- if requested_policy_version is not None:
- query_params["optionsRequestedPolicyVersion"] = requested_policy_version
+ if requested_policy_version is not None:
+ query_params["optionsRequestedPolicyVersion"] = requested_policy_version
- info = client._get_resource(
- f"{self.path}/iam",
- query_params=query_params,
- timeout=timeout,
- retry=retry,
- _target_object=None,
- )
- return Policy.from_api_repr(info)
+ info = client._get_resource(
+ f"{self.path}/iam",
+ query_params=query_params,
+ timeout=timeout,
+ retry=retry,
+ _target_object=None,
+ )
+ return Policy.from_api_repr(info)
- @create_trace_span(name="Storage.Blob.setIamPolicy")
def set_iam_policy(
self,
policy,
@@ -3354,27 +3352,27 @@ def set_iam_policy(
:returns: the policy instance, based on the resource returned from
the ``setIamPolicy`` API request.
"""
- client = self._require_client(client)
+ with create_trace_span(name="Storage.Blob.setIamPolicy"):
+ client = self._require_client(client)
- query_params = {}
+ query_params = {}
- if self.user_project is not None:
- query_params["userProject"] = self.user_project
-
- path = f"{self.path}/iam"
- resource = policy.to_api_repr()
- resource["resourceId"] = self.path
- info = client._put_resource(
- path,
- resource,
- query_params=query_params,
- timeout=timeout,
- retry=retry,
- _target_object=None,
- )
- return Policy.from_api_repr(info)
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
+
+ path = f"{self.path}/iam"
+ resource = policy.to_api_repr()
+ resource["resourceId"] = self.path
+ info = client._put_resource(
+ path,
+ resource,
+ query_params=query_params,
+ timeout=timeout,
+ retry=retry,
+ _target_object=None,
+ )
+ return Policy.from_api_repr(info)
- @create_trace_span(name="Storage.Blob.testIamPermissions")
def test_iam_permissions(
self, permissions, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY
):
@@ -3412,24 +3410,24 @@ def test_iam_permissions(
:returns: the permissions returned by the ``testIamPermissions`` API
request.
"""
- client = self._require_client(client)
- query_params = {"permissions": permissions}
+ with create_trace_span(name="Storage.Blob.testIamPermissions"):
+ client = self._require_client(client)
+ query_params = {"permissions": permissions}
- if self.user_project is not None:
- query_params["userProject"] = self.user_project
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
- path = f"{self.path}/iam/testPermissions"
- resp = client._get_resource(
- path,
- query_params=query_params,
- timeout=timeout,
- retry=retry,
- _target_object=None,
- )
+ path = f"{self.path}/iam/testPermissions"
+ resp = client._get_resource(
+ path,
+ query_params=query_params,
+ timeout=timeout,
+ retry=retry,
+ _target_object=None,
+ )
- return resp.get("permissions", [])
+ return resp.get("permissions", [])
- @create_trace_span(name="Storage.Blob.makePublic")
def make_public(
self,
client=None,
@@ -3472,18 +3470,18 @@ def make_public(
:param retry:
(Optional) How to retry the RPC. See: :ref:`configuring_retries`
"""
- self.acl.all().grant_read()
- self.acl.save(
- client=client,
- timeout=timeout,
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- retry=retry,
- )
+ with create_trace_span(name="Storage.Blob.makePublic"):
+ self.acl.all().grant_read()
+ self.acl.save(
+ client=client,
+ timeout=timeout,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ retry=retry,
+ )
- @create_trace_span(name="Storage.Blob.makePrivate")
def make_private(
self,
client=None,
@@ -3526,18 +3524,18 @@ def make_private(
:param retry:
(Optional) How to retry the RPC. See: :ref:`configuring_retries`
"""
- self.acl.all().revoke_read()
- self.acl.save(
- client=client,
- timeout=timeout,
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- retry=retry,
- )
+ with create_trace_span(name="Storage.Blob.makePrivate"):
+ self.acl.all().revoke_read()
+ self.acl.save(
+ client=client,
+ timeout=timeout,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ retry=retry,
+ )
- @create_trace_span(name="Storage.Blob.compose")
def compose(
self,
sources,
@@ -3607,77 +3605,77 @@ def compose(
to enable retries regardless of generation precondition setting.
See [Configuring Retries](https://cloud.google.com/python/docs/reference/storage/latest/retry_timeout).
"""
- sources_len = len(sources)
- client = self._require_client(client)
- query_params = {}
-
- if isinstance(if_generation_match, list):
- warnings.warn(
- _COMPOSE_IF_GENERATION_LIST_DEPRECATED,
- DeprecationWarning,
- stacklevel=2,
- )
+ with create_trace_span(name="Storage.Blob.compose"):
+ sources_len = len(sources)
+ client = self._require_client(client)
+ query_params = {}
- if if_source_generation_match is not None:
- raise ValueError(
- _COMPOSE_IF_GENERATION_LIST_AND_IF_SOURCE_GENERATION_ERROR
+ if isinstance(if_generation_match, list):
+ warnings.warn(
+ _COMPOSE_IF_GENERATION_LIST_DEPRECATED,
+ DeprecationWarning,
+ stacklevel=2,
)
- if_source_generation_match = if_generation_match
- if_generation_match = None
+ if if_source_generation_match is not None:
+ raise ValueError(
+ _COMPOSE_IF_GENERATION_LIST_AND_IF_SOURCE_GENERATION_ERROR
+ )
- if isinstance(if_metageneration_match, list):
- warnings.warn(
- _COMPOSE_IF_METAGENERATION_LIST_DEPRECATED,
- DeprecationWarning,
- stacklevel=2,
- )
+ if_source_generation_match = if_generation_match
+ if_generation_match = None
- if_metageneration_match = None
+ if isinstance(if_metageneration_match, list):
+ warnings.warn(
+ _COMPOSE_IF_METAGENERATION_LIST_DEPRECATED,
+ DeprecationWarning,
+ stacklevel=2,
+ )
- if if_source_generation_match is None:
- if_source_generation_match = [None] * sources_len
- if len(if_source_generation_match) != sources_len:
- raise ValueError(_COMPOSE_IF_SOURCE_GENERATION_MISMATCH_ERROR)
+ if_metageneration_match = None
- source_objects = []
- for source, source_generation in zip(sources, if_source_generation_match):
- source_object = {"name": source.name, "generation": source.generation}
+ if if_source_generation_match is None:
+ if_source_generation_match = [None] * sources_len
+ if len(if_source_generation_match) != sources_len:
+ raise ValueError(_COMPOSE_IF_SOURCE_GENERATION_MISMATCH_ERROR)
- preconditions = {}
- if source_generation is not None:
- preconditions["ifGenerationMatch"] = source_generation
+ source_objects = []
+ for source, source_generation in zip(sources, if_source_generation_match):
+ source_object = {"name": source.name, "generation": source.generation}
- if preconditions:
- source_object["objectPreconditions"] = preconditions
+ preconditions = {}
+ if source_generation is not None:
+ preconditions["ifGenerationMatch"] = source_generation
- source_objects.append(source_object)
+ if preconditions:
+ source_object["objectPreconditions"] = preconditions
- request = {
- "sourceObjects": source_objects,
- "destination": self._properties.copy(),
- }
+ source_objects.append(source_object)
- if self.user_project is not None:
- query_params["userProject"] = self.user_project
+ request = {
+ "sourceObjects": source_objects,
+ "destination": self._properties.copy(),
+ }
- _add_generation_match_parameters(
- query_params,
- if_generation_match=if_generation_match,
- if_metageneration_match=if_metageneration_match,
- )
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
- api_response = client._post_resource(
- f"{self.path}/compose",
- request,
- query_params=query_params,
- timeout=timeout,
- retry=retry,
- _target_object=self,
- )
- self._set_properties(api_response)
+ _add_generation_match_parameters(
+ query_params,
+ if_generation_match=if_generation_match,
+ if_metageneration_match=if_metageneration_match,
+ )
+
+ api_response = client._post_resource(
+ f"{self.path}/compose",
+ request,
+ query_params=query_params,
+ timeout=timeout,
+ retry=retry,
+ _target_object=self,
+ )
+ self._set_properties(api_response)
- @create_trace_span(name="Storage.Blob.rewrite")
def rewrite(
self,
source,
@@ -3783,66 +3781,66 @@ def rewrite(
and ``total_bytes`` is the total number of bytes to be
rewritten.
"""
- client = self._require_client(client)
- headers = _get_encryption_headers(self._encryption_key)
- headers.update(_get_encryption_headers(source._encryption_key, source=True))
-
- query_params = self._query_params
- if "generation" in query_params:
- del query_params["generation"]
-
- if token:
- query_params["rewriteToken"] = token
-
- if source.generation:
- query_params["sourceGeneration"] = source.generation
-
- # When a Customer Managed Encryption Key is used to encrypt Cloud Storage object
- # at rest, object resource metadata will store the version of the Key Management
- # Service cryptographic material. If a Blob instance with KMS Key metadata set is
- # used to rewrite the object, then the existing kmsKeyName version
- # value can't be used in the rewrite request and the client instead ignores it.
- if (
- self.kms_key_name is not None
- and "cryptoKeyVersions" not in self.kms_key_name
- ):
- query_params["destinationKmsKeyName"] = self.kms_key_name
+ with create_trace_span(name="Storage.Blob.rewrite"):
+ client = self._require_client(client)
+ headers = _get_encryption_headers(self._encryption_key)
+ headers.update(_get_encryption_headers(source._encryption_key, source=True))
+
+ query_params = self._query_params
+ if "generation" in query_params:
+ del query_params["generation"]
+
+ if token:
+ query_params["rewriteToken"] = token
+
+ if source.generation:
+ query_params["sourceGeneration"] = source.generation
+
+ # When a Customer Managed Encryption Key is used to encrypt Cloud Storage object
+ # at rest, object resource metadata will store the version of the Key Management
+ # Service cryptographic material. If a Blob instance with KMS Key metadata set is
+ # used to rewrite the object, then the existing kmsKeyName version
+ # value can't be used in the rewrite request and the client instead ignores it.
+ if (
+ self.kms_key_name is not None
+ and "cryptoKeyVersions" not in self.kms_key_name
+ ):
+ query_params["destinationKmsKeyName"] = self.kms_key_name
- _add_generation_match_parameters(
- query_params,
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- if_source_generation_match=if_source_generation_match,
- if_source_generation_not_match=if_source_generation_not_match,
- if_source_metageneration_match=if_source_metageneration_match,
- if_source_metageneration_not_match=if_source_metageneration_not_match,
- )
+ _add_generation_match_parameters(
+ query_params,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ if_source_generation_match=if_source_generation_match,
+ if_source_generation_not_match=if_source_generation_not_match,
+ if_source_metageneration_match=if_source_metageneration_match,
+ if_source_metageneration_not_match=if_source_metageneration_not_match,
+ )
- path = f"{source.path}/rewriteTo{self.path}"
- api_response = client._post_resource(
- path,
- self._properties,
- query_params=query_params,
- headers=headers,
- timeout=timeout,
- retry=retry,
- _target_object=self,
- )
- rewritten = int(api_response["totalBytesRewritten"])
- size = int(api_response["objectSize"])
+ path = f"{source.path}/rewriteTo{self.path}"
+ api_response = client._post_resource(
+ path,
+ self._properties,
+ query_params=query_params,
+ headers=headers,
+ timeout=timeout,
+ retry=retry,
+ _target_object=self,
+ )
+ rewritten = int(api_response["totalBytesRewritten"])
+ size = int(api_response["objectSize"])
- # The resource key is set if and only if the API response is
- # completely done. Additionally, there is no rewrite token to return
- # in this case.
- if api_response["done"]:
- self._set_properties(api_response["resource"])
- return None, rewritten, size
+ # The resource key is set if and only if the API response is
+ # completely done. Additionally, there is no rewrite token to return
+ # in this case.
+ if api_response["done"]:
+ self._set_properties(api_response["resource"])
+ return None, rewritten, size
- return api_response["rewriteToken"], rewritten, size
+ return api_response["rewriteToken"], rewritten, size
- @create_trace_span(name="Storage.Blob.updateStorageClass")
def update_storage_class(
self,
new_class,
@@ -3943,27 +3941,13 @@ def update_storage_class(
to enable retries regardless of generation precondition setting.
See [Configuring Retries](https://cloud.google.com/python/docs/reference/storage/latest/retry_timeout).
"""
- # Update current blob's storage class prior to rewrite
- self._patch_property("storageClass", new_class)
+ with create_trace_span(name="Storage.Blob.updateStorageClass"):
+ # Update current blob's storage class prior to rewrite
+ self._patch_property("storageClass", new_class)
- # Execute consecutive rewrite operations until operation is done
- token, _, _ = self.rewrite(
- self,
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- if_source_generation_match=if_source_generation_match,
- if_source_generation_not_match=if_source_generation_not_match,
- if_source_metageneration_match=if_source_metageneration_match,
- if_source_metageneration_not_match=if_source_metageneration_not_match,
- timeout=timeout,
- retry=retry,
- )
- while token is not None:
+ # Execute consecutive rewrite operations until operation is done
token, _, _ = self.rewrite(
self,
- token=token,
if_generation_match=if_generation_match,
if_generation_not_match=if_generation_not_match,
if_metageneration_match=if_metageneration_match,
@@ -3975,8 +3959,22 @@ def update_storage_class(
timeout=timeout,
retry=retry,
)
+ while token is not None:
+ token, _, _ = self.rewrite(
+ self,
+ token=token,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ if_source_generation_match=if_source_generation_match,
+ if_source_generation_not_match=if_source_generation_not_match,
+ if_source_metageneration_match=if_source_metageneration_match,
+ if_source_metageneration_not_match=if_source_metageneration_not_match,
+ timeout=timeout,
+ retry=retry,
+ )
- @create_trace_span(name="Storage.Blob.open")
def open(
self,
mode="r",
@@ -4086,51 +4084,54 @@ def open(
'google.cloud.storage.fileio', or an 'io.TextIOWrapper' around one
of those classes, depending on the 'mode' argument.
"""
- if mode == "rb":
- if encoding or errors or newline:
- raise ValueError(
- "encoding, errors and newline arguments are for text mode only"
- )
- if ignore_flush:
- raise ValueError(
- "ignore_flush argument is for non-text write mode only"
+ with create_trace_span(name="Storage.Blob.open"):
+ if mode == "rb":
+ if encoding or errors or newline:
+ raise ValueError(
+ "encoding, errors and newline arguments are for text mode only"
+ )
+ if ignore_flush:
+ raise ValueError(
+ "ignore_flush argument is for non-text write mode only"
+ )
+ return BlobReader(self, chunk_size=chunk_size, **kwargs)
+ elif mode == "wb":
+ if encoding or errors or newline:
+ raise ValueError(
+ "encoding, errors and newline arguments are for text mode only"
+ )
+ return BlobWriter(
+ self, chunk_size=chunk_size, ignore_flush=ignore_flush, **kwargs
)
- return BlobReader(self, chunk_size=chunk_size, **kwargs)
- elif mode == "wb":
- if encoding or errors or newline:
- raise ValueError(
- "encoding, errors and newline arguments are for text mode only"
+ elif mode in ("r", "rt"):
+ if ignore_flush:
+ raise ValueError(
+ "ignore_flush argument is for non-text write mode only"
+ )
+ return TextIOWrapper(
+ BlobReader(self, chunk_size=chunk_size, **kwargs),
+ encoding=encoding,
+ errors=errors,
+ newline=newline,
)
- return BlobWriter(
- self, chunk_size=chunk_size, ignore_flush=ignore_flush, **kwargs
- )
- elif mode in ("r", "rt"):
- if ignore_flush:
- raise ValueError(
- "ignore_flush argument is for non-text write mode only"
+ elif mode in ("w", "wt"):
+ if ignore_flush is False:
+ raise ValueError(
+ "ignore_flush is required for text mode writing and "
+ "cannot be set to False"
+ )
+ return TextIOWrapper(
+ BlobWriter(
+ self, chunk_size=chunk_size, ignore_flush=True, **kwargs
+ ),
+ encoding=encoding,
+ errors=errors,
+ newline=newline,
)
- return TextIOWrapper(
- BlobReader(self, chunk_size=chunk_size, **kwargs),
- encoding=encoding,
- errors=errors,
- newline=newline,
- )
- elif mode in ("w", "wt"):
- if ignore_flush is False:
- raise ValueError(
- "ignore_flush is required for text mode writing and "
- "cannot be set to False"
+ else:
+ raise NotImplementedError(
+ "Supported modes strings are 'r', 'rb', 'rt', 'w', 'wb', and 'wt' only."
)
- return TextIOWrapper(
- BlobWriter(self, chunk_size=chunk_size, ignore_flush=True, **kwargs),
- encoding=encoding,
- errors=errors,
- newline=newline,
- )
- else:
- raise NotImplementedError(
- "Supported modes strings are 'r', 'rb', 'rt', 'w', 'wb', and 'wt' only."
- )
cache_control = _scalar_property("cacheControl")
"""HTTP 'Cache-Control' header for this object.
diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py
index 88356d316..fc5733bd0 100644
--- a/google/cloud/storage/bucket.py
+++ b/google/cloud/storage/bucket.py
@@ -910,7 +910,6 @@ def notification(
notification_id=notification_id,
)
- @create_trace_span(name="Storage.Bucket.exists")
def exists(
self,
client=None,
@@ -958,44 +957,46 @@ def exists(
:rtype: bool
:returns: True if the bucket exists in Cloud Storage.
"""
- client = self._require_client(client)
- # We only need the status code (200 or not) so we seek to
- # minimize the returned payload.
- query_params = {"fields": "name"}
-
- if self.user_project is not None:
- query_params["userProject"] = self.user_project
+ with create_trace_span(name="Storage.Bucket.exists"):
+ client = self._require_client(client)
+ # We only need the status code (200 or not) so we seek to
+ # minimize the returned payload.
+ query_params = {"fields": "name"}
- _add_generation_match_parameters(
- query_params,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- )
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
- headers = {}
- _add_etag_match_headers(
- headers, if_etag_match=if_etag_match, if_etag_not_match=if_etag_not_match
- )
+ _add_generation_match_parameters(
+ query_params,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ )
- try:
- # We intentionally pass `_target_object=None` since fields=name
- # would limit the local properties.
- client._get_resource(
- self.path,
- query_params=query_params,
- headers=headers,
- timeout=timeout,
- retry=retry,
- _target_object=None,
+ headers = {}
+ _add_etag_match_headers(
+ headers,
+ if_etag_match=if_etag_match,
+ if_etag_not_match=if_etag_not_match,
)
- except NotFound:
- # NOTE: This will not fail immediately in a batch. However, when
- # Batch.finish() is called, the resulting `NotFound` will be
- # raised.
- return False
- return True
-
- @create_trace_span(name="Storage.Bucket.create")
+
+ try:
+ # We intentionally pass `_target_object=None` since fields=name
+ # would limit the local properties.
+ client._get_resource(
+ self.path,
+ query_params=query_params,
+ headers=headers,
+ timeout=timeout,
+ retry=retry,
+ _target_object=None,
+ )
+ except NotFound:
+ # NOTE: This will not fail immediately in a batch. However, when
+ # Batch.finish() is called, the resulting `NotFound` will be
+ # raised.
+ return False
+ return True
+
def create(
self,
client=None,
@@ -1057,21 +1058,20 @@ def create(
:param retry:
(Optional) How to retry the RPC. See: :ref:`configuring_retries`
"""
+ with create_trace_span(name="Storage.Bucket.create"):
+ client = self._require_client(client)
+ client.create_bucket(
+ bucket_or_name=self,
+ project=project,
+ user_project=self.user_project,
+ location=location,
+ predefined_acl=predefined_acl,
+ predefined_default_object_acl=predefined_default_object_acl,
+ enable_object_retention=enable_object_retention,
+ timeout=timeout,
+ retry=retry,
+ )
- client = self._require_client(client)
- client.create_bucket(
- bucket_or_name=self,
- project=project,
- user_project=self.user_project,
- location=location,
- predefined_acl=predefined_acl,
- predefined_default_object_acl=predefined_default_object_acl,
- enable_object_retention=enable_object_retention,
- timeout=timeout,
- retry=retry,
- )
-
- @create_trace_span(name="Storage.Bucket.update")
def update(
self,
client=None,
@@ -1108,15 +1108,15 @@ def update(
:param retry:
(Optional) How to retry the RPC. See: :ref:`configuring_retries`
"""
- super(Bucket, self).update(
- client=client,
- timeout=timeout,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- retry=retry,
- )
+ with create_trace_span(name="Storage.Bucket.update"):
+ super(Bucket, self).update(
+ client=client,
+ timeout=timeout,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ retry=retry,
+ )
- @create_trace_span(name="Storage.Bucket.reload")
def reload(
self,
client=None,
@@ -1175,19 +1175,19 @@ def reload(
set if ``soft_deleted`` is set to True.
See: https://cloud.google.com/storage/docs/soft-delete
"""
- super(Bucket, self).reload(
- client=client,
- projection=projection,
- timeout=timeout,
- if_etag_match=if_etag_match,
- if_etag_not_match=if_etag_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- retry=retry,
- soft_deleted=soft_deleted,
- )
+ with create_trace_span(name="Storage.Bucket.reload"):
+ super(Bucket, self).reload(
+ client=client,
+ projection=projection,
+ timeout=timeout,
+ if_etag_match=if_etag_match,
+ if_etag_not_match=if_etag_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ retry=retry,
+ soft_deleted=soft_deleted,
+ )
- @create_trace_span(name="Storage.Bucket.patch")
def patch(
self,
client=None,
@@ -1224,22 +1224,23 @@ def patch(
:param retry:
(Optional) How to retry the RPC. See: :ref:`configuring_retries`
"""
- # Special case: For buckets, it is possible that labels are being
- # removed; this requires special handling.
- if self._label_removals:
- self._changes.add("labels")
- self._properties.setdefault("labels", {})
- for removed_label in self._label_removals:
- self._properties["labels"][removed_label] = None
-
- # Call the superclass method.
- super(Bucket, self).patch(
- client=client,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- timeout=timeout,
- retry=retry,
- )
+ with create_trace_span(name="Storage.Bucket.patch"):
+ # Special case: For buckets, it is possible that labels are being
+ # removed; this requires special handling.
+ if self._label_removals:
+ self._changes.add("labels")
+ self._properties.setdefault("labels", {})
+ for removed_label in self._label_removals:
+ self._properties["labels"][removed_label] = None
+
+ # Call the superclass method.
+ super(Bucket, self).patch(
+ client=client,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ timeout=timeout,
+ retry=retry,
+ )
@property
def acl(self):
@@ -1271,7 +1272,6 @@ def path(self):
return self.path_helper(self.name)
- @create_trace_span(name="Storage.Bucket.getBlob")
def get_blob(
self,
blob_name,
@@ -1360,35 +1360,35 @@ def get_blob(
:rtype: :class:`google.cloud.storage.blob.Blob` or None
:returns: The blob object if it exists, otherwise None.
"""
- blob = Blob(
- bucket=self,
- name=blob_name,
- encryption_key=encryption_key,
- generation=generation,
- **kwargs,
- )
- try:
- # NOTE: This will not fail immediately in a batch. However, when
- # Batch.finish() is called, the resulting `NotFound` will be
- # raised.
- blob.reload(
- client=client,
- timeout=timeout,
- if_etag_match=if_etag_match,
- if_etag_not_match=if_etag_not_match,
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- retry=retry,
- soft_deleted=soft_deleted,
+ with create_trace_span(name="Storage.Bucket.getBlob"):
+ blob = Blob(
+ bucket=self,
+ name=blob_name,
+ encryption_key=encryption_key,
+ generation=generation,
+ **kwargs,
)
- except NotFound:
- return None
- else:
- return blob
+ try:
+ # NOTE: This will not fail immediately in a batch. However, when
+ # Batch.finish() is called, the resulting `NotFound` will be
+ # raised.
+ blob.reload(
+ client=client,
+ timeout=timeout,
+ if_etag_match=if_etag_match,
+ if_etag_not_match=if_etag_not_match,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ retry=retry,
+ soft_deleted=soft_deleted,
+ )
+ except NotFound:
+ return None
+ else:
+ return blob
- @create_trace_span(name="Storage.Bucket.listBlobs")
def list_blobs(
self,
max_results=None,
@@ -1510,28 +1510,28 @@ def list_blobs(
:returns: Iterator of all :class:`~google.cloud.storage.blob.Blob`
in this bucket matching the arguments.
"""
- client = self._require_client(client)
- return client.list_blobs(
- self,
- max_results=max_results,
- page_token=page_token,
- prefix=prefix,
- delimiter=delimiter,
- start_offset=start_offset,
- end_offset=end_offset,
- include_trailing_delimiter=include_trailing_delimiter,
- versions=versions,
- projection=projection,
- fields=fields,
- page_size=page_size,
- timeout=timeout,
- retry=retry,
- match_glob=match_glob,
- include_folders_as_prefixes=include_folders_as_prefixes,
- soft_deleted=soft_deleted,
- )
+ with create_trace_span(name="Storage.Bucket.listBlobs"):
+ client = self._require_client(client)
+ return client.list_blobs(
+ self,
+ max_results=max_results,
+ page_token=page_token,
+ prefix=prefix,
+ delimiter=delimiter,
+ start_offset=start_offset,
+ end_offset=end_offset,
+ include_trailing_delimiter=include_trailing_delimiter,
+ versions=versions,
+ projection=projection,
+ fields=fields,
+ page_size=page_size,
+ timeout=timeout,
+ retry=retry,
+ match_glob=match_glob,
+ include_folders_as_prefixes=include_folders_as_prefixes,
+ soft_deleted=soft_deleted,
+ )
- @create_trace_span(name="Storage.Bucket.listNotifications")
def list_notifications(
self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY
):
@@ -1558,18 +1558,18 @@ def list_notifications(
:rtype: list of :class:`.BucketNotification`
:returns: notification instances
"""
- client = self._require_client(client)
- path = self.path + "/notificationConfigs"
- iterator = client._list_resource(
- path,
- _item_to_notification,
- timeout=timeout,
- retry=retry,
- )
- iterator.bucket = self
- return iterator
+ with create_trace_span(name="Storage.Bucket.listNotifications"):
+ client = self._require_client(client)
+ path = self.path + "/notificationConfigs"
+ iterator = client._list_resource(
+ path,
+ _item_to_notification,
+ timeout=timeout,
+ retry=retry,
+ )
+ iterator.bucket = self
+ return iterator
- @create_trace_span(name="Storage.Bucket.getNotification")
def get_notification(
self,
notification_id,
@@ -1603,11 +1603,11 @@ def get_notification(
:rtype: :class:`.BucketNotification`
:returns: notification instance.
"""
- notification = self.notification(notification_id=notification_id)
- notification.reload(client=client, timeout=timeout, retry=retry)
- return notification
+ with create_trace_span(name="Storage.Bucket.getNotification"):
+ notification = self.notification(notification_id=notification_id)
+ notification.reload(client=client, timeout=timeout, retry=retry)
+ return notification
- @create_trace_span(name="Storage.Bucket.delete")
def delete(
self,
force=False,
@@ -1663,58 +1663,58 @@ def delete(
:raises: :class:`ValueError` if ``force`` is ``True`` and the bucket
contains more than 256 objects / blobs.
"""
- client = self._require_client(client)
- query_params = {}
+ with create_trace_span(name="Storage.Bucket.delete"):
+ client = self._require_client(client)
+ query_params = {}
- if self.user_project is not None:
- query_params["userProject"] = self.user_project
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
- _add_generation_match_parameters(
- query_params,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- )
- if force:
- blobs = list(
- self.list_blobs(
- max_results=self._MAX_OBJECTS_FOR_ITERATION + 1,
+ _add_generation_match_parameters(
+ query_params,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ )
+ if force:
+ blobs = list(
+ self.list_blobs(
+ max_results=self._MAX_OBJECTS_FOR_ITERATION + 1,
+ client=client,
+ timeout=timeout,
+ retry=retry,
+ versions=True,
+ )
+ )
+ if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION:
+ message = (
+ "Refusing to delete bucket with more than "
+ "%d objects. If you actually want to delete "
+ "this bucket, please delete the objects "
+ "yourself before calling Bucket.delete()."
+ ) % (self._MAX_OBJECTS_FOR_ITERATION,)
+ raise ValueError(message)
+
+ # Ignore 404 errors on delete.
+ self.delete_blobs(
+ blobs,
+ on_error=lambda blob: None,
client=client,
timeout=timeout,
retry=retry,
- versions=True,
+ preserve_generation=True,
)
- )
- if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION:
- message = (
- "Refusing to delete bucket with more than "
- "%d objects. If you actually want to delete "
- "this bucket, please delete the objects "
- "yourself before calling Bucket.delete()."
- ) % (self._MAX_OBJECTS_FOR_ITERATION,)
- raise ValueError(message)
-
- # Ignore 404 errors on delete.
- self.delete_blobs(
- blobs,
- on_error=lambda blob: None,
- client=client,
+
+ # We intentionally pass `_target_object=None` since a DELETE
+ # request has no response value (whether in a standard request or
+ # in a batch request).
+ client._delete_resource(
+ self.path,
+ query_params=query_params,
timeout=timeout,
retry=retry,
- preserve_generation=True,
+ _target_object=None,
)
- # We intentionally pass `_target_object=None` since a DELETE
- # request has no response value (whether in a standard request or
- # in a batch request).
- client._delete_resource(
- self.path,
- query_params=query_params,
- timeout=timeout,
- retry=retry,
- _target_object=None,
- )
-
- @create_trace_span(name="Storage.Bucket.deleteBlob")
def delete_blob(
self,
blob_name,
@@ -1786,29 +1786,29 @@ def delete_blob(
the exception, use :meth:`delete_blobs` by passing a no-op
``on_error`` callback.
"""
- client = self._require_client(client)
- blob = Blob(blob_name, bucket=self, generation=generation)
-
- query_params = copy.deepcopy(blob._query_params)
- _add_generation_match_parameters(
- query_params,
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- )
- # We intentionally pass `_target_object=None` since a DELETE
- # request has no response value (whether in a standard request or
- # in a batch request).
- client._delete_resource(
- blob.path,
- query_params=query_params,
- timeout=timeout,
- retry=retry,
- _target_object=None,
- )
+ with create_trace_span(name="Storage.Bucket.deleteBlob"):
+ client = self._require_client(client)
+ blob = Blob(blob_name, bucket=self, generation=generation)
+
+ query_params = copy.deepcopy(blob._query_params)
+ _add_generation_match_parameters(
+ query_params,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ )
+ # We intentionally pass `_target_object=None` since a DELETE
+ # request has no response value (whether in a standard request or
+ # in a batch request).
+ client._delete_resource(
+ blob.path,
+ query_params=query_params,
+ timeout=timeout,
+ retry=retry,
+ _target_object=None,
+ )
- @create_trace_span(name="Storage.Bucket.deleteBlobs")
def delete_blobs(
self,
blobs,
@@ -1899,44 +1899,46 @@ def delete_blobs(
:raises: :class:`~google.cloud.exceptions.NotFound` (if
`on_error` is not passed).
"""
- _raise_if_len_differs(
- len(blobs),
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- )
- if_generation_match = iter(if_generation_match or [])
- if_generation_not_match = iter(if_generation_not_match or [])
- if_metageneration_match = iter(if_metageneration_match or [])
- if_metageneration_not_match = iter(if_metageneration_not_match or [])
+ with create_trace_span(name="Storage.Bucket.deleteBlobs"):
+ _raise_if_len_differs(
+ len(blobs),
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ )
+ if_generation_match = iter(if_generation_match or [])
+ if_generation_not_match = iter(if_generation_not_match or [])
+ if_metageneration_match = iter(if_metageneration_match or [])
+ if_metageneration_not_match = iter(if_metageneration_not_match or [])
- for blob in blobs:
- try:
- blob_name = blob
- generation = None
- if not isinstance(blob_name, str):
- blob_name = blob.name
- generation = blob.generation if preserve_generation else None
-
- self.delete_blob(
- blob_name,
- client=client,
- generation=generation,
- if_generation_match=next(if_generation_match, None),
- if_generation_not_match=next(if_generation_not_match, None),
- if_metageneration_match=next(if_metageneration_match, None),
- if_metageneration_not_match=next(if_metageneration_not_match, None),
- timeout=timeout,
- retry=retry,
- )
- except NotFound:
- if on_error is not None:
- on_error(blob)
- else:
- raise
+ for blob in blobs:
+ try:
+ blob_name = blob
+ generation = None
+ if not isinstance(blob_name, str):
+ blob_name = blob.name
+ generation = blob.generation if preserve_generation else None
+
+ self.delete_blob(
+ blob_name,
+ client=client,
+ generation=generation,
+ if_generation_match=next(if_generation_match, None),
+ if_generation_not_match=next(if_generation_not_match, None),
+ if_metageneration_match=next(if_metageneration_match, None),
+ if_metageneration_not_match=next(
+ if_metageneration_not_match, None
+ ),
+ timeout=timeout,
+ retry=retry,
+ )
+ except NotFound:
+ if on_error is not None:
+ on_error(blob)
+ else:
+ raise
- @create_trace_span(name="Storage.Bucket.copyBlob")
def copy_blob(
self,
blob,
@@ -2051,48 +2053,48 @@ def copy_blob(
:rtype: :class:`google.cloud.storage.blob.Blob`
:returns: The new Blob.
"""
- client = self._require_client(client)
- query_params = {}
-
- if self.user_project is not None:
- query_params["userProject"] = self.user_project
-
- if source_generation is not None:
- query_params["sourceGeneration"] = source_generation
-
- _add_generation_match_parameters(
- query_params,
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- if_source_generation_match=if_source_generation_match,
- if_source_generation_not_match=if_source_generation_not_match,
- if_source_metageneration_match=if_source_metageneration_match,
- if_source_metageneration_not_match=if_source_metageneration_not_match,
- )
+ with create_trace_span(name="Storage.Bucket.copyBlob"):
+ client = self._require_client(client)
+ query_params = {}
- if new_name is None:
- new_name = blob.name
-
- new_blob = Blob(bucket=destination_bucket, name=new_name)
- api_path = blob.path + "/copyTo" + new_blob.path
- copy_result = client._post_resource(
- api_path,
- None,
- query_params=query_params,
- timeout=timeout,
- retry=retry,
- _target_object=new_blob,
- )
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
- if not preserve_acl:
- new_blob.acl.save(acl={}, client=client, timeout=timeout)
+ if source_generation is not None:
+ query_params["sourceGeneration"] = source_generation
- new_blob._set_properties(copy_result)
- return new_blob
+ _add_generation_match_parameters(
+ query_params,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ if_source_generation_match=if_source_generation_match,
+ if_source_generation_not_match=if_source_generation_not_match,
+ if_source_metageneration_match=if_source_metageneration_match,
+ if_source_metageneration_not_match=if_source_metageneration_not_match,
+ )
+
+ if new_name is None:
+ new_name = blob.name
+
+ new_blob = Blob(bucket=destination_bucket, name=new_name)
+ api_path = blob.path + "/copyTo" + new_blob.path
+ copy_result = client._post_resource(
+ api_path,
+ None,
+ query_params=query_params,
+ timeout=timeout,
+ retry=retry,
+ _target_object=new_blob,
+ )
+
+ if not preserve_acl:
+ new_blob.acl.save(acl={}, client=client, timeout=timeout)
+
+ new_blob._set_properties(copy_result)
+ return new_blob
- @create_trace_span(name="Storage.Bucket.renameBlob")
def rename_blob(
self,
blob,
@@ -2205,38 +2207,38 @@ def rename_blob(
:rtype: :class:`Blob`
:returns: The newly-renamed blob.
"""
- same_name = blob.name == new_name
-
- new_blob = self.copy_blob(
- blob,
- self,
- new_name,
- client=client,
- timeout=timeout,
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- if_source_generation_match=if_source_generation_match,
- if_source_generation_not_match=if_source_generation_not_match,
- if_source_metageneration_match=if_source_metageneration_match,
- if_source_metageneration_not_match=if_source_metageneration_not_match,
- retry=retry,
- )
+ with create_trace_span(name="Storage.Bucket.renameBlob"):
+ same_name = blob.name == new_name
- if not same_name:
- blob.delete(
+ new_blob = self.copy_blob(
+ blob,
+ self,
+ new_name,
client=client,
timeout=timeout,
- if_generation_match=if_source_generation_match,
- if_generation_not_match=if_source_generation_not_match,
- if_metageneration_match=if_source_metageneration_match,
- if_metageneration_not_match=if_source_metageneration_not_match,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ if_source_generation_match=if_source_generation_match,
+ if_source_generation_not_match=if_source_generation_not_match,
+ if_source_metageneration_match=if_source_metageneration_match,
+ if_source_metageneration_not_match=if_source_metageneration_not_match,
retry=retry,
)
- return new_blob
- @create_trace_span(name="Storage.Bucket.moveBlob")
+ if not same_name:
+ blob.delete(
+ client=client,
+ timeout=timeout,
+ if_generation_match=if_source_generation_match,
+ if_generation_not_match=if_source_generation_not_match,
+ if_metageneration_match=if_source_metageneration_match,
+ if_metageneration_not_match=if_source_metageneration_not_match,
+ retry=retry,
+ )
+ return new_blob
+
def move_blob(
self,
blob,
@@ -2328,39 +2330,39 @@ def move_blob(
:rtype: :class:`Blob`
:returns: The newly-moved blob.
"""
- client = self._require_client(client)
- query_params = {}
-
- if self.user_project is not None:
- query_params["userProject"] = self.user_project
-
- _add_generation_match_parameters(
- query_params,
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- if_source_generation_match=if_source_generation_match,
- if_source_generation_not_match=if_source_generation_not_match,
- if_source_metageneration_match=if_source_metageneration_match,
- if_source_metageneration_not_match=if_source_metageneration_not_match,
- )
+ with create_trace_span(name="Storage.Bucket.moveBlob"):
+ client = self._require_client(client)
+ query_params = {}
- new_blob = Blob(bucket=self, name=new_name)
- api_path = blob.path + "/moveTo/o/" + new_blob.name
- move_result = client._post_resource(
- api_path,
- None,
- query_params=query_params,
- timeout=timeout,
- retry=retry,
- _target_object=new_blob,
- )
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
- new_blob._set_properties(move_result)
- return new_blob
+ _add_generation_match_parameters(
+ query_params,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ if_source_generation_match=if_source_generation_match,
+ if_source_generation_not_match=if_source_generation_not_match,
+ if_source_metageneration_match=if_source_metageneration_match,
+ if_source_metageneration_not_match=if_source_metageneration_not_match,
+ )
+
+ new_blob = Blob(bucket=self, name=new_name)
+ api_path = blob.path + "/moveTo/o/" + new_blob.name
+ move_result = client._post_resource(
+ api_path,
+ None,
+ query_params=query_params,
+ timeout=timeout,
+ retry=retry,
+ _target_object=new_blob,
+ )
+
+ new_blob._set_properties(move_result)
+ return new_blob
- @create_trace_span(name="Storage.Bucket.restore_blob")
def restore_blob(
self,
blob_name,
@@ -2434,36 +2436,37 @@ def restore_blob(
:rtype: :class:`google.cloud.storage.blob.Blob`
:returns: The restored Blob.
"""
- client = self._require_client(client)
- query_params = {}
-
- if self.user_project is not None:
- query_params["userProject"] = self.user_project
- if generation is not None:
- query_params["generation"] = generation
- if copy_source_acl is not None:
- query_params["copySourceAcl"] = copy_source_acl
- if projection is not None:
- query_params["projection"] = projection
-
- _add_generation_match_parameters(
- query_params,
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- )
+ with create_trace_span(name="Storage.Bucket.restore_blob"):
+ client = self._require_client(client)
+ query_params = {}
+
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
+ if generation is not None:
+ query_params["generation"] = generation
+ if copy_source_acl is not None:
+ query_params["copySourceAcl"] = copy_source_acl
+ if projection is not None:
+ query_params["projection"] = projection
+
+ _add_generation_match_parameters(
+ query_params,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ )
- blob = Blob(bucket=self, name=blob_name)
- api_response = client._post_resource(
- f"{blob.path}/restore",
- None,
- query_params=query_params,
- timeout=timeout,
- retry=retry,
- )
- blob._set_properties(api_response)
- return blob
+ blob = Blob(bucket=self, name=blob_name)
+ api_response = client._post_resource(
+ f"{blob.path}/restore",
+ None,
+ query_params=query_params,
+ timeout=timeout,
+ retry=retry,
+ )
+ blob._set_properties(api_response)
+ return blob
@property
def cors(self):
@@ -3262,7 +3265,6 @@ def disable_website(self):
"""
return self.configure_website(None, None)
- @create_trace_span(name="Storage.Bucket.getIamPolicy")
def get_iam_policy(
self,
client=None,
@@ -3307,25 +3309,25 @@ def get_iam_policy(
:returns: the policy instance, based on the resource returned from
the ``getIamPolicy`` API request.
"""
- client = self._require_client(client)
- query_params = {}
+ with create_trace_span(name="Storage.Bucket.getIamPolicy"):
+ client = self._require_client(client)
+ query_params = {}
- if self.user_project is not None:
- query_params["userProject"] = self.user_project
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
- if requested_policy_version is not None:
- query_params["optionsRequestedPolicyVersion"] = requested_policy_version
+ if requested_policy_version is not None:
+ query_params["optionsRequestedPolicyVersion"] = requested_policy_version
- info = client._get_resource(
- f"{self.path}/iam",
- query_params=query_params,
- timeout=timeout,
- retry=retry,
- _target_object=None,
- )
- return Policy.from_api_repr(info)
+ info = client._get_resource(
+ f"{self.path}/iam",
+ query_params=query_params,
+ timeout=timeout,
+ retry=retry,
+ _target_object=None,
+ )
+ return Policy.from_api_repr(info)
- @create_trace_span(name="Storage.Bucket.setIamPolicy")
def set_iam_policy(
self,
policy,
@@ -3361,28 +3363,28 @@ def set_iam_policy(
:returns: the policy instance, based on the resource returned from
the ``setIamPolicy`` API request.
"""
- client = self._require_client(client)
- query_params = {}
-
- if self.user_project is not None:
- query_params["userProject"] = self.user_project
-
- path = f"{self.path}/iam"
- resource = policy.to_api_repr()
- resource["resourceId"] = self.path
-
- info = client._put_resource(
- path,
- resource,
- query_params=query_params,
- timeout=timeout,
- retry=retry,
- _target_object=None,
- )
+ with create_trace_span(name="Storage.Bucket.setIamPolicy"):
+ client = self._require_client(client)
+ query_params = {}
- return Policy.from_api_repr(info)
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
+
+ path = f"{self.path}/iam"
+ resource = policy.to_api_repr()
+ resource["resourceId"] = self.path
+
+ info = client._put_resource(
+ path,
+ resource,
+ query_params=query_params,
+ timeout=timeout,
+ retry=retry,
+ _target_object=None,
+ )
+
+ return Policy.from_api_repr(info)
- @create_trace_span(name="Storage.Bucket.testIamPermissions")
def test_iam_permissions(
self, permissions, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY
):
@@ -3414,23 +3416,23 @@ def test_iam_permissions(
:returns: the permissions returned by the ``testIamPermissions`` API
request.
"""
- client = self._require_client(client)
- query_params = {"permissions": permissions}
-
- if self.user_project is not None:
- query_params["userProject"] = self.user_project
-
- path = f"{self.path}/iam/testPermissions"
- resp = client._get_resource(
- path,
- query_params=query_params,
- timeout=timeout,
- retry=retry,
- _target_object=None,
- )
- return resp.get("permissions", [])
+ with create_trace_span(name="Storage.Bucket.testIamPermissions"):
+ client = self._require_client(client)
+ query_params = {"permissions": permissions}
+
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
+
+ path = f"{self.path}/iam/testPermissions"
+ resp = client._get_resource(
+ path,
+ query_params=query_params,
+ timeout=timeout,
+ retry=retry,
+ _target_object=None,
+ )
+ return resp.get("permissions", [])
- @create_trace_span(name="Storage.Bucket.makePublic")
def make_public(
self,
recursive=False,
@@ -3480,21 +3482,9 @@ def make_public(
:meth:`~google.cloud.storage.blob.Blob.make_public`
for each blob.
"""
- self.acl.all().grant_read()
- self.acl.save(
- client=client,
- timeout=timeout,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- retry=retry,
- )
-
- if future:
- doa = self.default_object_acl
- if not doa.loaded:
- doa.reload(client=client, timeout=timeout)
- doa.all().grant_read()
- doa.save(
+ with create_trace_span(name="Storage.Bucket.makePublic"):
+ self.acl.all().grant_read()
+ self.acl.save(
client=client,
timeout=timeout,
if_metageneration_match=if_metageneration_match,
@@ -3502,33 +3492,45 @@ def make_public(
retry=retry,
)
- if recursive:
- blobs = list(
- self.list_blobs(
- projection="full",
- max_results=self._MAX_OBJECTS_FOR_ITERATION + 1,
+ if future:
+ doa = self.default_object_acl
+ if not doa.loaded:
+ doa.reload(client=client, timeout=timeout)
+ doa.all().grant_read()
+ doa.save(
client=client,
timeout=timeout,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ retry=retry,
)
- )
- if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION:
- message = (
- "Refusing to make public recursively with more than "
- "%d objects. If you actually want to make every object "
- "in this bucket public, iterate through the blobs "
- "returned by 'Bucket.list_blobs()' and call "
- "'make_public' on each one."
- ) % (self._MAX_OBJECTS_FOR_ITERATION,)
- raise ValueError(message)
- for blob in blobs:
- blob.acl.all().grant_read()
- blob.acl.save(
- client=client,
- timeout=timeout,
+ if recursive:
+ blobs = list(
+ self.list_blobs(
+ projection="full",
+ max_results=self._MAX_OBJECTS_FOR_ITERATION + 1,
+ client=client,
+ timeout=timeout,
+ )
)
+ if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION:
+ message = (
+ "Refusing to make public recursively with more than "
+ "%d objects. If you actually want to make every object "
+ "in this bucket public, iterate through the blobs "
+ "returned by 'Bucket.list_blobs()' and call "
+ "'make_public' on each one."
+ ) % (self._MAX_OBJECTS_FOR_ITERATION,)
+ raise ValueError(message)
+
+ for blob in blobs:
+ blob.acl.all().grant_read()
+ blob.acl.save(
+ client=client,
+ timeout=timeout,
+ )
- @create_trace_span(name="Storage.Bucket.makePrivate")
def make_private(
self,
recursive=False,
@@ -3577,21 +3579,9 @@ def make_private(
:meth:`~google.cloud.storage.blob.Blob.make_private`
for each blob.
"""
- self.acl.all().revoke_read()
- self.acl.save(
- client=client,
- timeout=timeout,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- retry=retry,
- )
-
- if future:
- doa = self.default_object_acl
- if not doa.loaded:
- doa.reload(client=client, timeout=timeout)
- doa.all().revoke_read()
- doa.save(
+ with create_trace_span(name="Storage.Bucket.makePrivate"):
+ self.acl.all().revoke_read()
+ self.acl.save(
client=client,
timeout=timeout,
if_metageneration_match=if_metageneration_match,
@@ -3599,28 +3589,41 @@ def make_private(
retry=retry,
)
- if recursive:
- blobs = list(
- self.list_blobs(
- projection="full",
- max_results=self._MAX_OBJECTS_FOR_ITERATION + 1,
+ if future:
+ doa = self.default_object_acl
+ if not doa.loaded:
+ doa.reload(client=client, timeout=timeout)
+ doa.all().revoke_read()
+ doa.save(
client=client,
timeout=timeout,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ retry=retry,
)
- )
- if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION:
- message = (
- "Refusing to make private recursively with more than "
- "%d objects. If you actually want to make every object "
- "in this bucket private, iterate through the blobs "
- "returned by 'Bucket.list_blobs()' and call "
- "'make_private' on each one."
- ) % (self._MAX_OBJECTS_FOR_ITERATION,)
- raise ValueError(message)
- for blob in blobs:
- blob.acl.all().revoke_read()
- blob.acl.save(client=client, timeout=timeout)
+ if recursive:
+ blobs = list(
+ self.list_blobs(
+ projection="full",
+ max_results=self._MAX_OBJECTS_FOR_ITERATION + 1,
+ client=client,
+ timeout=timeout,
+ )
+ )
+ if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION:
+ message = (
+ "Refusing to make private recursively with more than "
+ "%d objects. If you actually want to make every object "
+ "in this bucket private, iterate through the blobs "
+ "returned by 'Bucket.list_blobs()' and call "
+ "'make_private' on each one."
+ ) % (self._MAX_OBJECTS_FOR_ITERATION,)
+ raise ValueError(message)
+
+ for blob in blobs:
+ blob.acl.all().revoke_read()
+ blob.acl.save(client=client, timeout=timeout)
def generate_upload_policy(self, conditions, expiration=None, client=None):
"""Create a signed upload policy for uploading objects.
@@ -3676,7 +3679,6 @@ def generate_upload_policy(self, conditions, expiration=None, client=None):
return fields
- @create_trace_span(name="Storage.Bucket.lockRetentionPolicy")
def lock_retention_policy(
self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY
):
@@ -3701,34 +3703,39 @@ def lock_retention_policy(
if the bucket has no retention policy assigned;
if the bucket's retention policy is already locked.
"""
- if "metageneration" not in self._properties:
- raise ValueError("Bucket has no retention policy assigned: try 'reload'?")
+ with create_trace_span(name="Storage.Bucket.lockRetentionPolicy"):
+ if "metageneration" not in self._properties:
+ raise ValueError(
+ "Bucket has no retention policy assigned: try 'reload'?"
+ )
- policy = self._properties.get("retentionPolicy")
+ policy = self._properties.get("retentionPolicy")
- if policy is None:
- raise ValueError("Bucket has no retention policy assigned: try 'reload'?")
+ if policy is None:
+ raise ValueError(
+ "Bucket has no retention policy assigned: try 'reload'?"
+ )
- if policy.get("isLocked"):
- raise ValueError("Bucket's retention policy is already locked.")
+ if policy.get("isLocked"):
+ raise ValueError("Bucket's retention policy is already locked.")
- client = self._require_client(client)
+ client = self._require_client(client)
- query_params = {"ifMetagenerationMatch": self.metageneration}
+ query_params = {"ifMetagenerationMatch": self.metageneration}
- if self.user_project is not None:
- query_params["userProject"] = self.user_project
+ if self.user_project is not None:
+ query_params["userProject"] = self.user_project
- path = f"/b/{self.name}/lockRetentionPolicy"
- api_response = client._post_resource(
- path,
- None,
- query_params=query_params,
- timeout=timeout,
- retry=retry,
- _target_object=self,
- )
- self._set_properties(api_response)
+ path = f"/b/{self.name}/lockRetentionPolicy"
+ api_response = client._post_resource(
+ path,
+ None,
+ query_params=query_params,
+ timeout=timeout,
+ retry=retry,
+ _target_object=self,
+ )
+ self._set_properties(api_response)
def generate_signed_url(
self,
diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py
index 57fa7043b..4a9d606a0 100644
--- a/google/cloud/storage/client.py
+++ b/google/cloud/storage/client.py
@@ -339,7 +339,6 @@ def current_batch(self):
"""
return self._batch_stack.top
- @create_trace_span(name="Storage.Client.getServiceAccountEmail")
def get_service_account_email(
self, project=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY
):
@@ -361,12 +360,13 @@ def get_service_account_email(
:rtype: str
:returns: service account email address
"""
- if project is None:
- project = self.project
+ with create_trace_span(name="Storage.Client.getServiceAccountEmail"):
+ if project is None:
+ project = self.project
- path = f"/projects/{project}/serviceAccount"
- api_response = self._get_resource(path, timeout=timeout, retry=retry)
- return api_response["email_address"]
+ path = f"/projects/{project}/serviceAccount"
+ api_response = self._get_resource(path, timeout=timeout, retry=retry)
+ return api_response["email_address"]
def bucket(self, bucket_name, user_project=None, generation=None):
"""Factory constructor for bucket object.
@@ -831,7 +831,6 @@ def _bucket_arg_to_bucket(self, bucket_or_name, generation=None):
bucket = Bucket(self, name=bucket_or_name, generation=generation)
return bucket
- @create_trace_span(name="Storage.Client.getBucket")
def get_bucket(
self,
bucket_or_name,
@@ -903,18 +902,18 @@ def get_bucket(
google.cloud.exceptions.NotFound
If the bucket is not found.
"""
- bucket = self._bucket_arg_to_bucket(bucket_or_name, generation=generation)
- bucket.reload(
- client=self,
- timeout=timeout,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- retry=retry,
- soft_deleted=soft_deleted,
- )
- return bucket
+ with create_trace_span(name="Storage.Client.getBucket"):
+ bucket = self._bucket_arg_to_bucket(bucket_or_name, generation=generation)
+ bucket.reload(
+ client=self,
+ timeout=timeout,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ retry=retry,
+ soft_deleted=soft_deleted,
+ )
+ return bucket
- @create_trace_span(name="Storage.Client.lookupBucket")
def lookup_bucket(
self,
bucket_name,
@@ -951,18 +950,18 @@ def lookup_bucket(
:rtype: :class:`google.cloud.storage.bucket.Bucket` or ``NoneType``
:returns: The bucket matching the name provided or None if not found.
"""
- try:
- return self.get_bucket(
- bucket_name,
- timeout=timeout,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- retry=retry,
- )
- except NotFound:
- return None
+ with create_trace_span(name="Storage.Client.lookupBucket"):
+ try:
+ return self.get_bucket(
+ bucket_name,
+ timeout=timeout,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ retry=retry,
+ )
+ except NotFound:
+ return None
- @create_trace_span(name="Storage.Client.createBucket")
def create_bucket(
self,
bucket_or_name,
@@ -1043,70 +1042,72 @@ def create_bucket(
google.cloud.exceptions.Conflict
If the bucket already exists.
"""
- bucket = self._bucket_arg_to_bucket(bucket_or_name)
- query_params = {}
+ with create_trace_span(name="Storage.Client.createBucket"):
+ bucket = self._bucket_arg_to_bucket(bucket_or_name)
+ query_params = {}
- if project is None:
- project = self.project
-
- # Use no project if STORAGE_EMULATOR_HOST is set
- if self._is_emulator_set:
- if project is None:
- project = _get_environ_project()
if project is None:
- project = ""
-
- # Only include the project parameter if a project is set.
- # If a project is not set, falls back to API validation (BadRequest).
- if project is not None:
- query_params = {"project": project}
-
- if requester_pays is not None:
- warnings.warn(
- "requester_pays arg is deprecated. Use Bucket().requester_pays instead.",
- PendingDeprecationWarning,
- stacklevel=1,
- )
- bucket.requester_pays = requester_pays
+ project = self.project
- if predefined_acl is not None:
- predefined_acl = BucketACL.validate_predefined(predefined_acl)
- query_params["predefinedAcl"] = predefined_acl
+ # Use no project if STORAGE_EMULATOR_HOST is set
+ if self._is_emulator_set:
+ if project is None:
+ project = _get_environ_project()
+ if project is None:
+ project = ""
- if predefined_default_object_acl is not None:
- predefined_default_object_acl = DefaultObjectACL.validate_predefined(
- predefined_default_object_acl
- )
- query_params["predefinedDefaultObjectAcl"] = predefined_default_object_acl
+ # Only include the project parameter if a project is set.
+ # If a project is not set, falls back to API validation (BadRequest).
+ if project is not None:
+ query_params = {"project": project}
- if user_project is not None:
- query_params["userProject"] = user_project
+ if requester_pays is not None:
+ warnings.warn(
+ "requester_pays arg is deprecated. Use Bucket().requester_pays instead.",
+ PendingDeprecationWarning,
+ stacklevel=1,
+ )
+ bucket.requester_pays = requester_pays
- if enable_object_retention:
- query_params["enableObjectRetention"] = enable_object_retention
+ if predefined_acl is not None:
+ predefined_acl = BucketACL.validate_predefined(predefined_acl)
+ query_params["predefinedAcl"] = predefined_acl
- properties = {key: bucket._properties[key] for key in bucket._changes}
- properties["name"] = bucket.name
+ if predefined_default_object_acl is not None:
+ predefined_default_object_acl = DefaultObjectACL.validate_predefined(
+ predefined_default_object_acl
+ )
+ query_params[
+ "predefinedDefaultObjectAcl"
+ ] = predefined_default_object_acl
- if location is not None:
- properties["location"] = location
+ if user_project is not None:
+ query_params["userProject"] = user_project
- if data_locations is not None:
- properties["customPlacementConfig"] = {"dataLocations": data_locations}
+ if enable_object_retention:
+ query_params["enableObjectRetention"] = enable_object_retention
- api_response = self._post_resource(
- "/b",
- properties,
- query_params=query_params,
- timeout=timeout,
- retry=retry,
- _target_object=bucket,
- )
+ properties = {key: bucket._properties[key] for key in bucket._changes}
+ properties["name"] = bucket.name
- bucket._set_properties(api_response)
- return bucket
+ if location is not None:
+ properties["location"] = location
+
+ if data_locations is not None:
+ properties["customPlacementConfig"] = {"dataLocations": data_locations}
+
+ api_response = self._post_resource(
+ "/b",
+ properties,
+ query_params=query_params,
+ timeout=timeout,
+ retry=retry,
+ _target_object=bucket,
+ )
+
+ bucket._set_properties(api_response)
+ return bucket
- @create_trace_span(name="Storage.Client.downloadBlobToFile")
def download_blob_to_file(
self,
blob_or_uri,
@@ -1197,28 +1198,27 @@ def download_blob_to_file(
(google.cloud.storage.retry) for information on retry types and how
to configure them.
"""
+ with create_trace_span(name="Storage.Client.downloadBlobToFile"):
+ if not isinstance(blob_or_uri, Blob):
+ blob_or_uri = Blob.from_uri(blob_or_uri)
+
+ blob_or_uri._prep_and_do_download(
+ file_obj,
+ client=self,
+ start=start,
+ end=end,
+ raw_download=raw_download,
+ if_etag_match=if_etag_match,
+ if_etag_not_match=if_etag_not_match,
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ timeout=timeout,
+ checksum=checksum,
+ retry=retry,
+ )
- if not isinstance(blob_or_uri, Blob):
- blob_or_uri = Blob.from_uri(blob_or_uri)
-
- blob_or_uri._prep_and_do_download(
- file_obj,
- client=self,
- start=start,
- end=end,
- raw_download=raw_download,
- if_etag_match=if_etag_match,
- if_etag_not_match=if_etag_not_match,
- if_generation_match=if_generation_match,
- if_generation_not_match=if_generation_not_match,
- if_metageneration_match=if_metageneration_match,
- if_metageneration_not_match=if_metageneration_not_match,
- timeout=timeout,
- checksum=checksum,
- retry=retry,
- )
-
- @create_trace_span(name="Storage.Client.listBlobs")
def list_blobs(
self,
bucket_or_name,
@@ -1355,60 +1355,60 @@ def list_blobs(
As part of the response, you'll also get back an iterator.prefixes entity that lists object names
up to and including the requested delimiter. Duplicate entries are omitted from this list.
"""
- bucket = self._bucket_arg_to_bucket(bucket_or_name)
+ with create_trace_span(name="Storage.Client.listBlobs"):
+ bucket = self._bucket_arg_to_bucket(bucket_or_name)
- extra_params = {"projection": projection}
+ extra_params = {"projection": projection}
- if prefix is not None:
- extra_params["prefix"] = prefix
+ if prefix is not None:
+ extra_params["prefix"] = prefix
- if delimiter is not None:
- extra_params["delimiter"] = delimiter
+ if delimiter is not None:
+ extra_params["delimiter"] = delimiter
- if match_glob is not None:
- extra_params["matchGlob"] = match_glob
+ if match_glob is not None:
+ extra_params["matchGlob"] = match_glob
- if start_offset is not None:
- extra_params["startOffset"] = start_offset
+ if start_offset is not None:
+ extra_params["startOffset"] = start_offset
- if end_offset is not None:
- extra_params["endOffset"] = end_offset
+ if end_offset is not None:
+ extra_params["endOffset"] = end_offset
- if include_trailing_delimiter is not None:
- extra_params["includeTrailingDelimiter"] = include_trailing_delimiter
+ if include_trailing_delimiter is not None:
+ extra_params["includeTrailingDelimiter"] = include_trailing_delimiter
- if versions is not None:
- extra_params["versions"] = versions
+ if versions is not None:
+ extra_params["versions"] = versions
- if fields is not None:
- extra_params["fields"] = fields
+ if fields is not None:
+ extra_params["fields"] = fields
- if include_folders_as_prefixes is not None:
- extra_params["includeFoldersAsPrefixes"] = include_folders_as_prefixes
+ if include_folders_as_prefixes is not None:
+ extra_params["includeFoldersAsPrefixes"] = include_folders_as_prefixes
- if soft_deleted is not None:
- extra_params["softDeleted"] = soft_deleted
+ if soft_deleted is not None:
+ extra_params["softDeleted"] = soft_deleted
- if bucket.user_project is not None:
- extra_params["userProject"] = bucket.user_project
+ if bucket.user_project is not None:
+ extra_params["userProject"] = bucket.user_project
- path = bucket.path + "/o"
- iterator = self._list_resource(
- path,
- _item_to_blob,
- page_token=page_token,
- max_results=max_results,
- extra_params=extra_params,
- page_start=_blobs_page_start,
- page_size=page_size,
- timeout=timeout,
- retry=retry,
- )
- iterator.bucket = bucket
- iterator.prefixes = set()
- return iterator
+ path = bucket.path + "/o"
+ iterator = self._list_resource(
+ path,
+ _item_to_blob,
+ page_token=page_token,
+ max_results=max_results,
+ extra_params=extra_params,
+ page_start=_blobs_page_start,
+ page_size=page_size,
+ timeout=timeout,
+ retry=retry,
+ )
+ iterator.bucket = bucket
+ iterator.prefixes = set()
+ return iterator
- @create_trace_span(name="Storage.Client.listBuckets")
def list_buckets(
self,
max_results=None,
@@ -1486,44 +1486,45 @@ def list_buckets(
:returns: Iterator of all :class:`~google.cloud.storage.bucket.Bucket`
belonging to this project.
"""
- extra_params = {}
+ with create_trace_span(name="Storage.Client.listBuckets"):
+ extra_params = {}
- if project is None:
- project = self.project
-
- # Use no project if STORAGE_EMULATOR_HOST is set
- if self._is_emulator_set:
- if project is None:
- project = _get_environ_project()
if project is None:
- project = ""
+ project = self.project
- # Only include the project parameter if a project is set.
- # If a project is not set, falls back to API validation (BadRequest).
- if project is not None:
- extra_params = {"project": project}
+ # Use no project if STORAGE_EMULATOR_HOST is set
+ if self._is_emulator_set:
+ if project is None:
+ project = _get_environ_project()
+ if project is None:
+ project = ""
- if prefix is not None:
- extra_params["prefix"] = prefix
+ # Only include the project parameter if a project is set.
+ # If a project is not set, falls back to API validation (BadRequest).
+ if project is not None:
+ extra_params = {"project": project}
- extra_params["projection"] = projection
+ if prefix is not None:
+ extra_params["prefix"] = prefix
- if fields is not None:
- extra_params["fields"] = fields
+ extra_params["projection"] = projection
- if soft_deleted is not None:
- extra_params["softDeleted"] = soft_deleted
+ if fields is not None:
+ extra_params["fields"] = fields
- return self._list_resource(
- "/b",
- _item_to_bucket,
- page_token=page_token,
- max_results=max_results,
- extra_params=extra_params,
- page_size=page_size,
- timeout=timeout,
- retry=retry,
- )
+ if soft_deleted is not None:
+ extra_params["softDeleted"] = soft_deleted
+
+ return self._list_resource(
+ "/b",
+ _item_to_bucket,
+ page_token=page_token,
+ max_results=max_results,
+ extra_params=extra_params,
+ page_size=page_size,
+ timeout=timeout,
+ retry=retry,
+ )
def restore_bucket(
self,
@@ -1590,7 +1591,6 @@ def restore_bucket(
bucket._set_properties(api_response)
return bucket
- @create_trace_span(name="Storage.Client.createHmacKey")
def create_hmac_key(
self,
service_account_email,
@@ -1634,28 +1634,28 @@ def create_hmac_key(
Tuple[:class:`~google.cloud.storage.hmac_key.HMACKeyMetadata`, str]
:returns: metadata for the created key, plus the bytes of the key's secret, which is an 40-character base64-encoded string.
"""
- if project_id is None:
- project_id = self.project
+ with create_trace_span(name="Storage.Client.createHmacKey"):
+ if project_id is None:
+ project_id = self.project
- path = f"/projects/{project_id}/hmacKeys"
- qs_params = {"serviceAccountEmail": service_account_email}
+ path = f"/projects/{project_id}/hmacKeys"
+ qs_params = {"serviceAccountEmail": service_account_email}
- if user_project is not None:
- qs_params["userProject"] = user_project
+ if user_project is not None:
+ qs_params["userProject"] = user_project
- api_response = self._post_resource(
- path,
- None,
- query_params=qs_params,
- timeout=timeout,
- retry=retry,
- )
- metadata = HMACKeyMetadata(self)
- metadata._properties = api_response["metadata"]
- secret = api_response["secret"]
- return metadata, secret
+ api_response = self._post_resource(
+ path,
+ None,
+ query_params=qs_params,
+ timeout=timeout,
+ retry=retry,
+ )
+ metadata = HMACKeyMetadata(self)
+ metadata._properties = api_response["metadata"]
+ secret = api_response["secret"]
+ return metadata, secret
- @create_trace_span(name="Storage.Client.listHmacKeys")
def list_hmac_keys(
self,
max_results=None,
@@ -1701,31 +1701,31 @@ def list_hmac_keys(
Tuple[:class:`~google.cloud.storage.hmac_key.HMACKeyMetadata`, str]
:returns: metadata for the created key, plus the bytes of the key's secret, which is an 40-character base64-encoded string.
"""
- if project_id is None:
- project_id = self.project
+ with create_trace_span(name="Storage.Client.listHmacKeys"):
+ if project_id is None:
+ project_id = self.project
- path = f"/projects/{project_id}/hmacKeys"
- extra_params = {}
+ path = f"/projects/{project_id}/hmacKeys"
+ extra_params = {}
- if service_account_email is not None:
- extra_params["serviceAccountEmail"] = service_account_email
+ if service_account_email is not None:
+ extra_params["serviceAccountEmail"] = service_account_email
- if show_deleted_keys is not None:
- extra_params["showDeletedKeys"] = show_deleted_keys
+ if show_deleted_keys is not None:
+ extra_params["showDeletedKeys"] = show_deleted_keys
- if user_project is not None:
- extra_params["userProject"] = user_project
+ if user_project is not None:
+ extra_params["userProject"] = user_project
- return self._list_resource(
- path,
- _item_to_hmac_key_metadata,
- max_results=max_results,
- extra_params=extra_params,
- timeout=timeout,
- retry=retry,
- )
+ return self._list_resource(
+ path,
+ _item_to_hmac_key_metadata,
+ max_results=max_results,
+ extra_params=extra_params,
+ timeout=timeout,
+ retry=retry,
+ )
- @create_trace_span(name="Storage.Client.getHmacKeyMetadata")
def get_hmac_key_metadata(
self, access_id, project_id=None, user_project=None, timeout=_DEFAULT_TIMEOUT
):
@@ -1746,9 +1746,10 @@ def get_hmac_key_metadata(
:type user_project: str
:param user_project: (Optional) This parameter is currently ignored.
"""
- metadata = HMACKeyMetadata(self, access_id, project_id, user_project)
- metadata.reload(timeout=timeout) # raises NotFound for missing key
- return metadata
+ with create_trace_span(name="Storage.Client.getHmacKeyMetadata"):
+ metadata = HMACKeyMetadata(self, access_id, project_id, user_project)
+ metadata.reload(timeout=timeout) # raises NotFound for missing key
+ return metadata
def generate_signed_post_policy_v4(
self,
diff --git a/google/cloud/storage/hmac_key.py b/google/cloud/storage/hmac_key.py
index d37bc071b..547650366 100644
--- a/google/cloud/storage/hmac_key.py
+++ b/google/cloud/storage/hmac_key.py
@@ -188,7 +188,6 @@ def user_project(self):
"""
return self._user_project
- @create_trace_span(name="Storage.HmacKey.exists")
def exists(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY):
"""Determine whether or not the key for this metadata exists.
@@ -204,24 +203,24 @@ def exists(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY):
:rtype: bool
:returns: True if the key exists in Cloud Storage.
"""
- try:
- qs_params = {}
-
- if self.user_project is not None:
- qs_params["userProject"] = self.user_project
-
- self._client._get_resource(
- self.path,
- query_params=qs_params,
- timeout=timeout,
- retry=retry,
- )
- except NotFound:
- return False
- else:
- return True
+ with create_trace_span(name="Storage.HmacKey.exists"):
+ try:
+ qs_params = {}
+
+ if self.user_project is not None:
+ qs_params["userProject"] = self.user_project
+
+ self._client._get_resource(
+ self.path,
+ query_params=qs_params,
+ timeout=timeout,
+ retry=retry,
+ )
+ except NotFound:
+ return False
+ else:
+ return True
- @create_trace_span(name="Storage.HmacKey.reload")
def reload(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY):
"""Reload properties from Cloud Storage.
@@ -237,19 +236,19 @@ def reload(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY):
:raises :class:`~google.api_core.exceptions.NotFound`:
if the key does not exist on the back-end.
"""
- qs_params = {}
+ with create_trace_span(name="Storage.HmacKey.reload"):
+ qs_params = {}
- if self.user_project is not None:
- qs_params["userProject"] = self.user_project
+ if self.user_project is not None:
+ qs_params["userProject"] = self.user_project
- self._properties = self._client._get_resource(
- self.path,
- query_params=qs_params,
- timeout=timeout,
- retry=retry,
- )
+ self._properties = self._client._get_resource(
+ self.path,
+ query_params=qs_params,
+ timeout=timeout,
+ retry=retry,
+ )
- @create_trace_span(name="Storage.HmacKey.update")
def update(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY_IF_ETAG_IN_JSON):
"""Save writable properties to Cloud Storage.
@@ -265,20 +264,20 @@ def update(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY_IF_ETAG_IN_JSON):
:raises :class:`~google.api_core.exceptions.NotFound`:
if the key does not exist on the back-end.
"""
- qs_params = {}
- if self.user_project is not None:
- qs_params["userProject"] = self.user_project
-
- payload = {"state": self.state}
- self._properties = self._client._put_resource(
- self.path,
- payload,
- query_params=qs_params,
- timeout=timeout,
- retry=retry,
- )
-
- @create_trace_span(name="Storage.HmacKey.delete")
+ with create_trace_span(name="Storage.HmacKey.update"):
+ qs_params = {}
+ if self.user_project is not None:
+ qs_params["userProject"] = self.user_project
+
+ payload = {"state": self.state}
+ self._properties = self._client._put_resource(
+ self.path,
+ payload,
+ query_params=qs_params,
+ timeout=timeout,
+ retry=retry,
+ )
+
def delete(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY):
"""Delete the key from Cloud Storage.
@@ -294,13 +293,14 @@ def delete(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY):
:raises :class:`~google.api_core.exceptions.NotFound`:
if the key does not exist on the back-end.
"""
- qs_params = {}
- if self.user_project is not None:
- qs_params["userProject"] = self.user_project
-
- self._client._delete_resource(
- self.path,
- query_params=qs_params,
- timeout=timeout,
- retry=retry,
- )
+ with create_trace_span(name="Storage.HmacKey.delete"):
+ qs_params = {}
+ if self.user_project is not None:
+ qs_params["userProject"] = self.user_project
+
+ self._client._delete_resource(
+ self.path,
+ query_params=qs_params,
+ timeout=timeout,
+ retry=retry,
+ )
diff --git a/google/cloud/storage/notification.py b/google/cloud/storage/notification.py
index d9d49fc4b..d13b80fc4 100644
--- a/google/cloud/storage/notification.py
+++ b/google/cloud/storage/notification.py
@@ -231,7 +231,6 @@ def _set_properties(self, response):
self._properties.clear()
self._properties.update(response)
- @create_trace_span(name="Storage.BucketNotification.create")
def create(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=None):
"""API wrapper: create the notification.
@@ -255,36 +254,36 @@ def create(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=None):
:raises ValueError: if the notification already exists.
"""
- if self.notification_id is not None:
- raise ValueError(
- f"notification_id already set to {self.notification_id}; must be None to create a Notification."
- )
-
- client = self._require_client(client)
-
- query_params = {}
- if self.bucket.user_project is not None:
- query_params["userProject"] = self.bucket.user_project
-
- path = f"/b/{self.bucket.name}/notificationConfigs"
- properties = self._properties.copy()
-
- if self.topic_name is None:
- properties["topic"] = _TOPIC_REF_FMT.format(self.topic_project, "")
- else:
- properties["topic"] = _TOPIC_REF_FMT.format(
- self.topic_project, self.topic_name
+ with create_trace_span(name="Storage.BucketNotification.create"):
+ if self.notification_id is not None:
+ raise ValueError(
+ f"notification_id already set to {self.notification_id}; must be None to create a Notification."
+ )
+
+ client = self._require_client(client)
+
+ query_params = {}
+ if self.bucket.user_project is not None:
+ query_params["userProject"] = self.bucket.user_project
+
+ path = f"/b/{self.bucket.name}/notificationConfigs"
+ properties = self._properties.copy()
+
+ if self.topic_name is None:
+ properties["topic"] = _TOPIC_REF_FMT.format(self.topic_project, "")
+ else:
+ properties["topic"] = _TOPIC_REF_FMT.format(
+ self.topic_project, self.topic_name
+ )
+
+ self._properties = client._post_resource(
+ path,
+ properties,
+ query_params=query_params,
+ timeout=timeout,
+ retry=retry,
)
- self._properties = client._post_resource(
- path,
- properties,
- query_params=query_params,
- timeout=timeout,
- retry=retry,
- )
-
- @create_trace_span(name="Storage.BucketNotification.exists")
def exists(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY):
"""Test whether this notification exists.
@@ -311,28 +310,30 @@ def exists(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY):
:returns: True, if the notification exists, else False.
:raises ValueError: if the notification has no ID.
"""
- if self.notification_id is None:
- raise ValueError("Notification ID not set: set an explicit notification_id")
-
- client = self._require_client(client)
-
- query_params = {}
- if self.bucket.user_project is not None:
- query_params["userProject"] = self.bucket.user_project
+ with create_trace_span(name="Storage.BucketNotification.exists"):
+ if self.notification_id is None:
+ raise ValueError(
+ "Notification ID not set: set an explicit notification_id"
+ )
+
+ client = self._require_client(client)
+
+ query_params = {}
+ if self.bucket.user_project is not None:
+ query_params["userProject"] = self.bucket.user_project
+
+ try:
+ client._get_resource(
+ self.path,
+ query_params=query_params,
+ timeout=timeout,
+ retry=retry,
+ )
+ except NotFound:
+ return False
+ else:
+ return True
- try:
- client._get_resource(
- self.path,
- query_params=query_params,
- timeout=timeout,
- retry=retry,
- )
- except NotFound:
- return False
- else:
- return True
-
- @create_trace_span(name="Storage.BucketNotification.reload")
def reload(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY):
"""Update this notification from the server configuration.
@@ -358,24 +359,26 @@ def reload(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY):
:raises ValueError: if the notification has no ID.
"""
- if self.notification_id is None:
- raise ValueError("Notification ID not set: set an explicit notification_id")
+ with create_trace_span(name="Storage.BucketNotification.reload"):
+ if self.notification_id is None:
+ raise ValueError(
+ "Notification ID not set: set an explicit notification_id"
+ )
- client = self._require_client(client)
+ client = self._require_client(client)
- query_params = {}
- if self.bucket.user_project is not None:
- query_params["userProject"] = self.bucket.user_project
+ query_params = {}
+ if self.bucket.user_project is not None:
+ query_params["userProject"] = self.bucket.user_project
- response = client._get_resource(
- self.path,
- query_params=query_params,
- timeout=timeout,
- retry=retry,
- )
- self._set_properties(response)
+ response = client._get_resource(
+ self.path,
+ query_params=query_params,
+ timeout=timeout,
+ retry=retry,
+ )
+ self._set_properties(response)
- @create_trace_span(name="Storage.BucketNotification.delete")
def delete(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY):
"""Delete this notification.
@@ -402,21 +405,24 @@ def delete(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY):
if the notification does not exist.
:raises ValueError: if the notification has no ID.
"""
- if self.notification_id is None:
- raise ValueError("Notification ID not set: set an explicit notification_id")
+ with create_trace_span(name="Storage.BucketNotification.delete"):
+ if self.notification_id is None:
+ raise ValueError(
+ "Notification ID not set: set an explicit notification_id"
+ )
- client = self._require_client(client)
+ client = self._require_client(client)
- query_params = {}
- if self.bucket.user_project is not None:
- query_params["userProject"] = self.bucket.user_project
+ query_params = {}
+ if self.bucket.user_project is not None:
+ query_params["userProject"] = self.bucket.user_project
- client._delete_resource(
- self.path,
- query_params=query_params,
- timeout=timeout,
- retry=retry,
- )
+ client._delete_resource(
+ self.path,
+ query_params=query_params,
+ timeout=timeout,
+ retry=retry,
+ )
def _parse_topic_path(topic_path):
From b58d3190c9eaadaf4d216f1c7f128e6f7480394a Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Thu, 27 Feb 2025 18:54:25 +0000
Subject: [PATCH 5/7] chore(deps): bump virtualenv from 20.26.3 to 20.26.6 in
/.kokoro (#1412)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Bumps [virtualenv](https://togithub.com/pypa/virtualenv) from 20.26.3 to 20.26.6.
Release notes
Sourced from virtualenv's releases.
20.26.6
What's Changed
New Contributors
Full Changelog: https://togithub.com/pypa/virtualenv/compare/20.26.5...20.26.6
20.26.5
What's Changed
Full Changelog: https://togithub.com/pypa/virtualenv/compare/20.26.4...20.26.5
20.26.4
What's Changed
New Contributors
Full Changelog: https://togithub.com/pypa/virtualenv/compare/20.26.3...20.26.4
Changelog
Sourced from virtualenv's changelog.
v20.26.6 (2024-09-27)
Bugfixes - 20.26.6
- Properly quote string placeholders in activation script templates to mitigate
potential command injection - by :user:`y5c4l3`. (:issue:`2768`)
v20.26.5 (2024-09-17)
Bugfixes - 20.26.5
- Upgrade embedded wheels: setuptools to
75.1.0
from 74.1.2
- by :user:gaborbernat
. (:issue:2765
)
v20.26.4 (2024-09-07)
Bugfixes - 20.26.4
- no longer create `()` output in console during activation of a virtualenv by .bat file. (:issue:`2728`)
- Upgrade embedded wheels:
- wheel to
0.44.0
from 0.43.0
- pip to
24.2
from 24.1
- setuptools to
74.1.2
from 70.1.0
(:issue:2760
)
Commits
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
You can disable automated security fix PRs for this repo from the [Security Alerts page](https://togithub.com/googleapis/python-storage/network/alerts).
---
.kokoro/requirements.txt | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt
index 9622baf0b..14466eb39 100644
--- a/.kokoro/requirements.txt
+++ b/.kokoro/requirements.txt
@@ -517,9 +517,9 @@ urllib3==2.2.2 \
# via
# requests
# twine
-virtualenv==20.26.3 \
- --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \
- --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589
+virtualenv==20.26.6 \
+ --hash=sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48 \
+ --hash=sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2
# via nox
wheel==0.43.0 \
--hash=sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85 \
From c869e15ec535a0aa50029d30b6a3ce64ff119b5f Mon Sep 17 00:00:00 2001
From: Andrew Gorcester
Date: Thu, 27 Feb 2025 15:56:22 -0800
Subject: [PATCH 6/7] Feat: Add api_key argument to Client constructor (#1441)
---
google/cloud/storage/client.py | 19 ++++++++++++++
setup.py | 2 +-
tests/unit/test_client.py | 47 ++++++++++++++++++++++++++++++++++
3 files changed, 67 insertions(+), 1 deletion(-)
diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py
index 4a9d606a0..ba94b26fc 100644
--- a/google/cloud/storage/client.py
+++ b/google/cloud/storage/client.py
@@ -108,6 +108,12 @@ class Client(ClientWithProject):
:param extra_headers:
(Optional) Custom headers to be sent with the requests attached to the client.
For example, you can add custom audit logging headers.
+
+ :type api_key: string
+ :param api_key:
+ (Optional) An API key. Mutually exclusive with any other credentials.
+ This parameter is an alias for setting `client_options.api_key` and
+ will supercede any api key set in the `client_options` parameter.
"""
SCOPE = (
@@ -126,6 +132,8 @@ def __init__(
client_options=None,
use_auth_w_custom_endpoint=True,
extra_headers={},
+ *,
+ api_key=None,
):
self._base_connection = None
@@ -146,6 +154,17 @@ def __init__(
connection_kw_args = {"client_info": client_info}
+ # api_key should set client_options.api_key. Set it here whether
+ # client_options was specified as a dict, as a ClientOptions object, or
+ # None.
+ if api_key:
+ if client_options and not isinstance(client_options, dict):
+ client_options.api_key = api_key
+ else:
+ if not client_options:
+ client_options = {}
+ client_options["api_key"] = api_key
+
if client_options:
if isinstance(client_options, dict):
client_options = google.api_core.client_options.from_dict(
diff --git a/setup.py b/setup.py
index 84eedd4f2..31c360f40 100644
--- a/setup.py
+++ b/setup.py
@@ -30,7 +30,7 @@
dependencies = [
"google-auth >= 2.26.1, < 3.0dev",
"google-api-core >= 2.15.0, <3.0.0dev",
- "google-cloud-core >= 2.3.0, < 3.0dev",
+ "google-cloud-core >= 2.4.2, < 3.0dev",
# The dependency "google-resumable-media" is no longer used. However, the
# dependency is still included here to accommodate users who may be
# importing exception classes from the google-resumable-media without
diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py
index 0bef1ea91..b671cc092 100644
--- a/tests/unit/test_client.py
+++ b/tests/unit/test_client.py
@@ -191,6 +191,53 @@ def test_ctor_w_client_options_object(self):
self.assertEqual(client._connection.API_BASE_URL, api_endpoint)
self.assertEqual(client.api_endpoint, api_endpoint)
+ def test_ctor_w_api_key(self):
+ from google.auth.api_key import Credentials
+
+ PROJECT = "PROJECT"
+ api_key = "my_api_key"
+
+ client = self._make_one(project=PROJECT, api_key=api_key)
+
+ self.assertEqual(
+ client._connection.API_BASE_URL, client._connection.DEFAULT_API_ENDPOINT
+ )
+ self.assertIsInstance(client._credentials, Credentials)
+ self.assertEqual(client._credentials.token, api_key)
+
+ def test_ctor_w_api_key_and_client_options(self):
+ from google.auth.api_key import Credentials
+ from google.api_core.client_options import ClientOptions
+
+ PROJECT = "PROJECT"
+ api_key = "my_api_key"
+ api_endpoint = "https://www.foo-googleapis.com"
+ client_options = ClientOptions(api_endpoint=api_endpoint)
+
+ client = self._make_one(
+ project=PROJECT, client_options=client_options, api_key=api_key
+ )
+
+ self.assertEqual(client._connection.API_BASE_URL, api_endpoint)
+ self.assertIsInstance(client._credentials, Credentials)
+ self.assertEqual(client._credentials.token, api_key)
+
+ def test_ctor_w_api_key_and_client_dict(self):
+ from google.auth.api_key import Credentials
+
+ PROJECT = "PROJECT"
+ api_key = "my_api_key"
+ api_endpoint = "https://www.foo-googleapis.com"
+ client_options = {"api_endpoint": api_endpoint}
+
+ client = self._make_one(
+ project=PROJECT, client_options=client_options, api_key=api_key
+ )
+
+ self.assertEqual(client._connection.API_BASE_URL, api_endpoint)
+ self.assertIsInstance(client._credentials, Credentials)
+ self.assertEqual(client._credentials.token, api_key)
+
def test_ctor_w_universe_domain_and_matched_credentials(self):
PROJECT = "PROJECT"
universe_domain = "example.com"
From aa7afdff7eb92ee0c460e508b65f3b2917288268 Mon Sep 17 00:00:00 2001
From: "release-please[bot]"
<55107282+release-please[bot]@users.noreply.github.com>
Date: Fri, 28 Feb 2025 00:12:25 +0000
Subject: [PATCH 7/7] chore(main): release 3.1.0 (#1435)
:robot: I have created a release *beep* *boop*
---
## [3.1.0](https://togithub.com/googleapis/python-storage/compare/v3.0.0...v3.1.0) (2025-02-27)
### Features
* Add api_key argument to Client constructor ([#1441](https://togithub.com/googleapis/python-storage/issues/1441)) ([c869e15](https://togithub.com/googleapis/python-storage/commit/c869e15ec535a0aa50029d30b6a3ce64ff119b5f))
* Add Bucket.move_blob() for HNS-enabled buckets ([#1431](https://togithub.com/googleapis/python-storage/issues/1431)) ([24c000f](https://togithub.com/googleapis/python-storage/commit/24c000fb7b9f576e6d6c6ec5733f3971fe133655))
---
This PR was generated with [Release Please](https://togithub.com/googleapis/release-please). See [documentation](https://togithub.com/googleapis/release-please#release-please).
---
CHANGELOG.md | 8 ++++++++
google/cloud/storage/version.py | 2 +-
2 files changed, 9 insertions(+), 1 deletion(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index dcf58ac2a..fefb84f50 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,14 @@
[1]: https://pypi.org/project/google-cloud-storage/#history
+## [3.1.0](https://github.com/googleapis/python-storage/compare/v3.0.0...v3.1.0) (2025-02-27)
+
+
+### Features
+
+* Add api_key argument to Client constructor ([#1441](https://github.com/googleapis/python-storage/issues/1441)) ([c869e15](https://github.com/googleapis/python-storage/commit/c869e15ec535a0aa50029d30b6a3ce64ff119b5f))
+* Add Bucket.move_blob() for HNS-enabled buckets ([#1431](https://github.com/googleapis/python-storage/issues/1431)) ([24c000f](https://github.com/googleapis/python-storage/commit/24c000fb7b9f576e6d6c6ec5733f3971fe133655))
+
## [3.0.0](https://github.com/googleapis/python-storage/compare/v2.19.0...v3.0.0) (2025-01-28)
diff --git a/google/cloud/storage/version.py b/google/cloud/storage/version.py
index d6f7def8c..6ce498ba5 100644
--- a/google/cloud/storage/version.py
+++ b/google/cloud/storage/version.py
@@ -12,4 +12,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-__version__ = "3.0.0"
+__version__ = "3.1.0"