From b772454381f2a8309a9260a37673c27433291bd7 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Fri, 30 Apr 2021 12:49:52 -0700 Subject: [PATCH 01/45] initial tests for retry strategy conformance tests implementation --- tests/unit/retry_strategy_test_data.json | 63 +++++++++++ tests/unit/test_retry.py | 136 +++++++++++++++++++++++ 2 files changed, 199 insertions(+) create mode 100644 tests/unit/retry_strategy_test_data.json diff --git a/tests/unit/retry_strategy_test_data.json b/tests/unit/retry_strategy_test_data.json new file mode 100644 index 000000000..93d18163e --- /dev/null +++ b/tests/unit/retry_strategy_test_data.json @@ -0,0 +1,63 @@ +{ + "retryStrategyTests": [ + { + "id": 1, + "description": "always idempotent", + "cases": [ + { + "instructions": [ + "return-503", + "return-503", + "return-503" + ] + }, + { + "instructions": [ + "return-503", + "return-503", + "return-503" + ] + } + ], + "methods": [ + { + "name": "storage.buckets.list", + "resources": [ + "BUCKET" + ] + }, + { + "name": "storage.buckets.list", + "resources": [ + "BUCKET", + "OBJECT" + ] + } + ], + "preconditionProvided": false, + "expectSuccess": true + }, + { + "id": 4, + "description": "non idempotent", + "cases": [ + { + "instructions": [ + "return-503" + ] + } + ], + "methods": [ + { + "name": "storage.buckets.list", + "resources": [ + "BUCKET", + "NOTIFICATION" + ] + } + ], + "preconditionProvided": false, + "expectSuccess": false + } + ] + } \ No newline at end of file diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 582fa8097..00aa26208 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -16,7 +16,11 @@ from google.cloud.storage import _helpers +from . import _read_local_json + import mock +import pytest +import requests class Test_should_retry(unittest.TestCase): @@ -259,3 +263,135 @@ def test_is_meta_or_etag_in_json_invalid(self): query_params={"ifGenerationMatch": 1}, data="I am invalid JSON!" ) self.assertEqual(policy, None) + + +# ToDo: Confirm what are the credentials required. Can we use the same service account created for url_signer_v4_test_account? +_FAKE_SERVICE_ACCOUNT = None + + +def fake_service_account(): + global _FAKE_SERVICE_ACCOUNT + # validate and set fake service account + +# ToDo: Confirm what are the credentials required. Can we use the same service account created for url_signer_v4_test_account? ) +# _SERVICE_ACCOUNT_JSON = _read_local_json("") +_CONFORMANCE_TESTS = _read_local_json("retry_strategy_test_data.json")["retryStrategyTests"] +# ToDo: Confirm the correct access endpoint. +_API_ACCESS_ENDPOINT = "http://127.0.0.1:9000" + +# retry tests +def list_buckets(): + from google.cloud import storage + client = storage.Client() + bucket = client.list_buckets() + +def get_blob(client, resource): + from google.cloud import storage + client = storage.Client() + bucket = client.bucket(resource["bucket"]["name"]) + bucket.get_blob(resource["object"]["name"]) + +def download_blob_to_file(client, resource): + client.download_blob_to_file(resource["object"]["name"], resource["file_handle"]) #file handle in resource? + +def reload_bucket(client, resource): + bucket = Bucket(client, resource["bucket"]["name"]) + bucket.reload() + +# Method invocation mapping. Methods to retry. This is a map whose keys are a string describing a standard +# API call (e.g. storage.objects.get) and values are a list of functions which +# wrap library methods that implement these calls. There may be multiple values +# because multiple library methods may use the same call (e.g. get could be a +# read or just a metadata get). +method_mapping = { + "storage.buckets.list": [ + list_buckets, + list_buckets + ], + "storage.objects.get": [ + # get_blob(client, resource), + # download_blob_to_file(client, resource) + list_buckets, + list_buckets + ], + "storage.buckets.get": [ + # reload_bucket(client, resource), + # (lambda client, resource: client.get_bucket(resource["bucket"]["name"])) + list_buckets + ], + "storage.notification.create": [ + list_buckets + ] +} + + +def _preflight_send_instructions(method_name, instructions): + import json + + preflight_post_uri = _API_ACCESS_ENDPOINT + "/retry_test" + headers = { + 'Content-Type': 'application/json', + } + data_dict = { + 'test_instructions': { + method_name: instructions + } + } + data = json.dumps(data_dict) + r = requests.post(preflight_post_uri, headers=headers, data=data) + print(r.text) + return r.json() + + +def _get_status_check(id): + status_get_uri = "{base}{retry}/{id}".format(base=_API_ACCESS_ENDPOINT, retry="/retry_test", id=id) + r = requests.get(status_get_uri) + print(r.text) + return r.json() + + +def _run_single_test(id): + test_run_uri = _API_ACCESS_ENDPOINT + "/storage/v1/b" + headers = { + 'x-retry-test-id': id, + } + params = ( + ('project', 'test'), + ) + r = requests.get(test_run_uri, headers=headers, params=params) + print(r.text) + return r.json() + + +def _run_conformance_test( + resource, test_data, api_access_endpoint=_API_ACCESS_ENDPOINT +): + # retry_success = make request to emulator (need a helper method make_request?) + assert retry_success == test_data["expectSuccess"] + + +@pytest.mark.parametrize("test_data", _CONFORMANCE_TESTS) +def test_conformance_retry_strategy(test_data): + methods = test_data["methods"] + cases = test_data["cases"] + for m, c in zip(methods, cases): + # extract method name and instructions for preflight request to send instructions + method_name = m["name"] + instructions = c["instructions"] + + if method_name not in method_mapping: + print("No tests for operation {}".format(method_name)) + continue + + for function in method_mapping[method_name]: + # send instructions with preflight + r = _preflight_send_instructions(method_name, instructions) + id = r["id"] + + # get status with unique identifier + status_response = _get_status_check(id) + + # run each single test for retry + test_response = _run_single_test(id) + stat_complete = status_response["completed"] + assert stat_complete == False \ No newline at end of file From c78011e9fc3dc42f2d53a1d0500b2f353c27a781 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Mon, 3 May 2021 16:29:34 -0700 Subject: [PATCH 02/45] revise helper methods to interact with Retry Test API --- tests/unit/retry_strategy_test_data.json | 1 - tests/unit/test_retry.py | 57 ++++++++++++------------ 2 files changed, 29 insertions(+), 29 deletions(-) diff --git a/tests/unit/retry_strategy_test_data.json b/tests/unit/retry_strategy_test_data.json index 93d18163e..e55c17d4e 100644 --- a/tests/unit/retry_strategy_test_data.json +++ b/tests/unit/retry_strategy_test_data.json @@ -13,7 +13,6 @@ }, { "instructions": [ - "return-503", "return-503", "return-503" ] diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 00aa26208..99e4b7939 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -14,6 +14,7 @@ import unittest +from google.cloud import storage from google.cloud.storage import _helpers from . import _read_local_json @@ -279,7 +280,7 @@ def fake_service_account(): # ToDo: Confirm the correct access endpoint. _API_ACCESS_ENDPOINT = "http://127.0.0.1:9000" -# retry tests +# Library methods for mapping def list_buckets(): from google.cloud import storage client = storage.Client() @@ -298,6 +299,10 @@ def reload_bucket(client, resource): bucket = Bucket(client, resource["bucket"]["name"]) bucket.reload() +def get_bucket(client, resource): + bucket_name = "bucket" #resource["bucket"]["name"] + bucket = client.get_bucket(bucket_name) + # Method invocation mapping. Methods to retry. This is a map whose keys are a string describing a standard # API call (e.g. storage.objects.get) and values are a list of functions which # wrap library methods that implement these calls. There may be multiple values @@ -305,22 +310,22 @@ def reload_bucket(client, resource): # read or just a metadata get). method_mapping = { "storage.buckets.list": [ - list_buckets, - list_buckets + get_bucket, + get_bucket ], "storage.objects.get": [ # get_blob(client, resource), # download_blob_to_file(client, resource) - list_buckets, - list_buckets + get_bucket, + get_bucket ], "storage.buckets.get": [ # reload_bucket(client, resource), # (lambda client, resource: client.get_bucket(resource["bucket"]["name"])) - list_buckets + get_bucket ], "storage.notification.create": [ - list_buckets + get_bucket ] } @@ -350,26 +355,14 @@ def _get_status_check(id): return r.json() -def _run_single_test(id): - test_run_uri = _API_ACCESS_ENDPOINT + "/storage/v1/b" - headers = { - 'x-retry-test-id': id, - } - params = ( - ('project', 'test'), - ) - r = requests.get(test_run_uri, headers=headers, params=params) - print(r.text) +def _run_single_test(id, func, resource=None): + test_run_uri = _API_ACCESS_ENDPOINT + "/storage/v1/b?project=test" + client = storage.Client(client_options={"api_endpoint": test_run_uri}) + client._http.headers.update({"x-retry-test-id": id}) + r = func(client=client, resource=resource) return r.json() -def _run_conformance_test( - resource, test_data, api_access_endpoint=_API_ACCESS_ENDPOINT -): - # retry_success = make request to emulator (need a helper method make_request?) - assert retry_success == test_data["expectSuccess"] - - @pytest.mark.parametrize("test_data", _CONFORMANCE_TESTS) def test_conformance_retry_strategy(test_data): methods = test_data["methods"] @@ -389,9 +382,17 @@ def test_conformance_retry_strategy(test_data): id = r["id"] # get status with unique identifier - status_response = _get_status_check(id) + # status_response = _get_status_check(id) # run each single test for retry - test_response = _run_single_test(id) - stat_complete = status_response["completed"] - assert stat_complete == False \ No newline at end of file + test_complete = False + while not test_complete: + try: + _run_single_test(id, func=function) + status_response = _get_status_check(id) + test_complete = status_response["completed"] + except Exception as e: + status_response = _get_status_check(id) + test_complete = status_response["completed"] + if test_complete: # also need to check with expected_success + assert test_complete == True From 09b8903c8b3cacdfee0bd1dd293550f3da5c5540 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Mon, 3 May 2021 17:36:44 -0700 Subject: [PATCH 03/45] add helper to delete retry test --- tests/unit/test_retry.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 99e4b7939..d2c010313 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -363,6 +363,11 @@ def _run_single_test(id, func, resource=None): return r.json() +def _delete_retry_test(id): + status_get_uri = "{base}{retry}/{id}".format(base=_API_ACCESS_ENDPOINT, retry="/retry_test", id=id) + r = requests.delete(status_get_uri) + + @pytest.mark.parametrize("test_data", _CONFORMANCE_TESTS) def test_conformance_retry_strategy(test_data): methods = test_data["methods"] @@ -395,4 +400,5 @@ def test_conformance_retry_strategy(test_data): status_response = _get_status_check(id) test_complete = status_response["completed"] if test_complete: # also need to check with expected_success + _delete_retry_test(id) assert test_complete == True From 2845731ba6224b9d233c1b4407fe09224003ff02 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Mon, 3 May 2021 22:41:30 -0700 Subject: [PATCH 04/45] handle exceptions in helper methods --- tests/unit/test_retry.py | 74 +++++++++++++++++++++++++--------------- 1 file changed, 47 insertions(+), 27 deletions(-) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index d2c010313..4db208c2c 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -278,7 +278,8 @@ def fake_service_account(): # _SERVICE_ACCOUNT_JSON = _read_local_json("") _CONFORMANCE_TESTS = _read_local_json("retry_strategy_test_data.json")["retryStrategyTests"] # ToDo: Confirm the correct access endpoint. -_API_ACCESS_ENDPOINT = "http://127.0.0.1:9000" +_API_ACCESS_ENDPOINT = _helpers._get_storage_host() +_DEFAULT_STORAGE_HOST = u"https://storage.googleapis.com" # Library methods for mapping def list_buckets(): @@ -301,7 +302,7 @@ def reload_bucket(client, resource): def get_bucket(client, resource): bucket_name = "bucket" #resource["bucket"]["name"] - bucket = client.get_bucket(bucket_name) + client.get_bucket(bucket_name) # Method invocation mapping. Methods to retry. This is a map whose keys are a string describing a standard # API call (e.g. storage.objects.get) and values are a list of functions which @@ -343,35 +344,50 @@ def _preflight_send_instructions(method_name, instructions): } } data = json.dumps(data_dict) - r = requests.post(preflight_post_uri, headers=headers, data=data) - print(r.text) - return r.json() + try: + r = requests.post(preflight_post_uri, headers=headers, data=data) + return r.json() + except Exception as e: + print(e.args) + # do something + return None def _get_status_check(id): status_get_uri = "{base}{retry}/{id}".format(base=_API_ACCESS_ENDPOINT, retry="/retry_test", id=id) - r = requests.get(status_get_uri) - print(r.text) - return r.json() - + try: + r = requests.get(status_get_uri) + return r.json() + except Exception as e: + print(e.args) + # do something + return None def _run_single_test(id, func, resource=None): test_run_uri = _API_ACCESS_ENDPOINT + "/storage/v1/b?project=test" client = storage.Client(client_options={"api_endpoint": test_run_uri}) client._http.headers.update({"x-retry-test-id": id}) r = func(client=client, resource=resource) - return r.json() + return r def _delete_retry_test(id): status_get_uri = "{base}{retry}/{id}".format(base=_API_ACCESS_ENDPOINT, retry="/retry_test", id=id) - r = requests.delete(status_get_uri) + try: + r = requests.delete(status_get_uri) + except Exception as e: + print(e.args) + # do something @pytest.mark.parametrize("test_data", _CONFORMANCE_TESTS) def test_conformance_retry_strategy(test_data): + if _API_ACCESS_ENDPOINT == _DEFAULT_STORAGE_HOST: + pytest.skip("This test must use the testbench emulator; set STORAGE_EMULATOR_HOST to run.") + methods = test_data["methods"] cases = test_data["cases"] + expect_success = test_data["expectSuccess"] for m, c in zip(methods, cases): # extract method name and instructions for preflight request to send instructions method_name = m["name"] @@ -384,21 +400,25 @@ def test_conformance_retry_strategy(test_data): for function in method_mapping[method_name]: # send instructions with preflight r = _preflight_send_instructions(method_name, instructions) - id = r["id"] - - # get status with unique identifier - # status_response = _get_status_check(id) + if r: + id = r["id"] + else: + print("Error creating retry test") + continue # run each single test for retry - test_complete = False - while not test_complete: - try: - _run_single_test(id, func=function) - status_response = _get_status_check(id) - test_complete = status_response["completed"] - except Exception as e: - status_response = _get_status_check(id) - test_complete = status_response["completed"] - if test_complete: # also need to check with expected_success - _delete_retry_test(id) - assert test_complete == True + try: + _run_single_test(id, func=function) + except Exception as e: + pass + + # check if all instructions are dequed + status_response = _get_status_check(id) + if status_response: + test_complete = status_response["completed"] + # assert test_complete == True + else: + print("Error getting retry test") + + # clean up and delete retry test + _delete_retry_test(id) From e615dc9e591c89e0a241af125928aae11f844dc3 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Wed, 5 May 2021 11:46:12 -0700 Subject: [PATCH 05/45] remove try except --- tests/unit/test_retry.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 4db208c2c..358a9ccbb 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -23,6 +23,8 @@ import pytest import requests +import http +http.client.HTTPConnection.debuglevel=5 class Test_should_retry(unittest.TestCase): def _call_fut(self, exc): @@ -315,14 +317,10 @@ def get_bucket(client, resource): get_bucket ], "storage.objects.get": [ - # get_blob(client, resource), - # download_blob_to_file(client, resource) get_bucket, get_bucket ], "storage.buckets.get": [ - # reload_bucket(client, resource), - # (lambda client, resource: client.get_bucket(resource["bucket"]["name"])) get_bucket ], "storage.notification.create": [ @@ -407,10 +405,7 @@ def test_conformance_retry_strategy(test_data): continue # run each single test for retry - try: - _run_single_test(id, func=function) - except Exception as e: - pass + _run_single_test(id, func=function) # check if all instructions are dequed status_response = _get_status_check(id) From b7c6b4aa0012c2ba011d4e06b191d3a050eec9f9 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Wed, 19 May 2021 12:05:19 -0700 Subject: [PATCH 06/45] change payload key to instructions to align emulator change --- tests/unit/test_retry.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 358a9ccbb..58f3b05aa 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -337,7 +337,7 @@ def _preflight_send_instructions(method_name, instructions): 'Content-Type': 'application/json', } data_dict = { - 'test_instructions': { + 'instructions': { method_name: instructions } } From 489643fd3c42818d9a539608140204af9ece2662 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Wed, 19 May 2021 13:00:36 -0700 Subject: [PATCH 07/45] rename and revise to loop through each case and method --- tests/unit/test_retry.py | 71 +++++++++++++++++++++------------------- 1 file changed, 37 insertions(+), 34 deletions(-) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 58f3b05aa..d2c1596c9 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -329,7 +329,7 @@ def get_bucket(client, resource): } -def _preflight_send_instructions(method_name, instructions): +def _create_retry_test(method_name, instructions): import json preflight_post_uri = _API_ACCESS_ENDPOINT + "/retry_test" @@ -351,7 +351,7 @@ def _preflight_send_instructions(method_name, instructions): return None -def _get_status_check(id): +def _check_retry_test(id): status_get_uri = "{base}{retry}/{id}".format(base=_API_ACCESS_ENDPOINT, retry="/retry_test", id=id) try: r = requests.get(status_get_uri) @@ -361,12 +361,11 @@ def _get_status_check(id): # do something return None -def _run_single_test(id, func, resource=None): +def _run_retry_test(id, func, resource=None): test_run_uri = _API_ACCESS_ENDPOINT + "/storage/v1/b?project=test" client = storage.Client(client_options={"api_endpoint": test_run_uri}) client._http.headers.update({"x-retry-test-id": id}) - r = func(client=client, resource=resource) - return r + func(client=client, resource=resource) def _delete_retry_test(id): @@ -386,34 +385,38 @@ def test_conformance_retry_strategy(test_data): methods = test_data["methods"] cases = test_data["cases"] expect_success = test_data["expectSuccess"] - for m, c in zip(methods, cases): - # extract method name and instructions for preflight request to send instructions - method_name = m["name"] - instructions = c["instructions"] - - if method_name not in method_mapping: - print("No tests for operation {}".format(method_name)) - continue - - for function in method_mapping[method_name]: - # send instructions with preflight - r = _preflight_send_instructions(method_name, instructions) - if r: - id = r["id"] - else: - print("Error creating retry test") + for c in cases: + for m in methods: + # Extract method name and instructions to create retry test. + method_name = m["name"] + instructions = c["instructions"] + + if method_name not in method_mapping: + # TODO(cathyo@): change to log warning + print("No tests for operation {}".format(method_name)) continue - # run each single test for retry - _run_single_test(id, func=function) - - # check if all instructions are dequed - status_response = _get_status_check(id) - if status_response: - test_complete = status_response["completed"] - # assert test_complete == True - else: - print("Error getting retry test") - - # clean up and delete retry test - _delete_retry_test(id) + for function in method_mapping[method_name]: + # Create the retry test in the emulator to handle instructions. + r = _create_retry_test(method_name, instructions) + if r: + id = r["id"] + else: + # TODO(cathyo@): change to log warning + print("Error creating retry test") + continue + + # Run retry tests on library methods + _run_retry_test(id, func=function) + + # Verify that all instructions were used up during the test + # (indicates that the client sent the correct requests). + status_response = _check_retry_test(id) + if status_response: + test_complete = status_response["completed"] + # assert test_complete == True + else: + print("do something") + + # Clean up and close out test in emulator. + _delete_retry_test(id) From c6c3fff1af3521cd7be2dfb011115a4d8cb2c5de Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Wed, 19 May 2021 13:46:20 -0700 Subject: [PATCH 08/45] wip populate resources to conf tests --- tests/unit/test_retry.py | 35 +++++++++++++++++++++++++++++++---- 1 file changed, 31 insertions(+), 4 deletions(-) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index d2c1596c9..528cd5e55 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -13,6 +13,7 @@ # limitations under the License. import unittest +import uuid from google.cloud import storage from google.cloud.storage import _helpers @@ -329,6 +330,29 @@ def get_bucket(client, resource): } +def _populate_resource_bucket(client, resources): + bucket = client.bucket(uuid.uuid4().hex) + client.create_bucket(bucket) + return bucket + + +resource_mapping = { + "BUCKET": _populate_resource_bucket, +} + + +def _populate_resource(client, json_resource): + resources = {} + for r in json_resource: + try: + func = resource_mapping[r] + res = func(client, resources) + resources[r] = res + except Exception as e: + print("log warning here: {}".format(e)) + + + def _create_retry_test(method_name, instructions): import json @@ -361,9 +385,9 @@ def _check_retry_test(id): # do something return None -def _run_retry_test(id, func, resource=None): - test_run_uri = _API_ACCESS_ENDPOINT + "/storage/v1/b?project=test" - client = storage.Client(client_options={"api_endpoint": test_run_uri}) +def _run_retry_test(client, id, func, resource=None): + # test_run_uri = _API_ACCESS_ENDPOINT + "/storage/v1/b?project=test" + # client = storage.Client(client_options={"api_endpoint": test_run_uri}) client._http.headers.update({"x-retry-test-id": id}) func(client=client, resource=resource) @@ -382,6 +406,9 @@ def test_conformance_retry_strategy(test_data): if _API_ACCESS_ENDPOINT == _DEFAULT_STORAGE_HOST: pytest.skip("This test must use the testbench emulator; set STORAGE_EMULATOR_HOST to run.") + test_run_uri = _API_ACCESS_ENDPOINT + "/storage/v1/b?project=test" + client = storage.Client(client_options={"api_endpoint": test_run_uri}) + methods = test_data["methods"] cases = test_data["cases"] expect_success = test_data["expectSuccess"] @@ -407,7 +434,7 @@ def test_conformance_retry_strategy(test_data): continue # Run retry tests on library methods - _run_retry_test(id, func=function) + _run_retry_test(client, id, func=function) # Verify that all instructions were used up during the test # (indicates that the client sent the correct requests). From 6281e70cfbfb62fc4b1c94f068905a54389ad81c Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Wed, 19 May 2021 19:33:18 -0700 Subject: [PATCH 09/45] add logic to populate fixture resources --- tests/unit/test_retry.py | 60 ++++++++++++++++++++++++++++------------ 1 file changed, 43 insertions(+), 17 deletions(-) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 528cd5e55..5195f94d0 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -25,7 +25,7 @@ import requests import http -http.client.HTTPConnection.debuglevel=5 +# http.client.HTTPConnection.debuglevel=5 class Test_should_retry(unittest.TestCase): def _call_fut(self, exc): @@ -303,8 +303,8 @@ def reload_bucket(client, resource): bucket = Bucket(client, resource["bucket"]["name"]) bucket.reload() -def get_bucket(client, resource): - bucket_name = "bucket" #resource["bucket"]["name"] +def get_bucket(client, resources): + bucket_name = resources["bucket"].name client.get_bucket(bucket_name) # Method invocation mapping. Methods to retry. This is a map whose keys are a string describing a standard @@ -333,24 +333,48 @@ def get_bucket(client, resource): def _populate_resource_bucket(client, resources): bucket = client.bucket(uuid.uuid4().hex) client.create_bucket(bucket) - return bucket + resources["bucket"] = bucket + +def _populate_resource_object(client, resources): + bucket_name = resources["bucket"].name + bucket = client.get_bucket(bucket_name) + blob = bucket.blob(uuid.uuid4().hex) + blob.upload_from_string("hello world") + blob.reload() + resources["object"] = blob + +def _populate_resource_notification(client, resources): + bucket_name = resources["bucket"].name + bucket = client.get_bucket(bucket_name) + notification = bucket.notification() + notification.create() + notification.reload() + resources["notification"] = notification resource_mapping = { "BUCKET": _populate_resource_bucket, + "OBJECT": _populate_resource_object, + "NOTIFICATION": _populate_resource_notification, } -def _populate_resource(client, json_resource): - resources = {} +def _populate_resources(client, json_resource): + resources = { + "bucket": None, + "object": None, + "notification": None, + "hmac_key": None, + } + for r in json_resource: try: func = resource_mapping[r] - res = func(client, resources) - resources[r] = res + func(client, resources) except Exception as e: print("log warning here: {}".format(e)) + return resources def _create_retry_test(method_name, instructions): @@ -385,11 +409,11 @@ def _check_retry_test(id): # do something return None -def _run_retry_test(client, id, func, resource=None): - # test_run_uri = _API_ACCESS_ENDPOINT + "/storage/v1/b?project=test" - # client = storage.Client(client_options={"api_endpoint": test_run_uri}) +def _run_retry_test(client, id, func, resources): + test_run_uri = _API_ACCESS_ENDPOINT + "/storage/v1/b?project=test" + client = storage.Client(client_options={"api_endpoint": test_run_uri}) client._http.headers.update({"x-retry-test-id": id}) - func(client=client, resource=resource) + func(client=client, resources=resources) def _delete_retry_test(id): @@ -406,9 +430,7 @@ def test_conformance_retry_strategy(test_data): if _API_ACCESS_ENDPOINT == _DEFAULT_STORAGE_HOST: pytest.skip("This test must use the testbench emulator; set STORAGE_EMULATOR_HOST to run.") - test_run_uri = _API_ACCESS_ENDPOINT + "/storage/v1/b?project=test" - client = storage.Client(client_options={"api_endpoint": test_run_uri}) - + client = storage.Client() methods = test_data["methods"] cases = test_data["cases"] expect_success = test_data["expectSuccess"] @@ -417,6 +439,7 @@ def test_conformance_retry_strategy(test_data): # Extract method name and instructions to create retry test. method_name = m["name"] instructions = c["instructions"] + json_resources = m["resources"] if method_name not in method_mapping: # TODO(cathyo@): change to log warning @@ -433,8 +456,11 @@ def test_conformance_retry_strategy(test_data): print("Error creating retry test") continue - # Run retry tests on library methods - _run_retry_test(client, id, func=function) + # Populate resources. + resources = _populate_resources(client, json_resources) + + # Run retry tests on library methods. + _run_retry_test(client, id, func=function, resources=resources) # Verify that all instructions were used up during the test # (indicates that the client sent the correct requests). From c595aa5132b773c113a4ecb2fddbdec30f4753f9 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Thu, 20 May 2021 13:55:07 -0700 Subject: [PATCH 10/45] add helper method to populate fixture hmacy key --- tests/unit/test_retry.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 5195f94d0..9e63c7335 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -283,6 +283,8 @@ def fake_service_account(): # ToDo: Confirm the correct access endpoint. _API_ACCESS_ENDPOINT = _helpers._get_storage_host() _DEFAULT_STORAGE_HOST = u"https://storage.googleapis.com" +_CONF_TEST_PROJECT_ID = "my-project-id" +_CONF_TEST_SERVICE_ACCOUNT_EMAIL = "my-service-account@my-project-id.iam.gserviceaccount.com" # Library methods for mapping def list_buckets(): @@ -350,12 +352,20 @@ def _populate_resource_notification(client, resources): notification.create() notification.reload() resources["notification"] = notification + +def _populate_resource_hmackey(client, resources): + hmac_key, secret = client.create_hmac_key( + service_account_email=_CONF_TEST_SERVICE_ACCOUNT_EMAIL, + project_id=_CONF_TEST_PROJECT_ID + ) + resources["hmac_key"] = hmac_key resource_mapping = { "BUCKET": _populate_resource_bucket, "OBJECT": _populate_resource_object, "NOTIFICATION": _populate_resource_notification, + "HMAC_KEY": _populate_resource_hmackey, } From 294b0841a8eb7cc89d15cebe233232da83859172 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Thu, 20 May 2021 14:37:43 -0700 Subject: [PATCH 11/45] revise endpoints and 2 clients --- tests/unit/retry_strategy_test_data.json | 6 ++--- tests/unit/test_retry.py | 30 +++++++++++++----------- 2 files changed, 19 insertions(+), 17 deletions(-) diff --git a/tests/unit/retry_strategy_test_data.json b/tests/unit/retry_strategy_test_data.json index e55c17d4e..300a23b9f 100644 --- a/tests/unit/retry_strategy_test_data.json +++ b/tests/unit/retry_strategy_test_data.json @@ -20,13 +20,13 @@ ], "methods": [ { - "name": "storage.buckets.list", + "name": "storage.buckets.get", "resources": [ "BUCKET" ] }, { - "name": "storage.buckets.list", + "name": "storage.buckets.get", "resources": [ "BUCKET", "OBJECT" @@ -48,7 +48,7 @@ ], "methods": [ { - "name": "storage.buckets.list", + "name": "storage.buckets.get", "resources": [ "BUCKET", "NOTIFICATION" diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 9e63c7335..6e174ef6c 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -387,10 +387,10 @@ def _populate_resources(client, json_resource): return resources -def _create_retry_test(method_name, instructions): +def _create_retry_test(host, method_name, instructions): import json - preflight_post_uri = _API_ACCESS_ENDPOINT + "/retry_test" + preflight_post_uri = host + "/retry_test" headers = { 'Content-Type': 'application/json', } @@ -409,8 +409,8 @@ def _create_retry_test(method_name, instructions): return None -def _check_retry_test(id): - status_get_uri = "{base}{retry}/{id}".format(base=_API_ACCESS_ENDPOINT, retry="/retry_test", id=id) +def _check_retry_test(host, id): + status_get_uri = "{base}{retry}/{id}".format(base=host, retry="/retry_test", id=id) try: r = requests.get(status_get_uri) return r.json() @@ -419,15 +419,15 @@ def _check_retry_test(id): # do something return None -def _run_retry_test(client, id, func, resources): - test_run_uri = _API_ACCESS_ENDPOINT + "/storage/v1/b?project=test" - client = storage.Client(client_options={"api_endpoint": test_run_uri}) +def _run_retry_test(host, id, func, resources): + # Create client using x-retry-test-id header. + client = storage.Client(client_options={"api_endpoint": host}) client._http.headers.update({"x-retry-test-id": id}) func(client=client, resources=resources) -def _delete_retry_test(id): - status_get_uri = "{base}{retry}/{id}".format(base=_API_ACCESS_ENDPOINT, retry="/retry_test", id=id) +def _delete_retry_test(host, id): + status_get_uri = "{base}{retry}/{id}".format(base=host, retry="/retry_test", id=id) try: r = requests.delete(status_get_uri) except Exception as e: @@ -437,9 +437,11 @@ def _delete_retry_test(id): @pytest.mark.parametrize("test_data", _CONFORMANCE_TESTS) def test_conformance_retry_strategy(test_data): - if _API_ACCESS_ENDPOINT == _DEFAULT_STORAGE_HOST: + host = _API_ACCESS_ENDPOINT + if host == _DEFAULT_STORAGE_HOST: pytest.skip("This test must use the testbench emulator; set STORAGE_EMULATOR_HOST to run.") + # Create client to use for setup steps. client = storage.Client() methods = test_data["methods"] cases = test_data["cases"] @@ -458,7 +460,7 @@ def test_conformance_retry_strategy(test_data): for function in method_mapping[method_name]: # Create the retry test in the emulator to handle instructions. - r = _create_retry_test(method_name, instructions) + r = _create_retry_test(host, method_name, instructions) if r: id = r["id"] else: @@ -470,11 +472,11 @@ def test_conformance_retry_strategy(test_data): resources = _populate_resources(client, json_resources) # Run retry tests on library methods. - _run_retry_test(client, id, func=function, resources=resources) + _run_retry_test(host, id, func=function, resources=resources) # Verify that all instructions were used up during the test # (indicates that the client sent the correct requests). - status_response = _check_retry_test(id) + status_response = _check_retry_test(host, id) if status_response: test_complete = status_response["completed"] # assert test_complete == True @@ -482,4 +484,4 @@ def test_conformance_retry_strategy(test_data): print("do something") # Clean up and close out test in emulator. - _delete_retry_test(id) + _delete_retry_test(host, id) From 7e8bd0694d5ff626dafc8530d8a547ed81b949ba Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Thu, 20 May 2021 14:58:52 -0700 Subject: [PATCH 12/45] add assertions to testdata scenarios --- tests/unit/test_retry.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 6e174ef6c..4a5d080a4 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -472,14 +472,22 @@ def test_conformance_retry_strategy(test_data): resources = _populate_resources(client, json_resources) # Run retry tests on library methods. - _run_retry_test(host, id, func=function, resources=resources) + try: + _run_retry_test(host, id, func=function, resources=resources) + except Exception as e: + success_results = False + else: + success_results = True + # Assert expected success for each scenario. + assert expect_success == success_results + # Verify that all instructions were used up during the test # (indicates that the client sent the correct requests). status_response = _check_retry_test(host, id) if status_response: test_complete = status_response["completed"] - # assert test_complete == True + assert test_complete == True else: print("do something") From d4ed7641def7992ee9751a07f07d2576647d6fde Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Thu, 20 May 2021 16:55:03 -0700 Subject: [PATCH 13/45] add logic for preconditions wip --- tests/unit/retry_strategy_test_data.json | 24 +++++++++++++++++++++ tests/unit/test_retry.py | 27 +++++++++++++++++++----- 2 files changed, 46 insertions(+), 5 deletions(-) diff --git a/tests/unit/retry_strategy_test_data.json b/tests/unit/retry_strategy_test_data.json index 300a23b9f..27b44070e 100644 --- a/tests/unit/retry_strategy_test_data.json +++ b/tests/unit/retry_strategy_test_data.json @@ -36,12 +36,36 @@ "preconditionProvided": false, "expectSuccess": true }, + { + "id": 2, + "description": "conditionally idempotent", + "cases": [ + { + "instructions": [ + "return-503", + "return-503" + ] + } + ], + "methods": [ + { + "name": "storage.objects.patch", + "resources": [ + "BUCKET", + "OBJECT" + ] + } + ], + "preconditionProvided": true, + "expectSuccess": true + }, { "id": 4, "description": "non idempotent", "cases": [ { "instructions": [ + "return-503", "return-503" ] } diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 4a5d080a4..3eed8d50d 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -305,10 +305,23 @@ def reload_bucket(client, resource): bucket = Bucket(client, resource["bucket"]["name"]) bucket.reload() -def get_bucket(client, resources): +def get_bucket(client, resources, preconditions): bucket_name = resources["bucket"].name client.get_bucket(bucket_name) +def update_blob(client, resources, preconditions): + bucket_name = resources["bucket"].name + resource_blob = resources["object"] + bucket = client.bucket(bucket_name) + blob = bucket.blob(resource_blob.name) + metadata = {"foo": "bar"} + blob.metadata = metadata + if preconditions: + metageneration = resource_blob.metageneration + blob.patch(if_metageneration_match=metageneration) + else: + blob.patch() + # Method invocation mapping. Methods to retry. This is a map whose keys are a string describing a standard # API call (e.g. storage.objects.get) and values are a list of functions which # wrap library methods that implement these calls. There may be multiple values @@ -328,7 +341,10 @@ def get_bucket(client, resources): ], "storage.notification.create": [ get_bucket - ] + ], + "storage.objects.patch": [ + update_blob + ], } @@ -419,11 +435,11 @@ def _check_retry_test(host, id): # do something return None -def _run_retry_test(host, id, func, resources): +def _run_retry_test(host, id, func, resources, preconditions): # Create client using x-retry-test-id header. client = storage.Client(client_options={"api_endpoint": host}) client._http.headers.update({"x-retry-test-id": id}) - func(client=client, resources=resources) + func(client=client, resources=resources, preconditions=preconditions) def _delete_retry_test(host, id): @@ -446,6 +462,7 @@ def test_conformance_retry_strategy(test_data): methods = test_data["methods"] cases = test_data["cases"] expect_success = test_data["expectSuccess"] + precondition_provided = test_data["preconditionProvided"] for c in cases: for m in methods: # Extract method name and instructions to create retry test. @@ -473,7 +490,7 @@ def test_conformance_retry_strategy(test_data): # Run retry tests on library methods. try: - _run_retry_test(host, id, func=function, resources=resources) + _run_retry_test(host, id, func=function, resources=resources, preconditions=precondition_provided) except Exception as e: success_results = False else: From b59c39d06033ad35353ea9691af2ed6475cc831c Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Fri, 21 May 2021 11:30:20 -0700 Subject: [PATCH 14/45] add mapping scenarios and formatting for readability --- tests/unit/test_retry.py | 135 ++++++++++++++++++++++++++++----------- 1 file changed, 98 insertions(+), 37 deletions(-) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 3eed8d50d..0dbbbc13c 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -25,8 +25,10 @@ import requests import http + # http.client.HTTPConnection.debuglevel=5 + class Test_should_retry(unittest.TestCase): def _call_fut(self, exc): from google.cloud.storage import retry @@ -277,38 +279,56 @@ def fake_service_account(): global _FAKE_SERVICE_ACCOUNT # validate and set fake service account + # ToDo: Confirm what are the credentials required. Can we use the same service account created for url_signer_v4_test_account? ) # _SERVICE_ACCOUNT_JSON = _read_local_json("") -_CONFORMANCE_TESTS = _read_local_json("retry_strategy_test_data.json")["retryStrategyTests"] +_CONFORMANCE_TESTS = _read_local_json("retry_strategy_test_data.json")[ + "retryStrategyTests" +] # ToDo: Confirm the correct access endpoint. _API_ACCESS_ENDPOINT = _helpers._get_storage_host() _DEFAULT_STORAGE_HOST = u"https://storage.googleapis.com" _CONF_TEST_PROJECT_ID = "my-project-id" -_CONF_TEST_SERVICE_ACCOUNT_EMAIL = "my-service-account@my-project-id.iam.gserviceaccount.com" +_CONF_TEST_SERVICE_ACCOUNT_EMAIL = ( + "my-service-account@my-project-id.iam.gserviceaccount.com" +) + +######################################################################################################################################## +### Library methods for mapping ######################################################################################################## +######################################################################################################################################## + -# Library methods for mapping def list_buckets(): from google.cloud import storage + client = storage.Client() bucket = client.list_buckets() + def get_blob(client, resource): from google.cloud import storage + client = storage.Client() bucket = client.bucket(resource["bucket"]["name"]) bucket.get_blob(resource["object"]["name"]) + def download_blob_to_file(client, resource): - client.download_blob_to_file(resource["object"]["name"], resource["file_handle"]) #file handle in resource? + client.download_blob_to_file( + resource["object"]["name"], resource["file_handle"] + ) # file handle in resource? + def reload_bucket(client, resource): - bucket = Bucket(client, resource["bucket"]["name"]) + bucket = storage.Bucket(client, resource["bucket"]["name"]) bucket.reload() + def get_bucket(client, resources, preconditions): bucket_name = resources["bucket"].name client.get_bucket(bucket_name) + def update_blob(client, resources, preconditions): bucket_name = resources["bucket"].name resource_blob = resources["object"] @@ -322,37 +342,60 @@ def update_blob(client, resources, preconditions): else: blob.patch() + # Method invocation mapping. Methods to retry. This is a map whose keys are a string describing a standard # API call (e.g. storage.objects.get) and values are a list of functions which # wrap library methods that implement these calls. There may be multiple values # because multiple library methods may use the same call (e.g. get could be a # read or just a metadata get). method_mapping = { - "storage.buckets.list": [ - get_bucket, - get_bucket - ], - "storage.objects.get": [ - get_bucket, - get_bucket - ], - "storage.buckets.get": [ - get_bucket - ], - "storage.notification.create": [ - get_bucket - ], - "storage.objects.patch": [ - update_blob - ], + "storage.bucket_acl.get": [], # S1 start + "storage.bucket_acl.list": [], + "storage.buckets.delete": [], + "storage.buckets.get": [get_bucket], + "storage.buckets.getIamPolicy": [], + "storage.buckets.insert": [], + "storage.buckets.list": [get_bucket, get_bucket], + "storage.buckets.lockRententionPolicy": [], + "storage.buckets.testIamPermission": [], + "storage.default_object_acl.get": [], + "storage.default_object_acl.list": [], + "storage.hmacKey.delete": [], + "storage.hmacKey.list": [], + "storage.hmacKey.get": [], + "storage.notification.delete": [], + "storage.notification.get": [], + "storage.notification.list": [], + "storage.object_acl.get": [], + "storage.object_acl.list": [], + "storage.objects.get": [get_bucket, get_bucket], + "storage.objects.list": [], + "storage.serviceaccount.get": [], # S1 end + "storage.buckets.patch": [], # S2 start + "storage.buckets.setIamPolicy": [], + "storage.buckets.update": [], + "storage.hmacKey.update": [], + "storage.objects.compose": [], + "storage.objects.copy": [], + "storage.objects.delete": [], + "storage.objects.insert": [], + "storage.objects.patch": [update_blob], + "storage.objects.rewrite": [], + "storage.objects.update": [], # S2 end + "storage.notification.create": [get_bucket], } +######################################################################################################################################## +### Helper Methods for Populating Resources ############################################################################################ +######################################################################################################################################## + def _populate_resource_bucket(client, resources): bucket = client.bucket(uuid.uuid4().hex) client.create_bucket(bucket) resources["bucket"] = bucket + def _populate_resource_object(client, resources): bucket_name = resources["bucket"].name bucket = client.get_bucket(bucket_name) @@ -361,6 +404,7 @@ def _populate_resource_object(client, resources): blob.reload() resources["object"] = blob + def _populate_resource_notification(client, resources): bucket_name = resources["bucket"].name bucket = client.get_bucket(bucket_name) @@ -369,13 +413,14 @@ def _populate_resource_notification(client, resources): notification.reload() resources["notification"] = notification + def _populate_resource_hmackey(client, resources): hmac_key, secret = client.create_hmac_key( - service_account_email=_CONF_TEST_SERVICE_ACCOUNT_EMAIL, - project_id=_CONF_TEST_PROJECT_ID + service_account_email=_CONF_TEST_SERVICE_ACCOUNT_EMAIL, + project_id=_CONF_TEST_PROJECT_ID, ) resources["hmac_key"] = hmac_key - + resource_mapping = { "BUCKET": _populate_resource_bucket, @@ -394,7 +439,7 @@ def _populate_resources(client, json_resource): } for r in json_resource: - try: + try: func = resource_mapping[r] func(client, resources) except Exception as e: @@ -403,18 +448,19 @@ def _populate_resources(client, json_resource): return resources +######################################################################################################################################## +### Helper Methods for Emulator Retry API ############################################################################################## +######################################################################################################################################## + + def _create_retry_test(host, method_name, instructions): import json preflight_post_uri = host + "/retry_test" headers = { - 'Content-Type': 'application/json', - } - data_dict = { - 'instructions': { - method_name: instructions - } + "Content-Type": "application/json", } + data_dict = {"instructions": {method_name: instructions}} data = json.dumps(data_dict) try: r = requests.post(preflight_post_uri, headers=headers, data=data) @@ -435,6 +481,7 @@ def _check_retry_test(host, id): # do something return None + def _run_retry_test(host, id, func, resources, preconditions): # Create client using x-retry-test-id header. client = storage.Client(client_options={"api_endpoint": host}) @@ -445,17 +492,24 @@ def _run_retry_test(host, id, func, resources, preconditions): def _delete_retry_test(host, id): status_get_uri = "{base}{retry}/{id}".format(base=host, retry="/retry_test", id=id) try: - r = requests.delete(status_get_uri) + requests.delete(status_get_uri) except Exception as e: print(e.args) # do something +######################################################################################################################################## +### Run Conformance Tests for Retry Strategy ########################################################################################### +######################################################################################################################################## + + @pytest.mark.parametrize("test_data", _CONFORMANCE_TESTS) def test_conformance_retry_strategy(test_data): host = _API_ACCESS_ENDPOINT if host == _DEFAULT_STORAGE_HOST: - pytest.skip("This test must use the testbench emulator; set STORAGE_EMULATOR_HOST to run.") + pytest.skip( + "This test must use the testbench emulator; set STORAGE_EMULATOR_HOST to run." + ) # Create client to use for setup steps. client = storage.Client() @@ -490,17 +544,24 @@ def test_conformance_retry_strategy(test_data): # Run retry tests on library methods. try: - _run_retry_test(host, id, func=function, resources=resources, preconditions=precondition_provided) + _run_retry_test( + host, + id, + func=function, + resources=resources, + preconditions=precondition_provided, + ) except Exception as e: + print(e) success_results = False else: success_results = True # Assert expected success for each scenario. assert expect_success == success_results - + # Verify that all instructions were used up during the test - # (indicates that the client sent the correct requests). + # (indicates that the client sent the correct requests). status_response = _check_retry_test(host, id) if status_response: test_complete = status_response["completed"] From cca0c65d17372109dc8bc93a4ecb6edf27af8276 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Fri, 21 May 2021 13:59:56 -0700 Subject: [PATCH 15/45] add library methods to method invocation mapping and json --- tests/unit/retry_strategy_test_data.json | 51 +++++++++- tests/unit/test_retry.py | 118 ++++++++++++++++------- 2 files changed, 130 insertions(+), 39 deletions(-) diff --git a/tests/unit/retry_strategy_test_data.json b/tests/unit/retry_strategy_test_data.json index 27b44070e..2390b3d5f 100644 --- a/tests/unit/retry_strategy_test_data.json +++ b/tests/unit/retry_strategy_test_data.json @@ -26,7 +26,53 @@ ] }, { - "name": "storage.buckets.get", + "name": "storage.buckets.list", + "resources": [ + "BUCKET" + ] + }, + { + "name": "storage.buckets.delete", + "resources": [ + "BUCKET" + ] + }, + { + "name": "storage.buckets.getIamPolicy", + "resources": [ + "BUCKET" + ] + }, + { + "name": "storage.notifications.list", + "resources": [ + "BUCKET", + "NOTIFICATION" + ] + }, + { + "name": "storage.notifications.get", + "resources": [ + "BUCKET", + "NOTIFICATION" + ] + }, + { + "name": "storage.notifications.delete", + "resources": [ + "BUCKET", + "NOTIFICATION" + ] + }, + { + "name": "storage.objects.get", + "resources": [ + "BUCKET", + "OBJECT" + ] + }, + { + "name": "storage.objects.list", "resources": [ "BUCKET", "OBJECT" @@ -65,14 +111,13 @@ "cases": [ { "instructions": [ - "return-503", "return-503" ] } ], "methods": [ { - "name": "storage.buckets.get", + "name": "storage.notifications.insert", "resources": [ "BUCKET", "NOTIFICATION" diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 0dbbbc13c..08e20dd21 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -298,37 +298,32 @@ def fake_service_account(): ######################################################################################################################################## -def list_buckets(): - from google.cloud import storage +def list_buckets(client, resources, preconditions): + buckets = client.list_buckets() + for b in buckets: + break - client = storage.Client() - bucket = client.list_buckets() - - -def get_blob(client, resource): - from google.cloud import storage - - client = storage.Client() - bucket = client.bucket(resource["bucket"]["name"]) - bucket.get_blob(resource["object"]["name"]) - - -def download_blob_to_file(client, resource): - client.download_blob_to_file( - resource["object"]["name"], resource["file_handle"] - ) # file handle in resource? +def list_blobs(client, resources, preconditions): + bucket_name = resources["bucket"].name + blobs = client.list_blobs(bucket_name) + for b in blobs: + break +def get_blob(client, resources, preconditions): + bucket_name = resources["bucket"].name + blob_name = resources["object"].name + bucket = client.bucket(bucket_name) + bucket.get_blob(blob_name) -def reload_bucket(client, resource): - bucket = storage.Bucket(client, resource["bucket"]["name"]) +def reload_bucket(client, resources, preconditions): + bucket_name = resources["bucket"].name + bucket = client.bucket(bucket_name) bucket.reload() - def get_bucket(client, resources, preconditions): bucket_name = resources["bucket"].name client.get_bucket(bucket_name) - def update_blob(client, resources, preconditions): bucket_name = resources["bucket"].name resource_blob = resources["object"] @@ -342,6 +337,57 @@ def update_blob(client, resources, preconditions): else: blob.patch() +def create_bucket(client, resources, preconditions): + bucket = client.bucket(uuid.uuid4().hex) + client.create_bucket(bucket) + +# Q!!! upload_from_string did not retry. +def upload_from_string(client, resources, preconditions): + bucket_name = resources["bucket"].name + bucket = client.get_bucket(bucket_name) + blob = bucket.blob(uuid.uuid4().hex) + blob.upload_from_string("upload from string") + +def create_notification(client, resources, preconditions): + bucket_name = resources["bucket"].name + bucket = client.get_bucket(bucket_name) + notification = bucket.notification() + notification.create() + +def list_notifications(client, resources, preconditions): + bucket_name = resources["bucket"].name + bucket = client.get_bucket(bucket_name) + notifications = bucket.list_notifications() + for n in notifications: + break + +def get_notification(client, resources, preconditions): + bucket_name = resources["bucket"].name + notification_id = resources["notification"].notification_id + client.bucket(bucket_name).get_notification(notification_id) + +def delete_notification(client, resources, preconditions): + bucket_name = resources["bucket"].name + notification_id = resources["notification"].notification_id + notification = client.bucket(bucket_name).get_notification(notification_id) + notification.delete() + +# Q!!! are there hmacKeys retryable endpoints in the emulator? +def list_hmac_keys(client, resources, preconditions): + hmac_keys = client.list_hmac_keys() + for k in hmac_keys: + break + +def delete_bucket(client, resources, preconditions): + bucket_name = resources["bucket"].name + bucket = client.bucket(bucket_name) + bucket.delete() + +def get_iam_policy(client, resources, preconditions): + bucket_name = resources["bucket"].name + bucket = client.bucket(bucket_name) + bucket.get_iam_policy() + # Method invocation mapping. Methods to retry. This is a map whose keys are a string describing a standard # API call (e.g. storage.objects.get) and values are a list of functions which @@ -349,13 +395,13 @@ def update_blob(client, resources, preconditions): # because multiple library methods may use the same call (e.g. get could be a # read or just a metadata get). method_mapping = { - "storage.bucket_acl.get": [], # S1 start + "storage.bucket_acl.get": [], # S1 start "storage.bucket_acl.list": [], - "storage.buckets.delete": [], - "storage.buckets.get": [get_bucket], - "storage.buckets.getIamPolicy": [], - "storage.buckets.insert": [], - "storage.buckets.list": [get_bucket, get_bucket], + "storage.buckets.delete": [delete_bucket], + "storage.buckets.get": [get_bucket, reload_bucket], + "storage.buckets.getIamPolicy": [get_iam_policy], + "storage.buckets.insert": [create_bucket], + "storage.buckets.list": [list_buckets], "storage.buckets.lockRententionPolicy": [], "storage.buckets.testIamPermission": [], "storage.default_object_acl.get": [], @@ -363,15 +409,15 @@ def update_blob(client, resources, preconditions): "storage.hmacKey.delete": [], "storage.hmacKey.list": [], "storage.hmacKey.get": [], - "storage.notification.delete": [], - "storage.notification.get": [], - "storage.notification.list": [], + "storage.notifications.delete": [delete_notification], + "storage.notifications.get": [get_notification], + "storage.notifications.list": [list_notifications], "storage.object_acl.get": [], "storage.object_acl.list": [], - "storage.objects.get": [get_bucket, get_bucket], - "storage.objects.list": [], + "storage.objects.get": [get_blob], + "storage.objects.list": [list_blobs], "storage.serviceaccount.get": [], # S1 end - "storage.buckets.patch": [], # S2 start + "storage.buckets.patch": [], # S2 start "storage.buckets.setIamPolicy": [], "storage.buckets.update": [], "storage.hmacKey.update": [], @@ -381,8 +427,8 @@ def update_blob(client, resources, preconditions): "storage.objects.insert": [], "storage.objects.patch": [update_blob], "storage.objects.rewrite": [], - "storage.objects.update": [], # S2 end - "storage.notification.create": [get_bucket], + "storage.objects.update": [], # S2 end + "storage.notifications.insert": [create_notification], # S4 } ######################################################################################################################################## From dcb2f0920d82f67a69347619fc41ab52a4152292 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Fri, 21 May 2021 14:01:07 -0700 Subject: [PATCH 16/45] lint --- tests/unit/test_retry.py | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 08e20dd21..04e592ddd 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -303,27 +303,32 @@ def list_buckets(client, resources, preconditions): for b in buckets: break + def list_blobs(client, resources, preconditions): bucket_name = resources["bucket"].name blobs = client.list_blobs(bucket_name) for b in blobs: break + def get_blob(client, resources, preconditions): bucket_name = resources["bucket"].name blob_name = resources["object"].name bucket = client.bucket(bucket_name) bucket.get_blob(blob_name) + def reload_bucket(client, resources, preconditions): bucket_name = resources["bucket"].name bucket = client.bucket(bucket_name) bucket.reload() + def get_bucket(client, resources, preconditions): bucket_name = resources["bucket"].name client.get_bucket(bucket_name) + def update_blob(client, resources, preconditions): bucket_name = resources["bucket"].name resource_blob = resources["object"] @@ -337,23 +342,27 @@ def update_blob(client, resources, preconditions): else: blob.patch() + def create_bucket(client, resources, preconditions): bucket = client.bucket(uuid.uuid4().hex) client.create_bucket(bucket) -# Q!!! upload_from_string did not retry. + +# Q!!! upload_from_string did not retry. def upload_from_string(client, resources, preconditions): bucket_name = resources["bucket"].name bucket = client.get_bucket(bucket_name) blob = bucket.blob(uuid.uuid4().hex) blob.upload_from_string("upload from string") + def create_notification(client, resources, preconditions): bucket_name = resources["bucket"].name bucket = client.get_bucket(bucket_name) notification = bucket.notification() notification.create() + def list_notifications(client, resources, preconditions): bucket_name = resources["bucket"].name bucket = client.get_bucket(bucket_name) @@ -361,28 +370,33 @@ def list_notifications(client, resources, preconditions): for n in notifications: break + def get_notification(client, resources, preconditions): bucket_name = resources["bucket"].name notification_id = resources["notification"].notification_id client.bucket(bucket_name).get_notification(notification_id) + def delete_notification(client, resources, preconditions): bucket_name = resources["bucket"].name notification_id = resources["notification"].notification_id notification = client.bucket(bucket_name).get_notification(notification_id) notification.delete() + # Q!!! are there hmacKeys retryable endpoints in the emulator? def list_hmac_keys(client, resources, preconditions): hmac_keys = client.list_hmac_keys() for k in hmac_keys: break + def delete_bucket(client, resources, preconditions): bucket_name = resources["bucket"].name bucket = client.bucket(bucket_name) bucket.delete() + def get_iam_policy(client, resources, preconditions): bucket_name = resources["bucket"].name bucket = client.bucket(bucket_name) @@ -395,7 +409,7 @@ def get_iam_policy(client, resources, preconditions): # because multiple library methods may use the same call (e.g. get could be a # read or just a metadata get). method_mapping = { - "storage.bucket_acl.get": [], # S1 start + "storage.bucket_acl.get": [], # S1 start "storage.bucket_acl.list": [], "storage.buckets.delete": [delete_bucket], "storage.buckets.get": [get_bucket, reload_bucket], @@ -417,7 +431,7 @@ def get_iam_policy(client, resources, preconditions): "storage.objects.get": [get_blob], "storage.objects.list": [list_blobs], "storage.serviceaccount.get": [], # S1 end - "storage.buckets.patch": [], # S2 start + "storage.buckets.patch": [], # S2 start "storage.buckets.setIamPolicy": [], "storage.buckets.update": [], "storage.hmacKey.update": [], @@ -427,7 +441,7 @@ def get_iam_policy(client, resources, preconditions): "storage.objects.insert": [], "storage.objects.patch": [update_blob], "storage.objects.rewrite": [], - "storage.objects.update": [], # S2 end + "storage.objects.update": [], # S2 end "storage.notifications.insert": [create_notification], # S4 } From 5b33dc251fe8afd79425ac85de58e7ab7a6e1fe1 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Mon, 24 May 2021 18:14:10 -0700 Subject: [PATCH 17/45] refactor using **kwargs --- tests/unit/retry_strategy_test_data.json | 7 +- tests/unit/test_retry.py | 93 +++++++++--------------- 2 files changed, 41 insertions(+), 59 deletions(-) diff --git a/tests/unit/retry_strategy_test_data.json b/tests/unit/retry_strategy_test_data.json index 2390b3d5f..a2ee07806 100644 --- a/tests/unit/retry_strategy_test_data.json +++ b/tests/unit/retry_strategy_test_data.json @@ -37,6 +37,10 @@ "BUCKET" ] }, + { + "name": "storage.buckets.insert", + "resources": [] + }, { "name": "storage.buckets.getIamPolicy", "resources": [ @@ -119,8 +123,7 @@ { "name": "storage.notifications.insert", "resources": [ - "BUCKET", - "NOTIFICATION" + "BUCKET" ] } ], diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 04e592ddd..e0b587cba 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -298,108 +298,92 @@ def fake_service_account(): ######################################################################################################################################## -def list_buckets(client, resources, preconditions): +def list_buckets(client, _, _bucket): buckets = client.list_buckets() for b in buckets: break -def list_blobs(client, resources, preconditions): - bucket_name = resources["bucket"].name - blobs = client.list_blobs(bucket_name) +def list_blobs(client, _, bucket, _blob): + blobs = client.list_blobs(bucket.name) for b in blobs: break -def get_blob(client, resources, preconditions): - bucket_name = resources["bucket"].name - blob_name = resources["object"].name - bucket = client.bucket(bucket_name) - bucket.get_blob(blob_name) +def get_blob(client, _, bucket, object): + bucket = client.bucket(bucket.name) + bucket.get_blob(object.name) -def reload_bucket(client, resources, preconditions): - bucket_name = resources["bucket"].name - bucket = client.bucket(bucket_name) +def reload_bucket(client, _, bucket): + bucket = client.bucket(bucket.name) bucket.reload() -def get_bucket(client, resources, preconditions): - bucket_name = resources["bucket"].name - client.get_bucket(bucket_name) +def get_bucket(client, _, bucket): + client.get_bucket(bucket.name) -def update_blob(client, resources, preconditions): - bucket_name = resources["bucket"].name - resource_blob = resources["object"] - bucket = client.bucket(bucket_name) - blob = bucket.blob(resource_blob.name) +def update_blob(client, preconditions, bucket, object): + bucket = client.bucket(bucket.name) + blob = bucket.blob(object.name) metadata = {"foo": "bar"} blob.metadata = metadata if preconditions: - metageneration = resource_blob.metageneration + metageneration = object.metageneration blob.patch(if_metageneration_match=metageneration) else: blob.patch() -def create_bucket(client, resources, preconditions): +def create_bucket(client, _): bucket = client.bucket(uuid.uuid4().hex) client.create_bucket(bucket) # Q!!! upload_from_string did not retry. -def upload_from_string(client, resources, preconditions): - bucket_name = resources["bucket"].name - bucket = client.get_bucket(bucket_name) +def upload_from_string(client, _, bucket): + bucket = client.get_bucket(bucket.name) blob = bucket.blob(uuid.uuid4().hex) blob.upload_from_string("upload from string") -def create_notification(client, resources, preconditions): - bucket_name = resources["bucket"].name - bucket = client.get_bucket(bucket_name) +def create_notification(client, _, bucket): + bucket = client.get_bucket(bucket.name) notification = bucket.notification() notification.create() -def list_notifications(client, resources, preconditions): - bucket_name = resources["bucket"].name - bucket = client.get_bucket(bucket_name) +def list_notifications(client, _, bucket, _notification): + bucket = client.get_bucket(bucket.name) notifications = bucket.list_notifications() for n in notifications: break -def get_notification(client, resources, preconditions): - bucket_name = resources["bucket"].name - notification_id = resources["notification"].notification_id - client.bucket(bucket_name).get_notification(notification_id) +def get_notification(client, _, bucket, notification): + client.bucket(bucket.name).get_notification(notification.notification_id) -def delete_notification(client, resources, preconditions): - bucket_name = resources["bucket"].name - notification_id = resources["notification"].notification_id - notification = client.bucket(bucket_name).get_notification(notification_id) +def delete_notification(client, _, bucket, notification): + notification = client.bucket(bucket.name).get_notification(notification.notification_id) notification.delete() # Q!!! are there hmacKeys retryable endpoints in the emulator? -def list_hmac_keys(client, resources, preconditions): +def list_hmac_keys(client, _, _hmac_key): hmac_keys = client.list_hmac_keys() for k in hmac_keys: break -def delete_bucket(client, resources, preconditions): - bucket_name = resources["bucket"].name - bucket = client.bucket(bucket_name) +def delete_bucket(client, _, bucket): + bucket = client.bucket(bucket.name) bucket.delete() -def get_iam_policy(client, resources, preconditions): - bucket_name = resources["bucket"].name - bucket = client.bucket(bucket_name) +def get_iam_policy(client, _, bucket): + bucket = client.bucket(bucket.name) bucket.get_iam_policy() @@ -491,12 +475,7 @@ def _populate_resource_hmackey(client, resources): def _populate_resources(client, json_resource): - resources = { - "bucket": None, - "object": None, - "notification": None, - "hmac_key": None, - } + resources = {} for r in json_resource: try: @@ -542,11 +521,11 @@ def _check_retry_test(host, id): return None -def _run_retry_test(host, id, func, resources, preconditions): +def _run_retry_test(host, id, func, preconditions, **resources): # Create client using x-retry-test-id header. client = storage.Client(client_options={"api_endpoint": host}) client._http.headers.update({"x-retry-test-id": id}) - func(client=client, resources=resources, preconditions=preconditions) + func(client, preconditions, **resources) def _delete_retry_test(host, id): @@ -607,9 +586,9 @@ def test_conformance_retry_strategy(test_data): _run_retry_test( host, id, - func=function, - resources=resources, - preconditions=precondition_provided, + function, + precondition_provided, + **resources ) except Exception as e: print(e) From 11d9ee1f4cb8b7990b72c9bf39a2c7084aab5320 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Tue, 25 May 2021 10:28:45 -0700 Subject: [PATCH 18/45] remove unused module and fix lint --- tests/unit/test_retry.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index e0b587cba..3ef622665 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -24,8 +24,6 @@ import pytest import requests -import http - # http.client.HTTPConnection.debuglevel=5 @@ -366,7 +364,9 @@ def get_notification(client, _, bucket, notification): def delete_notification(client, _, bucket, notification): - notification = client.bucket(bucket.name).get_notification(notification.notification_id) + notification = client.bucket(bucket.name).get_notification( + notification.notification_id + ) notification.delete() @@ -584,11 +584,7 @@ def test_conformance_retry_strategy(test_data): # Run retry tests on library methods. try: _run_retry_test( - host, - id, - function, - precondition_provided, - **resources + host, id, function, precondition_provided, **resources ) except Exception as e: print(e) @@ -604,7 +600,7 @@ def test_conformance_retry_strategy(test_data): status_response = _check_retry_test(host, id) if status_response: test_complete = status_response["completed"] - assert test_complete == True + assert test_complete is True else: print("do something") From ee61aab2cdd9da0f01398a81545ccb6840a0a6b2 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Tue, 25 May 2021 15:02:50 -0700 Subject: [PATCH 19/45] handle misused arguments following style guide --- tests/unit/retry_strategy_test_data.json | 4 +-- tests/unit/test_retry.py | 42 ++++++++++++++++-------- 2 files changed, 30 insertions(+), 16 deletions(-) diff --git a/tests/unit/retry_strategy_test_data.json b/tests/unit/retry_strategy_test_data.json index a2ee07806..a4fbb0f47 100644 --- a/tests/unit/retry_strategy_test_data.json +++ b/tests/unit/retry_strategy_test_data.json @@ -20,13 +20,13 @@ ], "methods": [ { - "name": "storage.buckets.get", + "name": "storage.buckets.list", "resources": [ "BUCKET" ] }, { - "name": "storage.buckets.list", + "name": "storage.buckets.get", "resources": [ "BUCKET" ] diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 5aec2276c..71dd2b322 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -317,29 +317,34 @@ def fake_service_account(): ######################################################################################################################################## -def list_buckets(client, _, _bucket): +def list_buckets(client, preconditions, bucket): + del preconditions, bucket # Unused by api call. buckets = client.list_buckets() for b in buckets: break -def list_blobs(client, _, bucket, _blob): +def list_blobs(client, preconditions, bucket, object): + del preconditions, object # Unused by api call. blobs = client.list_blobs(bucket.name) for b in blobs: break -def get_blob(client, _, bucket, object): +def get_blob(client, preconditions, bucket, object): + del preconditions # Unused by api call. bucket = client.bucket(bucket.name) bucket.get_blob(object.name) -def reload_bucket(client, _, bucket): +def reload_bucket(client, preconditions, bucket): + del preconditions # Unused by api call. bucket = client.bucket(bucket.name) bucket.reload() -def get_bucket(client, _, bucket): +def get_bucket(client, preconditions, bucket): + del preconditions # Unused by api call. client.get_bucket(bucket.name) @@ -355,36 +360,42 @@ def update_blob(client, preconditions, bucket, object): blob.patch() -def create_bucket(client, _): +def create_bucket(client, preconditions): + del preconditions # Unused by api call. bucket = client.bucket(uuid.uuid4().hex) client.create_bucket(bucket) # Q!!! upload_from_string did not retry. -def upload_from_string(client, _, bucket): +def upload_from_string(client, preconditions, bucket): + del preconditions # Unused by api call. bucket = client.get_bucket(bucket.name) blob = bucket.blob(uuid.uuid4().hex) blob.upload_from_string("upload from string") -def create_notification(client, _, bucket): +def create_notification(client, preconditions, bucket): + del preconditions # Unused by api call. bucket = client.get_bucket(bucket.name) notification = bucket.notification() notification.create() -def list_notifications(client, _, bucket, _notification): +def list_notifications(client, preconditions, bucket, notification): + del preconditions, notification # Unused by api call. bucket = client.get_bucket(bucket.name) notifications = bucket.list_notifications() for n in notifications: break -def get_notification(client, _, bucket, notification): +def get_notification(client, preconditions, bucket, notification): + del preconditions # Unused by api call. client.bucket(bucket.name).get_notification(notification.notification_id) -def delete_notification(client, _, bucket, notification): +def delete_notification(client, preconditions, bucket, notification): + del preconditions # Unused by api call. notification = client.bucket(bucket.name).get_notification( notification.notification_id ) @@ -392,18 +403,21 @@ def delete_notification(client, _, bucket, notification): # Q!!! are there hmacKeys retryable endpoints in the emulator? -def list_hmac_keys(client, _, _hmac_key): +def list_hmac_keys(client, preconditions, hmac_key): + del preconditions, hmac_key # Unused by api call. hmac_keys = client.list_hmac_keys() for k in hmac_keys: break -def delete_bucket(client, _, bucket): +def delete_bucket(client, preconditions, bucket): + del preconditions # Unused by api call. bucket = client.bucket(bucket.name) bucket.delete() -def get_iam_policy(client, _, bucket): +def get_iam_policy(client, preconditions, bucket): + del preconditions # Unused by api call. bucket = client.bucket(bucket.name) bucket.get_iam_policy() From 694ae3378ef404b83a8e6edbb732e7033f69735c Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Tue, 25 May 2021 15:09:55 -0700 Subject: [PATCH 20/45] handle unused arguments --- tests/unit/test_retry.py | 50 +++++++++++++++------------------------- 1 file changed, 18 insertions(+), 32 deletions(-) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 71dd2b322..a989969a7 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -317,85 +317,74 @@ def fake_service_account(): ######################################################################################################################################## -def list_buckets(client, preconditions, bucket): - del preconditions, bucket # Unused by api call. +def list_buckets(client, _preconditions, **_): buckets = client.list_buckets() for b in buckets: break -def list_blobs(client, preconditions, bucket, object): - del preconditions, object # Unused by api call. +def list_blobs(client, _preconditions, bucket, **_): blobs = client.list_blobs(bucket.name) for b in blobs: break -def get_blob(client, preconditions, bucket, object): - del preconditions # Unused by api call. +def get_blob(client, _preconditions, bucket, object): bucket = client.bucket(bucket.name) bucket.get_blob(object.name) -def reload_bucket(client, preconditions, bucket): - del preconditions # Unused by api call. +def reload_bucket(client, _preconditions, bucket): bucket = client.bucket(bucket.name) bucket.reload() -def get_bucket(client, preconditions, bucket): - del preconditions # Unused by api call. +def get_bucket(client, _preconditions, bucket): client.get_bucket(bucket.name) -def update_blob(client, preconditions, bucket, object): +def update_blob(client, _preconditions, bucket, object): bucket = client.bucket(bucket.name) blob = bucket.blob(object.name) metadata = {"foo": "bar"} blob.metadata = metadata - if preconditions: + if _preconditions: metageneration = object.metageneration blob.patch(if_metageneration_match=metageneration) else: blob.patch() -def create_bucket(client, preconditions): - del preconditions # Unused by api call. +def create_bucket(client, _preconditions): bucket = client.bucket(uuid.uuid4().hex) client.create_bucket(bucket) # Q!!! upload_from_string did not retry. -def upload_from_string(client, preconditions, bucket): - del preconditions # Unused by api call. +def upload_from_string(client, _preconditions, bucket): bucket = client.get_bucket(bucket.name) blob = bucket.blob(uuid.uuid4().hex) blob.upload_from_string("upload from string") -def create_notification(client, preconditions, bucket): - del preconditions # Unused by api call. +def create_notification(client, _preconditions, bucket): bucket = client.get_bucket(bucket.name) notification = bucket.notification() notification.create() -def list_notifications(client, preconditions, bucket, notification): - del preconditions, notification # Unused by api call. +def list_notifications(client, _preconditions, bucket, **_): bucket = client.get_bucket(bucket.name) notifications = bucket.list_notifications() for n in notifications: break -def get_notification(client, preconditions, bucket, notification): - del preconditions # Unused by api call. +def get_notification(client, _preconditions, bucket, notification): client.bucket(bucket.name).get_notification(notification.notification_id) -def delete_notification(client, preconditions, bucket, notification): - del preconditions # Unused by api call. +def delete_notification(client, _preconditions, bucket, notification): notification = client.bucket(bucket.name).get_notification( notification.notification_id ) @@ -403,21 +392,18 @@ def delete_notification(client, preconditions, bucket, notification): # Q!!! are there hmacKeys retryable endpoints in the emulator? -def list_hmac_keys(client, preconditions, hmac_key): - del preconditions, hmac_key # Unused by api call. +def list_hmac_keys(client, _preconditions, **_): hmac_keys = client.list_hmac_keys() for k in hmac_keys: break -def delete_bucket(client, preconditions, bucket): - del preconditions # Unused by api call. +def delete_bucket(client, _preconditions, bucket): bucket = client.bucket(bucket.name) bucket.delete() -def get_iam_policy(client, preconditions, bucket): - del preconditions # Unused by api call. +def get_iam_policy(client, _preconditions, bucket): bucket = client.bucket(bucket.name) bucket.get_iam_policy() @@ -556,11 +542,11 @@ def _check_retry_test(host, id): return None -def _run_retry_test(host, id, func, preconditions, **resources): +def _run_retry_test(host, id, func, _preconditions, **resources): # Create client using x-retry-test-id header. client = storage.Client(client_options={"api_endpoint": host}) client._http.headers.update({"x-retry-test-id": id}) - func(client, preconditions, **resources) + func(client, _preconditions, **resources) def _delete_retry_test(host, id): From e9cf5b1df59bcc134aabde76336feea2052eeb56 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Wed, 26 May 2021 12:41:56 -0700 Subject: [PATCH 21/45] wip: add library methods to mapping and json --- tests/unit/retry_strategy_test_data.json | 13 ++++++++++ tests/unit/test_retry.py | 30 ++++++++++++++++++++++-- 2 files changed, 41 insertions(+), 2 deletions(-) diff --git a/tests/unit/retry_strategy_test_data.json b/tests/unit/retry_strategy_test_data.json index a4fbb0f47..ad3c1439d 100644 --- a/tests/unit/retry_strategy_test_data.json +++ b/tests/unit/retry_strategy_test_data.json @@ -19,6 +19,12 @@ } ], "methods": [ + { + "name": "storage.buckets.testIamPermission", + "resources": [ + "BUCKET" + ] + }, { "name": "storage.buckets.list", "resources": [ @@ -98,6 +104,13 @@ } ], "methods": [ + { + "name": "storage.objects.delete", + "resources": [ + "BUCKET", + "OBJECT" + ] + }, { "name": "storage.objects.patch", "resources": [ diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index a989969a7..5c9529640 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -408,6 +408,32 @@ def get_iam_policy(client, _preconditions, bucket): bucket.get_iam_policy() +def test_iam_permissions(client, _preconditions, bucket): + bucket = client.bucket(bucket.name) + permissions = ["storage.buckets.get", "storage.buckets.create"] + bucket.test_iam_permissions(permissions) + + +# Q: cannot find the corresponding endpoint in the Retry API +def get_service_account_email(client, _preconditions): + client.get_service_account_email() + + +# Q: not hitting the errors from the instructions +def make_bucket_public(client, _preconditions, bucket): + bucket = client.bucket(bucket.name) + bucket.make_public() + + +def delete_blob(client, _preconditions, bucket, object): + bucket = client.bucket(bucket.name) + if _preconditions: + generation = object.generation + bucket.delete_blob(object.name, if_generation_match=generation) + else: + bucket.delete_blob(object.name) + + # Method invocation mapping. Methods to retry. This is a map whose keys are a string describing a standard # API call (e.g. storage.objects.get) and values are a list of functions which # wrap library methods that implement these calls. There may be multiple values @@ -422,7 +448,7 @@ def get_iam_policy(client, _preconditions, bucket): "storage.buckets.insert": [create_bucket], "storage.buckets.list": [list_buckets], "storage.buckets.lockRententionPolicy": [], - "storage.buckets.testIamPermission": [], + "storage.buckets.testIamPermission": [test_iam_permissions], "storage.default_object_acl.get": [], "storage.default_object_acl.list": [], "storage.hmacKey.delete": [], @@ -442,7 +468,7 @@ def get_iam_policy(client, _preconditions, bucket): "storage.hmacKey.update": [], "storage.objects.compose": [], "storage.objects.copy": [], - "storage.objects.delete": [], + "storage.objects.delete": [delete_blob], "storage.objects.insert": [], "storage.objects.patch": [update_blob], "storage.objects.rewrite": [], From ffe718366e585b2379fafab560b0574afcfdc1ce Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Thu, 27 May 2021 13:57:19 -0700 Subject: [PATCH 22/45] add client_options to resource populating client --- tests/unit/test_retry.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 5c9529640..89ec8fd9f 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -598,7 +598,7 @@ def test_conformance_retry_strategy(test_data): ) # Create client to use for setup steps. - client = storage.Client() + client = storage.Client(client_options={"api_endpoint": host}) methods = test_data["methods"] cases = test_data["cases"] expect_success = test_data["expectSuccess"] From cba9d13c437fa89a354ba9527bffd2256ff36ebf Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Thu, 27 May 2021 15:56:41 -0700 Subject: [PATCH 23/45] log warnings and revise try except blocks --- tests/unit/test_retry.py | 91 +++++++++++++++++++++++----------------- 1 file changed, 52 insertions(+), 39 deletions(-) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 89ec8fd9f..f9f35fcbc 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -23,6 +23,7 @@ import mock import pytest import requests +import warnings # http.client.HTTPConnection.debuglevel=5 @@ -525,11 +526,8 @@ def _populate_resources(client, json_resource): resources = {} for r in json_resource: - try: - func = resource_mapping[r] - func(client, resources) - except Exception as e: - print("log warning here: {}".format(e)) + func = resource_mapping[r] + func(client, resources) return resources @@ -548,24 +546,14 @@ def _create_retry_test(host, method_name, instructions): } data_dict = {"instructions": {method_name: instructions}} data = json.dumps(data_dict) - try: - r = requests.post(preflight_post_uri, headers=headers, data=data) - return r.json() - except Exception as e: - print(e.args) - # do something - return None + r = requests.post(preflight_post_uri, headers=headers, data=data) + return r.json() def _check_retry_test(host, id): status_get_uri = "{base}{retry}/{id}".format(base=host, retry="/retry_test", id=id) - try: - r = requests.get(status_get_uri) - return r.json() - except Exception as e: - print(e.args) - # do something - return None + r = requests.get(status_get_uri) + return r.json() def _run_retry_test(host, id, func, _preconditions, **resources): @@ -577,11 +565,7 @@ def _run_retry_test(host, id, func, _preconditions, **resources): def _delete_retry_test(host, id): status_get_uri = "{base}{retry}/{id}".format(base=host, retry="/retry_test", id=id) - try: - requests.delete(status_get_uri) - except Exception as e: - print(e.args) - # do something + requests.delete(status_get_uri) ######################################################################################################################################## @@ -611,22 +595,38 @@ def test_conformance_retry_strategy(test_data): json_resources = m["resources"] if method_name not in method_mapping: - # TODO(cathyo@): change to log warning - print("No tests for operation {}".format(method_name)) + warnings.warn( + "No tests for operation {}".format(method_name), + UserWarning, + stacklevel=1 + ) continue for function in method_mapping[method_name]: # Create the retry test in the emulator to handle instructions. - r = _create_retry_test(host, method_name, instructions) - if r: + try: + r = _create_retry_test(host, method_name, instructions) id = r["id"] - else: - # TODO(cathyo@): change to log warning - print("Error creating retry test") + except Exception as e: + warnings.warn( + "Error creating retry test for {}: {}".format(method_name, e), + UserWarning, + stacklevel=1 + ) continue + # Populate resources. - resources = _populate_resources(client, json_resources) + try: + resources = _populate_resources(client, json_resources) + except Exception as e: + warnings.warn( + "Error populating resources for {}: {}".format(method_name, e), + UserWarning, + stacklevel=1 + ) + continue + # Run retry tests on library methods. try: @@ -634,22 +634,35 @@ def test_conformance_retry_strategy(test_data): host, id, function, precondition_provided, **resources ) except Exception as e: + # Should we be catching specific exceptions print(e) success_results = False else: success_results = True + # Assert expected success for each scenario. assert expect_success == success_results # Verify that all instructions were used up during the test # (indicates that the client sent the correct requests). - status_response = _check_retry_test(host, id) - if status_response: - test_complete = status_response["completed"] - assert test_complete is True - else: - print("do something") + try: + status_response = _check_retry_test(host, id) + assert status_response["completed"] is True + except Exception as e: + warnings.warn( + "Error checking retry test status for {}: {}".format(method_name, e), + UserWarning, + stacklevel=1 + ) + # Clean up and close out test in emulator. - _delete_retry_test(host, id) + try: + _delete_retry_test(host, id) + except Exception as e: + warnings.warn( + "Error deleting retry test for {}: {}".format(method_name, e), + UserWarning, + stacklevel=1 + ) From 15567c2dcb36bdbd256e0de632554ba112e042ee Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Tue, 1 Jun 2021 11:05:08 -0700 Subject: [PATCH 24/45] update schema for S1 and S2 --- tests/unit/retry_strategy_test_data.json | 159 ++++++++++++++++++++++- tests/unit/test_retry.py | 20 ++- 2 files changed, 165 insertions(+), 14 deletions(-) diff --git a/tests/unit/retry_strategy_test_data.json b/tests/unit/retry_strategy_test_data.json index ad3c1439d..9f1a9cc28 100644 --- a/tests/unit/retry_strategy_test_data.json +++ b/tests/unit/retry_strategy_test_data.json @@ -20,13 +20,19 @@ ], "methods": [ { - "name": "storage.buckets.testIamPermission", + "name": "storage.bucket_acl.get", "resources": [ "BUCKET" ] }, { - "name": "storage.buckets.list", + "name": "storage.bucket_acl.list", + "resources": [ + "BUCKET" + ] + }, + { + "name": "storage.buckets.delete", "resources": [ "BUCKET" ] @@ -38,7 +44,7 @@ ] }, { - "name": "storage.buckets.delete", + "name": "storage.buckets.getIamPolicy", "resources": [ "BUCKET" ] @@ -48,13 +54,51 @@ "resources": [] }, { - "name": "storage.buckets.getIamPolicy", + "name": "storage.buckets.list", "resources": [ "BUCKET" ] }, { - "name": "storage.notifications.list", + "name": "storage.buckets.lockRententionPolicy", + "resources": [ + "BUCKET" + ] + }, + { + "name": "storage.buckets.testIamPermission", + "resources": [ + "BUCKET" + ] + }, + { + "name": "storage.default_object_acl.get", + "resources": [ + "BUCKET", + "OBJECT" + ] + }, + { + "name": "storage.default_object_acl.list", + "resources": [ + "BUCKET", + "OBJECT" + ] + }, + { + "name": "storage.hmacKey.delete", + "resources": [] + }, + { + "name": "storage.hmacKey.get", + "resources": [] + }, + { + "name": "storage.hmacKey.list", + "resources": [] + }, + { + "name": "storage.notifications.delete", "resources": [ "BUCKET", "NOTIFICATION" @@ -68,12 +112,26 @@ ] }, { - "name": "storage.notifications.delete", + "name": "storage.notifications.list", "resources": [ "BUCKET", "NOTIFICATION" ] }, + { + "name": "storage.object_acl.get", + "resources": [ + "BUCKET", + "OBJECT" + ] + }, + { + "name": "storage.object_acl.list", + "resources": [ + "BUCKET", + "OBJECT" + ] + }, { "name": "storage.objects.get", "resources": [ @@ -87,6 +145,10 @@ "BUCKET", "OBJECT" ] + }, + { + "name": "storage.serviceaccount.get", + "resources": [] } ], "preconditionProvided": false, @@ -94,7 +156,7 @@ }, { "id": 2, - "description": "conditionally idempotent", + "description": "conditionally idempotent retries when precondition is present", "cases": [ { "instructions": [ @@ -104,6 +166,42 @@ } ], "methods": [ + { + "name": "storage.buckets.patch", + "resources": [ + "BUCKET" + ] + }, + { + "name": "storage.buckets.setIamPolicy", + "resources": [ + "BUCKET" + ] + }, + { + "name": "storage.buckets.update", + "resources": [ + "BUCKET" + ] + }, + { + "name": "storage.hmacKey.update", + "resources": [] + }, + { + "name": "storage.objects.compose", + "resources": [ + "BUCKET", + "OBJECT" + ] + }, + { + "name": "storage.objects.copy", + "resources": [ + "BUCKET", + "OBJECT" + ] + }, { "name": "storage.objects.delete", "resources": [ @@ -111,17 +209,58 @@ "OBJECT" ] }, + { + "name": "storage.objects.insert", + "resources": [ + "BUCKET" + ] + }, { "name": "storage.objects.patch", "resources": [ "BUCKET", "OBJECT" ] + }, + { + "name": "storage.objects.rewrite", + "resources": [ + "BUCKET", + "OBJECT" + ] + }, + { + "name": "storage.objects.update", + "resources": [ + "BUCKET", + "OBJECT" + ] } ], "preconditionProvided": true, "expectSuccess": true }, + { + "id": 3, + "description": "conditionally idempotent no retries when precondition is absent", + "cases": [ + { + "instructions": [ + "return-503" + ] + } + ], + "methods": [ + { + "name": "storage.buckets.patch", + "resources": [ + "BUCKET" + ] + } + ], + "preconditionProvided": false, + "expectSuccess": false + }, { "id": 4, "description": "non idempotent", @@ -138,6 +277,12 @@ "resources": [ "BUCKET" ] + }, + { + "name": "storage.bucket_acl.patch", + "resources": [ + "BUCKET" + ] } ], "preconditionProvided": false, diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index f9f35fcbc..7a127d47a 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -434,6 +434,12 @@ def delete_blob(client, _preconditions, bucket, object): else: bucket.delete_blob(object.name) +#Q: 1) cannot lock a locked bucket 2) currently using default "bucket" with metageneration +def lock_retention_policy(client, _preconditions, bucket): + bucket2 = client.bucket(bucket.name) + bucket2.retention_period = 60 + bucket2.patch() + bucket2.lock_retention_policy() # Method invocation mapping. Methods to retry. This is a map whose keys are a string describing a standard # API call (e.g. storage.objects.get) and values are a list of functions which @@ -441,20 +447,20 @@ def delete_blob(client, _preconditions, bucket, object): # because multiple library methods may use the same call (e.g. get could be a # read or just a metadata get). method_mapping = { - "storage.bucket_acl.get": [], # S1 start - "storage.bucket_acl.list": [], + # "storage.bucket_acl.get": [], # S1 start # no library method mapped + # "storage.bucket_acl.list": [], # no library method mapped "storage.buckets.delete": [delete_bucket], "storage.buckets.get": [get_bucket, reload_bucket], "storage.buckets.getIamPolicy": [get_iam_policy], "storage.buckets.insert": [create_bucket], "storage.buckets.list": [list_buckets], - "storage.buckets.lockRententionPolicy": [], + # "storage.buckets.lockRententionPolicy": [], # lock_retention_policy "storage.buckets.testIamPermission": [test_iam_permissions], "storage.default_object_acl.get": [], "storage.default_object_acl.list": [], - "storage.hmacKey.delete": [], - "storage.hmacKey.list": [], - "storage.hmacKey.get": [], + # "storage.hmacKey.delete": [], # emulator project related endpoints wip + # "storage.hmacKey.list": [], # emulator project related endpoints wip + # "storage.hmacKey.get": [], # emulator project related endpoints wip "storage.notifications.delete": [delete_notification], "storage.notifications.get": [get_notification], "storage.notifications.list": [list_notifications], @@ -462,7 +468,7 @@ def delete_blob(client, _preconditions, bucket, object): "storage.object_acl.list": [], "storage.objects.get": [get_blob], "storage.objects.list": [list_blobs], - "storage.serviceaccount.get": [], # S1 end + # "storage.serviceaccount.get": [], # S1 end # emulator project related endpoints wip "storage.buckets.patch": [], # S2 start "storage.buckets.setIamPolicy": [], "storage.buckets.update": [], From 952769b3f9766b5d1720a1050dc048d43a2ca31c Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Tue, 1 Jun 2021 13:02:40 -0700 Subject: [PATCH 25/45] fix lint and mark error --- tests/unit/test_retry.py | 33 +++++++++++++++++---------------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 7a127d47a..085d715dc 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -409,10 +409,11 @@ def get_iam_policy(client, _preconditions, bucket): bucket.get_iam_policy() -def test_iam_permissions(client, _preconditions, bucket): - bucket = client.bucket(bucket.name) - permissions = ["storage.buckets.get", "storage.buckets.create"] - bucket.test_iam_permissions(permissions) +# Q: error - fixture 'client' not found +# def test_iam_permissions(client, _preconditions, bucket): +# bucket = client.bucket(bucket.name) +# permissions = ["storage.buckets.get", "storage.buckets.create"] +# bucket.test_iam_permissions(permissions) # Q: cannot find the corresponding endpoint in the Retry API @@ -434,13 +435,15 @@ def delete_blob(client, _preconditions, bucket, object): else: bucket.delete_blob(object.name) -#Q: 1) cannot lock a locked bucket 2) currently using default "bucket" with metageneration + +# Q: 1) cannot lock a locked bucket 2) currently using default "bucket" with metageneration def lock_retention_policy(client, _preconditions, bucket): bucket2 = client.bucket(bucket.name) bucket2.retention_period = 60 bucket2.patch() bucket2.lock_retention_policy() + # Method invocation mapping. Methods to retry. This is a map whose keys are a string describing a standard # API call (e.g. storage.objects.get) and values are a list of functions which # wrap library methods that implement these calls. There may be multiple values @@ -455,7 +458,7 @@ def lock_retention_policy(client, _preconditions, bucket): "storage.buckets.insert": [create_bucket], "storage.buckets.list": [list_buckets], # "storage.buckets.lockRententionPolicy": [], # lock_retention_policy - "storage.buckets.testIamPermission": [test_iam_permissions], + # "storage.buckets.testIamPermission": [], # test_iam_permissions "storage.default_object_acl.get": [], "storage.default_object_acl.list": [], # "storage.hmacKey.delete": [], # emulator project related endpoints wip @@ -604,7 +607,7 @@ def test_conformance_retry_strategy(test_data): warnings.warn( "No tests for operation {}".format(method_name), UserWarning, - stacklevel=1 + stacklevel=1, ) continue @@ -617,11 +620,10 @@ def test_conformance_retry_strategy(test_data): warnings.warn( "Error creating retry test for {}: {}".format(method_name, e), UserWarning, - stacklevel=1 + stacklevel=1, ) continue - # Populate resources. try: resources = _populate_resources(client, json_resources) @@ -629,11 +631,10 @@ def test_conformance_retry_strategy(test_data): warnings.warn( "Error populating resources for {}: {}".format(method_name, e), UserWarning, - stacklevel=1 + stacklevel=1, ) continue - # Run retry tests on library methods. try: _run_retry_test( @@ -646,7 +647,6 @@ def test_conformance_retry_strategy(test_data): else: success_results = True - # Assert expected success for each scenario. assert expect_success == success_results @@ -657,12 +657,13 @@ def test_conformance_retry_strategy(test_data): assert status_response["completed"] is True except Exception as e: warnings.warn( - "Error checking retry test status for {}: {}".format(method_name, e), + "Error checking retry test status for {}: {}".format( + method_name, e + ), UserWarning, - stacklevel=1 + stacklevel=1, ) - # Clean up and close out test in emulator. try: _delete_retry_test(host, id) @@ -670,5 +671,5 @@ def test_conformance_retry_strategy(test_data): warnings.warn( "Error deleting retry test for {}: {}".format(method_name, e), UserWarning, - stacklevel=1 + stacklevel=1, ) From 6c31f01180e524398b8e8029e0e5ec773de69dd0 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Tue, 1 Jun 2021 13:12:03 -0700 Subject: [PATCH 26/45] move retry conformance tests to separate folder --- .../conformance/retry_strategy_test_data.json | 292 ++++++++ tests/conformance/test_conformance.py | 411 +++++++++++ tests/unit/test_retry.py | 676 +++++++++--------- 3 files changed, 1041 insertions(+), 338 deletions(-) create mode 100644 tests/conformance/retry_strategy_test_data.json create mode 100644 tests/conformance/test_conformance.py diff --git a/tests/conformance/retry_strategy_test_data.json b/tests/conformance/retry_strategy_test_data.json new file mode 100644 index 000000000..9f1a9cc28 --- /dev/null +++ b/tests/conformance/retry_strategy_test_data.json @@ -0,0 +1,292 @@ +{ + "retryStrategyTests": [ + { + "id": 1, + "description": "always idempotent", + "cases": [ + { + "instructions": [ + "return-503", + "return-503", + "return-503" + ] + }, + { + "instructions": [ + "return-503", + "return-503" + ] + } + ], + "methods": [ + { + "name": "storage.bucket_acl.get", + "resources": [ + "BUCKET" + ] + }, + { + "name": "storage.bucket_acl.list", + "resources": [ + "BUCKET" + ] + }, + { + "name": "storage.buckets.delete", + "resources": [ + "BUCKET" + ] + }, + { + "name": "storage.buckets.get", + "resources": [ + "BUCKET" + ] + }, + { + "name": "storage.buckets.getIamPolicy", + "resources": [ + "BUCKET" + ] + }, + { + "name": "storage.buckets.insert", + "resources": [] + }, + { + "name": "storage.buckets.list", + "resources": [ + "BUCKET" + ] + }, + { + "name": "storage.buckets.lockRententionPolicy", + "resources": [ + "BUCKET" + ] + }, + { + "name": "storage.buckets.testIamPermission", + "resources": [ + "BUCKET" + ] + }, + { + "name": "storage.default_object_acl.get", + "resources": [ + "BUCKET", + "OBJECT" + ] + }, + { + "name": "storage.default_object_acl.list", + "resources": [ + "BUCKET", + "OBJECT" + ] + }, + { + "name": "storage.hmacKey.delete", + "resources": [] + }, + { + "name": "storage.hmacKey.get", + "resources": [] + }, + { + "name": "storage.hmacKey.list", + "resources": [] + }, + { + "name": "storage.notifications.delete", + "resources": [ + "BUCKET", + "NOTIFICATION" + ] + }, + { + "name": "storage.notifications.get", + "resources": [ + "BUCKET", + "NOTIFICATION" + ] + }, + { + "name": "storage.notifications.list", + "resources": [ + "BUCKET", + "NOTIFICATION" + ] + }, + { + "name": "storage.object_acl.get", + "resources": [ + "BUCKET", + "OBJECT" + ] + }, + { + "name": "storage.object_acl.list", + "resources": [ + "BUCKET", + "OBJECT" + ] + }, + { + "name": "storage.objects.get", + "resources": [ + "BUCKET", + "OBJECT" + ] + }, + { + "name": "storage.objects.list", + "resources": [ + "BUCKET", + "OBJECT" + ] + }, + { + "name": "storage.serviceaccount.get", + "resources": [] + } + ], + "preconditionProvided": false, + "expectSuccess": true + }, + { + "id": 2, + "description": "conditionally idempotent retries when precondition is present", + "cases": [ + { + "instructions": [ + "return-503", + "return-503" + ] + } + ], + "methods": [ + { + "name": "storage.buckets.patch", + "resources": [ + "BUCKET" + ] + }, + { + "name": "storage.buckets.setIamPolicy", + "resources": [ + "BUCKET" + ] + }, + { + "name": "storage.buckets.update", + "resources": [ + "BUCKET" + ] + }, + { + "name": "storage.hmacKey.update", + "resources": [] + }, + { + "name": "storage.objects.compose", + "resources": [ + "BUCKET", + "OBJECT" + ] + }, + { + "name": "storage.objects.copy", + "resources": [ + "BUCKET", + "OBJECT" + ] + }, + { + "name": "storage.objects.delete", + "resources": [ + "BUCKET", + "OBJECT" + ] + }, + { + "name": "storage.objects.insert", + "resources": [ + "BUCKET" + ] + }, + { + "name": "storage.objects.patch", + "resources": [ + "BUCKET", + "OBJECT" + ] + }, + { + "name": "storage.objects.rewrite", + "resources": [ + "BUCKET", + "OBJECT" + ] + }, + { + "name": "storage.objects.update", + "resources": [ + "BUCKET", + "OBJECT" + ] + } + ], + "preconditionProvided": true, + "expectSuccess": true + }, + { + "id": 3, + "description": "conditionally idempotent no retries when precondition is absent", + "cases": [ + { + "instructions": [ + "return-503" + ] + } + ], + "methods": [ + { + "name": "storage.buckets.patch", + "resources": [ + "BUCKET" + ] + } + ], + "preconditionProvided": false, + "expectSuccess": false + }, + { + "id": 4, + "description": "non idempotent", + "cases": [ + { + "instructions": [ + "return-503" + ] + } + ], + "methods": [ + { + "name": "storage.notifications.insert", + "resources": [ + "BUCKET" + ] + }, + { + "name": "storage.bucket_acl.patch", + "resources": [ + "BUCKET" + ] + } + ], + "preconditionProvided": false, + "expectSuccess": false + } + ] + } \ No newline at end of file diff --git a/tests/conformance/test_conformance.py b/tests/conformance/test_conformance.py new file mode 100644 index 000000000..317a3c9fb --- /dev/null +++ b/tests/conformance/test_conformance.py @@ -0,0 +1,411 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import uuid + +from google.cloud import storage +from google.cloud.storage import _helpers + +from . import _read_local_json + +import pytest +import requests +import warnings + +# http.client.HTTPConnection.debuglevel=5 + + + +# ToDo: Confirm what are the credentials required. Can we use the same service account created for url_signer_v4_test_account? +_FAKE_SERVICE_ACCOUNT = None + + +def fake_service_account(): + global _FAKE_SERVICE_ACCOUNT + # validate and set fake service account + + +# ToDo: Confirm what are the credentials required. Can we use the same service account created for url_signer_v4_test_account? ) +# _SERVICE_ACCOUNT_JSON = _read_local_json("") +_CONFORMANCE_TESTS = _read_local_json("retry_strategy_test_data.json")[ + "retryStrategyTests" +] +# ToDo: Confirm the correct access endpoint. +_API_ACCESS_ENDPOINT = _helpers._get_storage_host() +_DEFAULT_STORAGE_HOST = u"https://storage.googleapis.com" +_CONF_TEST_PROJECT_ID = "my-project-id" +_CONF_TEST_SERVICE_ACCOUNT_EMAIL = ( + "my-service-account@my-project-id.iam.gserviceaccount.com" +) + +######################################################################################################################################## +### Library methods for mapping ######################################################################################################## +######################################################################################################################################## + + +def list_buckets(client, _preconditions, **_): + buckets = client.list_buckets() + for b in buckets: + break + + +def list_blobs(client, _preconditions, bucket, **_): + blobs = client.list_blobs(bucket.name) + for b in blobs: + break + + +def get_blob(client, _preconditions, bucket, object): + bucket = client.bucket(bucket.name) + bucket.get_blob(object.name) + + +def reload_bucket(client, _preconditions, bucket): + bucket = client.bucket(bucket.name) + bucket.reload() + + +def get_bucket(client, _preconditions, bucket): + client.get_bucket(bucket.name) + + +def update_blob(client, _preconditions, bucket, object): + bucket = client.bucket(bucket.name) + blob = bucket.blob(object.name) + metadata = {"foo": "bar"} + blob.metadata = metadata + if _preconditions: + metageneration = object.metageneration + blob.patch(if_metageneration_match=metageneration) + else: + blob.patch() + + +def create_bucket(client, _preconditions): + bucket = client.bucket(uuid.uuid4().hex) + client.create_bucket(bucket) + + +# Q!!! upload_from_string did not retry. +def upload_from_string(client, _preconditions, bucket): + bucket = client.get_bucket(bucket.name) + blob = bucket.blob(uuid.uuid4().hex) + blob.upload_from_string("upload from string") + + +def create_notification(client, _preconditions, bucket): + bucket = client.get_bucket(bucket.name) + notification = bucket.notification() + notification.create() + + +def list_notifications(client, _preconditions, bucket, **_): + bucket = client.get_bucket(bucket.name) + notifications = bucket.list_notifications() + for n in notifications: + break + + +def get_notification(client, _preconditions, bucket, notification): + client.bucket(bucket.name).get_notification(notification.notification_id) + + +def delete_notification(client, _preconditions, bucket, notification): + notification = client.bucket(bucket.name).get_notification( + notification.notification_id + ) + notification.delete() + + +# Q!!! are there hmacKeys retryable endpoints in the emulator? +def list_hmac_keys(client, _preconditions, **_): + hmac_keys = client.list_hmac_keys() + for k in hmac_keys: + break + + +def delete_bucket(client, _preconditions, bucket): + bucket = client.bucket(bucket.name) + bucket.delete() + + +def get_iam_policy(client, _preconditions, bucket): + bucket = client.bucket(bucket.name) + bucket.get_iam_policy() + + +# Q: error - fixture 'client' not found +# def test_iam_permissions(client, _preconditions, bucket): +# bucket = client.bucket(bucket.name) +# permissions = ["storage.buckets.get", "storage.buckets.create"] +# bucket.test_iam_permissions(permissions) + + +# Q: cannot find the corresponding endpoint in the Retry API +def get_service_account_email(client, _preconditions): + client.get_service_account_email() + + +# Q: not hitting the errors from the instructions +def make_bucket_public(client, _preconditions, bucket): + bucket = client.bucket(bucket.name) + bucket.make_public() + + +def delete_blob(client, _preconditions, bucket, object): + bucket = client.bucket(bucket.name) + if _preconditions: + generation = object.generation + bucket.delete_blob(object.name, if_generation_match=generation) + else: + bucket.delete_blob(object.name) + + +# Q: 1) cannot lock a locked bucket 2) currently using default "bucket" with metageneration +def lock_retention_policy(client, _preconditions, bucket): + bucket2 = client.bucket(bucket.name) + bucket2.retention_period = 60 + bucket2.patch() + bucket2.lock_retention_policy() + + +# Method invocation mapping. Methods to retry. This is a map whose keys are a string describing a standard +# API call (e.g. storage.objects.get) and values are a list of functions which +# wrap library methods that implement these calls. There may be multiple values +# because multiple library methods may use the same call (e.g. get could be a +# read or just a metadata get). +method_mapping = { + # "storage.bucket_acl.get": [], # S1 start # no library method mapped + # "storage.bucket_acl.list": [], # no library method mapped + "storage.buckets.delete": [delete_bucket], + "storage.buckets.get": [get_bucket, reload_bucket], + "storage.buckets.getIamPolicy": [get_iam_policy], + "storage.buckets.insert": [create_bucket], + "storage.buckets.list": [list_buckets], + # "storage.buckets.lockRententionPolicy": [], # lock_retention_policy + # "storage.buckets.testIamPermission": [], # test_iam_permissions + "storage.default_object_acl.get": [], + "storage.default_object_acl.list": [], + # "storage.hmacKey.delete": [], # emulator project related endpoints wip + # "storage.hmacKey.list": [], # emulator project related endpoints wip + # "storage.hmacKey.get": [], # emulator project related endpoints wip + "storage.notifications.delete": [delete_notification], + "storage.notifications.get": [get_notification], + "storage.notifications.list": [list_notifications], + "storage.object_acl.get": [], + "storage.object_acl.list": [], + "storage.objects.get": [get_blob], + "storage.objects.list": [list_blobs], + # "storage.serviceaccount.get": [], # S1 end # emulator project related endpoints wip + "storage.buckets.patch": [], # S2 start + "storage.buckets.setIamPolicy": [], + "storage.buckets.update": [], + "storage.hmacKey.update": [], + "storage.objects.compose": [], + "storage.objects.copy": [], + "storage.objects.delete": [delete_blob], + "storage.objects.insert": [], + "storage.objects.patch": [update_blob], + "storage.objects.rewrite": [], + "storage.objects.update": [], # S2 end + "storage.notifications.insert": [create_notification], # S4 +} + +######################################################################################################################################## +### Helper Methods for Populating Resources ############################################################################################ +######################################################################################################################################## + + +def _populate_resource_bucket(client, resources): + bucket = client.bucket(uuid.uuid4().hex) + client.create_bucket(bucket) + resources["bucket"] = bucket + + +def _populate_resource_object(client, resources): + bucket_name = resources["bucket"].name + bucket = client.get_bucket(bucket_name) + blob = bucket.blob(uuid.uuid4().hex) + blob.upload_from_string("hello world") + blob.reload() + resources["object"] = blob + + +def _populate_resource_notification(client, resources): + bucket_name = resources["bucket"].name + bucket = client.get_bucket(bucket_name) + notification = bucket.notification() + notification.create() + notification.reload() + resources["notification"] = notification + + +def _populate_resource_hmackey(client, resources): + hmac_key, secret = client.create_hmac_key( + service_account_email=_CONF_TEST_SERVICE_ACCOUNT_EMAIL, + project_id=_CONF_TEST_PROJECT_ID, + ) + resources["hmac_key"] = hmac_key + + +resource_mapping = { + "BUCKET": _populate_resource_bucket, + "OBJECT": _populate_resource_object, + "NOTIFICATION": _populate_resource_notification, + "HMAC_KEY": _populate_resource_hmackey, +} + + +def _populate_resources(client, json_resource): + resources = {} + + for r in json_resource: + func = resource_mapping[r] + func(client, resources) + + return resources + + +######################################################################################################################################## +### Helper Methods for Emulator Retry API ############################################################################################## +######################################################################################################################################## + + +def _create_retry_test(host, method_name, instructions): + import json + + preflight_post_uri = host + "/retry_test" + headers = { + "Content-Type": "application/json", + } + data_dict = {"instructions": {method_name: instructions}} + data = json.dumps(data_dict) + r = requests.post(preflight_post_uri, headers=headers, data=data) + return r.json() + + +def _check_retry_test(host, id): + status_get_uri = "{base}{retry}/{id}".format(base=host, retry="/retry_test", id=id) + r = requests.get(status_get_uri) + return r.json() + + +def _run_retry_test(host, id, func, _preconditions, **resources): + # Create client using x-retry-test-id header. + client = storage.Client(client_options={"api_endpoint": host}) + client._http.headers.update({"x-retry-test-id": id}) + func(client, _preconditions, **resources) + + +def _delete_retry_test(host, id): + status_get_uri = "{base}{retry}/{id}".format(base=host, retry="/retry_test", id=id) + requests.delete(status_get_uri) + + +######################################################################################################################################## +### Run Conformance Tests for Retry Strategy ########################################################################################### +######################################################################################################################################## + + +@pytest.mark.parametrize("test_data", _CONFORMANCE_TESTS) +def test_conformance_retry_strategy(test_data): + host = _API_ACCESS_ENDPOINT + if host == _DEFAULT_STORAGE_HOST: + pytest.skip( + "This test must use the testbench emulator; set STORAGE_EMULATOR_HOST to run." + ) + + # Create client to use for setup steps. + client = storage.Client(client_options={"api_endpoint": host}) + methods = test_data["methods"] + cases = test_data["cases"] + expect_success = test_data["expectSuccess"] + precondition_provided = test_data["preconditionProvided"] + for c in cases: + for m in methods: + # Extract method name and instructions to create retry test. + method_name = m["name"] + instructions = c["instructions"] + json_resources = m["resources"] + + if method_name not in method_mapping: + warnings.warn( + "No tests for operation {}".format(method_name), + UserWarning, + stacklevel=1, + ) + continue + + for function in method_mapping[method_name]: + # Create the retry test in the emulator to handle instructions. + try: + r = _create_retry_test(host, method_name, instructions) + id = r["id"] + except Exception as e: + warnings.warn( + "Error creating retry test for {}: {}".format(method_name, e), + UserWarning, + stacklevel=1, + ) + continue + + # Populate resources. + try: + resources = _populate_resources(client, json_resources) + except Exception as e: + warnings.warn( + "Error populating resources for {}: {}".format(method_name, e), + UserWarning, + stacklevel=1, + ) + continue + + # Run retry tests on library methods. + try: + _run_retry_test( + host, id, function, precondition_provided, **resources + ) + except Exception as e: + # Should we be catching specific exceptions + print(e) + success_results = False + else: + success_results = True + + # Assert expected success for each scenario. + assert expect_success == success_results + + # Verify that all instructions were used up during the test + # (indicates that the client sent the correct requests). + try: + status_response = _check_retry_test(host, id) + assert status_response["completed"] is True + except Exception as e: + warnings.warn( + "Error checking retry test status for {}: {}".format( + method_name, e + ), + UserWarning, + stacklevel=1, + ) + + # Clean up and close out test in emulator. + try: + _delete_retry_test(host, id) + except Exception as e: + warnings.warn( + "Error deleting retry test for {}: {}".format(method_name, e), + UserWarning, + stacklevel=1, + ) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 085d715dc..b963c0c45 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -291,385 +291,385 @@ def test_is_meta_or_etag_in_json_invalid(self): self.assertEqual(policy, None) -# ToDo: Confirm what are the credentials required. Can we use the same service account created for url_signer_v4_test_account? -_FAKE_SERVICE_ACCOUNT = None +# # ToDo: Confirm what are the credentials required. Can we use the same service account created for url_signer_v4_test_account? +# _FAKE_SERVICE_ACCOUNT = None -def fake_service_account(): - global _FAKE_SERVICE_ACCOUNT - # validate and set fake service account +# def fake_service_account(): +# global _FAKE_SERVICE_ACCOUNT +# # validate and set fake service account -# ToDo: Confirm what are the credentials required. Can we use the same service account created for url_signer_v4_test_account? ) -# _SERVICE_ACCOUNT_JSON = _read_local_json("") -_CONFORMANCE_TESTS = _read_local_json("retry_strategy_test_data.json")[ - "retryStrategyTests" -] -# ToDo: Confirm the correct access endpoint. -_API_ACCESS_ENDPOINT = _helpers._get_storage_host() -_DEFAULT_STORAGE_HOST = u"https://storage.googleapis.com" -_CONF_TEST_PROJECT_ID = "my-project-id" -_CONF_TEST_SERVICE_ACCOUNT_EMAIL = ( - "my-service-account@my-project-id.iam.gserviceaccount.com" -) +# # ToDo: Confirm what are the credentials required. Can we use the same service account created for url_signer_v4_test_account? ) +# # _SERVICE_ACCOUNT_JSON = _read_local_json("") +# _CONFORMANCE_TESTS = _read_local_json("retry_strategy_test_data.json")[ +# "retryStrategyTests" +# ] +# # ToDo: Confirm the correct access endpoint. +# _API_ACCESS_ENDPOINT = _helpers._get_storage_host() +# _DEFAULT_STORAGE_HOST = u"https://storage.googleapis.com" +# _CONF_TEST_PROJECT_ID = "my-project-id" +# _CONF_TEST_SERVICE_ACCOUNT_EMAIL = ( +# "my-service-account@my-project-id.iam.gserviceaccount.com" +# ) -######################################################################################################################################## -### Library methods for mapping ######################################################################################################## -######################################################################################################################################## +# ######################################################################################################################################## +# ### Library methods for mapping ######################################################################################################## +# ######################################################################################################################################## -def list_buckets(client, _preconditions, **_): - buckets = client.list_buckets() - for b in buckets: - break +# def list_buckets(client, _preconditions, **_): +# buckets = client.list_buckets() +# for b in buckets: +# break -def list_blobs(client, _preconditions, bucket, **_): - blobs = client.list_blobs(bucket.name) - for b in blobs: - break +# def list_blobs(client, _preconditions, bucket, **_): +# blobs = client.list_blobs(bucket.name) +# for b in blobs: +# break -def get_blob(client, _preconditions, bucket, object): - bucket = client.bucket(bucket.name) - bucket.get_blob(object.name) - - -def reload_bucket(client, _preconditions, bucket): - bucket = client.bucket(bucket.name) - bucket.reload() - +# def get_blob(client, _preconditions, bucket, object): +# bucket = client.bucket(bucket.name) +# bucket.get_blob(object.name) -def get_bucket(client, _preconditions, bucket): - client.get_bucket(bucket.name) +# def reload_bucket(client, _preconditions, bucket): +# bucket = client.bucket(bucket.name) +# bucket.reload() -def update_blob(client, _preconditions, bucket, object): - bucket = client.bucket(bucket.name) - blob = bucket.blob(object.name) - metadata = {"foo": "bar"} - blob.metadata = metadata - if _preconditions: - metageneration = object.metageneration - blob.patch(if_metageneration_match=metageneration) - else: - blob.patch() +# def get_bucket(client, _preconditions, bucket): +# client.get_bucket(bucket.name) -def create_bucket(client, _preconditions): - bucket = client.bucket(uuid.uuid4().hex) - client.create_bucket(bucket) +# def update_blob(client, _preconditions, bucket, object): +# bucket = client.bucket(bucket.name) +# blob = bucket.blob(object.name) +# metadata = {"foo": "bar"} +# blob.metadata = metadata +# if _preconditions: +# metageneration = object.metageneration +# blob.patch(if_metageneration_match=metageneration) +# else: +# blob.patch() -# Q!!! upload_from_string did not retry. -def upload_from_string(client, _preconditions, bucket): - bucket = client.get_bucket(bucket.name) - blob = bucket.blob(uuid.uuid4().hex) - blob.upload_from_string("upload from string") +# def create_bucket(client, _preconditions): +# bucket = client.bucket(uuid.uuid4().hex) +# client.create_bucket(bucket) -def create_notification(client, _preconditions, bucket): - bucket = client.get_bucket(bucket.name) - notification = bucket.notification() - notification.create() +# # Q!!! upload_from_string did not retry. +# def upload_from_string(client, _preconditions, bucket): +# bucket = client.get_bucket(bucket.name) +# blob = bucket.blob(uuid.uuid4().hex) +# blob.upload_from_string("upload from string") -def list_notifications(client, _preconditions, bucket, **_): - bucket = client.get_bucket(bucket.name) - notifications = bucket.list_notifications() - for n in notifications: - break +# def create_notification(client, _preconditions, bucket): +# bucket = client.get_bucket(bucket.name) +# notification = bucket.notification() +# notification.create() -def get_notification(client, _preconditions, bucket, notification): - client.bucket(bucket.name).get_notification(notification.notification_id) +# def list_notifications(client, _preconditions, bucket, **_): +# bucket = client.get_bucket(bucket.name) +# notifications = bucket.list_notifications() +# for n in notifications: +# break -def delete_notification(client, _preconditions, bucket, notification): - notification = client.bucket(bucket.name).get_notification( - notification.notification_id - ) - notification.delete() +# def get_notification(client, _preconditions, bucket, notification): +# client.bucket(bucket.name).get_notification(notification.notification_id) -# Q!!! are there hmacKeys retryable endpoints in the emulator? -def list_hmac_keys(client, _preconditions, **_): - hmac_keys = client.list_hmac_keys() - for k in hmac_keys: - break +# def delete_notification(client, _preconditions, bucket, notification): +# notification = client.bucket(bucket.name).get_notification( +# notification.notification_id +# ) +# notification.delete() -def delete_bucket(client, _preconditions, bucket): - bucket = client.bucket(bucket.name) - bucket.delete() +# # Q!!! are there hmacKeys retryable endpoints in the emulator? +# def list_hmac_keys(client, _preconditions, **_): +# hmac_keys = client.list_hmac_keys() +# for k in hmac_keys: +# break -def get_iam_policy(client, _preconditions, bucket): - bucket = client.bucket(bucket.name) - bucket.get_iam_policy() - -# Q: error - fixture 'client' not found -# def test_iam_permissions(client, _preconditions, bucket): +# def delete_bucket(client, _preconditions, bucket): # bucket = client.bucket(bucket.name) -# permissions = ["storage.buckets.get", "storage.buckets.create"] -# bucket.test_iam_permissions(permissions) - - -# Q: cannot find the corresponding endpoint in the Retry API -def get_service_account_email(client, _preconditions): - client.get_service_account_email() - - -# Q: not hitting the errors from the instructions -def make_bucket_public(client, _preconditions, bucket): - bucket = client.bucket(bucket.name) - bucket.make_public() - - -def delete_blob(client, _preconditions, bucket, object): - bucket = client.bucket(bucket.name) - if _preconditions: - generation = object.generation - bucket.delete_blob(object.name, if_generation_match=generation) - else: - bucket.delete_blob(object.name) - - -# Q: 1) cannot lock a locked bucket 2) currently using default "bucket" with metageneration -def lock_retention_policy(client, _preconditions, bucket): - bucket2 = client.bucket(bucket.name) - bucket2.retention_period = 60 - bucket2.patch() - bucket2.lock_retention_policy() - - -# Method invocation mapping. Methods to retry. This is a map whose keys are a string describing a standard -# API call (e.g. storage.objects.get) and values are a list of functions which -# wrap library methods that implement these calls. There may be multiple values -# because multiple library methods may use the same call (e.g. get could be a -# read or just a metadata get). -method_mapping = { - # "storage.bucket_acl.get": [], # S1 start # no library method mapped - # "storage.bucket_acl.list": [], # no library method mapped - "storage.buckets.delete": [delete_bucket], - "storage.buckets.get": [get_bucket, reload_bucket], - "storage.buckets.getIamPolicy": [get_iam_policy], - "storage.buckets.insert": [create_bucket], - "storage.buckets.list": [list_buckets], - # "storage.buckets.lockRententionPolicy": [], # lock_retention_policy - # "storage.buckets.testIamPermission": [], # test_iam_permissions - "storage.default_object_acl.get": [], - "storage.default_object_acl.list": [], - # "storage.hmacKey.delete": [], # emulator project related endpoints wip - # "storage.hmacKey.list": [], # emulator project related endpoints wip - # "storage.hmacKey.get": [], # emulator project related endpoints wip - "storage.notifications.delete": [delete_notification], - "storage.notifications.get": [get_notification], - "storage.notifications.list": [list_notifications], - "storage.object_acl.get": [], - "storage.object_acl.list": [], - "storage.objects.get": [get_blob], - "storage.objects.list": [list_blobs], - # "storage.serviceaccount.get": [], # S1 end # emulator project related endpoints wip - "storage.buckets.patch": [], # S2 start - "storage.buckets.setIamPolicy": [], - "storage.buckets.update": [], - "storage.hmacKey.update": [], - "storage.objects.compose": [], - "storage.objects.copy": [], - "storage.objects.delete": [delete_blob], - "storage.objects.insert": [], - "storage.objects.patch": [update_blob], - "storage.objects.rewrite": [], - "storage.objects.update": [], # S2 end - "storage.notifications.insert": [create_notification], # S4 -} - -######################################################################################################################################## -### Helper Methods for Populating Resources ############################################################################################ -######################################################################################################################################## - - -def _populate_resource_bucket(client, resources): - bucket = client.bucket(uuid.uuid4().hex) - client.create_bucket(bucket) - resources["bucket"] = bucket - - -def _populate_resource_object(client, resources): - bucket_name = resources["bucket"].name - bucket = client.get_bucket(bucket_name) - blob = bucket.blob(uuid.uuid4().hex) - blob.upload_from_string("hello world") - blob.reload() - resources["object"] = blob - - -def _populate_resource_notification(client, resources): - bucket_name = resources["bucket"].name - bucket = client.get_bucket(bucket_name) - notification = bucket.notification() - notification.create() - notification.reload() - resources["notification"] = notification - - -def _populate_resource_hmackey(client, resources): - hmac_key, secret = client.create_hmac_key( - service_account_email=_CONF_TEST_SERVICE_ACCOUNT_EMAIL, - project_id=_CONF_TEST_PROJECT_ID, - ) - resources["hmac_key"] = hmac_key - - -resource_mapping = { - "BUCKET": _populate_resource_bucket, - "OBJECT": _populate_resource_object, - "NOTIFICATION": _populate_resource_notification, - "HMAC_KEY": _populate_resource_hmackey, -} - - -def _populate_resources(client, json_resource): - resources = {} +# bucket.delete() - for r in json_resource: - func = resource_mapping[r] - func(client, resources) - - return resources - - -######################################################################################################################################## -### Helper Methods for Emulator Retry API ############################################################################################## -######################################################################################################################################## - - -def _create_retry_test(host, method_name, instructions): - import json - - preflight_post_uri = host + "/retry_test" - headers = { - "Content-Type": "application/json", - } - data_dict = {"instructions": {method_name: instructions}} - data = json.dumps(data_dict) - r = requests.post(preflight_post_uri, headers=headers, data=data) - return r.json() - - -def _check_retry_test(host, id): - status_get_uri = "{base}{retry}/{id}".format(base=host, retry="/retry_test", id=id) - r = requests.get(status_get_uri) - return r.json() +# def get_iam_policy(client, _preconditions, bucket): +# bucket = client.bucket(bucket.name) +# bucket.get_iam_policy() -def _run_retry_test(host, id, func, _preconditions, **resources): - # Create client using x-retry-test-id header. - client = storage.Client(client_options={"api_endpoint": host}) - client._http.headers.update({"x-retry-test-id": id}) - func(client, _preconditions, **resources) +# # Q: error - fixture 'client' not found +# # def test_iam_permissions(client, _preconditions, bucket): +# # bucket = client.bucket(bucket.name) +# # permissions = ["storage.buckets.get", "storage.buckets.create"] +# # bucket.test_iam_permissions(permissions) -def _delete_retry_test(host, id): - status_get_uri = "{base}{retry}/{id}".format(base=host, retry="/retry_test", id=id) - requests.delete(status_get_uri) +# # Q: cannot find the corresponding endpoint in the Retry API +# def get_service_account_email(client, _preconditions): +# client.get_service_account_email() -######################################################################################################################################## -### Run Conformance Tests for Retry Strategy ########################################################################################### -######################################################################################################################################## +# # Q: not hitting the errors from the instructions +# def make_bucket_public(client, _preconditions, bucket): +# bucket = client.bucket(bucket.name) +# bucket.make_public() -@pytest.mark.parametrize("test_data", _CONFORMANCE_TESTS) -def test_conformance_retry_strategy(test_data): - host = _API_ACCESS_ENDPOINT - if host == _DEFAULT_STORAGE_HOST: - pytest.skip( - "This test must use the testbench emulator; set STORAGE_EMULATOR_HOST to run." - ) - # Create client to use for setup steps. - client = storage.Client(client_options={"api_endpoint": host}) - methods = test_data["methods"] - cases = test_data["cases"] - expect_success = test_data["expectSuccess"] - precondition_provided = test_data["preconditionProvided"] - for c in cases: - for m in methods: - # Extract method name and instructions to create retry test. - method_name = m["name"] - instructions = c["instructions"] - json_resources = m["resources"] - - if method_name not in method_mapping: - warnings.warn( - "No tests for operation {}".format(method_name), - UserWarning, - stacklevel=1, - ) - continue - - for function in method_mapping[method_name]: - # Create the retry test in the emulator to handle instructions. - try: - r = _create_retry_test(host, method_name, instructions) - id = r["id"] - except Exception as e: - warnings.warn( - "Error creating retry test for {}: {}".format(method_name, e), - UserWarning, - stacklevel=1, - ) - continue - - # Populate resources. - try: - resources = _populate_resources(client, json_resources) - except Exception as e: - warnings.warn( - "Error populating resources for {}: {}".format(method_name, e), - UserWarning, - stacklevel=1, - ) - continue - - # Run retry tests on library methods. - try: - _run_retry_test( - host, id, function, precondition_provided, **resources - ) - except Exception as e: - # Should we be catching specific exceptions - print(e) - success_results = False - else: - success_results = True - - # Assert expected success for each scenario. - assert expect_success == success_results - - # Verify that all instructions were used up during the test - # (indicates that the client sent the correct requests). - try: - status_response = _check_retry_test(host, id) - assert status_response["completed"] is True - except Exception as e: - warnings.warn( - "Error checking retry test status for {}: {}".format( - method_name, e - ), - UserWarning, - stacklevel=1, - ) - - # Clean up and close out test in emulator. - try: - _delete_retry_test(host, id) - except Exception as e: - warnings.warn( - "Error deleting retry test for {}: {}".format(method_name, e), - UserWarning, - stacklevel=1, - ) +# def delete_blob(client, _preconditions, bucket, object): +# bucket = client.bucket(bucket.name) +# if _preconditions: +# generation = object.generation +# bucket.delete_blob(object.name, if_generation_match=generation) +# else: +# bucket.delete_blob(object.name) + + +# # Q: 1) cannot lock a locked bucket 2) currently using default "bucket" with metageneration +# def lock_retention_policy(client, _preconditions, bucket): +# bucket2 = client.bucket(bucket.name) +# bucket2.retention_period = 60 +# bucket2.patch() +# bucket2.lock_retention_policy() + + +# # Method invocation mapping. Methods to retry. This is a map whose keys are a string describing a standard +# # API call (e.g. storage.objects.get) and values are a list of functions which +# # wrap library methods that implement these calls. There may be multiple values +# # because multiple library methods may use the same call (e.g. get could be a +# # read or just a metadata get). +# method_mapping = { +# # "storage.bucket_acl.get": [], # S1 start # no library method mapped +# # "storage.bucket_acl.list": [], # no library method mapped +# "storage.buckets.delete": [delete_bucket], +# "storage.buckets.get": [get_bucket, reload_bucket], +# "storage.buckets.getIamPolicy": [get_iam_policy], +# "storage.buckets.insert": [create_bucket], +# "storage.buckets.list": [list_buckets], +# # "storage.buckets.lockRententionPolicy": [], # lock_retention_policy +# # "storage.buckets.testIamPermission": [], # test_iam_permissions +# "storage.default_object_acl.get": [], +# "storage.default_object_acl.list": [], +# # "storage.hmacKey.delete": [], # emulator project related endpoints wip +# # "storage.hmacKey.list": [], # emulator project related endpoints wip +# # "storage.hmacKey.get": [], # emulator project related endpoints wip +# "storage.notifications.delete": [delete_notification], +# "storage.notifications.get": [get_notification], +# "storage.notifications.list": [list_notifications], +# "storage.object_acl.get": [], +# "storage.object_acl.list": [], +# "storage.objects.get": [get_blob], +# "storage.objects.list": [list_blobs], +# # "storage.serviceaccount.get": [], # S1 end # emulator project related endpoints wip +# "storage.buckets.patch": [], # S2 start +# "storage.buckets.setIamPolicy": [], +# "storage.buckets.update": [], +# "storage.hmacKey.update": [], +# "storage.objects.compose": [], +# "storage.objects.copy": [], +# "storage.objects.delete": [delete_blob], +# "storage.objects.insert": [], +# "storage.objects.patch": [update_blob], +# "storage.objects.rewrite": [], +# "storage.objects.update": [], # S2 end +# "storage.notifications.insert": [create_notification], # S4 +# } + +# ######################################################################################################################################## +# ### Helper Methods for Populating Resources ############################################################################################ +# ######################################################################################################################################## + + +# def _populate_resource_bucket(client, resources): +# bucket = client.bucket(uuid.uuid4().hex) +# client.create_bucket(bucket) +# resources["bucket"] = bucket + + +# def _populate_resource_object(client, resources): +# bucket_name = resources["bucket"].name +# bucket = client.get_bucket(bucket_name) +# blob = bucket.blob(uuid.uuid4().hex) +# blob.upload_from_string("hello world") +# blob.reload() +# resources["object"] = blob + + +# def _populate_resource_notification(client, resources): +# bucket_name = resources["bucket"].name +# bucket = client.get_bucket(bucket_name) +# notification = bucket.notification() +# notification.create() +# notification.reload() +# resources["notification"] = notification + + +# def _populate_resource_hmackey(client, resources): +# hmac_key, secret = client.create_hmac_key( +# service_account_email=_CONF_TEST_SERVICE_ACCOUNT_EMAIL, +# project_id=_CONF_TEST_PROJECT_ID, +# ) +# resources["hmac_key"] = hmac_key + + +# resource_mapping = { +# "BUCKET": _populate_resource_bucket, +# "OBJECT": _populate_resource_object, +# "NOTIFICATION": _populate_resource_notification, +# "HMAC_KEY": _populate_resource_hmackey, +# } + + +# def _populate_resources(client, json_resource): +# resources = {} + +# for r in json_resource: +# func = resource_mapping[r] +# func(client, resources) + +# return resources + + +# ######################################################################################################################################## +# ### Helper Methods for Emulator Retry API ############################################################################################## +# ######################################################################################################################################## + + +# def _create_retry_test(host, method_name, instructions): +# import json + +# preflight_post_uri = host + "/retry_test" +# headers = { +# "Content-Type": "application/json", +# } +# data_dict = {"instructions": {method_name: instructions}} +# data = json.dumps(data_dict) +# r = requests.post(preflight_post_uri, headers=headers, data=data) +# return r.json() + + +# def _check_retry_test(host, id): +# status_get_uri = "{base}{retry}/{id}".format(base=host, retry="/retry_test", id=id) +# r = requests.get(status_get_uri) +# return r.json() + + +# def _run_retry_test(host, id, func, _preconditions, **resources): +# # Create client using x-retry-test-id header. +# client = storage.Client(client_options={"api_endpoint": host}) +# client._http.headers.update({"x-retry-test-id": id}) +# func(client, _preconditions, **resources) + + +# def _delete_retry_test(host, id): +# status_get_uri = "{base}{retry}/{id}".format(base=host, retry="/retry_test", id=id) +# requests.delete(status_get_uri) + + +# ######################################################################################################################################## +# ### Run Conformance Tests for Retry Strategy ########################################################################################### +# ######################################################################################################################################## + + +# @pytest.mark.parametrize("test_data", _CONFORMANCE_TESTS) +# def test_conformance_retry_strategy(test_data): +# host = _API_ACCESS_ENDPOINT +# if host == _DEFAULT_STORAGE_HOST: +# pytest.skip( +# "This test must use the testbench emulator; set STORAGE_EMULATOR_HOST to run." +# ) + +# # Create client to use for setup steps. +# client = storage.Client(client_options={"api_endpoint": host}) +# methods = test_data["methods"] +# cases = test_data["cases"] +# expect_success = test_data["expectSuccess"] +# precondition_provided = test_data["preconditionProvided"] +# for c in cases: +# for m in methods: +# # Extract method name and instructions to create retry test. +# method_name = m["name"] +# instructions = c["instructions"] +# json_resources = m["resources"] + +# if method_name not in method_mapping: +# warnings.warn( +# "No tests for operation {}".format(method_name), +# UserWarning, +# stacklevel=1, +# ) +# continue + +# for function in method_mapping[method_name]: +# # Create the retry test in the emulator to handle instructions. +# try: +# r = _create_retry_test(host, method_name, instructions) +# id = r["id"] +# except Exception as e: +# warnings.warn( +# "Error creating retry test for {}: {}".format(method_name, e), +# UserWarning, +# stacklevel=1, +# ) +# continue + +# # Populate resources. +# try: +# resources = _populate_resources(client, json_resources) +# except Exception as e: +# warnings.warn( +# "Error populating resources for {}: {}".format(method_name, e), +# UserWarning, +# stacklevel=1, +# ) +# continue + +# # Run retry tests on library methods. +# try: +# _run_retry_test( +# host, id, function, precondition_provided, **resources +# ) +# except Exception as e: +# # Should we be catching specific exceptions +# print(e) +# success_results = False +# else: +# success_results = True + +# # Assert expected success for each scenario. +# assert expect_success == success_results + +# # Verify that all instructions were used up during the test +# # (indicates that the client sent the correct requests). +# try: +# status_response = _check_retry_test(host, id) +# assert status_response["completed"] is True +# except Exception as e: +# warnings.warn( +# "Error checking retry test status for {}: {}".format( +# method_name, e +# ), +# UserWarning, +# stacklevel=1, +# ) + +# # Clean up and close out test in emulator. +# try: +# _delete_retry_test(host, id) +# except Exception as e: +# warnings.warn( +# "Error deleting retry test for {}: {}".format(method_name, e), +# UserWarning, +# stacklevel=1, +# ) From cf092fc13f1eaf0179199189c29f6c725919b630 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Tue, 1 Jun 2021 14:19:18 -0700 Subject: [PATCH 27/45] update noxfile --- noxfile.py | 48 +++ tests/conformance/__init__.py | 24 ++ tests/unit/test_retry.py | 676 +++++++++++++++++----------------- 3 files changed, 410 insertions(+), 338 deletions(-) create mode 100644 tests/conformance/__init__.py diff --git a/noxfile.py b/noxfile.py index 0b85dc8b0..fd625ac2d 100644 --- a/noxfile.py +++ b/noxfile.py @@ -29,6 +29,9 @@ DEFAULT_PYTHON_VERSION = "3.8" SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.8"] UNIT_TEST_PYTHON_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] +CONFORMANCE_TEST_PYTHON_VERSIONS = ["2.7", "3.8"] + +_DEFAULT_STORAGE_HOST = "https://storage.googleapis.com" @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -139,6 +142,51 @@ def system(session): session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) +@nox.session(python=CONFORMANCE_TEST_PYTHON_VERSIONS) +def conformance(session): + """Run the conformance test suite.""" + conformance_test_path = os.path.join("tests", "conformance.py") + conformance_test_folder_path = os.path.join("tests", "conformance") + + # Environment check: Only run tests if the STORAGE_EMULATOR_HOST is set. + if ( + os.environ.get("STORAGE_EMULATOR_HOST", _DEFAULT_STORAGE_HOST) + == _DEFAULT_STORAGE_HOST + ): + session.skip("Set STORAGE_EMULATOR_HOST to run, skipping") + # Environment check: Only run tests if the environment variable is set. + if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): + session.skip("Credentials must be set via environment variable") + + conformance_test_exists = os.path.exists(conformance_test_path) + conformance_test_folder_exists = os.path.exists(conformance_test_folder_path) + # Environment check: only run tests if found. + if not conformance_test_exists and not conformance_test_folder_exists: + session.skip("Conformance tests were not found") + + # Install all test dependencies, then install this package into the + # virtualenv's dist-packages. + # 2021-05-06: defer installing 'google-cloud-*' to after this package, + # in order to work around Python 2.7 googolapis-common-protos + # issue. + session.install("pytest",) + session.install("-e", ".") + session.install( + "google-cloud-testutils", + "google-cloud-iam", + "google-cloud-pubsub < 2.0.0", + "google-cloud-kms < 2.0dev", + ) + + # Run py.test against the conformance tests. + if conformance_test_exists: + session.run("py.test", "--quiet", conformance_test_path, *session.posargs) + if conformance_test_folder_exists: + session.run( + "py.test", "--quiet", conformance_test_folder_path, *session.posargs + ) + + @nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. diff --git a/tests/conformance/__init__.py b/tests/conformance/__init__.py new file mode 100644 index 000000000..a864e9eae --- /dev/null +++ b/tests/conformance/__init__.py @@ -0,0 +1,24 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io +import json +import os + + +def _read_local_json(json_file): + here = os.path.dirname(__file__) + json_path = os.path.abspath(os.path.join(here, json_file)) + with io.open(json_path, "r", encoding="utf-8-sig") as fileobj: + return json.load(fileobj) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index b963c0c45..085d715dc 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -291,385 +291,385 @@ def test_is_meta_or_etag_in_json_invalid(self): self.assertEqual(policy, None) -# # ToDo: Confirm what are the credentials required. Can we use the same service account created for url_signer_v4_test_account? -# _FAKE_SERVICE_ACCOUNT = None +# ToDo: Confirm what are the credentials required. Can we use the same service account created for url_signer_v4_test_account? +_FAKE_SERVICE_ACCOUNT = None -# def fake_service_account(): -# global _FAKE_SERVICE_ACCOUNT -# # validate and set fake service account +def fake_service_account(): + global _FAKE_SERVICE_ACCOUNT + # validate and set fake service account -# # ToDo: Confirm what are the credentials required. Can we use the same service account created for url_signer_v4_test_account? ) -# # _SERVICE_ACCOUNT_JSON = _read_local_json("") -# _CONFORMANCE_TESTS = _read_local_json("retry_strategy_test_data.json")[ -# "retryStrategyTests" -# ] -# # ToDo: Confirm the correct access endpoint. -# _API_ACCESS_ENDPOINT = _helpers._get_storage_host() -# _DEFAULT_STORAGE_HOST = u"https://storage.googleapis.com" -# _CONF_TEST_PROJECT_ID = "my-project-id" -# _CONF_TEST_SERVICE_ACCOUNT_EMAIL = ( -# "my-service-account@my-project-id.iam.gserviceaccount.com" -# ) +# ToDo: Confirm what are the credentials required. Can we use the same service account created for url_signer_v4_test_account? ) +# _SERVICE_ACCOUNT_JSON = _read_local_json("") +_CONFORMANCE_TESTS = _read_local_json("retry_strategy_test_data.json")[ + "retryStrategyTests" +] +# ToDo: Confirm the correct access endpoint. +_API_ACCESS_ENDPOINT = _helpers._get_storage_host() +_DEFAULT_STORAGE_HOST = u"https://storage.googleapis.com" +_CONF_TEST_PROJECT_ID = "my-project-id" +_CONF_TEST_SERVICE_ACCOUNT_EMAIL = ( + "my-service-account@my-project-id.iam.gserviceaccount.com" +) -# ######################################################################################################################################## -# ### Library methods for mapping ######################################################################################################## -# ######################################################################################################################################## +######################################################################################################################################## +### Library methods for mapping ######################################################################################################## +######################################################################################################################################## -# def list_buckets(client, _preconditions, **_): -# buckets = client.list_buckets() -# for b in buckets: -# break +def list_buckets(client, _preconditions, **_): + buckets = client.list_buckets() + for b in buckets: + break -# def list_blobs(client, _preconditions, bucket, **_): -# blobs = client.list_blobs(bucket.name) -# for b in blobs: -# break +def list_blobs(client, _preconditions, bucket, **_): + blobs = client.list_blobs(bucket.name) + for b in blobs: + break -# def get_blob(client, _preconditions, bucket, object): -# bucket = client.bucket(bucket.name) -# bucket.get_blob(object.name) +def get_blob(client, _preconditions, bucket, object): + bucket = client.bucket(bucket.name) + bucket.get_blob(object.name) -# def reload_bucket(client, _preconditions, bucket): -# bucket = client.bucket(bucket.name) -# bucket.reload() +def reload_bucket(client, _preconditions, bucket): + bucket = client.bucket(bucket.name) + bucket.reload() -# def get_bucket(client, _preconditions, bucket): -# client.get_bucket(bucket.name) +def get_bucket(client, _preconditions, bucket): + client.get_bucket(bucket.name) -# def update_blob(client, _preconditions, bucket, object): -# bucket = client.bucket(bucket.name) -# blob = bucket.blob(object.name) -# metadata = {"foo": "bar"} -# blob.metadata = metadata -# if _preconditions: -# metageneration = object.metageneration -# blob.patch(if_metageneration_match=metageneration) -# else: -# blob.patch() +def update_blob(client, _preconditions, bucket, object): + bucket = client.bucket(bucket.name) + blob = bucket.blob(object.name) + metadata = {"foo": "bar"} + blob.metadata = metadata + if _preconditions: + metageneration = object.metageneration + blob.patch(if_metageneration_match=metageneration) + else: + blob.patch() -# def create_bucket(client, _preconditions): -# bucket = client.bucket(uuid.uuid4().hex) -# client.create_bucket(bucket) +def create_bucket(client, _preconditions): + bucket = client.bucket(uuid.uuid4().hex) + client.create_bucket(bucket) -# # Q!!! upload_from_string did not retry. -# def upload_from_string(client, _preconditions, bucket): -# bucket = client.get_bucket(bucket.name) -# blob = bucket.blob(uuid.uuid4().hex) -# blob.upload_from_string("upload from string") +# Q!!! upload_from_string did not retry. +def upload_from_string(client, _preconditions, bucket): + bucket = client.get_bucket(bucket.name) + blob = bucket.blob(uuid.uuid4().hex) + blob.upload_from_string("upload from string") -# def create_notification(client, _preconditions, bucket): -# bucket = client.get_bucket(bucket.name) -# notification = bucket.notification() -# notification.create() +def create_notification(client, _preconditions, bucket): + bucket = client.get_bucket(bucket.name) + notification = bucket.notification() + notification.create() -# def list_notifications(client, _preconditions, bucket, **_): -# bucket = client.get_bucket(bucket.name) -# notifications = bucket.list_notifications() -# for n in notifications: -# break +def list_notifications(client, _preconditions, bucket, **_): + bucket = client.get_bucket(bucket.name) + notifications = bucket.list_notifications() + for n in notifications: + break -# def get_notification(client, _preconditions, bucket, notification): -# client.bucket(bucket.name).get_notification(notification.notification_id) +def get_notification(client, _preconditions, bucket, notification): + client.bucket(bucket.name).get_notification(notification.notification_id) -# def delete_notification(client, _preconditions, bucket, notification): -# notification = client.bucket(bucket.name).get_notification( -# notification.notification_id -# ) -# notification.delete() +def delete_notification(client, _preconditions, bucket, notification): + notification = client.bucket(bucket.name).get_notification( + notification.notification_id + ) + notification.delete() -# # Q!!! are there hmacKeys retryable endpoints in the emulator? -# def list_hmac_keys(client, _preconditions, **_): -# hmac_keys = client.list_hmac_keys() -# for k in hmac_keys: -# break +# Q!!! are there hmacKeys retryable endpoints in the emulator? +def list_hmac_keys(client, _preconditions, **_): + hmac_keys = client.list_hmac_keys() + for k in hmac_keys: + break -# def delete_bucket(client, _preconditions, bucket): -# bucket = client.bucket(bucket.name) -# bucket.delete() +def delete_bucket(client, _preconditions, bucket): + bucket = client.bucket(bucket.name) + bucket.delete() + + +def get_iam_policy(client, _preconditions, bucket): + bucket = client.bucket(bucket.name) + bucket.get_iam_policy() -# def get_iam_policy(client, _preconditions, bucket): +# Q: error - fixture 'client' not found +# def test_iam_permissions(client, _preconditions, bucket): # bucket = client.bucket(bucket.name) -# bucket.get_iam_policy() +# permissions = ["storage.buckets.get", "storage.buckets.create"] +# bucket.test_iam_permissions(permissions) + + +# Q: cannot find the corresponding endpoint in the Retry API +def get_service_account_email(client, _preconditions): + client.get_service_account_email() + + +# Q: not hitting the errors from the instructions +def make_bucket_public(client, _preconditions, bucket): + bucket = client.bucket(bucket.name) + bucket.make_public() + + +def delete_blob(client, _preconditions, bucket, object): + bucket = client.bucket(bucket.name) + if _preconditions: + generation = object.generation + bucket.delete_blob(object.name, if_generation_match=generation) + else: + bucket.delete_blob(object.name) + + +# Q: 1) cannot lock a locked bucket 2) currently using default "bucket" with metageneration +def lock_retention_policy(client, _preconditions, bucket): + bucket2 = client.bucket(bucket.name) + bucket2.retention_period = 60 + bucket2.patch() + bucket2.lock_retention_policy() + + +# Method invocation mapping. Methods to retry. This is a map whose keys are a string describing a standard +# API call (e.g. storage.objects.get) and values are a list of functions which +# wrap library methods that implement these calls. There may be multiple values +# because multiple library methods may use the same call (e.g. get could be a +# read or just a metadata get). +method_mapping = { + # "storage.bucket_acl.get": [], # S1 start # no library method mapped + # "storage.bucket_acl.list": [], # no library method mapped + "storage.buckets.delete": [delete_bucket], + "storage.buckets.get": [get_bucket, reload_bucket], + "storage.buckets.getIamPolicy": [get_iam_policy], + "storage.buckets.insert": [create_bucket], + "storage.buckets.list": [list_buckets], + # "storage.buckets.lockRententionPolicy": [], # lock_retention_policy + # "storage.buckets.testIamPermission": [], # test_iam_permissions + "storage.default_object_acl.get": [], + "storage.default_object_acl.list": [], + # "storage.hmacKey.delete": [], # emulator project related endpoints wip + # "storage.hmacKey.list": [], # emulator project related endpoints wip + # "storage.hmacKey.get": [], # emulator project related endpoints wip + "storage.notifications.delete": [delete_notification], + "storage.notifications.get": [get_notification], + "storage.notifications.list": [list_notifications], + "storage.object_acl.get": [], + "storage.object_acl.list": [], + "storage.objects.get": [get_blob], + "storage.objects.list": [list_blobs], + # "storage.serviceaccount.get": [], # S1 end # emulator project related endpoints wip + "storage.buckets.patch": [], # S2 start + "storage.buckets.setIamPolicy": [], + "storage.buckets.update": [], + "storage.hmacKey.update": [], + "storage.objects.compose": [], + "storage.objects.copy": [], + "storage.objects.delete": [delete_blob], + "storage.objects.insert": [], + "storage.objects.patch": [update_blob], + "storage.objects.rewrite": [], + "storage.objects.update": [], # S2 end + "storage.notifications.insert": [create_notification], # S4 +} + +######################################################################################################################################## +### Helper Methods for Populating Resources ############################################################################################ +######################################################################################################################################## + + +def _populate_resource_bucket(client, resources): + bucket = client.bucket(uuid.uuid4().hex) + client.create_bucket(bucket) + resources["bucket"] = bucket + + +def _populate_resource_object(client, resources): + bucket_name = resources["bucket"].name + bucket = client.get_bucket(bucket_name) + blob = bucket.blob(uuid.uuid4().hex) + blob.upload_from_string("hello world") + blob.reload() + resources["object"] = blob + + +def _populate_resource_notification(client, resources): + bucket_name = resources["bucket"].name + bucket = client.get_bucket(bucket_name) + notification = bucket.notification() + notification.create() + notification.reload() + resources["notification"] = notification + + +def _populate_resource_hmackey(client, resources): + hmac_key, secret = client.create_hmac_key( + service_account_email=_CONF_TEST_SERVICE_ACCOUNT_EMAIL, + project_id=_CONF_TEST_PROJECT_ID, + ) + resources["hmac_key"] = hmac_key -# # Q: error - fixture 'client' not found -# # def test_iam_permissions(client, _preconditions, bucket): -# # bucket = client.bucket(bucket.name) -# # permissions = ["storage.buckets.get", "storage.buckets.create"] -# # bucket.test_iam_permissions(permissions) +resource_mapping = { + "BUCKET": _populate_resource_bucket, + "OBJECT": _populate_resource_object, + "NOTIFICATION": _populate_resource_notification, + "HMAC_KEY": _populate_resource_hmackey, +} -# # Q: cannot find the corresponding endpoint in the Retry API -# def get_service_account_email(client, _preconditions): -# client.get_service_account_email() +def _populate_resources(client, json_resource): + resources = {} + for r in json_resource: + func = resource_mapping[r] + func(client, resources) -# # Q: not hitting the errors from the instructions -# def make_bucket_public(client, _preconditions, bucket): -# bucket = client.bucket(bucket.name) -# bucket.make_public() + return resources -# def delete_blob(client, _preconditions, bucket, object): -# bucket = client.bucket(bucket.name) -# if _preconditions: -# generation = object.generation -# bucket.delete_blob(object.name, if_generation_match=generation) -# else: -# bucket.delete_blob(object.name) - - -# # Q: 1) cannot lock a locked bucket 2) currently using default "bucket" with metageneration -# def lock_retention_policy(client, _preconditions, bucket): -# bucket2 = client.bucket(bucket.name) -# bucket2.retention_period = 60 -# bucket2.patch() -# bucket2.lock_retention_policy() - - -# # Method invocation mapping. Methods to retry. This is a map whose keys are a string describing a standard -# # API call (e.g. storage.objects.get) and values are a list of functions which -# # wrap library methods that implement these calls. There may be multiple values -# # because multiple library methods may use the same call (e.g. get could be a -# # read or just a metadata get). -# method_mapping = { -# # "storage.bucket_acl.get": [], # S1 start # no library method mapped -# # "storage.bucket_acl.list": [], # no library method mapped -# "storage.buckets.delete": [delete_bucket], -# "storage.buckets.get": [get_bucket, reload_bucket], -# "storage.buckets.getIamPolicy": [get_iam_policy], -# "storage.buckets.insert": [create_bucket], -# "storage.buckets.list": [list_buckets], -# # "storage.buckets.lockRententionPolicy": [], # lock_retention_policy -# # "storage.buckets.testIamPermission": [], # test_iam_permissions -# "storage.default_object_acl.get": [], -# "storage.default_object_acl.list": [], -# # "storage.hmacKey.delete": [], # emulator project related endpoints wip -# # "storage.hmacKey.list": [], # emulator project related endpoints wip -# # "storage.hmacKey.get": [], # emulator project related endpoints wip -# "storage.notifications.delete": [delete_notification], -# "storage.notifications.get": [get_notification], -# "storage.notifications.list": [list_notifications], -# "storage.object_acl.get": [], -# "storage.object_acl.list": [], -# "storage.objects.get": [get_blob], -# "storage.objects.list": [list_blobs], -# # "storage.serviceaccount.get": [], # S1 end # emulator project related endpoints wip -# "storage.buckets.patch": [], # S2 start -# "storage.buckets.setIamPolicy": [], -# "storage.buckets.update": [], -# "storage.hmacKey.update": [], -# "storage.objects.compose": [], -# "storage.objects.copy": [], -# "storage.objects.delete": [delete_blob], -# "storage.objects.insert": [], -# "storage.objects.patch": [update_blob], -# "storage.objects.rewrite": [], -# "storage.objects.update": [], # S2 end -# "storage.notifications.insert": [create_notification], # S4 -# } - -# ######################################################################################################################################## -# ### Helper Methods for Populating Resources ############################################################################################ -# ######################################################################################################################################## - - -# def _populate_resource_bucket(client, resources): -# bucket = client.bucket(uuid.uuid4().hex) -# client.create_bucket(bucket) -# resources["bucket"] = bucket - - -# def _populate_resource_object(client, resources): -# bucket_name = resources["bucket"].name -# bucket = client.get_bucket(bucket_name) -# blob = bucket.blob(uuid.uuid4().hex) -# blob.upload_from_string("hello world") -# blob.reload() -# resources["object"] = blob - - -# def _populate_resource_notification(client, resources): -# bucket_name = resources["bucket"].name -# bucket = client.get_bucket(bucket_name) -# notification = bucket.notification() -# notification.create() -# notification.reload() -# resources["notification"] = notification - - -# def _populate_resource_hmackey(client, resources): -# hmac_key, secret = client.create_hmac_key( -# service_account_email=_CONF_TEST_SERVICE_ACCOUNT_EMAIL, -# project_id=_CONF_TEST_PROJECT_ID, -# ) -# resources["hmac_key"] = hmac_key - - -# resource_mapping = { -# "BUCKET": _populate_resource_bucket, -# "OBJECT": _populate_resource_object, -# "NOTIFICATION": _populate_resource_notification, -# "HMAC_KEY": _populate_resource_hmackey, -# } - - -# def _populate_resources(client, json_resource): -# resources = {} - -# for r in json_resource: -# func = resource_mapping[r] -# func(client, resources) - -# return resources - - -# ######################################################################################################################################## -# ### Helper Methods for Emulator Retry API ############################################################################################## -# ######################################################################################################################################## - - -# def _create_retry_test(host, method_name, instructions): -# import json - -# preflight_post_uri = host + "/retry_test" -# headers = { -# "Content-Type": "application/json", -# } -# data_dict = {"instructions": {method_name: instructions}} -# data = json.dumps(data_dict) -# r = requests.post(preflight_post_uri, headers=headers, data=data) -# return r.json() - - -# def _check_retry_test(host, id): -# status_get_uri = "{base}{retry}/{id}".format(base=host, retry="/retry_test", id=id) -# r = requests.get(status_get_uri) -# return r.json() - - -# def _run_retry_test(host, id, func, _preconditions, **resources): -# # Create client using x-retry-test-id header. -# client = storage.Client(client_options={"api_endpoint": host}) -# client._http.headers.update({"x-retry-test-id": id}) -# func(client, _preconditions, **resources) - - -# def _delete_retry_test(host, id): -# status_get_uri = "{base}{retry}/{id}".format(base=host, retry="/retry_test", id=id) -# requests.delete(status_get_uri) - - -# ######################################################################################################################################## -# ### Run Conformance Tests for Retry Strategy ########################################################################################### -# ######################################################################################################################################## - - -# @pytest.mark.parametrize("test_data", _CONFORMANCE_TESTS) -# def test_conformance_retry_strategy(test_data): -# host = _API_ACCESS_ENDPOINT -# if host == _DEFAULT_STORAGE_HOST: -# pytest.skip( -# "This test must use the testbench emulator; set STORAGE_EMULATOR_HOST to run." -# ) - -# # Create client to use for setup steps. -# client = storage.Client(client_options={"api_endpoint": host}) -# methods = test_data["methods"] -# cases = test_data["cases"] -# expect_success = test_data["expectSuccess"] -# precondition_provided = test_data["preconditionProvided"] -# for c in cases: -# for m in methods: -# # Extract method name and instructions to create retry test. -# method_name = m["name"] -# instructions = c["instructions"] -# json_resources = m["resources"] - -# if method_name not in method_mapping: -# warnings.warn( -# "No tests for operation {}".format(method_name), -# UserWarning, -# stacklevel=1, -# ) -# continue - -# for function in method_mapping[method_name]: -# # Create the retry test in the emulator to handle instructions. -# try: -# r = _create_retry_test(host, method_name, instructions) -# id = r["id"] -# except Exception as e: -# warnings.warn( -# "Error creating retry test for {}: {}".format(method_name, e), -# UserWarning, -# stacklevel=1, -# ) -# continue - -# # Populate resources. -# try: -# resources = _populate_resources(client, json_resources) -# except Exception as e: -# warnings.warn( -# "Error populating resources for {}: {}".format(method_name, e), -# UserWarning, -# stacklevel=1, -# ) -# continue - -# # Run retry tests on library methods. -# try: -# _run_retry_test( -# host, id, function, precondition_provided, **resources -# ) -# except Exception as e: -# # Should we be catching specific exceptions -# print(e) -# success_results = False -# else: -# success_results = True - -# # Assert expected success for each scenario. -# assert expect_success == success_results - -# # Verify that all instructions were used up during the test -# # (indicates that the client sent the correct requests). -# try: -# status_response = _check_retry_test(host, id) -# assert status_response["completed"] is True -# except Exception as e: -# warnings.warn( -# "Error checking retry test status for {}: {}".format( -# method_name, e -# ), -# UserWarning, -# stacklevel=1, -# ) - -# # Clean up and close out test in emulator. -# try: -# _delete_retry_test(host, id) -# except Exception as e: -# warnings.warn( -# "Error deleting retry test for {}: {}".format(method_name, e), -# UserWarning, -# stacklevel=1, -# ) +######################################################################################################################################## +### Helper Methods for Emulator Retry API ############################################################################################## +######################################################################################################################################## + + +def _create_retry_test(host, method_name, instructions): + import json + + preflight_post_uri = host + "/retry_test" + headers = { + "Content-Type": "application/json", + } + data_dict = {"instructions": {method_name: instructions}} + data = json.dumps(data_dict) + r = requests.post(preflight_post_uri, headers=headers, data=data) + return r.json() + + +def _check_retry_test(host, id): + status_get_uri = "{base}{retry}/{id}".format(base=host, retry="/retry_test", id=id) + r = requests.get(status_get_uri) + return r.json() + + +def _run_retry_test(host, id, func, _preconditions, **resources): + # Create client using x-retry-test-id header. + client = storage.Client(client_options={"api_endpoint": host}) + client._http.headers.update({"x-retry-test-id": id}) + func(client, _preconditions, **resources) + + +def _delete_retry_test(host, id): + status_get_uri = "{base}{retry}/{id}".format(base=host, retry="/retry_test", id=id) + requests.delete(status_get_uri) + + +######################################################################################################################################## +### Run Conformance Tests for Retry Strategy ########################################################################################### +######################################################################################################################################## + + +@pytest.mark.parametrize("test_data", _CONFORMANCE_TESTS) +def test_conformance_retry_strategy(test_data): + host = _API_ACCESS_ENDPOINT + if host == _DEFAULT_STORAGE_HOST: + pytest.skip( + "This test must use the testbench emulator; set STORAGE_EMULATOR_HOST to run." + ) + + # Create client to use for setup steps. + client = storage.Client(client_options={"api_endpoint": host}) + methods = test_data["methods"] + cases = test_data["cases"] + expect_success = test_data["expectSuccess"] + precondition_provided = test_data["preconditionProvided"] + for c in cases: + for m in methods: + # Extract method name and instructions to create retry test. + method_name = m["name"] + instructions = c["instructions"] + json_resources = m["resources"] + + if method_name not in method_mapping: + warnings.warn( + "No tests for operation {}".format(method_name), + UserWarning, + stacklevel=1, + ) + continue + + for function in method_mapping[method_name]: + # Create the retry test in the emulator to handle instructions. + try: + r = _create_retry_test(host, method_name, instructions) + id = r["id"] + except Exception as e: + warnings.warn( + "Error creating retry test for {}: {}".format(method_name, e), + UserWarning, + stacklevel=1, + ) + continue + + # Populate resources. + try: + resources = _populate_resources(client, json_resources) + except Exception as e: + warnings.warn( + "Error populating resources for {}: {}".format(method_name, e), + UserWarning, + stacklevel=1, + ) + continue + + # Run retry tests on library methods. + try: + _run_retry_test( + host, id, function, precondition_provided, **resources + ) + except Exception as e: + # Should we be catching specific exceptions + print(e) + success_results = False + else: + success_results = True + + # Assert expected success for each scenario. + assert expect_success == success_results + + # Verify that all instructions were used up during the test + # (indicates that the client sent the correct requests). + try: + status_response = _check_retry_test(host, id) + assert status_response["completed"] is True + except Exception as e: + warnings.warn( + "Error checking retry test status for {}: {}".format( + method_name, e + ), + UserWarning, + stacklevel=1, + ) + + # Clean up and close out test in emulator. + try: + _delete_retry_test(host, id) + except Exception as e: + warnings.warn( + "Error deleting retry test for {}: {}".format(method_name, e), + UserWarning, + stacklevel=1, + ) From 95a3cc8f5d14b38e4a8f1bb0c81ee5e230dcb13b Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Tue, 1 Jun 2021 14:36:36 -0700 Subject: [PATCH 28/45] relocate conformance tests --- tests/conformance/test_conformance.py | 1 - tests/unit/test_retry.py | 393 -------------------------- 2 files changed, 394 deletions(-) diff --git a/tests/conformance/test_conformance.py b/tests/conformance/test_conformance.py index 317a3c9fb..c637b7839 100644 --- a/tests/conformance/test_conformance.py +++ b/tests/conformance/test_conformance.py @@ -26,7 +26,6 @@ # http.client.HTTPConnection.debuglevel=5 - # ToDo: Confirm what are the credentials required. Can we use the same service account created for url_signer_v4_test_account? _FAKE_SERVICE_ACCOUNT = None diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 085d715dc..3111584cb 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -13,19 +13,10 @@ # limitations under the License. import unittest -import uuid -from google.cloud import storage from google.cloud.storage import _helpers -from . import _read_local_json - import mock -import pytest -import requests -import warnings - -# http.client.HTTPConnection.debuglevel=5 try: @@ -289,387 +280,3 @@ def test_is_meta_or_etag_in_json_invalid(self): query_params={"ifGenerationMatch": 1}, data="I am invalid JSON!" ) self.assertEqual(policy, None) - - -# ToDo: Confirm what are the credentials required. Can we use the same service account created for url_signer_v4_test_account? -_FAKE_SERVICE_ACCOUNT = None - - -def fake_service_account(): - global _FAKE_SERVICE_ACCOUNT - # validate and set fake service account - - -# ToDo: Confirm what are the credentials required. Can we use the same service account created for url_signer_v4_test_account? ) -# _SERVICE_ACCOUNT_JSON = _read_local_json("") -_CONFORMANCE_TESTS = _read_local_json("retry_strategy_test_data.json")[ - "retryStrategyTests" -] -# ToDo: Confirm the correct access endpoint. -_API_ACCESS_ENDPOINT = _helpers._get_storage_host() -_DEFAULT_STORAGE_HOST = u"https://storage.googleapis.com" -_CONF_TEST_PROJECT_ID = "my-project-id" -_CONF_TEST_SERVICE_ACCOUNT_EMAIL = ( - "my-service-account@my-project-id.iam.gserviceaccount.com" -) - -######################################################################################################################################## -### Library methods for mapping ######################################################################################################## -######################################################################################################################################## - - -def list_buckets(client, _preconditions, **_): - buckets = client.list_buckets() - for b in buckets: - break - - -def list_blobs(client, _preconditions, bucket, **_): - blobs = client.list_blobs(bucket.name) - for b in blobs: - break - - -def get_blob(client, _preconditions, bucket, object): - bucket = client.bucket(bucket.name) - bucket.get_blob(object.name) - - -def reload_bucket(client, _preconditions, bucket): - bucket = client.bucket(bucket.name) - bucket.reload() - - -def get_bucket(client, _preconditions, bucket): - client.get_bucket(bucket.name) - - -def update_blob(client, _preconditions, bucket, object): - bucket = client.bucket(bucket.name) - blob = bucket.blob(object.name) - metadata = {"foo": "bar"} - blob.metadata = metadata - if _preconditions: - metageneration = object.metageneration - blob.patch(if_metageneration_match=metageneration) - else: - blob.patch() - - -def create_bucket(client, _preconditions): - bucket = client.bucket(uuid.uuid4().hex) - client.create_bucket(bucket) - - -# Q!!! upload_from_string did not retry. -def upload_from_string(client, _preconditions, bucket): - bucket = client.get_bucket(bucket.name) - blob = bucket.blob(uuid.uuid4().hex) - blob.upload_from_string("upload from string") - - -def create_notification(client, _preconditions, bucket): - bucket = client.get_bucket(bucket.name) - notification = bucket.notification() - notification.create() - - -def list_notifications(client, _preconditions, bucket, **_): - bucket = client.get_bucket(bucket.name) - notifications = bucket.list_notifications() - for n in notifications: - break - - -def get_notification(client, _preconditions, bucket, notification): - client.bucket(bucket.name).get_notification(notification.notification_id) - - -def delete_notification(client, _preconditions, bucket, notification): - notification = client.bucket(bucket.name).get_notification( - notification.notification_id - ) - notification.delete() - - -# Q!!! are there hmacKeys retryable endpoints in the emulator? -def list_hmac_keys(client, _preconditions, **_): - hmac_keys = client.list_hmac_keys() - for k in hmac_keys: - break - - -def delete_bucket(client, _preconditions, bucket): - bucket = client.bucket(bucket.name) - bucket.delete() - - -def get_iam_policy(client, _preconditions, bucket): - bucket = client.bucket(bucket.name) - bucket.get_iam_policy() - - -# Q: error - fixture 'client' not found -# def test_iam_permissions(client, _preconditions, bucket): -# bucket = client.bucket(bucket.name) -# permissions = ["storage.buckets.get", "storage.buckets.create"] -# bucket.test_iam_permissions(permissions) - - -# Q: cannot find the corresponding endpoint in the Retry API -def get_service_account_email(client, _preconditions): - client.get_service_account_email() - - -# Q: not hitting the errors from the instructions -def make_bucket_public(client, _preconditions, bucket): - bucket = client.bucket(bucket.name) - bucket.make_public() - - -def delete_blob(client, _preconditions, bucket, object): - bucket = client.bucket(bucket.name) - if _preconditions: - generation = object.generation - bucket.delete_blob(object.name, if_generation_match=generation) - else: - bucket.delete_blob(object.name) - - -# Q: 1) cannot lock a locked bucket 2) currently using default "bucket" with metageneration -def lock_retention_policy(client, _preconditions, bucket): - bucket2 = client.bucket(bucket.name) - bucket2.retention_period = 60 - bucket2.patch() - bucket2.lock_retention_policy() - - -# Method invocation mapping. Methods to retry. This is a map whose keys are a string describing a standard -# API call (e.g. storage.objects.get) and values are a list of functions which -# wrap library methods that implement these calls. There may be multiple values -# because multiple library methods may use the same call (e.g. get could be a -# read or just a metadata get). -method_mapping = { - # "storage.bucket_acl.get": [], # S1 start # no library method mapped - # "storage.bucket_acl.list": [], # no library method mapped - "storage.buckets.delete": [delete_bucket], - "storage.buckets.get": [get_bucket, reload_bucket], - "storage.buckets.getIamPolicy": [get_iam_policy], - "storage.buckets.insert": [create_bucket], - "storage.buckets.list": [list_buckets], - # "storage.buckets.lockRententionPolicy": [], # lock_retention_policy - # "storage.buckets.testIamPermission": [], # test_iam_permissions - "storage.default_object_acl.get": [], - "storage.default_object_acl.list": [], - # "storage.hmacKey.delete": [], # emulator project related endpoints wip - # "storage.hmacKey.list": [], # emulator project related endpoints wip - # "storage.hmacKey.get": [], # emulator project related endpoints wip - "storage.notifications.delete": [delete_notification], - "storage.notifications.get": [get_notification], - "storage.notifications.list": [list_notifications], - "storage.object_acl.get": [], - "storage.object_acl.list": [], - "storage.objects.get": [get_blob], - "storage.objects.list": [list_blobs], - # "storage.serviceaccount.get": [], # S1 end # emulator project related endpoints wip - "storage.buckets.patch": [], # S2 start - "storage.buckets.setIamPolicy": [], - "storage.buckets.update": [], - "storage.hmacKey.update": [], - "storage.objects.compose": [], - "storage.objects.copy": [], - "storage.objects.delete": [delete_blob], - "storage.objects.insert": [], - "storage.objects.patch": [update_blob], - "storage.objects.rewrite": [], - "storage.objects.update": [], # S2 end - "storage.notifications.insert": [create_notification], # S4 -} - -######################################################################################################################################## -### Helper Methods for Populating Resources ############################################################################################ -######################################################################################################################################## - - -def _populate_resource_bucket(client, resources): - bucket = client.bucket(uuid.uuid4().hex) - client.create_bucket(bucket) - resources["bucket"] = bucket - - -def _populate_resource_object(client, resources): - bucket_name = resources["bucket"].name - bucket = client.get_bucket(bucket_name) - blob = bucket.blob(uuid.uuid4().hex) - blob.upload_from_string("hello world") - blob.reload() - resources["object"] = blob - - -def _populate_resource_notification(client, resources): - bucket_name = resources["bucket"].name - bucket = client.get_bucket(bucket_name) - notification = bucket.notification() - notification.create() - notification.reload() - resources["notification"] = notification - - -def _populate_resource_hmackey(client, resources): - hmac_key, secret = client.create_hmac_key( - service_account_email=_CONF_TEST_SERVICE_ACCOUNT_EMAIL, - project_id=_CONF_TEST_PROJECT_ID, - ) - resources["hmac_key"] = hmac_key - - -resource_mapping = { - "BUCKET": _populate_resource_bucket, - "OBJECT": _populate_resource_object, - "NOTIFICATION": _populate_resource_notification, - "HMAC_KEY": _populate_resource_hmackey, -} - - -def _populate_resources(client, json_resource): - resources = {} - - for r in json_resource: - func = resource_mapping[r] - func(client, resources) - - return resources - - -######################################################################################################################################## -### Helper Methods for Emulator Retry API ############################################################################################## -######################################################################################################################################## - - -def _create_retry_test(host, method_name, instructions): - import json - - preflight_post_uri = host + "/retry_test" - headers = { - "Content-Type": "application/json", - } - data_dict = {"instructions": {method_name: instructions}} - data = json.dumps(data_dict) - r = requests.post(preflight_post_uri, headers=headers, data=data) - return r.json() - - -def _check_retry_test(host, id): - status_get_uri = "{base}{retry}/{id}".format(base=host, retry="/retry_test", id=id) - r = requests.get(status_get_uri) - return r.json() - - -def _run_retry_test(host, id, func, _preconditions, **resources): - # Create client using x-retry-test-id header. - client = storage.Client(client_options={"api_endpoint": host}) - client._http.headers.update({"x-retry-test-id": id}) - func(client, _preconditions, **resources) - - -def _delete_retry_test(host, id): - status_get_uri = "{base}{retry}/{id}".format(base=host, retry="/retry_test", id=id) - requests.delete(status_get_uri) - - -######################################################################################################################################## -### Run Conformance Tests for Retry Strategy ########################################################################################### -######################################################################################################################################## - - -@pytest.mark.parametrize("test_data", _CONFORMANCE_TESTS) -def test_conformance_retry_strategy(test_data): - host = _API_ACCESS_ENDPOINT - if host == _DEFAULT_STORAGE_HOST: - pytest.skip( - "This test must use the testbench emulator; set STORAGE_EMULATOR_HOST to run." - ) - - # Create client to use for setup steps. - client = storage.Client(client_options={"api_endpoint": host}) - methods = test_data["methods"] - cases = test_data["cases"] - expect_success = test_data["expectSuccess"] - precondition_provided = test_data["preconditionProvided"] - for c in cases: - for m in methods: - # Extract method name and instructions to create retry test. - method_name = m["name"] - instructions = c["instructions"] - json_resources = m["resources"] - - if method_name not in method_mapping: - warnings.warn( - "No tests for operation {}".format(method_name), - UserWarning, - stacklevel=1, - ) - continue - - for function in method_mapping[method_name]: - # Create the retry test in the emulator to handle instructions. - try: - r = _create_retry_test(host, method_name, instructions) - id = r["id"] - except Exception as e: - warnings.warn( - "Error creating retry test for {}: {}".format(method_name, e), - UserWarning, - stacklevel=1, - ) - continue - - # Populate resources. - try: - resources = _populate_resources(client, json_resources) - except Exception as e: - warnings.warn( - "Error populating resources for {}: {}".format(method_name, e), - UserWarning, - stacklevel=1, - ) - continue - - # Run retry tests on library methods. - try: - _run_retry_test( - host, id, function, precondition_provided, **resources - ) - except Exception as e: - # Should we be catching specific exceptions - print(e) - success_results = False - else: - success_results = True - - # Assert expected success for each scenario. - assert expect_success == success_results - - # Verify that all instructions were used up during the test - # (indicates that the client sent the correct requests). - try: - status_response = _check_retry_test(host, id) - assert status_response["completed"] is True - except Exception as e: - warnings.warn( - "Error checking retry test status for {}: {}".format( - method_name, e - ), - UserWarning, - stacklevel=1, - ) - - # Clean up and close out test in emulator. - try: - _delete_retry_test(host, id) - except Exception as e: - warnings.warn( - "Error deleting retry test for {}: {}".format(method_name, e), - UserWarning, - stacklevel=1, - ) From 90b586dae16758e423ffae0e96e3cef0804e140e Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Wed, 2 Jun 2021 17:30:08 -0700 Subject: [PATCH 29/45] add S1 S2 tests and delete json file --- .../conformance/retry_strategy_test_data.json | 21 +- tests/conformance/test_conformance.py | 109 ++++--- tests/unit/retry_strategy_test_data.json | 292 ------------------ 3 files changed, 80 insertions(+), 342 deletions(-) delete mode 100644 tests/unit/retry_strategy_test_data.json diff --git a/tests/conformance/retry_strategy_test_data.json b/tests/conformance/retry_strategy_test_data.json index 9f1a9cc28..20ec281f3 100644 --- a/tests/conformance/retry_strategy_test_data.json +++ b/tests/conformance/retry_strategy_test_data.json @@ -4,13 +4,6 @@ "id": 1, "description": "always idempotent", "cases": [ - { - "instructions": [ - "return-503", - "return-503", - "return-503" - ] - }, { "instructions": [ "return-503", @@ -256,6 +249,20 @@ "resources": [ "BUCKET" ] + }, + { + "name": "storage.objects.patch", + "resources": [ + "BUCKET", + "OBJECT" + ] + }, + { + "name": "storage.objects.update", + "resources": [ + "BUCKET", + "OBJECT" + ] } ], "preconditionProvided": false, diff --git a/tests/conformance/test_conformance.py b/tests/conformance/test_conformance.py index c637b7839..392ff9efd 100644 --- a/tests/conformance/test_conformance.py +++ b/tests/conformance/test_conformance.py @@ -79,28 +79,17 @@ def get_bucket(client, _preconditions, bucket): client.get_bucket(bucket.name) -def update_blob(client, _preconditions, bucket, object): - bucket = client.bucket(bucket.name) - blob = bucket.blob(object.name) - metadata = {"foo": "bar"} - blob.metadata = metadata - if _preconditions: - metageneration = object.metageneration - blob.patch(if_metageneration_match=metageneration) - else: - blob.patch() - - def create_bucket(client, _preconditions): bucket = client.bucket(uuid.uuid4().hex) client.create_bucket(bucket) -# Q!!! upload_from_string did not retry. def upload_from_string(client, _preconditions, bucket): - bucket = client.get_bucket(bucket.name) - blob = bucket.blob(uuid.uuid4().hex) - blob.upload_from_string("upload from string") + blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) + if _preconditions: + blob.upload_from_string("upload from string", if_metageneration_match=0) + else: + blob.upload_from_string("upload from string") def create_notification(client, _preconditions, bucket): @@ -127,7 +116,6 @@ def delete_notification(client, _preconditions, bucket, notification): notification.delete() -# Q!!! are there hmacKeys retryable endpoints in the emulator? def list_hmac_keys(client, _preconditions, **_): hmac_keys = client.list_hmac_keys() for k in hmac_keys: @@ -144,19 +132,16 @@ def get_iam_policy(client, _preconditions, bucket): bucket.get_iam_policy() -# Q: error - fixture 'client' not found -# def test_iam_permissions(client, _preconditions, bucket): -# bucket = client.bucket(bucket.name) -# permissions = ["storage.buckets.get", "storage.buckets.create"] -# bucket.test_iam_permissions(permissions) +def get_iam_permissions(client, _preconditions, bucket): + bucket = client.bucket(bucket.name) + permissions = ["storage.buckets.get", "storage.buckets.create"] + bucket.test_iam_permissions(permissions) -# Q: cannot find the corresponding endpoint in the Retry API def get_service_account_email(client, _preconditions): client.get_service_account_email() -# Q: not hitting the errors from the instructions def make_bucket_public(client, _preconditions, bucket): bucket = client.bucket(bucket.name) bucket.make_public() @@ -171,7 +156,6 @@ def delete_blob(client, _preconditions, bucket, object): bucket.delete_blob(object.name) -# Q: 1) cannot lock a locked bucket 2) currently using default "bucket" with metageneration def lock_retention_policy(client, _preconditions, bucket): bucket2 = client.bucket(bucket.name) bucket2.retention_period = 60 @@ -179,45 +163,84 @@ def lock_retention_policy(client, _preconditions, bucket): bucket2.lock_retention_policy() +def patch_bucket(client, _preconditions, bucket): + bucket = client.get_bucket("bucket") + metageneration = bucket.metageneration + bucket.storage_class = "COLDLINE" + if _preconditions: + bucket.patch(if_metageneration_match=metageneration) + else: + bucket.patch() + + +def update_bucket(client, _preconditions, bucket): + bucket = client.get_bucket("bucket") + metageneration = bucket.metageneration + bucket._properties = { + "storageClass": "STANDARD" + } + if _preconditions: + bucket.update(if_metageneration_match=metageneration) + else: + bucket.update() + + +def patch_blob(client, _preconditions, bucket, object): + blob = client.bucket(bucket.name).blob(object.name) + blob.metadata = {"foo": "bar"} + if _preconditions: + blob.patch(if_metageneration_match=object.metageneration) + else: + blob.patch() + + +def update_blob(client, _preconditions, bucket, object): + blob = client.bucket(bucket.name).blob(object.name) + blob.metadata = {"foo": "bar"} + if _preconditions: + blob.update(if_metageneration_match=object.metageneration) + else: + blob.update() + # Method invocation mapping. Methods to retry. This is a map whose keys are a string describing a standard # API call (e.g. storage.objects.get) and values are a list of functions which # wrap library methods that implement these calls. There may be multiple values # because multiple library methods may use the same call (e.g. get could be a # read or just a metadata get). method_mapping = { - # "storage.bucket_acl.get": [], # S1 start # no library method mapped - # "storage.bucket_acl.list": [], # no library method mapped + # "storage.bucket_acl.get": [], # S1 start # pending retry strategy added to ACL + # "storage.bucket_acl.list": [], # pending retry strategy added to ACL "storage.buckets.delete": [delete_bucket], "storage.buckets.get": [get_bucket, reload_bucket], "storage.buckets.getIamPolicy": [get_iam_policy], "storage.buckets.insert": [create_bucket], "storage.buckets.list": [list_buckets], - # "storage.buckets.lockRententionPolicy": [], # lock_retention_policy - # "storage.buckets.testIamPermission": [], # test_iam_permissions - "storage.default_object_acl.get": [], - "storage.default_object_acl.list": [], - # "storage.hmacKey.delete": [], # emulator project related endpoints wip - # "storage.hmacKey.list": [], # emulator project related endpoints wip - # "storage.hmacKey.get": [], # emulator project related endpoints wip + "storage.buckets.lockRententionPolicy": [], # lock_retention_policy + "storage.buckets.testIamPermission": [get_iam_permissions], # test_iam_permissions + # "storage.default_object_acl.get": [], # pending retry strategy added to ACL + # "storage.default_object_acl.list": [], # pending retry strategy added to ACL + # "storage.hmacKey.delete": [], # wip emulator project related endpoints + # "storage.hmacKey.list": [], # wip emulator project related endpoints + # "storage.hmacKey.get": [], # wip emulator project related endpoints "storage.notifications.delete": [delete_notification], "storage.notifications.get": [get_notification], "storage.notifications.list": [list_notifications], - "storage.object_acl.get": [], - "storage.object_acl.list": [], + # "storage.object_acl.get": [], # pending retry strategy added to ACL + # "storage.object_acl.list": [], # pending retry strategy added to ACL "storage.objects.get": [get_blob], "storage.objects.list": [list_blobs], - # "storage.serviceaccount.get": [], # S1 end # emulator project related endpoints wip - "storage.buckets.patch": [], # S2 start + # "storage.serviceaccount.get": [], # S1 end # wip emulator project related endpoints + "storage.buckets.patch": [patch_bucket], # S2 start "storage.buckets.setIamPolicy": [], - "storage.buckets.update": [], - "storage.hmacKey.update": [], + "storage.buckets.update": [update_bucket], + # "storage.hmacKey.update": [], # wip emulator project related endpoints "storage.objects.compose": [], "storage.objects.copy": [], "storage.objects.delete": [delete_blob], - "storage.objects.insert": [], - "storage.objects.patch": [update_blob], + "storage.objects.insert": [upload_from_string], + "storage.objects.patch": [patch_blob], "storage.objects.rewrite": [], - "storage.objects.update": [], # S2 end + "storage.objects.update": [update_blob], # S2 end "storage.notifications.insert": [create_notification], # S4 } diff --git a/tests/unit/retry_strategy_test_data.json b/tests/unit/retry_strategy_test_data.json deleted file mode 100644 index 9f1a9cc28..000000000 --- a/tests/unit/retry_strategy_test_data.json +++ /dev/null @@ -1,292 +0,0 @@ -{ - "retryStrategyTests": [ - { - "id": 1, - "description": "always idempotent", - "cases": [ - { - "instructions": [ - "return-503", - "return-503", - "return-503" - ] - }, - { - "instructions": [ - "return-503", - "return-503" - ] - } - ], - "methods": [ - { - "name": "storage.bucket_acl.get", - "resources": [ - "BUCKET" - ] - }, - { - "name": "storage.bucket_acl.list", - "resources": [ - "BUCKET" - ] - }, - { - "name": "storage.buckets.delete", - "resources": [ - "BUCKET" - ] - }, - { - "name": "storage.buckets.get", - "resources": [ - "BUCKET" - ] - }, - { - "name": "storage.buckets.getIamPolicy", - "resources": [ - "BUCKET" - ] - }, - { - "name": "storage.buckets.insert", - "resources": [] - }, - { - "name": "storage.buckets.list", - "resources": [ - "BUCKET" - ] - }, - { - "name": "storage.buckets.lockRententionPolicy", - "resources": [ - "BUCKET" - ] - }, - { - "name": "storage.buckets.testIamPermission", - "resources": [ - "BUCKET" - ] - }, - { - "name": "storage.default_object_acl.get", - "resources": [ - "BUCKET", - "OBJECT" - ] - }, - { - "name": "storage.default_object_acl.list", - "resources": [ - "BUCKET", - "OBJECT" - ] - }, - { - "name": "storage.hmacKey.delete", - "resources": [] - }, - { - "name": "storage.hmacKey.get", - "resources": [] - }, - { - "name": "storage.hmacKey.list", - "resources": [] - }, - { - "name": "storage.notifications.delete", - "resources": [ - "BUCKET", - "NOTIFICATION" - ] - }, - { - "name": "storage.notifications.get", - "resources": [ - "BUCKET", - "NOTIFICATION" - ] - }, - { - "name": "storage.notifications.list", - "resources": [ - "BUCKET", - "NOTIFICATION" - ] - }, - { - "name": "storage.object_acl.get", - "resources": [ - "BUCKET", - "OBJECT" - ] - }, - { - "name": "storage.object_acl.list", - "resources": [ - "BUCKET", - "OBJECT" - ] - }, - { - "name": "storage.objects.get", - "resources": [ - "BUCKET", - "OBJECT" - ] - }, - { - "name": "storage.objects.list", - "resources": [ - "BUCKET", - "OBJECT" - ] - }, - { - "name": "storage.serviceaccount.get", - "resources": [] - } - ], - "preconditionProvided": false, - "expectSuccess": true - }, - { - "id": 2, - "description": "conditionally idempotent retries when precondition is present", - "cases": [ - { - "instructions": [ - "return-503", - "return-503" - ] - } - ], - "methods": [ - { - "name": "storage.buckets.patch", - "resources": [ - "BUCKET" - ] - }, - { - "name": "storage.buckets.setIamPolicy", - "resources": [ - "BUCKET" - ] - }, - { - "name": "storage.buckets.update", - "resources": [ - "BUCKET" - ] - }, - { - "name": "storage.hmacKey.update", - "resources": [] - }, - { - "name": "storage.objects.compose", - "resources": [ - "BUCKET", - "OBJECT" - ] - }, - { - "name": "storage.objects.copy", - "resources": [ - "BUCKET", - "OBJECT" - ] - }, - { - "name": "storage.objects.delete", - "resources": [ - "BUCKET", - "OBJECT" - ] - }, - { - "name": "storage.objects.insert", - "resources": [ - "BUCKET" - ] - }, - { - "name": "storage.objects.patch", - "resources": [ - "BUCKET", - "OBJECT" - ] - }, - { - "name": "storage.objects.rewrite", - "resources": [ - "BUCKET", - "OBJECT" - ] - }, - { - "name": "storage.objects.update", - "resources": [ - "BUCKET", - "OBJECT" - ] - } - ], - "preconditionProvided": true, - "expectSuccess": true - }, - { - "id": 3, - "description": "conditionally idempotent no retries when precondition is absent", - "cases": [ - { - "instructions": [ - "return-503" - ] - } - ], - "methods": [ - { - "name": "storage.buckets.patch", - "resources": [ - "BUCKET" - ] - } - ], - "preconditionProvided": false, - "expectSuccess": false - }, - { - "id": 4, - "description": "non idempotent", - "cases": [ - { - "instructions": [ - "return-503" - ] - } - ], - "methods": [ - { - "name": "storage.notifications.insert", - "resources": [ - "BUCKET" - ] - }, - { - "name": "storage.bucket_acl.patch", - "resources": [ - "BUCKET" - ] - } - ], - "preconditionProvided": false, - "expectSuccess": false - } - ] - } \ No newline at end of file From 0e051fe07e18d22133a166aeaa6205a15be1af0a Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Wed, 2 Jun 2021 17:32:43 -0700 Subject: [PATCH 30/45] lint and clean comments --- tests/conformance/test_conformance.py | 26 +++++++------------------- 1 file changed, 7 insertions(+), 19 deletions(-) diff --git a/tests/conformance/test_conformance.py b/tests/conformance/test_conformance.py index 392ff9efd..86165de9c 100644 --- a/tests/conformance/test_conformance.py +++ b/tests/conformance/test_conformance.py @@ -86,7 +86,7 @@ def create_bucket(client, _preconditions): def upload_from_string(client, _preconditions, bucket): blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) - if _preconditions: + if _preconditions: blob.upload_from_string("upload from string", if_metageneration_match=0) else: blob.upload_from_string("upload from string") @@ -176,9 +176,7 @@ def patch_bucket(client, _preconditions, bucket): def update_bucket(client, _preconditions, bucket): bucket = client.get_bucket("bucket") metageneration = bucket.metageneration - bucket._properties = { - "storageClass": "STANDARD" - } + bucket._properties = {"storageClass": "STANDARD"} if _preconditions: bucket.update(if_metageneration_match=metageneration) else: @@ -202,38 +200,28 @@ def update_blob(client, _preconditions, bucket, object): else: blob.update() + # Method invocation mapping. Methods to retry. This is a map whose keys are a string describing a standard # API call (e.g. storage.objects.get) and values are a list of functions which # wrap library methods that implement these calls. There may be multiple values # because multiple library methods may use the same call (e.g. get could be a # read or just a metadata get). method_mapping = { - # "storage.bucket_acl.get": [], # S1 start # pending retry strategy added to ACL - # "storage.bucket_acl.list": [], # pending retry strategy added to ACL - "storage.buckets.delete": [delete_bucket], + "storage.buckets.delete": [delete_bucket], # S1 start "storage.buckets.get": [get_bucket, reload_bucket], "storage.buckets.getIamPolicy": [get_iam_policy], "storage.buckets.insert": [create_bucket], "storage.buckets.list": [list_buckets], - "storage.buckets.lockRententionPolicy": [], # lock_retention_policy - "storage.buckets.testIamPermission": [get_iam_permissions], # test_iam_permissions - # "storage.default_object_acl.get": [], # pending retry strategy added to ACL - # "storage.default_object_acl.list": [], # pending retry strategy added to ACL - # "storage.hmacKey.delete": [], # wip emulator project related endpoints - # "storage.hmacKey.list": [], # wip emulator project related endpoints - # "storage.hmacKey.get": [], # wip emulator project related endpoints + "storage.buckets.lockRententionPolicy": [], # lock_retention_policy + "storage.buckets.testIamPermission": [get_iam_permissions], "storage.notifications.delete": [delete_notification], "storage.notifications.get": [get_notification], "storage.notifications.list": [list_notifications], - # "storage.object_acl.get": [], # pending retry strategy added to ACL - # "storage.object_acl.list": [], # pending retry strategy added to ACL "storage.objects.get": [get_blob], - "storage.objects.list": [list_blobs], - # "storage.serviceaccount.get": [], # S1 end # wip emulator project related endpoints + "storage.objects.list": [list_blobs], # S1 end "storage.buckets.patch": [patch_bucket], # S2 start "storage.buckets.setIamPolicy": [], "storage.buckets.update": [update_bucket], - # "storage.hmacKey.update": [], # wip emulator project related endpoints "storage.objects.compose": [], "storage.objects.copy": [], "storage.objects.delete": [delete_blob], From b121df3d7f846ffc06e3535601fa3044f631722f Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Fri, 4 Jun 2021 16:00:34 -0700 Subject: [PATCH 31/45] add S2 object library methods --- tests/conformance/test_conformance.py | 62 +++++++++++++++++++-------- 1 file changed, 45 insertions(+), 17 deletions(-) diff --git a/tests/conformance/test_conformance.py b/tests/conformance/test_conformance.py index 86165de9c..b0a92816d 100644 --- a/tests/conformance/test_conformance.py +++ b/tests/conformance/test_conformance.py @@ -23,24 +23,11 @@ import requests import warnings -# http.client.HTTPConnection.debuglevel=5 - -# ToDo: Confirm what are the credentials required. Can we use the same service account created for url_signer_v4_test_account? -_FAKE_SERVICE_ACCOUNT = None - - -def fake_service_account(): - global _FAKE_SERVICE_ACCOUNT - # validate and set fake service account - - -# ToDo: Confirm what are the credentials required. Can we use the same service account created for url_signer_v4_test_account? ) -# _SERVICE_ACCOUNT_JSON = _read_local_json("") _CONFORMANCE_TESTS = _read_local_json("retry_strategy_test_data.json")[ "retryStrategyTests" ] -# ToDo: Confirm the correct access endpoint. + _API_ACCESS_ENDPOINT = _helpers._get_storage_host() _DEFAULT_STORAGE_HOST = u"https://storage.googleapis.com" _CONF_TEST_PROJECT_ID = "my-project-id" @@ -201,6 +188,47 @@ def update_blob(client, _preconditions, bucket, object): blob.update() +def copy_blob(client, _preconditions, bucket, object): + bucket = client.bucket(bucket.name) + destination = client.bucket("bucket") + if _preconditions: + bucket.copy_blob( + object, destination, new_name=uuid.uuid4().hex, if_generation_match=0 + ) + else: + bucket.copy_blob(object, destination) + + +def rename_blob(client, _preconditions, bucket, object): + bucket = client.bucket(bucket.name) + new_name = uuid.uuid4().hex + if _preconditions: + bucket.rename_blob(object, new_name, if_generation_match=0) + else: + bucket.rename_blob(object, new_name) + + +def rewrite_blob(client, _preconditions, bucket, object): + new_blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) + new_blob.metadata = {"foo": "bar"} + if _preconditions: + new_blob.rewrite(object, if_generation_match=0) + else: + new_blob.rewrite(object) + + +def compose_blob(client, _preconditions, bucket, object): + blob = client.bucket(bucket.name).blob(object.name) + blob_2 = bucket.blob(uuid.uuid4().hex) + blob_2.upload_from_string("foo") + sources = [blob_2] + + if _preconditions: + blob.compose(sources, if_generation_match=object.generation) + else: + blob.compose(sources) + + # Method invocation mapping. Methods to retry. This is a map whose keys are a string describing a standard # API call (e.g. storage.objects.get) and values are a list of functions which # wrap library methods that implement these calls. There may be multiple values @@ -222,12 +250,12 @@ def update_blob(client, _preconditions, bucket, object): "storage.buckets.patch": [patch_bucket], # S2 start "storage.buckets.setIamPolicy": [], "storage.buckets.update": [update_bucket], - "storage.objects.compose": [], - "storage.objects.copy": [], + "storage.objects.compose": [], # compose_blob + "storage.objects.copy": [copy_blob, rename_blob], "storage.objects.delete": [delete_blob], "storage.objects.insert": [upload_from_string], "storage.objects.patch": [patch_blob], - "storage.objects.rewrite": [], + "storage.objects.rewrite": [rewrite_blob], "storage.objects.update": [update_blob], # S2 end "storage.notifications.insert": [create_notification], # S4 } From 77e79ccf8772b646149b60bb328c4956df8d0a62 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Thu, 10 Jun 2021 15:07:00 -0700 Subject: [PATCH 32/45] address comments --- noxfile.py | 2 +- tests/conformance/__init__.py | 2 +- tests/conformance/test_conformance.py | 10 ++++++---- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/noxfile.py b/noxfile.py index fd625ac2d..324a70bfd 100644 --- a/noxfile.py +++ b/noxfile.py @@ -29,7 +29,7 @@ DEFAULT_PYTHON_VERSION = "3.8" SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.8"] UNIT_TEST_PYTHON_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] -CONFORMANCE_TEST_PYTHON_VERSIONS = ["2.7", "3.8"] +CONFORMANCE_TEST_PYTHON_VERSIONS = ["3.8"] _DEFAULT_STORAGE_HOST = "https://storage.googleapis.com" diff --git a/tests/conformance/__init__.py b/tests/conformance/__init__.py index a864e9eae..bff181aad 100644 --- a/tests/conformance/__init__.py +++ b/tests/conformance/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google LLC +# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/conformance/test_conformance.py b/tests/conformance/test_conformance.py index b0a92816d..e86e9a775 100644 --- a/tests/conformance/test_conformance.py +++ b/tests/conformance/test_conformance.py @@ -19,6 +19,7 @@ from . import _read_local_json +import os import pytest import requests import warnings @@ -28,8 +29,9 @@ "retryStrategyTests" ] -_API_ACCESS_ENDPOINT = _helpers._get_storage_host() -_DEFAULT_STORAGE_HOST = u"https://storage.googleapis.com" +STORAGE_EMULATOR_ENV_VAR = "STORAGE_EMULATOR_HOST" +"""Environment variable defining host for Storage emulator.""" + _CONF_TEST_PROJECT_ID = "my-project-id" _CONF_TEST_SERVICE_ACCOUNT_EMAIL = ( "my-service-account@my-project-id.iam.gserviceaccount.com" @@ -358,8 +360,8 @@ def _delete_retry_test(host, id): @pytest.mark.parametrize("test_data", _CONFORMANCE_TESTS) def test_conformance_retry_strategy(test_data): - host = _API_ACCESS_ENDPOINT - if host == _DEFAULT_STORAGE_HOST: + host = os.environ.get(STORAGE_EMULATOR_ENV_VAR) + if host is None: pytest.skip( "This test must use the testbench emulator; set STORAGE_EMULATOR_HOST to run." ) From f49b30fc7f305ad52cef16f810948e530fc8add5 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Fri, 11 Jun 2021 15:48:07 -0700 Subject: [PATCH 33/45] change test parametrization to separate test cases and address comments --- tests/conformance/test_conformance.py | 172 +++++++++++++------------- 1 file changed, 86 insertions(+), 86 deletions(-) diff --git a/tests/conformance/test_conformance.py b/tests/conformance/test_conformance.py index e86e9a775..445fa54d4 100644 --- a/tests/conformance/test_conformance.py +++ b/tests/conformance/test_conformance.py @@ -15,7 +15,6 @@ import uuid from google.cloud import storage -from google.cloud.storage import _helpers from . import _read_local_json @@ -323,6 +322,11 @@ def _populate_resources(client, json_resource): def _create_retry_test(host, method_name, instructions): + """ + Initialize a Retry Test resource with a list of instructions and an API method. + This offers a mechanism to send multiple retry instructions while sending a single, constant header through all the HTTP requests in a test. + See also: https://github.com/googleapis/google-cloud-cpp/tree/main/google/cloud/storage/emulator + """ import json preflight_post_uri = host + "/retry_test" @@ -335,14 +339,17 @@ def _create_retry_test(host, method_name, instructions): return r.json() -def _check_retry_test(host, id): +def _get_retry_test(host, id): status_get_uri = "{base}{retry}/{id}".format(base=host, retry="/retry_test", id=id) r = requests.get(status_get_uri) return r.json() def _run_retry_test(host, id, func, _preconditions, **resources): - # Create client using x-retry-test-id header. + """ + To execute tests against the list of instrucions sent to the Retry API, create a client to send the retry test ID using the x-retry-test-id header in each request. + For incoming requests which match the given API method, the emulator will pop off the next instruction from the list and force the listed failure case. + """ client = storage.Client(client_options={"api_endpoint": host}) client._http.headers.update({"x-retry-test-id": id}) func(client, _preconditions, **resources) @@ -358,8 +365,29 @@ def _delete_retry_test(host, id): ######################################################################################################################################## -@pytest.mark.parametrize("test_data", _CONFORMANCE_TESTS) -def test_conformance_retry_strategy(test_data): +def pytest_generate_tests(metafunc): + for test_data in _CONFORMANCE_TESTS: + scenario_id = test_data["id"] + m = "s{}method".format(scenario_id) + c = "s{}case".format(scenario_id) + s = "s{}".format(scenario_id) + if s in metafunc.fixturenames: + metafunc.parametrize(s, [scenario_id]) + if m in metafunc.fixturenames: + metafunc.parametrize(m, test_data["methods"]) + if c in metafunc.fixturenames: + metafunc.parametrize(c, test_data["cases"]) + + +def test_retry_s1_always_idempotent(s1, s1method, s1case): + run_retry_stragegy_conformance_test(s1, s1method, s1case) + + +def test_retry_s2_conditionally_idempotent_w_preconditions(s2, s2method, s2case): + run_retry_stragegy_conformance_test(s2, s2method, s2case) + + +def run_retry_stragegy_conformance_test(scenario_id, method, case): host = os.environ.get(STORAGE_EMULATOR_ENV_VAR) if host is None: pytest.skip( @@ -368,84 +396,56 @@ def test_conformance_retry_strategy(test_data): # Create client to use for setup steps. client = storage.Client(client_options={"api_endpoint": host}) - methods = test_data["methods"] - cases = test_data["cases"] - expect_success = test_data["expectSuccess"] - precondition_provided = test_data["preconditionProvided"] - for c in cases: - for m in methods: - # Extract method name and instructions to create retry test. - method_name = m["name"] - instructions = c["instructions"] - json_resources = m["resources"] - - if method_name not in method_mapping: - warnings.warn( - "No tests for operation {}".format(method_name), - UserWarning, - stacklevel=1, - ) - continue - - for function in method_mapping[method_name]: - # Create the retry test in the emulator to handle instructions. - try: - r = _create_retry_test(host, method_name, instructions) - id = r["id"] - except Exception as e: - warnings.warn( - "Error creating retry test for {}: {}".format(method_name, e), - UserWarning, - stacklevel=1, - ) - continue - - # Populate resources. - try: - resources = _populate_resources(client, json_resources) - except Exception as e: - warnings.warn( - "Error populating resources for {}: {}".format(method_name, e), - UserWarning, - stacklevel=1, - ) - continue - - # Run retry tests on library methods. - try: - _run_retry_test( - host, id, function, precondition_provided, **resources - ) - except Exception as e: - # Should we be catching specific exceptions - print(e) - success_results = False - else: - success_results = True - - # Assert expected success for each scenario. - assert expect_success == success_results - - # Verify that all instructions were used up during the test - # (indicates that the client sent the correct requests). - try: - status_response = _check_retry_test(host, id) - assert status_response["completed"] is True - except Exception as e: - warnings.warn( - "Error checking retry test status for {}: {}".format( - method_name, e - ), - UserWarning, - stacklevel=1, - ) - - # Clean up and close out test in emulator. - try: - _delete_retry_test(host, id) - except Exception as e: - warnings.warn( - "Error deleting retry test for {}: {}".format(method_name, e), - UserWarning, - stacklevel=1, - ) + scenario = _CONFORMANCE_TESTS[scenario_id - 1] + expect_success = scenario["expectSuccess"] + precondition_provided = scenario["preconditionProvided"] + json_resources = method["resources"] + method_name = method["name"] + instructions = case["instructions"] + + if method_name not in method_mapping: + pytest.skip("No tests for operation {}".format(method_name),) + + for function in method_mapping[method_name]: + # Create the retry test in the emulator to handle instructions. + try: + r = _create_retry_test(host, method_name, instructions) + id = r["id"] + except Exception as e: + warnings.warn( + "Error creating retry test for {}: {}".format(method_name, e), + UserWarning, + stacklevel=1, + ) + continue + + # Populate resources. + try: + resources = _populate_resources(client, json_resources) + except Exception as e: + warnings.warn( + "Error populating resources for {}: {}".format(method_name, e), + UserWarning, + stacklevel=1, + ) + continue + + # Run retry tests on library methods. + try: + _run_retry_test(host, id, function, precondition_provided, **resources) + except Exception as e: + print(e) + success_results = False + else: + success_results = True + + # Assert expected success for each scenario. + assert expect_success == success_results + + # Verify that all instructions were used up during the test + # (indicates that the client sent the correct requests). + status_response = _get_retry_test(host, id) + assert status_response["completed"] is True + + # Clean up and close out test in emulator. + _delete_retry_test(host, id) From de3ee4b6e01c46b3a6631690d094fd6f05987ff3 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Thu, 17 Jun 2021 10:16:27 -0700 Subject: [PATCH 34/45] add assertion message and display library method name --- tests/conformance/test_conformance.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/conformance/test_conformance.py b/tests/conformance/test_conformance.py index 445fa54d4..83622da1e 100644 --- a/tests/conformance/test_conformance.py +++ b/tests/conformance/test_conformance.py @@ -44,13 +44,13 @@ def list_buckets(client, _preconditions, **_): buckets = client.list_buckets() for b in buckets: - break + print(b) def list_blobs(client, _preconditions, bucket, **_): blobs = client.list_blobs(bucket.name) for b in blobs: - break + print(b) def get_blob(client, _preconditions, bucket, object): @@ -90,7 +90,7 @@ def list_notifications(client, _preconditions, bucket, **_): bucket = client.get_bucket(bucket.name) notifications = bucket.list_notifications() for n in notifications: - break + print(n) def get_notification(client, _preconditions, bucket, notification): @@ -107,7 +107,7 @@ def delete_notification(client, _preconditions, bucket, notification): def list_hmac_keys(client, _preconditions, **_): hmac_keys = client.list_hmac_keys() for k in hmac_keys: - break + print(k) def delete_bucket(client, _preconditions, bucket): @@ -440,12 +440,12 @@ def run_retry_stragegy_conformance_test(scenario_id, method, case): success_results = True # Assert expected success for each scenario. - assert expect_success == success_results + assert expect_success == success_results, "Scenario{}-{}: expected_success was {}, should be {}".format(scenario_id, function.__name__, success_results, expect_success) # Verify that all instructions were used up during the test # (indicates that the client sent the correct requests). status_response = _get_retry_test(host, id) - assert status_response["completed"] is True + assert status_response["completed"] is True, "Scenario{}-{}: test not completed; unused instructions:{}".format(scenario_id, function.__name__, status_response["instructions"]) # Clean up and close out test in emulator. _delete_retry_test(host, id) From b4096a2bfb79498c89a62e5b3187509a7d82e786 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Fri, 18 Jun 2021 15:56:58 -0700 Subject: [PATCH 35/45] add multiple lib methods and revise assertion message --- .../conformance/retry_strategy_test_data.json | 3 +- tests/conformance/test_conformance.py | 165 +++++++++++++++--- 2 files changed, 147 insertions(+), 21 deletions(-) diff --git a/tests/conformance/retry_strategy_test_data.json b/tests/conformance/retry_strategy_test_data.json index 20ec281f3..0ebf8adef 100644 --- a/tests/conformance/retry_strategy_test_data.json +++ b/tests/conformance/retry_strategy_test_data.json @@ -27,7 +27,8 @@ { "name": "storage.buckets.delete", "resources": [ - "BUCKET" + "BUCKET", + "OBJECT" ] }, { diff --git a/tests/conformance/test_conformance.py b/tests/conformance/test_conformance.py index 83622da1e..b97c24c9d 100644 --- a/tests/conformance/test_conformance.py +++ b/tests/conformance/test_conformance.py @@ -12,17 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os +import pytest +import requests +import tempfile import uuid +import warnings from google.cloud import storage from . import _read_local_json -import os -import pytest -import requests -import warnings - _CONFORMANCE_TESTS = _read_local_json("retry_strategy_test_data.json")[ "retryStrategyTests" @@ -53,11 +53,52 @@ def list_blobs(client, _preconditions, bucket, **_): print(b) +def bucket_list_blobs(client, _preconditions, bucket, **_): + blobs = client.bucket(bucket.name).list_blobs() + for b in blobs: + print(b) + + def get_blob(client, _preconditions, bucket, object): bucket = client.bucket(bucket.name) bucket.get_blob(object.name) +def blob_exists(client, _preconditions, bucket, object): + blob = client.bucket(bucket.name).blob(object.name) + blob.exists() + + +def blob_download_as_bytes(client, _preconditions, bucket, object): + blob = client.bucket(bucket.name).blob(object.name) + blob.download_as_bytes() + + +def blob_download_as_text(client, _preconditions, bucket, object): + blob = client.bucket(bucket.name).blob(object.name) + blob.download_as_text() + + +def blob_download_to_filename(client, _preconditions, bucket, object): + blob = client.bucket(bucket.name).blob(object.name) + with tempfile.NamedTemporaryFile() as temp_f: + blob.download_to_filename(temp_f.name) + + +def client_download_to_file(client, _preconditions, object, **_): + with tempfile.NamedTemporaryFile() as temp_f: + with open(temp_f.name, "wb") as file_obj: + client.download_blob_to_file(object, file_obj) + + +def blobreader_read(client, _preconditions, bucket, object): + from google.cloud.storage.fileio import BlobReader + + blob = client.bucket(bucket.name).blob(object.name) + blob_reader = BlobReader(blob) + blob_reader.read() + + def reload_bucket(client, _preconditions, bucket): bucket = client.bucket(bucket.name) bucket.reload() @@ -67,11 +108,25 @@ def get_bucket(client, _preconditions, bucket): client.get_bucket(bucket.name) +def lookup_bucket(client, _preconditions, bucket): + client.lookup_bucket(bucket.name) + + +def bucket_exists(client, _preconditions, bucket): + bucket = client.bucket(bucket.name) + bucket.exists() + + def create_bucket(client, _preconditions): bucket = client.bucket(uuid.uuid4().hex) client.create_bucket(bucket) +def bucket_create(client, _preconditions): + bucket = client.bucket(uuid.uuid4().hex) + bucket.create() + + def upload_from_string(client, _preconditions, bucket): blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) if _preconditions: @@ -97,9 +152,23 @@ def get_notification(client, _preconditions, bucket, notification): client.bucket(bucket.name).get_notification(notification.notification_id) +def reload_notification(client, _preconditions, bucket, notification): + notification = client.bucket(bucket.name).notification( + notification_id=notification.notification_id + ) + notification.reload() + + +def notification_exists(client, _preconditions, bucket, notification): + notification = client.bucket(bucket.name).notification( + notification_id=notification.notification_id + ) + notification.exists() + + def delete_notification(client, _preconditions, bucket, notification): - notification = client.bucket(bucket.name).get_notification( - notification.notification_id + notification = client.bucket(bucket.name).notification( + notification_id=notification.notification_id ) notification.delete() @@ -110,9 +179,9 @@ def list_hmac_keys(client, _preconditions, **_): print(k) -def delete_bucket(client, _preconditions, bucket): +def delete_bucket(client, _preconditions, bucket, **_): bucket = client.bucket(bucket.name) - bucket.delete() + bucket.delete(force=True) def get_iam_policy(client, _preconditions, bucket): @@ -135,7 +204,7 @@ def make_bucket_public(client, _preconditions, bucket): bucket.make_public() -def delete_blob(client, _preconditions, bucket, object): +def bucket_delete_blob(client, _preconditions, bucket, object): bucket = client.bucket(bucket.name) if _preconditions: generation = object.generation @@ -144,6 +213,27 @@ def delete_blob(client, _preconditions, bucket, object): bucket.delete_blob(object.name) +def bucket_delete_blobs(client, _preconditions, bucket, object): + bucket = client.bucket(bucket.name) + blob_2 = bucket.blob(uuid.uuid4().hex) + blob_2.upload_from_string("foo") + sources = [object, blob_2] + source_generations = [object.generation, blob_2.generation] + if _preconditions: + bucket.delete_blobs(sources, if_generation_match=source_generations) + else: + bucket.delete_blobs(sources) + + +def blob_delete(client, _preconditions, bucket, object): + blob = client.bucket(bucket.name).blob(object.name) + if _preconditions: + blob.delete(if_generation_match=object.generation) + else: + blob.delete() + + +# TODO(cathyo@): fix emulator issue and assign metageneration to buckets.insert def lock_retention_policy(client, _preconditions, bucket): bucket2 = client.bucket(bucket.name) bucket2.retention_period = 60 @@ -204,7 +294,12 @@ def rename_blob(client, _preconditions, bucket, object): bucket = client.bucket(bucket.name) new_name = uuid.uuid4().hex if _preconditions: - bucket.rename_blob(object, new_name, if_generation_match=0) + bucket.rename_blob( + object, + new_name, + if_generation_match=0, + if_source_generation_match=object.generation, + ) else: bucket.rename_blob(object, new_name) @@ -230,30 +325,52 @@ def compose_blob(client, _preconditions, bucket, object): blob.compose(sources) +######################################################################################################################################## +### Method Invocation Mapping ########################################################################################################## +######################################################################################################################################## + # Method invocation mapping. Methods to retry. This is a map whose keys are a string describing a standard # API call (e.g. storage.objects.get) and values are a list of functions which # wrap library methods that implement these calls. There may be multiple values # because multiple library methods may use the same call (e.g. get could be a # read or just a metadata get). + method_mapping = { "storage.buckets.delete": [delete_bucket], # S1 start - "storage.buckets.get": [get_bucket, reload_bucket], + "storage.buckets.get": [get_bucket, reload_bucket, lookup_bucket, bucket_exists], "storage.buckets.getIamPolicy": [get_iam_policy], - "storage.buckets.insert": [create_bucket], + "storage.buckets.insert": [create_bucket, bucket_create], "storage.buckets.list": [list_buckets], "storage.buckets.lockRententionPolicy": [], # lock_retention_policy "storage.buckets.testIamPermission": [get_iam_permissions], "storage.notifications.delete": [delete_notification], - "storage.notifications.get": [get_notification], + "storage.notifications.get": [ + get_notification, + notification_exists, + reload_notification, + ], "storage.notifications.list": [list_notifications], - "storage.objects.get": [get_blob], - "storage.objects.list": [list_blobs], # S1 end + "storage.objects.get": [ + get_blob, + blob_exists, + client_download_to_file, + blob_download_to_filename, + blob_download_as_bytes, + blob_download_as_text, + blobreader_read, + ], + "storage.objects.list": [list_blobs, bucket_list_blobs, delete_bucket], # S1 end "storage.buckets.patch": [patch_bucket], # S2 start "storage.buckets.setIamPolicy": [], "storage.buckets.update": [update_bucket], - "storage.objects.compose": [], # compose_blob + "storage.objects.compose": [compose_blob], "storage.objects.copy": [copy_blob, rename_blob], - "storage.objects.delete": [delete_blob], + "storage.objects.delete": [ + bucket_delete_blob, + bucket_delete_blobs, + delete_bucket, + blob_delete, + ], # rename_blob "storage.objects.insert": [upload_from_string], "storage.objects.patch": [patch_blob], "storage.objects.rewrite": [rewrite_blob], @@ -440,12 +557,20 @@ def run_retry_stragegy_conformance_test(scenario_id, method, case): success_results = True # Assert expected success for each scenario. - assert expect_success == success_results, "Scenario{}-{}: expected_success was {}, should be {}".format(scenario_id, function.__name__, success_results, expect_success) + assert ( + expect_success == success_results + ), "S{}-{}-{}: expected_success was {}, should be {}".format( + scenario_id, method_name, function.__name__, success_results, expect_success + ) # Verify that all instructions were used up during the test # (indicates that the client sent the correct requests). status_response = _get_retry_test(host, id) - assert status_response["completed"] is True, "Scenario{}-{}: test not completed; unused instructions:{}".format(scenario_id, function.__name__, status_response["instructions"]) + assert ( + status_response["completed"] is True + ), "S{}-{}-{}: test not completed; unused instructions:{}".format( + scenario_id, method_name, function.__name__, status_response["instructions"] + ) # Clean up and close out test in emulator. _delete_retry_test(host, id) From e49a9d2b7361ee166582f77b83e4e0ba9bb41e0c Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Wed, 23 Jun 2021 14:19:21 -0700 Subject: [PATCH 36/45] add S2 entry library methods after emulator fix --- tests/conformance/test_conformance.py | 73 +++++++++++++++++++++++++-- 1 file changed, 70 insertions(+), 3 deletions(-) diff --git a/tests/conformance/test_conformance.py b/tests/conformance/test_conformance.py index b97c24c9d..7e7eab1ff 100644 --- a/tests/conformance/test_conformance.py +++ b/tests/conformance/test_conformance.py @@ -135,6 +135,45 @@ def upload_from_string(client, _preconditions, bucket): blob.upload_from_string("upload from string") +def blob_upload_from_file(client, _preconditions, bucket): + from io import BytesIO + + file_obj = BytesIO() + blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) + if _preconditions: + blob.upload_from_file(file_obj, if_metageneration_match=0) + else: + blob.upload_from_file(file_obj) + + +def blob_upload_from_filename(client, _preconditions, bucket): + blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) + with tempfile.NamedTemporaryFile() as temp_f: + if _preconditions: + blob.upload_from_filename(temp_f.name, if_metageneration_match=0) + else: + blob.upload_from_filename(temp_f.name) + + +def blobwriter_write(client, _preconditions, bucket): + import os + from google.cloud.storage.fileio import BlobWriter + + chunk_size = 256 * 1024 + blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) + if _preconditions: + blob_writer = BlobWriter(blob, chunk_size=chunk_size, if_metageneration_match=0) + blob_writer.write(bytearray(os.urandom(262144))) + else: + blob_writer = BlobWriter(blob, chunk_size=chunk_size) + blob_writer.write(bytearray(os.urandom(262144))) + + +def blob_create_resumable_upload_session(client, _preconditions, bucket): + blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) + blob.create_resumable_upload_session() + + def create_notification(client, _preconditions, bucket): bucket = client.get_bucket(bucket.name) notification = bucket.notification() @@ -313,6 +352,15 @@ def rewrite_blob(client, _preconditions, bucket, object): new_blob.rewrite(object) +def blob_update_storage_class(client, _preconditions, bucket, object): + blob = client.bucket(bucket.name).blob(object.name) + storage_class = "STANDARD" + if _preconditions: + blob.update_storage_class(storage_class, if_generation_match=object.generation) + else: + blob.update_storage_class(storage_class) + + def compose_blob(client, _preconditions, bucket, object): blob = client.bucket(bucket.name).blob(object.name) blob_2 = bucket.blob(uuid.uuid4().hex) @@ -325,6 +373,20 @@ def compose_blob(client, _preconditions, bucket, object): blob.compose(sources) +def bucket_set_iam_policy(client, _preconditions, bucket): + bucket = client.get_bucket(bucket.name) + role = "roles/storage.objectViewer" + member = _CONF_TEST_SERVICE_ACCOUNT_EMAIL + + policy = bucket.get_iam_policy(requested_policy_version=3) + policy.bindings.append({"role": role, "members": {member}}) + + if _preconditions: + bucket.set_iam_policy(policy) + else: + bucket.set_iam_policy(policy) + + ######################################################################################################################################## ### Method Invocation Mapping ########################################################################################################## ######################################################################################################################################## @@ -361,7 +423,7 @@ def compose_blob(client, _preconditions, bucket, object): ], "storage.objects.list": [list_blobs, bucket_list_blobs, delete_bucket], # S1 end "storage.buckets.patch": [patch_bucket], # S2 start - "storage.buckets.setIamPolicy": [], + "storage.buckets.setIamPolicy": [], # bucket_set_iam_policy "storage.buckets.update": [update_bucket], "storage.objects.compose": [compose_blob], "storage.objects.copy": [copy_blob, rename_blob], @@ -371,9 +433,14 @@ def compose_blob(client, _preconditions, bucket, object): delete_bucket, blob_delete, ], # rename_blob - "storage.objects.insert": [upload_from_string], + "storage.objects.insert": [ + upload_from_string, + blob_upload_from_file, + blob_upload_from_filename, + blobwriter_write, + ], "storage.objects.patch": [patch_blob], - "storage.objects.rewrite": [rewrite_blob], + "storage.objects.rewrite": [rewrite_blob, blob_update_storage_class], "storage.objects.update": [update_blob], # S2 end "storage.notifications.insert": [create_notification], # S4 } From 90a0438d55d6375092d7f4d9f409f1db292bdf88 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Fri, 25 Jun 2021 17:24:37 -0700 Subject: [PATCH 37/45] address comments --- tests/conformance/test_conformance.py | 49 ++++++++++++--------------- 1 file changed, 21 insertions(+), 28 deletions(-) diff --git a/tests/conformance/test_conformance.py b/tests/conformance/test_conformance.py index 7e7eab1ff..91f8ca623 100644 --- a/tests/conformance/test_conformance.py +++ b/tests/conformance/test_conformance.py @@ -17,7 +17,7 @@ import requests import tempfile import uuid -import warnings +import logging from google.cloud import storage @@ -44,19 +44,19 @@ def list_buckets(client, _preconditions, **_): buckets = client.list_buckets() for b in buckets: - print(b) + pass def list_blobs(client, _preconditions, bucket, **_): blobs = client.list_blobs(bucket.name) for b in blobs: - print(b) + pass def bucket_list_blobs(client, _preconditions, bucket, **_): blobs = client.bucket(bucket.name).list_blobs() for b in blobs: - print(b) + pass def get_blob(client, _preconditions, bucket, object): @@ -130,7 +130,7 @@ def bucket_create(client, _preconditions): def upload_from_string(client, _preconditions, bucket): blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) if _preconditions: - blob.upload_from_string("upload from string", if_metageneration_match=0) + blob.upload_from_string("upload from string", if_generation_match=0) else: blob.upload_from_string("upload from string") @@ -141,7 +141,7 @@ def blob_upload_from_file(client, _preconditions, bucket): file_obj = BytesIO() blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) if _preconditions: - blob.upload_from_file(file_obj, if_metageneration_match=0) + blob.upload_from_file(file_obj, if_generation_match=0) else: blob.upload_from_file(file_obj) @@ -150,7 +150,7 @@ def blob_upload_from_filename(client, _preconditions, bucket): blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) with tempfile.NamedTemporaryFile() as temp_f: if _preconditions: - blob.upload_from_filename(temp_f.name, if_metageneration_match=0) + blob.upload_from_filename(temp_f.name, if_generation_match=0) else: blob.upload_from_filename(temp_f.name) @@ -162,7 +162,7 @@ def blobwriter_write(client, _preconditions, bucket): chunk_size = 256 * 1024 blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) if _preconditions: - blob_writer = BlobWriter(blob, chunk_size=chunk_size, if_metageneration_match=0) + blob_writer = BlobWriter(blob, chunk_size=chunk_size, if_generation_match=0) blob_writer.write(bytearray(os.urandom(262144))) else: blob_writer = BlobWriter(blob, chunk_size=chunk_size) @@ -184,7 +184,7 @@ def list_notifications(client, _preconditions, bucket, **_): bucket = client.get_bucket(bucket.name) notifications = bucket.list_notifications() for n in notifications: - print(n) + pass def get_notification(client, _preconditions, bucket, notification): @@ -215,7 +215,7 @@ def delete_notification(client, _preconditions, bucket, notification): def list_hmac_keys(client, _preconditions, **_): hmac_keys = client.list_hmac_keys() for k in hmac_keys: - print(k) + pass def delete_bucket(client, _preconditions, bucket, **_): @@ -254,10 +254,8 @@ def bucket_delete_blob(client, _preconditions, bucket, object): def bucket_delete_blobs(client, _preconditions, bucket, object): bucket = client.bucket(bucket.name) - blob_2 = bucket.blob(uuid.uuid4().hex) - blob_2.upload_from_string("foo") - sources = [object, blob_2] - source_generations = [object.generation, blob_2.generation] + sources = [object] + source_generations = [object.generation] if _preconditions: bucket.delete_blobs(sources, if_generation_match=source_generations) else: @@ -492,7 +490,6 @@ def _populate_resource_hmackey(client, resources): def _populate_resources(client, json_resource): resources = {} - for r in json_resource: func = resource_mapping[r] func(client, resources) @@ -596,29 +593,25 @@ def run_retry_stragegy_conformance_test(scenario_id, method, case): r = _create_retry_test(host, method_name, instructions) id = r["id"] except Exception as e: - warnings.warn( - "Error creating retry test for {}: {}".format(method_name, e), - UserWarning, - stacklevel=1, - ) - continue + raise Exception( + "Error creating retry test for {}: {}".format(method_name, e) + ).with_traceback(e.__traceback__) # Populate resources. try: resources = _populate_resources(client, json_resources) except Exception as e: - warnings.warn( - "Error populating resources for {}: {}".format(method_name, e), - UserWarning, - stacklevel=1, - ) - continue + raise Exception( + "Error populating resources for {}: {}".format(method_name, e) + ).with_traceback(e.__traceback__) # Run retry tests on library methods. try: _run_retry_test(host, id, function, precondition_provided, **resources) except Exception as e: - print(e) + logging.exception( + "Caught an exception while running retry instructions\n {}".format(e) + ) success_results = False else: success_results = True From 05fc2613bcb63dad357a615fc373802da50a63d5 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Mon, 12 Jul 2021 12:43:06 -0700 Subject: [PATCH 38/45] revise test case structure using python globals --- tests/conformance/test_conformance.py | 156 +++++++++++++------------- 1 file changed, 77 insertions(+), 79 deletions(-) diff --git a/tests/conformance/test_conformance.py b/tests/conformance/test_conformance.py index 91f8ca623..55c26d681 100644 --- a/tests/conformance/test_conformance.py +++ b/tests/conformance/test_conformance.py @@ -13,11 +13,11 @@ # limitations under the License. import os -import pytest import requests import tempfile import uuid import logging +import functools from google.cloud import storage @@ -458,7 +458,9 @@ def _populate_resource_object(client, resources): bucket_name = resources["bucket"].name bucket = client.get_bucket(bucket_name) blob = bucket.blob(uuid.uuid4().hex) - blob.upload_from_string("hello world") + blob.upload_from_string( + "hello world", checksum="crc32c" + ) # add checksum to trigger emulator behavior blob.reload() resources["object"] = blob @@ -542,39 +544,11 @@ def _delete_retry_test(host, id): ######################################################################################################################################## -### Run Conformance Tests for Retry Strategy ########################################################################################### +### Run Test Case for Retry Strategy ################################################################################################### ######################################################################################################################################## -def pytest_generate_tests(metafunc): - for test_data in _CONFORMANCE_TESTS: - scenario_id = test_data["id"] - m = "s{}method".format(scenario_id) - c = "s{}case".format(scenario_id) - s = "s{}".format(scenario_id) - if s in metafunc.fixturenames: - metafunc.parametrize(s, [scenario_id]) - if m in metafunc.fixturenames: - metafunc.parametrize(m, test_data["methods"]) - if c in metafunc.fixturenames: - metafunc.parametrize(c, test_data["cases"]) - - -def test_retry_s1_always_idempotent(s1, s1method, s1case): - run_retry_stragegy_conformance_test(s1, s1method, s1case) - - -def test_retry_s2_conditionally_idempotent_w_preconditions(s2, s2method, s2case): - run_retry_stragegy_conformance_test(s2, s2method, s2case) - - -def run_retry_stragegy_conformance_test(scenario_id, method, case): - host = os.environ.get(STORAGE_EMULATOR_ENV_VAR) - if host is None: - pytest.skip( - "This test must use the testbench emulator; set STORAGE_EMULATOR_HOST to run." - ) - +def run_test_case(scenario_id, method, case, lib_func, host): # Create client to use for setup steps. client = storage.Client(client_options={"api_endpoint": host}) scenario = _CONFORMANCE_TESTS[scenario_id - 1] @@ -584,53 +558,77 @@ def run_retry_stragegy_conformance_test(scenario_id, method, case): method_name = method["name"] instructions = case["instructions"] - if method_name not in method_mapping: - pytest.skip("No tests for operation {}".format(method_name),) - - for function in method_mapping[method_name]: - # Create the retry test in the emulator to handle instructions. - try: - r = _create_retry_test(host, method_name, instructions) - id = r["id"] - except Exception as e: - raise Exception( - "Error creating retry test for {}: {}".format(method_name, e) - ).with_traceback(e.__traceback__) - - # Populate resources. - try: - resources = _populate_resources(client, json_resources) - except Exception as e: - raise Exception( - "Error populating resources for {}: {}".format(method_name, e) - ).with_traceback(e.__traceback__) - - # Run retry tests on library methods. - try: - _run_retry_test(host, id, function, precondition_provided, **resources) - except Exception as e: - logging.exception( - "Caught an exception while running retry instructions\n {}".format(e) - ) - success_results = False - else: - success_results = True - - # Assert expected success for each scenario. - assert ( - expect_success == success_results - ), "S{}-{}-{}: expected_success was {}, should be {}".format( - scenario_id, method_name, function.__name__, success_results, expect_success + try: + r = _create_retry_test(host, method_name, instructions) + id = r["id"] + except Exception as e: + raise Exception( + "Error creating retry test for {}: {}".format(method_name, e) + ).with_traceback(e.__traceback__) + + # Populate resources. + try: + resources = _populate_resources(client, json_resources) + except Exception as e: + raise Exception( + "Error populating resources for {}: {}".format(method_name, e) + ).with_traceback(e.__traceback__) + + # Run retry tests on library methods. + try: + _run_retry_test(host, id, lib_func, precondition_provided, **resources) + except Exception as e: + logging.exception( + "Caught an exception while running retry instructions\n {}".format(e) ) + success_results = False + else: + success_results = True - # Verify that all instructions were used up during the test - # (indicates that the client sent the correct requests). - status_response = _get_retry_test(host, id) - assert ( - status_response["completed"] is True - ), "S{}-{}-{}: test not completed; unused instructions:{}".format( - scenario_id, method_name, function.__name__, status_response["instructions"] - ) + # Assert expected success for each scenario. + assert ( + expect_success == success_results + ), "Retry API call expected_success was {}, should be {}".format( + success_results, expect_success + ) + + # Verify that all instructions were used up during the test + # (indicates that the client sent the correct requests). + status_response = _get_retry_test(host, id) + assert ( + status_response["completed"] is True + ), "Retry test not completed; unused instructions:{}".format( + status_response["instructions"] + ) + + # Clean up and close out test in emulator. + _delete_retry_test(host, id) + + +######################################################################################################################################## +### Run Conformance Tests for Retry Strategy ########################################################################################### +######################################################################################################################################## - # Clean up and close out test in emulator. - _delete_retry_test(host, id) +for scenario in _CONFORMANCE_TESTS: + host = os.environ.get(STORAGE_EMULATOR_ENV_VAR) + if host is None: + logging.error( + "This test must use the testbench emulator; set STORAGE_EMULATOR_HOST to run." + ) + break + + id = scenario["id"] + methods = scenario["methods"] + cases = scenario["cases"] + for c in cases: + for m in methods: + method_name = m["name"] + if method_name not in method_mapping: + logging.info("No tests for operation {}".format(method_name)) + continue + + for lib_func in method_mapping[method_name]: + test_name = "test-S{}-{}-{}".format(id, method_name, lib_func.__name__) + globals()[test_name] = functools.partial( + run_test_case, id, m, c, lib_func, host + ) From d70b44bdb2129c3950125ec7feea8fa60403d587 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Mon, 12 Jul 2021 16:29:37 -0700 Subject: [PATCH 39/45] revise library methods naming to start with class name --- tests/conformance/test_conformance.py | 134 ++++++++++++++------------ 1 file changed, 74 insertions(+), 60 deletions(-) diff --git a/tests/conformance/test_conformance.py b/tests/conformance/test_conformance.py index 55c26d681..fd59609bd 100644 --- a/tests/conformance/test_conformance.py +++ b/tests/conformance/test_conformance.py @@ -41,13 +41,13 @@ ######################################################################################################################################## -def list_buckets(client, _preconditions, **_): +def client_list_buckets(client, _preconditions, **_): buckets = client.list_buckets() for b in buckets: pass -def list_blobs(client, _preconditions, bucket, **_): +def client_list_blobs(client, _preconditions, bucket, **_): blobs = client.list_blobs(bucket.name) for b in blobs: pass @@ -59,7 +59,7 @@ def bucket_list_blobs(client, _preconditions, bucket, **_): pass -def get_blob(client, _preconditions, bucket, object): +def bucket_get_blob(client, _preconditions, bucket, object): bucket = client.bucket(bucket.name) bucket.get_blob(object.name) @@ -85,7 +85,7 @@ def blob_download_to_filename(client, _preconditions, bucket, object): blob.download_to_filename(temp_f.name) -def client_download_to_file(client, _preconditions, object, **_): +def client_download_blob_to_file(client, _preconditions, object, **_): with tempfile.NamedTemporaryFile() as temp_f: with open(temp_f.name, "wb") as file_obj: client.download_blob_to_file(object, file_obj) @@ -99,16 +99,16 @@ def blobreader_read(client, _preconditions, bucket, object): blob_reader.read() -def reload_bucket(client, _preconditions, bucket): +def bucket_reload(client, _preconditions, bucket): bucket = client.bucket(bucket.name) bucket.reload() -def get_bucket(client, _preconditions, bucket): +def client_get_bucket(client, _preconditions, bucket): client.get_bucket(bucket.name) -def lookup_bucket(client, _preconditions, bucket): +def client_lookup_bucket(client, _preconditions, bucket): client.lookup_bucket(bucket.name) @@ -117,7 +117,7 @@ def bucket_exists(client, _preconditions, bucket): bucket.exists() -def create_bucket(client, _preconditions): +def client_create_bucket(client, _preconditions): bucket = client.bucket(uuid.uuid4().hex) client.create_bucket(bucket) @@ -127,7 +127,7 @@ def bucket_create(client, _preconditions): bucket.create() -def upload_from_string(client, _preconditions, bucket): +def blob_upload_from_string(client, _preconditions, bucket): blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) if _preconditions: blob.upload_from_string("upload from string", if_generation_match=0) @@ -171,27 +171,29 @@ def blobwriter_write(client, _preconditions, bucket): def blob_create_resumable_upload_session(client, _preconditions, bucket): blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) - blob.create_resumable_upload_session() + if _preconditions: + blob.create_resumable_upload_session(if_generation_match=0) + else: + blob.create_resumable_upload_session() -def create_notification(client, _preconditions, bucket): +def notification_create(client, _preconditions, bucket): bucket = client.get_bucket(bucket.name) notification = bucket.notification() notification.create() -def list_notifications(client, _preconditions, bucket, **_): - bucket = client.get_bucket(bucket.name) - notifications = bucket.list_notifications() +def bucket_list_notifications(client, _preconditions, bucket, **_): + notifications = client.bucket(bucket.name).list_notifications() for n in notifications: pass -def get_notification(client, _preconditions, bucket, notification): +def bucket_get_notification(client, _preconditions, bucket, notification): client.bucket(bucket.name).get_notification(notification.notification_id) -def reload_notification(client, _preconditions, bucket, notification): +def notification_reload(client, _preconditions, bucket, notification): notification = client.bucket(bucket.name).notification( notification_id=notification.notification_id ) @@ -205,40 +207,40 @@ def notification_exists(client, _preconditions, bucket, notification): notification.exists() -def delete_notification(client, _preconditions, bucket, notification): +def notification_delete(client, _preconditions, bucket, notification): notification = client.bucket(bucket.name).notification( notification_id=notification.notification_id ) notification.delete() -def list_hmac_keys(client, _preconditions, **_): +def client_list_hmac_keys(client, _preconditions, **_): hmac_keys = client.list_hmac_keys() for k in hmac_keys: pass -def delete_bucket(client, _preconditions, bucket, **_): +def bucket_delete(client, _preconditions, bucket, **_): bucket = client.bucket(bucket.name) bucket.delete(force=True) -def get_iam_policy(client, _preconditions, bucket): +def bucket_get_iam_policy(client, _preconditions, bucket): bucket = client.bucket(bucket.name) bucket.get_iam_policy() -def get_iam_permissions(client, _preconditions, bucket): +def bucket_test_iam_permissions(client, _preconditions, bucket): bucket = client.bucket(bucket.name) permissions = ["storage.buckets.get", "storage.buckets.create"] bucket.test_iam_permissions(permissions) -def get_service_account_email(client, _preconditions): +def client_get_service_account_email(client, _preconditions): client.get_service_account_email() -def make_bucket_public(client, _preconditions, bucket): +def bucket_make_public(client, _preconditions, bucket): bucket = client.bucket(bucket.name) bucket.make_public() @@ -271,14 +273,14 @@ def blob_delete(client, _preconditions, bucket, object): # TODO(cathyo@): fix emulator issue and assign metageneration to buckets.insert -def lock_retention_policy(client, _preconditions, bucket): +def bucket_lock_retention_policy(client, _preconditions, bucket): bucket2 = client.bucket(bucket.name) bucket2.retention_period = 60 bucket2.patch() bucket2.lock_retention_policy() -def patch_bucket(client, _preconditions, bucket): +def bucket_patch(client, _preconditions, bucket): bucket = client.get_bucket("bucket") metageneration = bucket.metageneration bucket.storage_class = "COLDLINE" @@ -288,7 +290,7 @@ def patch_bucket(client, _preconditions, bucket): bucket.patch() -def update_bucket(client, _preconditions, bucket): +def bucket_update(client, _preconditions, bucket): bucket = client.get_bucket("bucket") metageneration = bucket.metageneration bucket._properties = {"storageClass": "STANDARD"} @@ -298,7 +300,7 @@ def update_bucket(client, _preconditions, bucket): bucket.update() -def patch_blob(client, _preconditions, bucket, object): +def blob_patch(client, _preconditions, bucket, object): blob = client.bucket(bucket.name).blob(object.name) blob.metadata = {"foo": "bar"} if _preconditions: @@ -307,7 +309,7 @@ def patch_blob(client, _preconditions, bucket, object): blob.patch() -def update_blob(client, _preconditions, bucket, object): +def blob_update(client, _preconditions, bucket, object): blob = client.bucket(bucket.name).blob(object.name) blob.metadata = {"foo": "bar"} if _preconditions: @@ -316,7 +318,7 @@ def update_blob(client, _preconditions, bucket, object): blob.update() -def copy_blob(client, _preconditions, bucket, object): +def bucket_copy_blob(client, _preconditions, bucket, object): bucket = client.bucket(bucket.name) destination = client.bucket("bucket") if _preconditions: @@ -327,21 +329,22 @@ def copy_blob(client, _preconditions, bucket, object): bucket.copy_blob(object, destination) -def rename_blob(client, _preconditions, bucket, object): +def bucket_rename_blob(client, _preconditions, bucket, object): bucket = client.bucket(bucket.name) + blob = bucket.blob(object.name) new_name = uuid.uuid4().hex if _preconditions: bucket.rename_blob( - object, + blob, new_name, if_generation_match=0, if_source_generation_match=object.generation, ) else: - bucket.rename_blob(object, new_name) + bucket.rename_blob(blob, new_name) -def rewrite_blob(client, _preconditions, bucket, object): +def blob_rewrite(client, _preconditions, bucket, object): new_blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) new_blob.metadata = {"foo": "bar"} if _preconditions: @@ -359,7 +362,7 @@ def blob_update_storage_class(client, _preconditions, bucket, object): blob.update_storage_class(storage_class) -def compose_blob(client, _preconditions, bucket, object): +def blob_compose(client, _preconditions, bucket, object): blob = client.bucket(bucket.name).blob(object.name) blob_2 = bucket.blob(uuid.uuid4().hex) blob_2.upload_from_string("foo") @@ -396,51 +399,62 @@ def bucket_set_iam_policy(client, _preconditions, bucket): # read or just a metadata get). method_mapping = { - "storage.buckets.delete": [delete_bucket], # S1 start - "storage.buckets.get": [get_bucket, reload_bucket, lookup_bucket, bucket_exists], - "storage.buckets.getIamPolicy": [get_iam_policy], - "storage.buckets.insert": [create_bucket, bucket_create], - "storage.buckets.list": [list_buckets], - "storage.buckets.lockRententionPolicy": [], # lock_retention_policy - "storage.buckets.testIamPermission": [get_iam_permissions], - "storage.notifications.delete": [delete_notification], + "storage.buckets.delete": [bucket_delete], # S1 start + "storage.buckets.get": [ + client_get_bucket, + bucket_reload, + client_lookup_bucket, + bucket_exists, + ], + "storage.buckets.getIamPolicy": [bucket_get_iam_policy], + "storage.buckets.insert": [client_create_bucket, bucket_create], + "storage.buckets.list": [client_list_buckets], + "storage.buckets.lockRententionPolicy": [], # bucket_lock_retention_policy + "storage.buckets.testIamPermission": [bucket_test_iam_permissions], + "storage.notifications.delete": [notification_delete], "storage.notifications.get": [ - get_notification, + bucket_get_notification, notification_exists, - reload_notification, + notification_reload, ], - "storage.notifications.list": [list_notifications], + "storage.notifications.list": [bucket_list_notifications], "storage.objects.get": [ - get_blob, + bucket_get_blob, blob_exists, - client_download_to_file, + client_download_blob_to_file, blob_download_to_filename, blob_download_as_bytes, blob_download_as_text, blobreader_read, ], - "storage.objects.list": [list_blobs, bucket_list_blobs, delete_bucket], # S1 end - "storage.buckets.patch": [patch_bucket], # S2 start - "storage.buckets.setIamPolicy": [], # bucket_set_iam_policy - "storage.buckets.update": [update_bucket], - "storage.objects.compose": [compose_blob], - "storage.objects.copy": [copy_blob, rename_blob], + "storage.objects.list": [ + client_list_blobs, + bucket_list_blobs, + bucket_delete, + ], # S1 end + "storage.buckets.patch": [bucket_patch], # S2 start + "storage.buckets.setIamPolicy": [bucket_set_iam_policy], + "storage.buckets.update": [bucket_update], + "storage.objects.compose": [blob_compose], + "storage.objects.copy": [bucket_copy_blob, bucket_rename_blob], "storage.objects.delete": [ bucket_delete_blob, bucket_delete_blobs, - delete_bucket, + bucket_delete, blob_delete, - ], # rename_blob + bucket_rename_blob, + ], "storage.objects.insert": [ - upload_from_string, + blob_upload_from_string, blob_upload_from_file, blob_upload_from_filename, blobwriter_write, + blob_create_resumable_upload_session, ], - "storage.objects.patch": [patch_blob], - "storage.objects.rewrite": [rewrite_blob, blob_update_storage_class], - "storage.objects.update": [update_blob], # S2 end - "storage.notifications.insert": [create_notification], # S4 + "storage.objects.patch": [blob_patch], + "storage.objects.rewrite": [blob_rewrite, blob_update_storage_class], + "storage.objects.update": [blob_update], # S2 end + "storage.notifications.insert": [notification_create], # S4 } ######################################################################################################################################## From 0e8a111ac7605973883ab97cbf10519d1263b088 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Mon, 12 Jul 2021 17:38:23 -0700 Subject: [PATCH 40/45] unify library method signatures --- tests/conformance/test_conformance.py | 179 ++++++++++++++++---------- 1 file changed, 109 insertions(+), 70 deletions(-) diff --git a/tests/conformance/test_conformance.py b/tests/conformance/test_conformance.py index fd59609bd..9e28fa89e 100644 --- a/tests/conformance/test_conformance.py +++ b/tests/conformance/test_conformance.py @@ -47,87 +47,103 @@ def client_list_buckets(client, _preconditions, **_): pass -def client_list_blobs(client, _preconditions, bucket, **_): +def client_list_blobs(client, _preconditions, **resources): + bucket = resources.get("bucket") blobs = client.list_blobs(bucket.name) for b in blobs: pass -def bucket_list_blobs(client, _preconditions, bucket, **_): +def bucket_list_blobs(client, _preconditions, **resources): + bucket = resources.get("bucket") blobs = client.bucket(bucket.name).list_blobs() for b in blobs: pass -def bucket_get_blob(client, _preconditions, bucket, object): +def bucket_get_blob(client, _preconditions, **resources): + bucket = resources.get("bucket") + object = resources.get("object") bucket = client.bucket(bucket.name) bucket.get_blob(object.name) -def blob_exists(client, _preconditions, bucket, object): +def blob_exists(client, _preconditions, **resources): + bucket = resources.get("bucket") + object = resources.get("object") blob = client.bucket(bucket.name).blob(object.name) blob.exists() -def blob_download_as_bytes(client, _preconditions, bucket, object): +def blob_download_as_bytes(client, _preconditions, **resources): + bucket = resources.get("bucket") + object = resources.get("object") blob = client.bucket(bucket.name).blob(object.name) blob.download_as_bytes() -def blob_download_as_text(client, _preconditions, bucket, object): +def blob_download_as_text(client, _preconditions, **resources): + bucket = resources.get("bucket") + object = resources.get("object") blob = client.bucket(bucket.name).blob(object.name) blob.download_as_text() -def blob_download_to_filename(client, _preconditions, bucket, object): +def blob_download_to_filename(client, _preconditions, **resources): + bucket = resources.get("bucket") + object = resources.get("object") blob = client.bucket(bucket.name).blob(object.name) with tempfile.NamedTemporaryFile() as temp_f: blob.download_to_filename(temp_f.name) -def client_download_blob_to_file(client, _preconditions, object, **_): +def client_download_blob_to_file(client, _preconditions, **resources): + object = resources.get("object") with tempfile.NamedTemporaryFile() as temp_f: with open(temp_f.name, "wb") as file_obj: client.download_blob_to_file(object, file_obj) -def blobreader_read(client, _preconditions, bucket, object): +def blobreader_read(client, _preconditions, **resources): from google.cloud.storage.fileio import BlobReader + bucket = resources.get("bucket") + object = resources.get("object") blob = client.bucket(bucket.name).blob(object.name) blob_reader = BlobReader(blob) blob_reader.read() -def bucket_reload(client, _preconditions, bucket): - bucket = client.bucket(bucket.name) +def bucket_reload(client, _preconditions, **resources): + bucket = client.bucket(resources.get("bucket").name) bucket.reload() -def client_get_bucket(client, _preconditions, bucket): - client.get_bucket(bucket.name) +def client_get_bucket(client, _preconditions, **resources): + client.get_bucket(resources.get("bucket").name) -def client_lookup_bucket(client, _preconditions, bucket): - client.lookup_bucket(bucket.name) +def client_lookup_bucket(client, _preconditions, **resources): + client.lookup_bucket(resources.get("bucket").name) -def bucket_exists(client, _preconditions, bucket): - bucket = client.bucket(bucket.name) +def bucket_exists(client, _preconditions, **resources): + bucket = client.bucket(resources.get("bucket").name) bucket.exists() -def client_create_bucket(client, _preconditions): +def client_create_bucket(client, _preconditions, **_): bucket = client.bucket(uuid.uuid4().hex) client.create_bucket(bucket) -def bucket_create(client, _preconditions): +def bucket_create(client, _preconditions, **_): bucket = client.bucket(uuid.uuid4().hex) bucket.create() -def blob_upload_from_string(client, _preconditions, bucket): +def blob_upload_from_string(client, _preconditions, **resources): + bucket = resources.get("bucket") blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) if _preconditions: blob.upload_from_string("upload from string", if_generation_match=0) @@ -135,10 +151,11 @@ def blob_upload_from_string(client, _preconditions, bucket): blob.upload_from_string("upload from string") -def blob_upload_from_file(client, _preconditions, bucket): +def blob_upload_from_file(client, _preconditions, **resources): from io import BytesIO file_obj = BytesIO() + bucket = resources.get("bucket") blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) if _preconditions: blob.upload_from_file(file_obj, if_generation_match=0) @@ -146,7 +163,8 @@ def blob_upload_from_file(client, _preconditions, bucket): blob.upload_from_file(file_obj) -def blob_upload_from_filename(client, _preconditions, bucket): +def blob_upload_from_filename(client, _preconditions, **resources): + bucket = resources.get("bucket") blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) with tempfile.NamedTemporaryFile() as temp_f: if _preconditions: @@ -155,11 +173,12 @@ def blob_upload_from_filename(client, _preconditions, bucket): blob.upload_from_filename(temp_f.name) -def blobwriter_write(client, _preconditions, bucket): +def blobwriter_write(client, _preconditions, **resources): import os from google.cloud.storage.fileio import BlobWriter chunk_size = 256 * 1024 + bucket = resources.get("bucket") blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) if _preconditions: blob_writer = BlobWriter(blob, chunk_size=chunk_size, if_generation_match=0) @@ -169,7 +188,8 @@ def blobwriter_write(client, _preconditions, bucket): blob_writer.write(bytearray(os.urandom(262144))) -def blob_create_resumable_upload_session(client, _preconditions, bucket): +def blob_create_resumable_upload_session(client, _preconditions, **resources): + bucket = resources.get("bucket") blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) if _preconditions: blob.create_resumable_upload_session(if_generation_match=0) @@ -177,39 +197,42 @@ def blob_create_resumable_upload_session(client, _preconditions, bucket): blob.create_resumable_upload_session() -def notification_create(client, _preconditions, bucket): - bucket = client.get_bucket(bucket.name) +def notification_create(client, _preconditions, **resources): + bucket = client.get_bucket(resources.get("bucket").name) notification = bucket.notification() notification.create() -def bucket_list_notifications(client, _preconditions, bucket, **_): +def bucket_list_notifications(client, _preconditions, **resources): + bucket = resources.get("bucket") notifications = client.bucket(bucket.name).list_notifications() for n in notifications: pass -def bucket_get_notification(client, _preconditions, bucket, notification): +def bucket_get_notification(client, _preconditions, **resources): + bucket = resources.get("bucket") + notification = resources.get("notification") client.bucket(bucket.name).get_notification(notification.notification_id) -def notification_reload(client, _preconditions, bucket, notification): - notification = client.bucket(bucket.name).notification( - notification_id=notification.notification_id +def notification_reload(client, _preconditions, **resources): + notification = client.bucket(resources.get("bucket").name).notification( + notification_id=resources.get("notification").notification_id ) notification.reload() -def notification_exists(client, _preconditions, bucket, notification): - notification = client.bucket(bucket.name).notification( - notification_id=notification.notification_id +def notification_exists(client, _preconditions, **resources): + notification = client.bucket(resources.get("bucket").name).notification( + notification_id=resources.get("notification").notification_id ) notification.exists() -def notification_delete(client, _preconditions, bucket, notification): - notification = client.bucket(bucket.name).notification( - notification_id=notification.notification_id +def notification_delete(client, _preconditions, **resources): + notification = client.bucket(resources.get("bucket").name).notification( + notification_id=resources.get("notification").notification_id ) notification.delete() @@ -220,33 +243,34 @@ def client_list_hmac_keys(client, _preconditions, **_): pass -def bucket_delete(client, _preconditions, bucket, **_): - bucket = client.bucket(bucket.name) +def bucket_delete(client, _preconditions, **resources): + bucket = client.bucket(resources.get("bucket").name) bucket.delete(force=True) -def bucket_get_iam_policy(client, _preconditions, bucket): - bucket = client.bucket(bucket.name) +def bucket_get_iam_policy(client, _preconditions, **resources): + bucket = client.bucket(resources.get("bucket").name) bucket.get_iam_policy() -def bucket_test_iam_permissions(client, _preconditions, bucket): - bucket = client.bucket(bucket.name) +def bucket_test_iam_permissions(client, _preconditions, **resources): + bucket = client.bucket(resources.get("bucket").name) permissions = ["storage.buckets.get", "storage.buckets.create"] bucket.test_iam_permissions(permissions) -def client_get_service_account_email(client, _preconditions): +def client_get_service_account_email(client, _preconditions, **_): client.get_service_account_email() -def bucket_make_public(client, _preconditions, bucket): - bucket = client.bucket(bucket.name) +def bucket_make_public(client, _preconditions, **resources): + bucket = client.bucket(resources.get("bucket").name) bucket.make_public() -def bucket_delete_blob(client, _preconditions, bucket, object): - bucket = client.bucket(bucket.name) +def bucket_delete_blob(client, _preconditions, **resources): + object = resources.get("object") + bucket = client.bucket(resources.get("bucket").name) if _preconditions: generation = object.generation bucket.delete_blob(object.name, if_generation_match=generation) @@ -254,8 +278,9 @@ def bucket_delete_blob(client, _preconditions, bucket, object): bucket.delete_blob(object.name) -def bucket_delete_blobs(client, _preconditions, bucket, object): - bucket = client.bucket(bucket.name) +def bucket_delete_blobs(client, _preconditions, **resources): + object = resources.get("object") + bucket = client.bucket(resources.get("bucket").name) sources = [object] source_generations = [object.generation] if _preconditions: @@ -264,7 +289,9 @@ def bucket_delete_blobs(client, _preconditions, bucket, object): bucket.delete_blobs(sources) -def blob_delete(client, _preconditions, bucket, object): +def blob_delete(client, _preconditions, **resources): + bucket = resources.get("bucket") + object = resources.get("object") blob = client.bucket(bucket.name).blob(object.name) if _preconditions: blob.delete(if_generation_match=object.generation) @@ -273,14 +300,14 @@ def blob_delete(client, _preconditions, bucket, object): # TODO(cathyo@): fix emulator issue and assign metageneration to buckets.insert -def bucket_lock_retention_policy(client, _preconditions, bucket): - bucket2 = client.bucket(bucket.name) - bucket2.retention_period = 60 - bucket2.patch() - bucket2.lock_retention_policy() +def bucket_lock_retention_policy(client, _preconditions, **resources): + bucket = client.bucket(resources.get("bucket").name) + bucket.retention_period = 60 + bucket.patch() + bucket.lock_retention_policy() -def bucket_patch(client, _preconditions, bucket): +def bucket_patch(client, _preconditions, **_): bucket = client.get_bucket("bucket") metageneration = bucket.metageneration bucket.storage_class = "COLDLINE" @@ -290,7 +317,7 @@ def bucket_patch(client, _preconditions, bucket): bucket.patch() -def bucket_update(client, _preconditions, bucket): +def bucket_update(client, _preconditions, **resources): bucket = client.get_bucket("bucket") metageneration = bucket.metageneration bucket._properties = {"storageClass": "STANDARD"} @@ -300,7 +327,9 @@ def bucket_update(client, _preconditions, bucket): bucket.update() -def blob_patch(client, _preconditions, bucket, object): +def blob_patch(client, _preconditions, **resources): + bucket = resources.get("bucket") + object = resources.get("object") blob = client.bucket(bucket.name).blob(object.name) blob.metadata = {"foo": "bar"} if _preconditions: @@ -309,7 +338,9 @@ def blob_patch(client, _preconditions, bucket, object): blob.patch() -def blob_update(client, _preconditions, bucket, object): +def blob_update(client, _preconditions, **resources): + bucket = resources.get("bucket") + object = resources.get("object") blob = client.bucket(bucket.name).blob(object.name) blob.metadata = {"foo": "bar"} if _preconditions: @@ -318,8 +349,9 @@ def blob_update(client, _preconditions, bucket, object): blob.update() -def bucket_copy_blob(client, _preconditions, bucket, object): - bucket = client.bucket(bucket.name) +def bucket_copy_blob(client, _preconditions, **resources): + object = resources.get("object") + bucket = client.bucket(resources.get("bucket").name) destination = client.bucket("bucket") if _preconditions: bucket.copy_blob( @@ -329,9 +361,10 @@ def bucket_copy_blob(client, _preconditions, bucket, object): bucket.copy_blob(object, destination) -def bucket_rename_blob(client, _preconditions, bucket, object): - bucket = client.bucket(bucket.name) - blob = bucket.blob(object.name) +def bucket_rename_blob(client, _preconditions, **resources): + object = resources.get("object") + bucket = client.bucket(resources.get("bucket").name) + blob = bucket.blob(resources.get("object").name) new_name = uuid.uuid4().hex if _preconditions: bucket.rename_blob( @@ -344,7 +377,9 @@ def bucket_rename_blob(client, _preconditions, bucket, object): bucket.rename_blob(blob, new_name) -def blob_rewrite(client, _preconditions, bucket, object): +def blob_rewrite(client, _preconditions, **resources): + bucket = resources.get("bucket") + object = resources.get("object") new_blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) new_blob.metadata = {"foo": "bar"} if _preconditions: @@ -353,7 +388,9 @@ def blob_rewrite(client, _preconditions, bucket, object): new_blob.rewrite(object) -def blob_update_storage_class(client, _preconditions, bucket, object): +def blob_update_storage_class(client, _preconditions, **resources): + bucket = resources.get("bucket") + object = resources.get("object") blob = client.bucket(bucket.name).blob(object.name) storage_class = "STANDARD" if _preconditions: @@ -362,7 +399,9 @@ def blob_update_storage_class(client, _preconditions, bucket, object): blob.update_storage_class(storage_class) -def blob_compose(client, _preconditions, bucket, object): +def blob_compose(client, _preconditions, **resources): + bucket = resources.get("bucket") + object = resources.get("object") blob = client.bucket(bucket.name).blob(object.name) blob_2 = bucket.blob(uuid.uuid4().hex) blob_2.upload_from_string("foo") @@ -374,8 +413,8 @@ def blob_compose(client, _preconditions, bucket, object): blob.compose(sources) -def bucket_set_iam_policy(client, _preconditions, bucket): - bucket = client.get_bucket(bucket.name) +def bucket_set_iam_policy(client, _preconditions, **resources): + bucket = client.get_bucket(resources.get("bucket").name) role = "roles/storage.objectViewer" member = _CONF_TEST_SERVICE_ACCOUNT_EMAIL From efc75cc42c893e34d7acad2e847d109a7e7bfc96 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Tue, 13 Jul 2021 13:27:18 -0700 Subject: [PATCH 41/45] cleanup code --- noxfile.py | 11 - .../conformance/retry_strategy_test_data.json | 46 +--- tests/conformance/test_conformance.py | 237 +++++++++--------- 3 files changed, 120 insertions(+), 174 deletions(-) diff --git a/noxfile.py b/noxfile.py index e9f91367c..47642a3c2 100644 --- a/noxfile.py +++ b/noxfile.py @@ -173,19 +173,8 @@ def conformance(session): if not conformance_test_exists and not conformance_test_folder_exists: session.skip("Conformance tests were not found") - # Install all test dependencies, then install this package into the - # virtualenv's dist-packages. - # 2021-05-06: defer installing 'google-cloud-*' to after this package, - # in order to work around Python 2.7 googolapis-common-protos - # issue. session.install("pytest",) session.install("-e", ".") - session.install( - "google-cloud-testutils", - "google-cloud-iam", - "google-cloud-pubsub < 2.0.0", - "google-cloud-kms < 2.0dev", - ) # Run py.test against the conformance tests. if conformance_test_exists: diff --git a/tests/conformance/retry_strategy_test_data.json b/tests/conformance/retry_strategy_test_data.json index 0ebf8adef..c7ca3e52c 100644 --- a/tests/conformance/retry_strategy_test_data.json +++ b/tests/conformance/retry_strategy_test_data.json @@ -239,33 +239,10 @@ "description": "conditionally idempotent no retries when precondition is absent", "cases": [ { - "instructions": [ - "return-503" - ] - } - ], - "methods": [ - { - "name": "storage.buckets.patch", - "resources": [ - "BUCKET" - ] - }, - { - "name": "storage.objects.patch", - "resources": [ - "BUCKET", - "OBJECT" - ] - }, - { - "name": "storage.objects.update", - "resources": [ - "BUCKET", - "OBJECT" - ] + "instructions": [] } ], + "methods": [], "preconditionProvided": false, "expectSuccess": false }, @@ -274,25 +251,10 @@ "description": "non idempotent", "cases": [ { - "instructions": [ - "return-503" - ] - } - ], - "methods": [ - { - "name": "storage.notifications.insert", - "resources": [ - "BUCKET" - ] - }, - { - "name": "storage.bucket_acl.patch", - "resources": [ - "BUCKET" - ] + "instructions": [] } ], + "methods": [], "preconditionProvided": false, "expectSuccess": false } diff --git a/tests/conformance/test_conformance.py b/tests/conformance/test_conformance.py index 9e28fa89e..f7a534167 100644 --- a/tests/conformance/test_conformance.py +++ b/tests/conformance/test_conformance.py @@ -36,31 +36,12 @@ "my-service-account@my-project-id.iam.gserviceaccount.com" ) + ######################################################################################################################################## ### Library methods for mapping ######################################################################################################## ######################################################################################################################################## -def client_list_buckets(client, _preconditions, **_): - buckets = client.list_buckets() - for b in buckets: - pass - - -def client_list_blobs(client, _preconditions, **resources): - bucket = resources.get("bucket") - blobs = client.list_blobs(bucket.name) - for b in blobs: - pass - - -def bucket_list_blobs(client, _preconditions, **resources): - bucket = resources.get("bucket") - blobs = client.bucket(bucket.name).list_blobs() - for b in blobs: - pass - - def bucket_get_blob(client, _preconditions, **resources): bucket = resources.get("bucket") object = resources.get("object") @@ -114,6 +95,25 @@ def blobreader_read(client, _preconditions, **resources): blob_reader.read() +def client_list_blobs(client, _preconditions, **resources): + bucket = resources.get("bucket") + blobs = client.list_blobs(bucket.name) + for b in blobs: + pass + + +def bucket_list_blobs(client, _preconditions, **resources): + bucket = resources.get("bucket") + blobs = client.bucket(bucket.name).list_blobs() + for b in blobs: + pass + + +def bucket_delete(client, _preconditions, **resources): + bucket = client.bucket(resources.get("bucket").name) + bucket.delete(force=True) + + def bucket_reload(client, _preconditions, **resources): bucket = client.bucket(resources.get("bucket").name) bucket.reload() @@ -142,59 +142,29 @@ def bucket_create(client, _preconditions, **_): bucket.create() -def blob_upload_from_string(client, _preconditions, **resources): - bucket = resources.get("bucket") - blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) - if _preconditions: - blob.upload_from_string("upload from string", if_generation_match=0) - else: - blob.upload_from_string("upload from string") - - -def blob_upload_from_file(client, _preconditions, **resources): - from io import BytesIO - - file_obj = BytesIO() - bucket = resources.get("bucket") - blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) - if _preconditions: - blob.upload_from_file(file_obj, if_generation_match=0) - else: - blob.upload_from_file(file_obj) +def client_list_buckets(client, _preconditions, **_): + buckets = client.list_buckets() + for b in buckets: + pass -def blob_upload_from_filename(client, _preconditions, **resources): - bucket = resources.get("bucket") - blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) - with tempfile.NamedTemporaryFile() as temp_f: - if _preconditions: - blob.upload_from_filename(temp_f.name, if_generation_match=0) - else: - blob.upload_from_filename(temp_f.name) - +def bucket_get_iam_policy(client, _preconditions, **resources): + bucket = client.bucket(resources.get("bucket").name) + bucket.get_iam_policy() -def blobwriter_write(client, _preconditions, **resources): - import os - from google.cloud.storage.fileio import BlobWriter - chunk_size = 256 * 1024 - bucket = resources.get("bucket") - blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) - if _preconditions: - blob_writer = BlobWriter(blob, chunk_size=chunk_size, if_generation_match=0) - blob_writer.write(bytearray(os.urandom(262144))) - else: - blob_writer = BlobWriter(blob, chunk_size=chunk_size) - blob_writer.write(bytearray(os.urandom(262144))) +def bucket_test_iam_permissions(client, _preconditions, **resources): + bucket = client.bucket(resources.get("bucket").name) + permissions = ["storage.buckets.get", "storage.buckets.create"] + bucket.test_iam_permissions(permissions) -def blob_create_resumable_upload_session(client, _preconditions, **resources): - bucket = resources.get("bucket") - blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) - if _preconditions: - blob.create_resumable_upload_session(if_generation_match=0) - else: - blob.create_resumable_upload_session() +# TODO(cathyo@): fix emulator issue and assign metageneration to buckets.insert +def bucket_lock_retention_policy(client, _preconditions, **resources): + bucket = client.bucket(resources.get("bucket").name) + bucket.retention_period = 60 + bucket.patch() + bucket.lock_retention_policy() def notification_create(client, _preconditions, **resources): @@ -243,29 +213,41 @@ def client_list_hmac_keys(client, _preconditions, **_): pass -def bucket_delete(client, _preconditions, **resources): - bucket = client.bucket(resources.get("bucket").name) - bucket.delete(force=True) - +def client_get_service_account_email(client, _preconditions, **_): + client.get_service_account_email() -def bucket_get_iam_policy(client, _preconditions, **resources): - bucket = client.bucket(resources.get("bucket").name) - bucket.get_iam_policy() +def bucket_patch(client, _preconditions, **_): + bucket = client.get_bucket("bucket") + metageneration = bucket.metageneration + bucket.storage_class = "COLDLINE" + if _preconditions: + bucket.patch(if_metageneration_match=metageneration) + else: + bucket.patch() -def bucket_test_iam_permissions(client, _preconditions, **resources): - bucket = client.bucket(resources.get("bucket").name) - permissions = ["storage.buckets.get", "storage.buckets.create"] - bucket.test_iam_permissions(permissions) +def bucket_update(client, _preconditions, **resources): + bucket = client.get_bucket("bucket") + metageneration = bucket.metageneration + bucket._properties = {"storageClass": "STANDARD"} + if _preconditions: + bucket.update(if_metageneration_match=metageneration) + else: + bucket.update() -def client_get_service_account_email(client, _preconditions, **_): - client.get_service_account_email() +def bucket_set_iam_policy(client, _preconditions, **resources): + bucket = client.get_bucket(resources.get("bucket").name) + role = "roles/storage.objectViewer" + member = _CONF_TEST_SERVICE_ACCOUNT_EMAIL -def bucket_make_public(client, _preconditions, **resources): - bucket = client.bucket(resources.get("bucket").name) - bucket.make_public() + policy = bucket.get_iam_policy(requested_policy_version=3) + policy.bindings.append({"role": role, "members": {member}}) + if _preconditions: + bucket.set_iam_policy(policy) + else: + bucket.set_iam_policy(policy) def bucket_delete_blob(client, _preconditions, **resources): @@ -299,34 +281,6 @@ def blob_delete(client, _preconditions, **resources): blob.delete() -# TODO(cathyo@): fix emulator issue and assign metageneration to buckets.insert -def bucket_lock_retention_policy(client, _preconditions, **resources): - bucket = client.bucket(resources.get("bucket").name) - bucket.retention_period = 60 - bucket.patch() - bucket.lock_retention_policy() - - -def bucket_patch(client, _preconditions, **_): - bucket = client.get_bucket("bucket") - metageneration = bucket.metageneration - bucket.storage_class = "COLDLINE" - if _preconditions: - bucket.patch(if_metageneration_match=metageneration) - else: - bucket.patch() - - -def bucket_update(client, _preconditions, **resources): - bucket = client.get_bucket("bucket") - metageneration = bucket.metageneration - bucket._properties = {"storageClass": "STANDARD"} - if _preconditions: - bucket.update(if_metageneration_match=metageneration) - else: - bucket.update() - - def blob_patch(client, _preconditions, **resources): bucket = resources.get("bucket") object = resources.get("object") @@ -406,25 +360,66 @@ def blob_compose(client, _preconditions, **resources): blob_2 = bucket.blob(uuid.uuid4().hex) blob_2.upload_from_string("foo") sources = [blob_2] - if _preconditions: blob.compose(sources, if_generation_match=object.generation) else: blob.compose(sources) -def bucket_set_iam_policy(client, _preconditions, **resources): - bucket = client.get_bucket(resources.get("bucket").name) - role = "roles/storage.objectViewer" - member = _CONF_TEST_SERVICE_ACCOUNT_EMAIL +def blob_upload_from_string(client, _preconditions, **resources): + bucket = resources.get("bucket") + blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) + if _preconditions: + blob.upload_from_string("upload from string", if_generation_match=0) + else: + blob.upload_from_string("upload from string") - policy = bucket.get_iam_policy(requested_policy_version=3) - policy.bindings.append({"role": role, "members": {member}}) +def blob_upload_from_file(client, _preconditions, **resources): + from io import BytesIO + + file_obj = BytesIO() + bucket = resources.get("bucket") + blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) if _preconditions: - bucket.set_iam_policy(policy) + blob.upload_from_file(file_obj, if_generation_match=0) else: - bucket.set_iam_policy(policy) + blob.upload_from_file(file_obj) + + +def blob_upload_from_filename(client, _preconditions, **resources): + bucket = resources.get("bucket") + blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) + + with tempfile.NamedTemporaryFile() as temp_f: + if _preconditions: + blob.upload_from_filename(temp_f.name, if_generation_match=0) + else: + blob.upload_from_filename(temp_f.name) + + +def blobwriter_write(client, _preconditions, **resources): + import os + from google.cloud.storage.fileio import BlobWriter + + chunk_size = 256 * 1024 + bucket = resources.get("bucket") + blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) + if _preconditions: + blob_writer = BlobWriter(blob, chunk_size=chunk_size, if_generation_match=0) + blob_writer.write(bytearray(os.urandom(262144))) + else: + blob_writer = BlobWriter(blob, chunk_size=chunk_size) + blob_writer.write(bytearray(os.urandom(262144))) + + +def blob_create_resumable_upload_session(client, _preconditions, **resources): + bucket = resources.get("bucket") + blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) + if _preconditions: + blob.create_resumable_upload_session(if_generation_match=0) + else: + blob.create_resumable_upload_session() ######################################################################################################################################## @@ -448,7 +443,7 @@ def bucket_set_iam_policy(client, _preconditions, **resources): "storage.buckets.getIamPolicy": [bucket_get_iam_policy], "storage.buckets.insert": [client_create_bucket, bucket_create], "storage.buckets.list": [client_list_buckets], - "storage.buckets.lockRententionPolicy": [], # bucket_lock_retention_policy + "storage.buckets.lockRententionPolicy": [], "storage.buckets.testIamPermission": [bucket_test_iam_permissions], "storage.notifications.delete": [notification_delete], "storage.notifications.get": [ From 1ff387c0b9b698e49fdbba8f89d8cc93c7585d8b Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Wed, 14 Jul 2021 13:29:06 -0700 Subject: [PATCH 42/45] use pytest fixtures to populate resources --- tests/conformance/test_conformance.py | 117 +++++++++++++++----------- 1 file changed, 67 insertions(+), 50 deletions(-) diff --git a/tests/conformance/test_conformance.py b/tests/conformance/test_conformance.py index f7a534167..f42e5f492 100644 --- a/tests/conformance/test_conformance.py +++ b/tests/conformance/test_conformance.py @@ -18,6 +18,7 @@ import uuid import logging import functools +import pytest from google.cloud import storage @@ -36,7 +37,6 @@ "my-service-account@my-project-id.iam.gserviceaccount.com" ) - ######################################################################################################################################## ### Library methods for mapping ######################################################################################################## ######################################################################################################################################## @@ -488,63 +488,71 @@ def blob_create_resumable_upload_session(client, _preconditions, **resources): "storage.objects.patch": [blob_patch], "storage.objects.rewrite": [blob_rewrite, blob_update_storage_class], "storage.objects.update": [blob_update], # S2 end - "storage.notifications.insert": [notification_create], # S4 } + ######################################################################################################################################## -### Helper Methods for Populating Resources ############################################################################################ +### Pytest Fixtures for Populating Resources ############################################################################################ ######################################################################################################################################## -def _populate_resource_bucket(client, resources): +@pytest.fixture +def client(): + host = os.environ.get(STORAGE_EMULATOR_ENV_VAR) + client = storage.Client(client_options={"api_endpoint": host}) + return client + + +@pytest.fixture +def bucket(client): bucket = client.bucket(uuid.uuid4().hex) client.create_bucket(bucket) - resources["bucket"] = bucket + yield bucket + try: + bucket.delete(force=True) + except Exception: # in cases where resources are deleted within the test + pass -def _populate_resource_object(client, resources): - bucket_name = resources["bucket"].name - bucket = client.get_bucket(bucket_name) +@pytest.fixture +def object(client, bucket): + bucket = client.get_bucket(bucket.name) blob = bucket.blob(uuid.uuid4().hex) - blob.upload_from_string( - "hello world", checksum="crc32c" - ) # add checksum to trigger emulator behavior + blob.upload_from_string("hello world", checksum="crc32c") blob.reload() - resources["object"] = blob + yield blob + try: + blob.delete() + except Exception: # in cases where resources are deleted within the test + pass -def _populate_resource_notification(client, resources): - bucket_name = resources["bucket"].name - bucket = client.get_bucket(bucket_name) +@pytest.fixture +def notification(client, bucket): + bucket = client.get_bucket(bucket.name) notification = bucket.notification() notification.create() notification.reload() - resources["notification"] = notification + yield notification + try: + notification.delete() + except Exception: # in cases where resources are deleted within the test + pass -def _populate_resource_hmackey(client, resources): +@pytest.fixture +def hmac_key(client): hmac_key, secret = client.create_hmac_key( service_account_email=_CONF_TEST_SERVICE_ACCOUNT_EMAIL, project_id=_CONF_TEST_PROJECT_ID, ) - resources["hmac_key"] = hmac_key - - -resource_mapping = { - "BUCKET": _populate_resource_bucket, - "OBJECT": _populate_resource_object, - "NOTIFICATION": _populate_resource_notification, - "HMAC_KEY": _populate_resource_hmackey, -} - - -def _populate_resources(client, json_resource): - resources = {} - for r in json_resource: - func = resource_mapping[r] - func(client, resources) - - return resources + yield hmac_key + try: + hmac_key.state = "INACTIVE" + hmac_key.update() + hmac_key.delete() + except Exception: # in cases where resources are deleted within the test + pass ######################################################################################################################################## @@ -576,14 +584,23 @@ def _get_retry_test(host, id): return r.json() -def _run_retry_test(host, id, func, _preconditions, **resources): +def _run_retry_test( + host, id, lib_func, _preconditions, bucket, object, notification, hmac_key +): """ To execute tests against the list of instrucions sent to the Retry API, create a client to send the retry test ID using the x-retry-test-id header in each request. For incoming requests which match the given API method, the emulator will pop off the next instruction from the list and force the listed failure case. """ client = storage.Client(client_options={"api_endpoint": host}) client._http.headers.update({"x-retry-test-id": id}) - func(client, _preconditions, **resources) + lib_func( + client, + _preconditions, + bucket=bucket, + object=object, + notification=notification, + hmac_key=hmac_key, + ) def _delete_retry_test(host, id): @@ -596,13 +613,12 @@ def _delete_retry_test(host, id): ######################################################################################################################################## -def run_test_case(scenario_id, method, case, lib_func, host): - # Create client to use for setup steps. - client = storage.Client(client_options={"api_endpoint": host}) +def run_test_case( + scenario_id, method, case, lib_func, host, bucket, object, notification, hmac_key +): scenario = _CONFORMANCE_TESTS[scenario_id - 1] expect_success = scenario["expectSuccess"] precondition_provided = scenario["preconditionProvided"] - json_resources = method["resources"] method_name = method["name"] instructions = case["instructions"] @@ -614,17 +630,18 @@ def run_test_case(scenario_id, method, case, lib_func, host): "Error creating retry test for {}: {}".format(method_name, e) ).with_traceback(e.__traceback__) - # Populate resources. - try: - resources = _populate_resources(client, json_resources) - except Exception as e: - raise Exception( - "Error populating resources for {}: {}".format(method_name, e) - ).with_traceback(e.__traceback__) - # Run retry tests on library methods. try: - _run_retry_test(host, id, lib_func, precondition_provided, **resources) + _run_retry_test( + host, + id, + lib_func, + precondition_provided, + bucket, + object, + notification, + hmac_key, + ) except Exception as e: logging.exception( "Caught an exception while running retry instructions\n {}".format(e) From 994a2f6f9e32ab317d0a39ecdce948ee3e2c002f Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Wed, 28 Jul 2021 16:30:18 -0700 Subject: [PATCH 43/45] address comments and update docstrings --- tests/conformance/test_conformance.py | 59 ++++++++++++++++----------- 1 file changed, 36 insertions(+), 23 deletions(-) diff --git a/tests/conformance/test_conformance.py b/tests/conformance/test_conformance.py index f42e5f492..b3bac7d3e 100644 --- a/tests/conformance/test_conformance.py +++ b/tests/conformance/test_conformance.py @@ -29,7 +29,7 @@ "retryStrategyTests" ] -STORAGE_EMULATOR_ENV_VAR = "STORAGE_EMULATOR_HOST" +_STORAGE_EMULATOR_ENV_VAR = "STORAGE_EMULATOR_HOST" """Environment variable defining host for Storage emulator.""" _CONF_TEST_PROJECT_ID = "my-project-id" @@ -37,6 +37,10 @@ "my-service-account@my-project-id.iam.gserviceaccount.com" ) +_STRING_CONTENT = "hello world" +_BYTE_CONTENT = b"12345678" + + ######################################################################################################################################## ### Library methods for mapping ######################################################################################################## ######################################################################################################################################## @@ -86,13 +90,11 @@ def client_download_blob_to_file(client, _preconditions, **resources): def blobreader_read(client, _preconditions, **resources): - from google.cloud.storage.fileio import BlobReader - bucket = resources.get("bucket") object = resources.get("object") blob = client.bucket(bucket.name).blob(object.name) - blob_reader = BlobReader(blob) - blob_reader.read() + with blob.open() as reader: + reader.read() def client_list_blobs(client, _preconditions, **resources): @@ -358,7 +360,7 @@ def blob_compose(client, _preconditions, **resources): object = resources.get("object") blob = client.bucket(bucket.name).blob(object.name) blob_2 = bucket.blob(uuid.uuid4().hex) - blob_2.upload_from_string("foo") + blob_2.upload_from_string(_STRING_CONTENT) sources = [blob_2] if _preconditions: blob.compose(sources, if_generation_match=object.generation) @@ -370,9 +372,9 @@ def blob_upload_from_string(client, _preconditions, **resources): bucket = resources.get("bucket") blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) if _preconditions: - blob.upload_from_string("upload from string", if_generation_match=0) + blob.upload_from_string(_STRING_CONTENT, if_generation_match=0) else: - blob.upload_from_string("upload from string") + blob.upload_from_string(_STRING_CONTENT) def blob_upload_from_file(client, _preconditions, **resources): @@ -399,18 +401,15 @@ def blob_upload_from_filename(client, _preconditions, **resources): def blobwriter_write(client, _preconditions, **resources): - import os - from google.cloud.storage.fileio import BlobWriter - chunk_size = 256 * 1024 bucket = resources.get("bucket") blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) if _preconditions: - blob_writer = BlobWriter(blob, chunk_size=chunk_size, if_generation_match=0) - blob_writer.write(bytearray(os.urandom(262144))) + with blob.open("wb", chunk_size=chunk_size, if_generation_match=0) as writer: + writer.write(_BYTE_CONTENT) else: - blob_writer = BlobWriter(blob, chunk_size=chunk_size) - blob_writer.write(bytearray(os.urandom(262144))) + with blob.open("wb", chunk_size=chunk_size) as writer: + writer.write(_BYTE_CONTENT) def blob_create_resumable_upload_session(client, _preconditions, **resources): @@ -498,7 +497,7 @@ def blob_create_resumable_upload_session(client, _preconditions, **resources): @pytest.fixture def client(): - host = os.environ.get(STORAGE_EMULATOR_ENV_VAR) + host = os.environ.get(_STORAGE_EMULATOR_ENV_VAR) client = storage.Client(client_options={"api_endpoint": host}) return client @@ -518,7 +517,7 @@ def bucket(client): def object(client, bucket): bucket = client.get_bucket(bucket.name) blob = bucket.blob(uuid.uuid4().hex) - blob.upload_from_string("hello world", checksum="crc32c") + blob.upload_from_string(_STRING_CONTENT, checksum="crc32c") blob.reload() yield blob try: @@ -562,9 +561,12 @@ def hmac_key(client): def _create_retry_test(host, method_name, instructions): """ - Initialize a Retry Test resource with a list of instructions and an API method. - This offers a mechanism to send multiple retry instructions while sending a single, constant header through all the HTTP requests in a test. - See also: https://github.com/googleapis/google-cloud-cpp/tree/main/google/cloud/storage/emulator + For each test case, initialize a Retry Test resource by loading a set of + instructions to the emulator host. The instructions include an API method + and a list of errors. An unique id is created for each Retry Test resouce. + This offers a mechanism to send multiple retry instructions while sending a + single, constant header through all the HTTP requests in a test. + See also: https://github.com/googleapis/storage-testbench """ import json @@ -579,6 +581,11 @@ def _create_retry_test(host, method_name, instructions): def _get_retry_test(host, id): + """ + Retrieve the state of the Retry Test resource, including the unique id, + instructions, and a boolean status "completed". This can be used to verify + if all instructions were used as expected. + """ status_get_uri = "{base}{retry}/{id}".format(base=host, retry="/retry_test", id=id) r = requests.get(status_get_uri) return r.json() @@ -588,8 +595,11 @@ def _run_retry_test( host, id, lib_func, _preconditions, bucket, object, notification, hmac_key ): """ - To execute tests against the list of instrucions sent to the Retry API, create a client to send the retry test ID using the x-retry-test-id header in each request. - For incoming requests which match the given API method, the emulator will pop off the next instruction from the list and force the listed failure case. + To execute tests against the list of instrucions sent to the Retry API, + create a client to send the retry test ID using the x-retry-test-id header + in each request. For incoming requests which match the given API method, + the emulator will pop off the next instruction from the list and force the + listed failure case. """ client = storage.Client(client_options={"api_endpoint": host}) client._http.headers.update({"x-retry-test-id": id}) @@ -604,6 +614,9 @@ def _run_retry_test( def _delete_retry_test(host, id): + """ + Delete the Retry Test resource by id. + """ status_get_uri = "{base}{retry}/{id}".format(base=host, retry="/retry_test", id=id) requests.delete(status_get_uri) @@ -675,7 +688,7 @@ def run_test_case( ######################################################################################################################################## for scenario in _CONFORMANCE_TESTS: - host = os.environ.get(STORAGE_EMULATOR_ENV_VAR) + host = os.environ.get(_STORAGE_EMULATOR_ENV_VAR) if host is None: logging.error( "This test must use the testbench emulator; set STORAGE_EMULATOR_HOST to run." From f35ccedb3c66c61193dc2db87a2092d52602eec1 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Fri, 6 Aug 2021 13:37:13 -0700 Subject: [PATCH 44/45] address comments. change to use anonymous credentials --- noxfile.py | 7 ++--- tests/conformance/test_conformance.py | 37 +++++++++++++++++---------- tests/unit/test__signing.py | 2 ++ 3 files changed, 28 insertions(+), 18 deletions(-) diff --git a/noxfile.py b/noxfile.py index 47642a3c2..d57287b73 100644 --- a/noxfile.py +++ b/noxfile.py @@ -152,8 +152,8 @@ def system(session): @nox.session(python=CONFORMANCE_TEST_PYTHON_VERSIONS) -def conformance(session): - """Run the conformance test suite.""" +def conftest_retry(session): + """Run the retry conformance test suite.""" conformance_test_path = os.path.join("tests", "conformance.py") conformance_test_folder_path = os.path.join("tests", "conformance") @@ -163,9 +163,6 @@ def conformance(session): == _DEFAULT_STORAGE_HOST ): session.skip("Set STORAGE_EMULATOR_HOST to run, skipping") - # Environment check: Only run tests if the environment variable is set. - if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): - session.skip("Credentials must be set via environment variable") conformance_test_exists = os.path.exists(conformance_test_path) conformance_test_folder_exists = os.path.exists(conformance_test_folder_path) diff --git a/tests/conformance/test_conformance.py b/tests/conformance/test_conformance.py index b3bac7d3e..d490788c7 100644 --- a/tests/conformance/test_conformance.py +++ b/tests/conformance/test_conformance.py @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +"""Conformance tests for retry. Verifies correct behavior around retryable errors, idempotency and preconditions.""" + import os import requests import tempfile @@ -21,6 +23,7 @@ import pytest from google.cloud import storage +from google.auth.credentials import AnonymousCredentials from . import _read_local_json @@ -378,15 +381,13 @@ def blob_upload_from_string(client, _preconditions, **resources): def blob_upload_from_file(client, _preconditions, **resources): - from io import BytesIO - - file_obj = BytesIO() bucket = resources.get("bucket") blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) - if _preconditions: - blob.upload_from_file(file_obj, if_generation_match=0) - else: - blob.upload_from_file(file_obj) + with tempfile.NamedTemporaryFile() as temp_f: + if _preconditions: + blob.upload_from_file(temp_f, if_generation_match=0) + else: + blob.upload_from_file(temp_f) def blob_upload_from_filename(client, _preconditions, **resources): @@ -425,7 +426,7 @@ def blob_create_resumable_upload_session(client, _preconditions, **resources): ### Method Invocation Mapping ########################################################################################################## ######################################################################################################################################## -# Method invocation mapping. Methods to retry. This is a map whose keys are a string describing a standard +# Method invocation mapping is a map whose keys are a string describing a standard # API call (e.g. storage.objects.get) and values are a list of functions which # wrap library methods that implement these calls. There may be multiple values # because multiple library methods may use the same call (e.g. get could be a @@ -498,7 +499,11 @@ def blob_create_resumable_upload_session(client, _preconditions, **resources): @pytest.fixture def client(): host = os.environ.get(_STORAGE_EMULATOR_ENV_VAR) - client = storage.Client(client_options={"api_endpoint": host}) + client = storage.Client( + project=_CONF_TEST_PROJECT_ID, + credentials=AnonymousCredentials(), + client_options={"api_endpoint": host}, + ) return client @@ -509,7 +514,9 @@ def bucket(client): yield bucket try: bucket.delete(force=True) - except Exception: # in cases where resources are deleted within the test + except Exception: + # in cases where resources are deleted within the test + # TODO(cathyo@): narrow except to NotFound once the emulator response issue is resolved pass @@ -597,11 +604,15 @@ def _run_retry_test( """ To execute tests against the list of instrucions sent to the Retry API, create a client to send the retry test ID using the x-retry-test-id header - in each request. For incoming requests which match the given API method, + in each request. For incoming requests that match the test ID and API method, the emulator will pop off the next instruction from the list and force the listed failure case. """ - client = storage.Client(client_options={"api_endpoint": host}) + client = storage.Client( + project=_CONF_TEST_PROJECT_ID, + credentials=AnonymousCredentials(), + client_options={"api_endpoint": host}, + ) client._http.headers.update({"x-retry-test-id": id}) lib_func( client, diff --git a/tests/unit/test__signing.py b/tests/unit/test__signing.py index 3eac70cc1..92c66feb9 100644 --- a/tests/unit/test__signing.py +++ b/tests/unit/test__signing.py @@ -818,6 +818,8 @@ def test_get_v4_now_dtstamps(self): self.assertEqual(datestamp, "20200312") +"""Conformance tests for v4 signed URLs.""" + _FAKE_SERVICE_ACCOUNT = None From 0b52219909cb123346cccfff55cebab0e5c2ca14 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Wed, 18 Aug 2021 15:07:54 -0700 Subject: [PATCH 45/45] update descriptions --- tests/conformance/test_conformance.py | 52 +++++++++++++++------------ 1 file changed, 29 insertions(+), 23 deletions(-) diff --git a/tests/conformance/test_conformance.py b/tests/conformance/test_conformance.py index d490788c7..8f6c5ef87 100644 --- a/tests/conformance/test_conformance.py +++ b/tests/conformance/test_conformance.py @@ -33,7 +33,7 @@ ] _STORAGE_EMULATOR_ENV_VAR = "STORAGE_EMULATOR_HOST" -"""Environment variable defining host for Storage emulator.""" +"""Environment variable defining host for Storage testbench emulator.""" _CONF_TEST_PROJECT_ID = "my-project-id" _CONF_TEST_SERVICE_ACCOUNT_EMAIL = ( @@ -164,7 +164,7 @@ def bucket_test_iam_permissions(client, _preconditions, **resources): bucket.test_iam_permissions(permissions) -# TODO(cathyo@): fix emulator issue and assign metageneration to buckets.insert +# TODO(cathyo@): issue resolved in the new testbench where buckets have a valid metageneration def bucket_lock_retention_policy(client, _preconditions, **resources): bucket = client.bucket(resources.get("bucket").name) bucket.retention_period = 60 @@ -492,7 +492,7 @@ def blob_create_resumable_upload_session(client, _preconditions, **resources): ######################################################################################################################################## -### Pytest Fixtures for Populating Resources ############################################################################################ +### Pytest Fixtures to Populate Resources ############################################################################################## ######################################################################################################################################## @@ -522,8 +522,7 @@ def bucket(client): @pytest.fixture def object(client, bucket): - bucket = client.get_bucket(bucket.name) - blob = bucket.blob(uuid.uuid4().hex) + blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) blob.upload_from_string(_STRING_CONTENT, checksum="crc32c") blob.reload() yield blob @@ -535,8 +534,7 @@ def object(client, bucket): @pytest.fixture def notification(client, bucket): - bucket = client.get_bucket(bucket.name) - notification = bucket.notification() + notification = client.bucket(bucket.name).notification() notification.create() notification.reload() yield notification @@ -548,7 +546,7 @@ def notification(client, bucket): @pytest.fixture def hmac_key(client): - hmac_key, secret = client.create_hmac_key( + hmac_key, _secret = client.create_hmac_key( service_account_email=_CONF_TEST_SERVICE_ACCOUNT_EMAIL, project_id=_CONF_TEST_PROJECT_ID, ) @@ -562,28 +560,32 @@ def hmac_key(client): ######################################################################################################################################## -### Helper Methods for Emulator Retry API ############################################################################################## +### Helper Methods for Testbench Retry Test API ######################################################################################## ######################################################################################################################################## +""" +The Retry Test API in the testbench is used to run the retry conformance tests. It offers a mechanism to describe more complex +retry scenarios while sending a single, constant header through all the HTTP requests from a test program. The Retry Test API +can be accessed by adding the path "/retry-test" to the host. See also: https://github.com/googleapis/storage-testbench +""" + + def _create_retry_test(host, method_name, instructions): """ For each test case, initialize a Retry Test resource by loading a set of - instructions to the emulator host. The instructions include an API method - and a list of errors. An unique id is created for each Retry Test resouce. - This offers a mechanism to send multiple retry instructions while sending a - single, constant header through all the HTTP requests in a test. - See also: https://github.com/googleapis/storage-testbench + instructions to the testbench host. The instructions include an API method + and a list of errors. An unique id is created for each Retry Test resource. """ import json - preflight_post_uri = host + "/retry_test" + retry_test_uri = host + "/retry_test" headers = { "Content-Type": "application/json", } data_dict = {"instructions": {method_name: instructions}} data = json.dumps(data_dict) - r = requests.post(preflight_post_uri, headers=headers, data=data) + r = requests.post(retry_test_uri, headers=headers, data=data) return r.json() @@ -593,8 +595,10 @@ def _get_retry_test(host, id): instructions, and a boolean status "completed". This can be used to verify if all instructions were used as expected. """ - status_get_uri = "{base}{retry}/{id}".format(base=host, retry="/retry_test", id=id) - r = requests.get(status_get_uri) + get_retry_test_uri = "{base}{retry}/{id}".format( + base=host, retry="/retry_test", id=id + ) + r = requests.get(get_retry_test_uri) return r.json() @@ -602,10 +606,10 @@ def _run_retry_test( host, id, lib_func, _preconditions, bucket, object, notification, hmac_key ): """ - To execute tests against the list of instrucions sent to the Retry API, + To execute tests against the list of instrucions sent to the Retry Test API, create a client to send the retry test ID using the x-retry-test-id header in each request. For incoming requests that match the test ID and API method, - the emulator will pop off the next instruction from the list and force the + the testbench will pop off the next instruction from the list and force the listed failure case. """ client = storage.Client( @@ -628,8 +632,10 @@ def _delete_retry_test(host, id): """ Delete the Retry Test resource by id. """ - status_get_uri = "{base}{retry}/{id}".format(base=host, retry="/retry_test", id=id) - requests.delete(status_get_uri) + get_retry_test_uri = "{base}{retry}/{id}".format( + base=host, retry="/retry_test", id=id + ) + requests.delete(get_retry_test_uri) ######################################################################################################################################## @@ -690,7 +696,7 @@ def run_test_case( status_response["instructions"] ) - # Clean up and close out test in emulator. + # Clean up and close out test in testbench. _delete_retry_test(host, id)