Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 6 additions & 8 deletions .github/workflows/conformance.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,17 +24,15 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
test-version: [ "v0.0.2" ]
test-version: [ "v0.0.4" ]
py-version: [ 3.8 ]
client-type: [ "async", "sync", "legacy" ]
client-type: [ "async", "sync"]
# None of the clients currently support reverse scans, execute query plan refresh, retry info, or routing cookie
include:
- client-type: "async"
test_args: "-skip \"PlanRefresh|_Reverse|_WithRetryInfo|_WithRoutingCookie\""
- client-type: "sync"
# sync client does not support concurrent streams
test_args: "-skip _Generic_MultiStream"
- client-type: "legacy"
# legacy client is synchronous and does not support concurrent streams
# legacy client does not expose mutate_row. Disable those tests
test_args: "-skip _Generic_MultiStream -skip TestMutateRow_"
test_args: "-skip \"PlanRefresh|_Reverse|_WithRetryInfo|_WithRoutingCookie|_Generic_MultiStream\""
fail-fast: false
name: "${{ matrix.client-type }} client / python ${{ matrix.py-version }} / test tag ${{ matrix.test-version }}"
steps:
Expand Down
3 changes: 3 additions & 0 deletions google/cloud/bigtable/data/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -331,6 +331,9 @@ def __init__(
class InvalidExecuteQueryResponse(core_exceptions.GoogleAPICallError):
"""Exception raised to invalid query response data from back-end."""

# Set to internal. This is representative of an internal error.
code = 13


class ParameterTypeInferenceFailed(ValueError):
"""Exception raised when query parameter types were not provided and cannot be inferred."""
Expand Down
2 changes: 1 addition & 1 deletion test_proxy/handlers/client_handler_data_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,7 +276,7 @@ async def ExecuteQuery(self, request, **kwargs):
prepare_operation_timeout=operation_timeout,
)
)
rows = [r async for r in result]
rows = CrossSync.rm_aio([r async for r in result])
md = result.metadata
proto_rows = []
for r in rows:
Expand Down
2 changes: 1 addition & 1 deletion test_proxy/handlers/client_handler_data_sync_autogen.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ async def ExecuteQuery(self, request, **kwargs):
operation_timeout=operation_timeout,
prepare_operation_timeout=operation_timeout,
)
rows = [r async for r in result]
rows = [r for r in result]
md = result.metadata
proto_rows = []
for r in rows:
Expand Down
63 changes: 44 additions & 19 deletions test_proxy/handlers/grpc_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,17 @@
from google.protobuf import json_format


def correct_cancelled(status):
"""
Deadline exceeded errors are a race between client side cancellation and server
side deadline exceeded. For the purpose of these tests, the client will never cancel,
so we adjust cancelled errors to deadline_exceeded for consistency.
"""
if status.code == 1:
return Status(code=4, message="deadlineexceeded")
return status


class TestProxyGrpcServer(test_proxy_pb2_grpc.CloudBigtableV2TestProxyServicer):
"""
Implements a grpc server that proxies conformance test requests to the client library
Expand Down Expand Up @@ -75,7 +86,7 @@ def ReadRows(self, request, context, client_response=None):
status = Status()
rows = []
if isinstance(client_response, dict) and "error" in client_response:
status = Status(code=5, message=client_response["error"])
status = correct_cancelled(Status(code=5, message=client_response["error"]))
else:
rows = [data_pb2.Row(**d) for d in client_response]
result = test_proxy_pb2.RowsResult(rows=rows, status=status)
Expand All @@ -86,9 +97,11 @@ def ReadRow(self, request, context, client_response=None):
status = Status()
row = None
if isinstance(client_response, dict) and "error" in client_response:
status = Status(
code=client_response.get("code", 5),
message=client_response.get("error"),
status = correct_cancelled(
Status(
code=client_response.get("code", 5),
message=client_response.get("error"),
)
)
elif client_response != "None":
row = data_pb2.Row(**client_response)
Expand All @@ -99,8 +112,11 @@ def ReadRow(self, request, context, client_response=None):
def MutateRow(self, request, context, client_response=None):
status = Status()
if isinstance(client_response, dict) and "error" in client_response:
status = Status(
code=client_response.get("code", 5), message=client_response["error"]
status = correct_cancelled(
Status(
code=client_response.get("code", 5),
message=client_response["error"],
)
)
return test_proxy_pb2.MutateRowResult(status=status)

Expand All @@ -112,24 +128,27 @@ def BulkMutateRows(self, request, context, client_response=None):
entries = [
bigtable_pb2.MutateRowsResponse.Entry(
index=exc_dict.get("index", 1),
status=Status(code=exc_dict.get("code", 5)),
status=correct_cancelled(Status(code=exc_dict.get("code", 5))),
)
for exc_dict in client_response.get("subexceptions", [])
]
if not entries:
# only return failure on the overall request if there are failed entries
status = Status(
status = correct_cancelled(
Status(
code=client_response.get("code", 5),
message=client_response["error"],
)
)
response = test_proxy_pb2.MutateRowsResult(status=status, entries=entries)
return response

@delegate_to_client_handler
def CheckAndMutateRow(self, request, context, client_response=None):
if isinstance(client_response, dict) and "error" in client_response:
status = Status(
code=client_response.get("code", 5), message=client_response["error"]
status = correct_cancelled(
Status(
code=client_response.get("code", 5),
message=client_response["error"],
)
)
response = test_proxy_pb2.CheckAndMutateRowResult(status=status)
else:
Expand All @@ -146,9 +165,11 @@ def ReadModifyWriteRow(self, request, context, client_response=None):
status = Status()
row = None
if isinstance(client_response, dict) and "error" in client_response:
status = Status(
code=client_response.get("code", 5),
message=client_response.get("error"),
status = correct_cancelled(
Status(
code=client_response.get("code", 5),
message=client_response.get("error"),
)
)
elif client_response != "None":
row = data_pb2.Row(**client_response)
Expand All @@ -160,9 +181,11 @@ def SampleRowKeys(self, request, context, client_response=None):
status = Status()
sample_list = []
if isinstance(client_response, dict) and "error" in client_response:
status = Status(
code=client_response.get("code", 5),
message=client_response.get("error"),
status = correct_cancelled(
Status(
code=client_response.get("code", 5),
message=client_response.get("error"),
)
)
else:
for sample in client_response:
Expand All @@ -177,7 +200,9 @@ def SampleRowKeys(self, request, context, client_response=None):
def ExecuteQuery(self, request, context, client_response=None):
if isinstance(client_response, dict) and "error" in client_response:
return test_proxy_pb2.ExecuteQueryResult(
status=Status(code=13, message=client_response["error"])
status=correct_cancelled(
Status(code=client_response.get("code", 13), message=client_response["error"])
)
)
else:
return test_proxy_pb2.ExecuteQueryResult(
Expand Down
Loading