Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
114 changes: 96 additions & 18 deletions test_proxy/handlers/client_handler_data_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
from google.cloud.environment_vars import BIGTABLE_EMULATOR
from google.cloud.bigtable.data import BigtableDataClientAsync
from google.cloud.bigtable.data._cross_sync import CrossSync
from helpers import sql_encoding_helpers

if not CrossSync.is_async:
from client_handler_data_async import error_safe
Expand All @@ -32,6 +33,7 @@ def error_safe(func):
Catch and pass errors back to the grpc_server_process
Also check if client is closed before processing requests
"""

async def wrapper(self, *args, **kwargs):
try:
if self.closed:
Expand All @@ -50,6 +52,7 @@ def encode_exception(exc):
Encode an exception or chain of exceptions to pass back to grpc_handler
"""
from google.api_core.exceptions import GoogleAPICallError

error_msg = f"{type(exc).__name__}: {exc}"
result = {"error": error_msg}
if exc.__cause__:
Expand Down Expand Up @@ -113,7 +116,9 @@ async def ReadRows(self, request, **kwargs):
table_id = request.pop("table_name").split("/")[-1]
app_profile_id = self.app_profile_id or request.get("app_profile_id", None)
table = self.client.get_table(self.instance_id, table_id, app_profile_id)
kwargs["operation_timeout"] = kwargs.get("operation_timeout", self.per_operation_timeout) or 20
kwargs["operation_timeout"] = (
kwargs.get("operation_timeout", self.per_operation_timeout) or 20
)
result_list = CrossSync.rm_aio(await table.read_rows(request, **kwargs))
# pack results back into protobuf-parsable format
serialized_response = [row._to_dict() for row in result_list]
Expand All @@ -124,7 +129,9 @@ async def ReadRow(self, row_key, **kwargs):
table_id = kwargs.pop("table_name").split("/")[-1]
app_profile_id = self.app_profile_id or kwargs.get("app_profile_id", None)
table = self.client.get_table(self.instance_id, table_id, app_profile_id)
kwargs["operation_timeout"] = kwargs.get("operation_timeout", self.per_operation_timeout) or 20
kwargs["operation_timeout"] = (
kwargs.get("operation_timeout", self.per_operation_timeout) or 20
)
result_row = CrossSync.rm_aio(await table.read_row(row_key, **kwargs))
# pack results back into protobuf-parsable format
if result_row:
Expand All @@ -135,10 +142,13 @@ async def ReadRow(self, row_key, **kwargs):
@error_safe
async def MutateRow(self, request, **kwargs):
from google.cloud.bigtable.data.mutations import Mutation

table_id = request["table_name"].split("/")[-1]
app_profile_id = self.app_profile_id or request.get("app_profile_id", None)
table = self.client.get_table(self.instance_id, table_id, app_profile_id)
kwargs["operation_timeout"] = kwargs.get("operation_timeout", self.per_operation_timeout) or 20
kwargs["operation_timeout"] = (
kwargs.get("operation_timeout", self.per_operation_timeout) or 20
)
row_key = request["row_key"]
mutations = [Mutation._from_dict(d) for d in request["mutations"]]
CrossSync.rm_aio(await table.mutate_row(row_key, mutations, **kwargs))
Expand All @@ -147,21 +157,29 @@ async def MutateRow(self, request, **kwargs):
@error_safe
async def BulkMutateRows(self, request, **kwargs):
from google.cloud.bigtable.data.mutations import RowMutationEntry

table_id = request["table_name"].split("/")[-1]
app_profile_id = self.app_profile_id or request.get("app_profile_id", None)
table = self.client.get_table(self.instance_id, table_id, app_profile_id)
kwargs["operation_timeout"] = kwargs.get("operation_timeout", self.per_operation_timeout) or 20
entry_list = [RowMutationEntry._from_dict(entry) for entry in request["entries"]]
kwargs["operation_timeout"] = (
kwargs.get("operation_timeout", self.per_operation_timeout) or 20
)
entry_list = [
RowMutationEntry._from_dict(entry) for entry in request["entries"]
]
CrossSync.rm_aio(await table.bulk_mutate_rows(entry_list, **kwargs))
return "OK"

@error_safe
async def CheckAndMutateRow(self, request, **kwargs):
from google.cloud.bigtable.data.mutations import Mutation, SetCell

table_id = request["table_name"].split("/")[-1]
app_profile_id = self.app_profile_id or request.get("app_profile_id", None)
table = self.client.get_table(self.instance_id, table_id, app_profile_id)
kwargs["operation_timeout"] = kwargs.get("operation_timeout", self.per_operation_timeout) or 20
kwargs["operation_timeout"] = (
kwargs.get("operation_timeout", self.per_operation_timeout) or 20
)
row_key = request["row_key"]
# add default values for incomplete dicts, so they can still be parsed to objects
true_mutations = []
Expand All @@ -180,33 +198,44 @@ async def CheckAndMutateRow(self, request, **kwargs):
# invalid mutation type. Conformance test may be sending generic empty request
false_mutations.append(SetCell("", "", "", 0))
predicate_filter = request.get("predicate_filter", None)
result = CrossSync.rm_aio(await table.check_and_mutate_row(
row_key,
predicate_filter,
true_case_mutations=true_mutations,
false_case_mutations=false_mutations,
**kwargs,
))
result = CrossSync.rm_aio(
await table.check_and_mutate_row(
row_key,
predicate_filter,
true_case_mutations=true_mutations,
false_case_mutations=false_mutations,
**kwargs,
)
)
return result

@error_safe
async def ReadModifyWriteRow(self, request, **kwargs):
from google.cloud.bigtable.data.read_modify_write_rules import IncrementRule
from google.cloud.bigtable.data.read_modify_write_rules import AppendValueRule

table_id = request["table_name"].split("/")[-1]
app_profile_id = self.app_profile_id or request.get("app_profile_id", None)
table = self.client.get_table(self.instance_id, table_id, app_profile_id)
kwargs["operation_timeout"] = kwargs.get("operation_timeout", self.per_operation_timeout) or 20
kwargs["operation_timeout"] = (
kwargs.get("operation_timeout", self.per_operation_timeout) or 20
)
row_key = request["row_key"]
rules = []
for rule_dict in request.get("rules", []):
qualifier = rule_dict["column_qualifier"]
if "append_value" in rule_dict:
new_rule = AppendValueRule(rule_dict["family_name"], qualifier, rule_dict["append_value"])
new_rule = AppendValueRule(
rule_dict["family_name"], qualifier, rule_dict["append_value"]
)
else:
new_rule = IncrementRule(rule_dict["family_name"], qualifier, rule_dict["increment_amount"])
new_rule = IncrementRule(
rule_dict["family_name"], qualifier, rule_dict["increment_amount"]
)
rules.append(new_rule)
result = CrossSync.rm_aio(await table.read_modify_write_row(row_key, rules, **kwargs))
result = CrossSync.rm_aio(
await table.read_modify_write_row(row_key, rules, **kwargs)
)
# pack results back into protobuf-parsable format
if result:
return result._to_dict()
Expand All @@ -218,6 +247,55 @@ async def SampleRowKeys(self, request, **kwargs):
table_id = request["table_name"].split("/")[-1]
app_profile_id = self.app_profile_id or request.get("app_profile_id", None)
table = self.client.get_table(self.instance_id, table_id, app_profile_id)
kwargs["operation_timeout"] = kwargs.get("operation_timeout", self.per_operation_timeout) or 20
kwargs["operation_timeout"] = (
kwargs.get("operation_timeout", self.per_operation_timeout) or 20
)
result = CrossSync.rm_aio(await table.sample_row_keys(**kwargs))
return result

@error_safe
async def ExecuteQuery(self, request, **kwargs):
app_profile_id = self.app_profile_id or request.get("app_profile_id", None)
query = request.get("query")
params = request.get("params") or {}
# Note that the request has been coverted to json, and the code for this converts
# query param names to snake case. convert_params reverses this conversion. For this
# reason, snake case params will have issues if they're used in the conformance tests.
formatted_params, parameter_types = sql_encoding_helpers.convert_params(params)
operation_timeout = (
kwargs.get("operation_timeout", self.per_operation_timeout) or 20
)
result = CrossSync.rm_aio(
await self.client.execute_query(
query,
self.instance_id,
parameters=formatted_params,
parameter_types=parameter_types,
app_profile_id=app_profile_id,
operation_timeout=operation_timeout,
prepare_operation_timeout=operation_timeout,
)
)
rows = [r async for r in result]
md = result.metadata
proto_rows = []
for r in rows:
vals = []
for c in md.columns:
vals.append(sql_encoding_helpers.convert_value(c.column_type, r[c.column_name]))

proto_rows.append({"values": vals})

proto_columns = []
for c in md.columns:
proto_columns.append(
{
"name": c.column_name,
"type": sql_encoding_helpers.convert_type(c.column_type),
}
)

return {
"metadata": {"columns": proto_columns},
"rows": proto_rows,
}
41 changes: 41 additions & 0 deletions test_proxy/handlers/client_handler_data_sync_autogen.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import os
from google.cloud.environment_vars import BIGTABLE_EMULATOR
from google.cloud.bigtable.data._cross_sync import CrossSync
from helpers import sql_encoding_helpers
from client_handler_data_async import error_safe


Expand Down Expand Up @@ -183,3 +184,43 @@ async def SampleRowKeys(self, request, **kwargs):
)
result = table.sample_row_keys(**kwargs)
return result

@error_safe
async def ExecuteQuery(self, request, **kwargs):
app_profile_id = self.app_profile_id or request.get("app_profile_id", None)
query = request.get("query")
params = request.get("params") or {}
(formatted_params, parameter_types) = sql_encoding_helpers.convert_params(
params
)
operation_timeout = (
kwargs.get("operation_timeout", self.per_operation_timeout) or 20
)
result = self.client.execute_query(
query,
self.instance_id,
parameters=formatted_params,
parameter_types=parameter_types,
app_profile_id=app_profile_id,
operation_timeout=operation_timeout,
prepare_operation_timeout=operation_timeout,
)
rows = [r async for r in result]
md = result.metadata
proto_rows = []
for r in rows:
vals = []
for c in md.columns:
vals.append(
sql_encoding_helpers.convert_value(c.column_type, r[c.column_name])
)
proto_rows.append({"values": vals})
proto_columns = []
for c in md.columns:
proto_columns.append(
{
"name": c.column_name,
"type": sql_encoding_helpers.convert_type(c.column_type),
}
)
return {"metadata": {"columns": proto_columns}, "rows": proto_rows}
71 changes: 54 additions & 17 deletions test_proxy/handlers/grpc_handler.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

import time

import test_proxy_pb2
Expand Down Expand Up @@ -59,7 +58,6 @@ def wrapper(self, request, context, **kwargs):

return wrapper


@delegate_to_client_handler
def CreateClient(self, request, context, client_response=None):
return test_proxy_pb2.CreateClientResponse()
Expand All @@ -80,15 +78,18 @@ def ReadRows(self, request, context, client_response=None):
status = Status(code=5, message=client_response["error"])
else:
rows = [data_pb2.Row(**d) for d in client_response]
result = test_proxy_pb2.RowsResult(row=rows, status=status)
result = test_proxy_pb2.RowsResult(rows=rows, status=status)
return result

@delegate_to_client_handler
def ReadRow(self, request, context, client_response=None):
status = Status()
row = None
if isinstance(client_response, dict) and "error" in client_response:
status=Status(code=client_response.get("code", 5), message=client_response.get("error"))
status = Status(
code=client_response.get("code", 5),
message=client_response.get("error"),
)
elif client_response != "None":
row = data_pb2.Row(**client_response)
result = test_proxy_pb2.RowResult(row=row, status=status)
Expand All @@ -98,38 +99,57 @@ def ReadRow(self, request, context, client_response=None):
def MutateRow(self, request, context, client_response=None):
status = Status()
if isinstance(client_response, dict) and "error" in client_response:
status = Status(code=client_response.get("code", 5), message=client_response["error"])
status = Status(
code=client_response.get("code", 5), message=client_response["error"]
)
return test_proxy_pb2.MutateRowResult(status=status)

@delegate_to_client_handler
def BulkMutateRows(self, request, context, client_response=None):
status = Status()
entries = []
if isinstance(client_response, dict) and "error" in client_response:
entries = [bigtable_pb2.MutateRowsResponse.Entry(index=exc_dict.get("index",1), status=Status(code=exc_dict.get("code", 5))) for exc_dict in client_response.get("subexceptions", [])]
entries = [
bigtable_pb2.MutateRowsResponse.Entry(
index=exc_dict.get("index", 1),
status=Status(code=exc_dict.get("code", 5)),
)
for exc_dict in client_response.get("subexceptions", [])
]
if not entries:
# only return failure on the overall request if there are failed entries
status = Status(code=client_response.get("code", 5), message=client_response["error"])
# TODO: protos were updated. entry is now entries: https://github.com/googleapis/cndb-client-testing-protos/commit/e6205a2bba04acc10d12421a1402870b4a525fb3
response = test_proxy_pb2.MutateRowsResult(status=status, entry=entries)
status = Status(
code=client_response.get("code", 5),
message=client_response["error"],
)
response = test_proxy_pb2.MutateRowsResult(status=status, entries=entries)
return response

@delegate_to_client_handler
def CheckAndMutateRow(self, request, context, client_response=None):
if isinstance(client_response, dict) and "error" in client_response:
status = Status(code=client_response.get("code", 5), message=client_response["error"])
status = Status(
code=client_response.get("code", 5), message=client_response["error"]
)
response = test_proxy_pb2.CheckAndMutateRowResult(status=status)
else:
result = bigtable_pb2.CheckAndMutateRowResponse(predicate_matched=client_response)
response = test_proxy_pb2.CheckAndMutateRowResult(result=result, status=Status())
result = bigtable_pb2.CheckAndMutateRowResponse(
predicate_matched=client_response
)
response = test_proxy_pb2.CheckAndMutateRowResult(
result=result, status=Status()
)
return response

@delegate_to_client_handler
def ReadModifyWriteRow(self, request, context, client_response=None):
status = Status()
row = None
if isinstance(client_response, dict) and "error" in client_response:
status = Status(code=client_response.get("code", 5), message=client_response.get("error"))
status = Status(
code=client_response.get("code", 5),
message=client_response.get("error"),
)
elif client_response != "None":
row = data_pb2.Row(**client_response)
result = test_proxy_pb2.RowResult(row=row, status=status)
Expand All @@ -140,9 +160,26 @@ def SampleRowKeys(self, request, context, client_response=None):
status = Status()
sample_list = []
if isinstance(client_response, dict) and "error" in client_response:
status = Status(code=client_response.get("code", 5), message=client_response.get("error"))
status = Status(
code=client_response.get("code", 5),
message=client_response.get("error"),
)
else:
for sample in client_response:
sample_list.append(bigtable_pb2.SampleRowKeysResponse(offset_bytes=sample[1], row_key=sample[0]))
# TODO: protos were updated. sample is now samples: https://github.com/googleapis/cndb-client-testing-protos/commit/e6205a2bba04acc10d12421a1402870b4a525fb3
return test_proxy_pb2.SampleRowKeysResult(status=status, sample=sample_list)
sample_list.append(
bigtable_pb2.SampleRowKeysResponse(
offset_bytes=sample[1], row_key=sample[0]
)
)
return test_proxy_pb2.SampleRowKeysResult(status=status, samples=sample_list)

@delegate_to_client_handler
def ExecuteQuery(self, request, context, client_response=None):
if isinstance(client_response, dict) and "error" in client_response:
return test_proxy_pb2.ExecuteQueryResult(
status=Status(code=13, message=client_response["error"])
)
else:
return test_proxy_pb2.ExecuteQueryResult(
metadata=client_response["metadata"], rows=client_response["rows"]
)
Loading
Loading