Skip to content

Commit a2151fe

Browse files
feat: Refactor Metadata, add system tests, remove preview warning (#1099)
1 parent eb2cd26 commit a2151fe

File tree

8 files changed

+117
-43
lines changed

8 files changed

+117
-43
lines changed

google/cloud/bigtable/data/_async/client.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -610,10 +610,6 @@ async def execute_query(
610610
google.cloud.bigtable.data.exceptions.ParameterTypeInferenceFailed: Raised if
611611
a parameter is passed without an explicit type, and the type cannot be infered
612612
"""
613-
warnings.warn(
614-
"ExecuteQuery is in preview and may change in the future.",
615-
category=RuntimeWarning,
616-
)
617613
instance_name = self._gapic_client.instance_path(self.project, instance_id)
618614
converted_param_types = _to_param_types(parameters, parameter_types)
619615
prepare_request = {

google/cloud/bigtable/data/_sync_autogen/client.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -471,10 +471,6 @@ def execute_query(
471471
google.cloud.bigtable.data.exceptions.ParameterTypeInferenceFailed: Raised if
472472
a parameter is passed without an explicit type, and the type cannot be infered
473473
"""
474-
warnings.warn(
475-
"ExecuteQuery is in preview and may change in the future.",
476-
category=RuntimeWarning,
477-
)
478474
instance_name = self._gapic_client.instance_path(self.project, instance_id)
479475
converted_param_types = _to_param_types(parameters, parameter_types)
480476
prepare_request = {

google/cloud/bigtable/data/execute_query/__init__.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020
)
2121
from google.cloud.bigtable.data.execute_query.metadata import (
2222
Metadata,
23-
ProtoMetadata,
2423
SqlType,
2524
)
2625
from google.cloud.bigtable.data.execute_query.values import (
@@ -39,7 +38,6 @@
3938
"QueryResultRow",
4039
"Struct",
4140
"Metadata",
42-
"ProtoMetadata",
4341
"ExecuteQueryIteratorAsync",
4442
"ExecuteQueryIterator",
4543
]

google/cloud/bigtable/data/execute_query/_reader.py

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@
1919
Iterable,
2020
Optional,
2121
Sequence,
22-
cast,
2322
)
2423
from abc import ABC, abstractmethod
2524

@@ -32,7 +31,7 @@
3231
from google.cloud.bigtable.helpers import batched
3332

3433
from google.cloud.bigtable.data.execute_query.values import QueryResultRow
35-
from google.cloud.bigtable.data.execute_query.metadata import Metadata, ProtoMetadata
34+
from google.cloud.bigtable.data.execute_query.metadata import Metadata
3635

3736

3837
T = TypeVar("T")
@@ -90,7 +89,7 @@ def _parse_proto_rows(self, bytes_to_parse: bytes) -> Iterable[PBValue]:
9089
return proto_rows.values
9190

9291
def _construct_query_result_row(
93-
self, values: Sequence[PBValue], metadata: ProtoMetadata
92+
self, values: Sequence[PBValue], metadata: Metadata
9493
) -> QueryResultRow:
9594
result = QueryResultRow()
9695
columns = metadata.columns
@@ -107,16 +106,13 @@ def _construct_query_result_row(
107106
def consume(
108107
self, batches_to_consume: List[bytes], metadata: Metadata
109108
) -> Optional[Iterable[QueryResultRow]]:
110-
proto_metadata = cast(ProtoMetadata, metadata)
111-
num_columns = len(proto_metadata.columns)
109+
num_columns = len(metadata.columns)
112110
rows = []
113111
for batch_bytes in batches_to_consume:
114112
values = self._parse_proto_rows(batch_bytes)
115113
for row_data in batched(values, n=num_columns):
116114
if len(row_data) == num_columns:
117-
rows.append(
118-
self._construct_query_result_row(row_data, proto_metadata)
119-
)
115+
rows.append(self._construct_query_result_row(row_data, metadata))
120116
else:
121117
raise ValueError(
122118
"Unexpected error, recieved bad number of values. "

google/cloud/bigtable/data/execute_query/metadata.py

Lines changed: 3 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -298,14 +298,6 @@ def _to_value_pb_dict(self, value: Any) -> Dict[str, Any]:
298298

299299

300300
class Metadata:
301-
"""
302-
Base class for metadata returned by the ExecuteQuery operation.
303-
"""
304-
305-
pass
306-
307-
308-
class ProtoMetadata(Metadata):
309301
"""
310302
Metadata class for the ExecuteQuery operation.
311303
@@ -335,7 +327,7 @@ def columns(self) -> List[Column]:
335327
def __init__(
336328
self, columns: Optional[List[Tuple[Optional[str], SqlType.Type]]] = None
337329
):
338-
self._columns: List[ProtoMetadata.Column] = []
330+
self._columns: List[Metadata.Column] = []
339331
self._column_indexes: Dict[str, List[int]] = defaultdict(list)
340332
self._duplicate_names: Set[str] = set()
341333

@@ -345,7 +337,7 @@ def __init__(
345337
if column_name in self._column_indexes:
346338
self._duplicate_names.add(column_name)
347339
self._column_indexes[column_name].append(len(self._columns))
348-
self._columns.append(ProtoMetadata.Column(column_name, column_type))
340+
self._columns.append(Metadata.Column(column_name, column_type))
349341

350342
def __getitem__(self, index_or_name: Union[str, int]) -> Column:
351343
if isinstance(index_or_name, str):
@@ -381,7 +373,7 @@ def _pb_metadata_to_metadata_types(
381373
fields.append(
382374
(column_metadata.name, _pb_type_to_metadata_type(column_metadata.type))
383375
)
384-
return ProtoMetadata(fields)
376+
return Metadata(fields)
385377
raise ValueError("Invalid ResultSetMetadata object received.")
386378

387379

tests/system/data/test_system_async.py

Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1063,6 +1063,36 @@ async def test_execute_query_simple(self, client, table_id, instance_id):
10631063
assert row["a"] == 1
10641064
assert row["b"] == "foo"
10651065

1066+
@CrossSync.pytest
1067+
@pytest.mark.usefixtures("table")
1068+
@CrossSync.Retry(
1069+
predicate=retry.if_exception_type(ClientError), initial=1, maximum=5
1070+
)
1071+
async def test_execute_against_table(
1072+
self, client, instance_id, table_id, temp_rows
1073+
):
1074+
await temp_rows.add_row(b"row_key_1")
1075+
result = await client.execute_query(
1076+
"SELECT * FROM `" + table_id + "`", instance_id
1077+
)
1078+
rows = [r async for r in result]
1079+
1080+
assert len(rows) == 1
1081+
assert rows[0]["_key"] == b"row_key_1"
1082+
family_map = rows[0][TEST_FAMILY]
1083+
assert len(family_map) == 1
1084+
assert family_map[b"q"] == b"test-value"
1085+
assert len(rows[0][TEST_FAMILY_2]) == 0
1086+
md = result.metadata
1087+
assert len(md) == 3
1088+
assert md["_key"].column_type == SqlType.Bytes()
1089+
assert md[TEST_FAMILY].column_type == SqlType.Map(
1090+
SqlType.Bytes(), SqlType.Bytes()
1091+
)
1092+
assert md[TEST_FAMILY_2].column_type == SqlType.Map(
1093+
SqlType.Bytes(), SqlType.Bytes()
1094+
)
1095+
10661096
@CrossSync.pytest
10671097
@pytest.mark.usefixtures("client")
10681098
@CrossSync.Retry(
@@ -1149,3 +1179,28 @@ async def test_execute_query_params(self, client, table_id, instance_id):
11491179
date_pb2.Date(year=2025, month=1, day=17),
11501180
None,
11511181
]
1182+
1183+
@CrossSync.pytest
1184+
@pytest.mark.usefixtures("table")
1185+
@CrossSync.Retry(
1186+
predicate=retry.if_exception_type(ClientError), initial=1, maximum=5
1187+
)
1188+
async def test_execute_metadata_on_empty_response(
1189+
self, client, instance_id, table_id, temp_rows
1190+
):
1191+
await temp_rows.add_row(b"row_key_1")
1192+
result = await client.execute_query(
1193+
"SELECT * FROM `" + table_id + "` WHERE _key='non-existent'", instance_id
1194+
)
1195+
rows = [r async for r in result]
1196+
1197+
assert len(rows) == 0
1198+
md = result.metadata
1199+
assert len(md) == 3
1200+
assert md["_key"].column_type == SqlType.Bytes()
1201+
assert md[TEST_FAMILY].column_type == SqlType.Map(
1202+
SqlType.Bytes(), SqlType.Bytes()
1203+
)
1204+
assert md[TEST_FAMILY_2].column_type == SqlType.Map(
1205+
SqlType.Bytes(), SqlType.Bytes()
1206+
)

tests/system/data/test_system_autogen.py

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -869,6 +869,30 @@ def test_execute_query_simple(self, client, table_id, instance_id):
869869
assert row["a"] == 1
870870
assert row["b"] == "foo"
871871

872+
@pytest.mark.usefixtures("table")
873+
@CrossSync._Sync_Impl.Retry(
874+
predicate=retry.if_exception_type(ClientError), initial=1, maximum=5
875+
)
876+
def test_execute_against_table(self, client, instance_id, table_id, temp_rows):
877+
temp_rows.add_row(b"row_key_1")
878+
result = client.execute_query("SELECT * FROM `" + table_id + "`", instance_id)
879+
rows = [r for r in result]
880+
assert len(rows) == 1
881+
assert rows[0]["_key"] == b"row_key_1"
882+
family_map = rows[0][TEST_FAMILY]
883+
assert len(family_map) == 1
884+
assert family_map[b"q"] == b"test-value"
885+
assert len(rows[0][TEST_FAMILY_2]) == 0
886+
md = result.metadata
887+
assert len(md) == 3
888+
assert md["_key"].column_type == SqlType.Bytes()
889+
assert md[TEST_FAMILY].column_type == SqlType.Map(
890+
SqlType.Bytes(), SqlType.Bytes()
891+
)
892+
assert md[TEST_FAMILY_2].column_type == SqlType.Map(
893+
SqlType.Bytes(), SqlType.Bytes()
894+
)
895+
872896
@pytest.mark.usefixtures("client")
873897
@CrossSync._Sync_Impl.Retry(
874898
predicate=retry.if_exception_type(ClientError), initial=1, maximum=5
@@ -945,3 +969,26 @@ def test_execute_query_params(self, client, table_id, instance_id):
945969
date_pb2.Date(year=2025, month=1, day=17),
946970
None,
947971
]
972+
973+
@pytest.mark.usefixtures("table")
974+
@CrossSync._Sync_Impl.Retry(
975+
predicate=retry.if_exception_type(ClientError), initial=1, maximum=5
976+
)
977+
def test_execute_metadata_on_empty_response(
978+
self, client, instance_id, table_id, temp_rows
979+
):
980+
temp_rows.add_row(b"row_key_1")
981+
result = client.execute_query(
982+
"SELECT * FROM `" + table_id + "` WHERE _key='non-existent'", instance_id
983+
)
984+
rows = [r for r in result]
985+
assert len(rows) == 0
986+
md = result.metadata
987+
assert len(md) == 3
988+
assert md["_key"].column_type == SqlType.Bytes()
989+
assert md[TEST_FAMILY].column_type == SqlType.Map(
990+
SqlType.Bytes(), SqlType.Bytes()
991+
)
992+
assert md[TEST_FAMILY_2].column_type == SqlType.Map(
993+
SqlType.Bytes(), SqlType.Bytes()
994+
)

tests/unit/data/execute_query/test_query_result_row_reader.py

Lines changed: 8 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
from google.cloud.bigtable.data.execute_query._reader import _QueryResultRowReader
1919

2020
from google.cloud.bigtable.data.execute_query.metadata import (
21-
ProtoMetadata,
21+
Metadata,
2222
SqlType,
2323
_pb_metadata_to_metadata_types,
2424
)
@@ -37,9 +37,7 @@
3737

3838
class TestQueryResultRowReader:
3939
def test__single_values_received(self):
40-
metadata = ProtoMetadata(
41-
[("test1", SqlType.Int64()), ("test2", SqlType.Int64())]
42-
)
40+
metadata = Metadata([("test1", SqlType.Int64()), ("test2", SqlType.Int64())])
4341
values = [
4442
proto_rows_bytes(int_val(1), int_val(2)),
4543
proto_rows_bytes(int_val(3), int_val(4)),
@@ -61,9 +59,7 @@ def test__multiple_rows_received(self):
6159
proto_rows_bytes(int_val(7), int_val(8)),
6260
]
6361

64-
metadata = ProtoMetadata(
65-
[("test1", SqlType.Int64()), ("test2", SqlType.Int64())]
66-
)
62+
metadata = Metadata([("test1", SqlType.Int64()), ("test2", SqlType.Int64())])
6763
reader = _QueryResultRowReader()
6864

6965
result = reader.consume(values[0:1], metadata)
@@ -89,9 +85,7 @@ def test__multiple_rows_received(self):
8985
assert result[0][1] == result[0]["test2"] == 8
9086

9187
def test__received_values_are_passed_to_parser_in_batches(self):
92-
metadata = ProtoMetadata(
93-
[("test1", SqlType.Int64()), ("test2", SqlType.Int64())]
94-
)
88+
metadata = Metadata([("test1", SqlType.Int64()), ("test2", SqlType.Int64())])
9589

9690
# TODO move to a SqlType test
9791
assert SqlType.Struct([("a", SqlType.Int64())]) == SqlType.Struct(
@@ -128,7 +122,7 @@ def test__received_values_are_passed_to_parser_in_batches(self):
128122
)
129123

130124
def test__parser_errors_are_forwarded(self):
131-
metadata = ProtoMetadata([("test1", SqlType.Int64())])
125+
metadata = Metadata([("test1", SqlType.Int64())])
132126

133127
values = [str_val("test")]
134128

@@ -236,7 +230,7 @@ def test_multiple_batches(self):
236230
]
237231
results = reader.consume(
238232
batches,
239-
ProtoMetadata([("test1", SqlType.Int64()), ("test2", SqlType.Int64())]),
233+
Metadata([("test1", SqlType.Int64()), ("test2", SqlType.Int64())]),
240234
)
241235
assert len(results) == 4
242236
[row1, row2, row3, row4] = results
@@ -250,9 +244,9 @@ def test_multiple_batches(self):
250244
assert row4["test2"] == 8
251245

252246

253-
class TestProtoMetadata:
247+
class TestMetadata:
254248
def test__duplicate_column_names(self):
255-
metadata = ProtoMetadata(
249+
metadata = Metadata(
256250
[
257251
("test1", SqlType.Int64()),
258252
("test2", SqlType.Bytes()),

0 commit comments

Comments
 (0)