diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 2567653c..ee94722a 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:87eee22d276554e4e52863ec9b1cb6a7245815dfae20439712bf644348215a5a + digest: sha256:6e7328583be8edd3ba8f35311c76a1ecbc823010279ccb6ab46b7a76e25eafcc diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg index 2c9b236b..b59a4877 100644 --- a/.kokoro/samples/lint/common.cfg +++ b/.kokoro/samples/lint/common.cfg @@ -31,4 +31,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery-storage/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-bigquery-storage/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg index 868f64fb..54527096 100644 --- a/.kokoro/samples/python3.6/common.cfg +++ b/.kokoro/samples/python3.6/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery-storage/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-bigquery-storage/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.6/periodic.cfg index 50fec964..71cd1e59 100644 --- a/.kokoro/samples/python3.6/periodic.cfg +++ b/.kokoro/samples/python3.6/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg index 21827044..bf6c95ce 100644 --- a/.kokoro/samples/python3.7/common.cfg +++ b/.kokoro/samples/python3.7/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery-storage/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-bigquery-storage/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.7/periodic.cfg b/.kokoro/samples/python3.7/periodic.cfg index 50fec964..71cd1e59 100644 --- a/.kokoro/samples/python3.7/periodic.cfg +++ b/.kokoro/samples/python3.7/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg index 0ea18f57..29afe106 100644 --- a/.kokoro/samples/python3.8/common.cfg +++ b/.kokoro/samples/python3.8/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery-storage/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-bigquery-storage/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.8/periodic.cfg b/.kokoro/samples/python3.8/periodic.cfg index 50fec964..71cd1e59 100644 --- a/.kokoro/samples/python3.8/periodic.cfg +++ b/.kokoro/samples/python3.8/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/.kokoro/samples/python3.9/common.cfg b/.kokoro/samples/python3.9/common.cfg index 4e0e4083..68474ce6 100644 --- a/.kokoro/samples/python3.9/common.cfg +++ b/.kokoro/samples/python3.9/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery-storage/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-bigquery-storage/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.9/periodic.cfg b/.kokoro/samples/python3.9/periodic.cfg index 50fec964..71cd1e59 100644 --- a/.kokoro/samples/python3.9/periodic.cfg +++ b/.kokoro/samples/python3.9/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh index 36ba35b5..ba3a707b 100755 --- a/.kokoro/test-samples-against-head.sh +++ b/.kokoro/test-samples-against-head.sh @@ -23,6 +23,4 @@ set -eo pipefail # Enables `**` to include files nested inside sub-folders shopt -s globstar -cd github/python-bigquery-storage - exec .kokoro/test-samples-impl.sh diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index 3ef99a86..11c042d3 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -24,8 +24,6 @@ set -eo pipefail # Enables `**` to include files nested inside sub-folders shopt -s globstar -cd github/python-bigquery-storage - # Run periodic samples tests at latest release if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then # preserving the test runner implementation. diff --git a/.repo-metadata.json b/.repo-metadata.json index 2b3d291e..d4d8049b 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -1,14 +1,16 @@ { - "name": "bigquerystorage", - "name_pretty": "Google BigQuery Storage", - "product_documentation": "https://cloud.google.com/bigquery/docs/reference/storage/", - "client_documentation": "https://googleapis.dev/python/bigquerystorage/latest", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559654", - "release_level": "ga", - "language": "python", - "library_type": "GAPIC_COMBO", - "repo": "googleapis/python-bigquery-storage", - "distribution_name": "google-cloud-bigquery-storage", - "api_id": "bigquerystorage.googleapis.com", - "requires_billing": true -} \ No newline at end of file + "name": "bigquerystorage", + "name_pretty": "Google BigQuery Storage", + "product_documentation": "https://cloud.google.com/bigquery/docs/reference/storage/", + "client_documentation": "https://googleapis.dev/python/bigquerystorage/latest", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559654", + "release_level": "ga", + "language": "python", + "library_type": "GAPIC_COMBO", + "repo": "googleapis/python-bigquery-storage", + "distribution_name": "google-cloud-bigquery-storage", + "api_id": "bigquerystorage.googleapis.com", + "requires_billing": true, + "default_version": "v1", + "codeowner_team": "@googleapis/api-bigquery" +} diff --git a/.trampolinerc b/.trampolinerc index 383b6ec8..0eee72ab 100644 --- a/.trampolinerc +++ b/.trampolinerc @@ -16,15 +16,26 @@ # Add required env vars here. required_envvars+=( - "STAGING_BUCKET" - "V2_STAGING_BUCKET" ) # Add env vars which are passed down into the container here. pass_down_envvars+=( + "NOX_SESSION" + ############### + # Docs builds + ############### "STAGING_BUCKET" "V2_STAGING_BUCKET" - "NOX_SESSION" + ################## + # Samples builds + ################## + "INSTALL_LIBRARY_FROM_SOURCE" + "RUN_TESTS_SESSION" + "BUILD_SPECIFIC_GCLOUD_PROJECT" + # Target directories. + "RUN_TESTS_DIRS" + # The nox session to run. + "RUN_TESTS_SESSION" ) # Prevent unintentional override on the default image. diff --git a/CHANGELOG.md b/CHANGELOG.md index 012f52b1..2bd3bb18 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-bigquery-storage/#history +### [2.9.1](https://www.github.com/googleapis/python-bigquery-storage/compare/v2.9.0...v2.9.1) (2021-10-06) + + +### Documentation + +* **samples:** Add minimal sample to show Write API in pending mode ([#322](https://www.github.com/googleapis/python-bigquery-storage/issues/322)) ([db51469](https://www.github.com/googleapis/python-bigquery-storage/commit/db5146980bd1a358413c56f6e090c07277bfac26)) + ## [2.9.0](https://www.github.com/googleapis/python-bigquery-storage/compare/v2.8.0...v2.9.0) (2021-09-27) diff --git a/samples/quickstart/noxfile.py b/samples/quickstart/noxfile.py index b008613f..1fd8956f 100644 --- a/samples/quickstart/noxfile.py +++ b/samples/quickstart/noxfile.py @@ -98,6 +98,10 @@ def get_pytest_env_vars() -> Dict[str, str]: "True", "true", ) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + # # Style Checks # diff --git a/samples/quickstart/requirements.txt b/samples/quickstart/requirements.txt index 0891ed5a..a3ecaa6e 100644 --- a/samples/quickstart/requirements.txt +++ b/samples/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.8.0 +google-cloud-bigquery-storage==2.9.0 diff --git a/samples/snippets/append_rows_pending.py b/samples/snippets/append_rows_pending.py new file mode 100644 index 00000000..af780ffa --- /dev/null +++ b/samples/snippets/append_rows_pending.py @@ -0,0 +1,134 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START bigquerystorage_append_rows_pending] +""" +This code sample demonstrates how to write records in pending mode +using the low-level generated client for Python. +""" + +from google.cloud import bigquery_storage_v1 +from google.cloud.bigquery_storage_v1 import types +from google.cloud.bigquery_storage_v1 import writer +from google.protobuf import descriptor_pb2 + +# If you update the customer_record.proto protocol buffer definition, run: +# +# protoc --python_out=. customer_record.proto +# +# from the samples/snippets directory to generate the customer_record_pb2.py module. +from . import customer_record_pb2 + + +def create_row_data(row_num: int, name: str): + row = customer_record_pb2.CustomerRecord() + row.row_num = row_num + row.customer_name = name + return row.SerializeToString() + + +def append_rows_pending(project_id: str, dataset_id: str, table_id: str): + + """Create a write stream, write some sample data, and commit the stream.""" + write_client = bigquery_storage_v1.BigQueryWriteClient() + parent = write_client.table_path(project_id, dataset_id, table_id) + write_stream = types.WriteStream() + + # When creating the stream, choose the type. Use the PENDING type to wait + # until the stream is committed before it is visible. See: + # https://cloud.google.com/bigquery/docs/reference/storage/rpc/google.cloud.bigquery.storage.v1#google.cloud.bigquery.storage.v1.WriteStream.Type + write_stream.type_ = types.WriteStream.Type.PENDING + write_stream = write_client.create_write_stream( + parent=parent, write_stream=write_stream + ) + stream_name = write_stream.name + + # Create a template with fields needed for the first request. + request_template = types.AppendRowsRequest() + + # The initial request must contain the stream name. + request_template.write_stream = stream_name + + # So that BigQuery knows how to parse the serialized_rows, generate a + # protocol buffer representation of your message descriptor. + proto_schema = types.ProtoSchema() + proto_descriptor = descriptor_pb2.DescriptorProto() + customer_record_pb2.CustomerRecord.DESCRIPTOR.CopyToProto(proto_descriptor) + proto_schema.proto_descriptor = proto_descriptor + proto_data = types.AppendRowsRequest.ProtoData() + proto_data.writer_schema = proto_schema + request_template.proto_rows = proto_data + + # Some stream types support an unbounded number of requests. Construct an + # AppendRowsStream to send an arbitrary number of requests to a stream. + append_rows_stream = writer.AppendRowsStream(write_client, request_template) + + # Create a batch of row data by appending proto2 serialized bytes to the + # serialized_rows repeated field. + proto_rows = types.ProtoRows() + proto_rows.serialized_rows.append(create_row_data(1, "Alice")) + proto_rows.serialized_rows.append(create_row_data(2, "Bob")) + + # Set an offset to allow resuming this stream if the connection breaks. + # Keep track of which requests the server has acknowledged and resume the + # stream at the first non-acknowledged message. If the server has already + # processed a message with that offset, it will return an ALREADY_EXISTS + # error, which can be safely ignored. + # + # The first request must always have an offset of 0. + request = types.AppendRowsRequest() + request.offset = 0 + proto_data = types.AppendRowsRequest.ProtoData() + proto_data.rows = proto_rows + request.proto_rows = proto_data + + response_future_1 = append_rows_stream.send(request) + + # Send another batch. + proto_rows = types.ProtoRows() + proto_rows.serialized_rows.append(create_row_data(3, "Charles")) + + # Since this is the second request, you only need to include the row data. + # The name of the stream and protocol buffers DESCRIPTOR is only needed in + # the first request. + request = types.AppendRowsRequest() + proto_data = types.AppendRowsRequest.ProtoData() + proto_data.rows = proto_rows + request.proto_rows = proto_data + + # Offset must equal the number of rows that were previously sent. + request.offset = 2 + + response_future_2 = append_rows_stream.send(request) + + print(response_future_1.result()) + print(response_future_2.result()) + + # Shutdown background threads and close the streaming connection. + append_rows_stream.close() + + # A PENDING type stream must be "finalized" before being committed. No new + # records can be written to the stream after this method has been called. + write_client.finalize_write_stream(name=write_stream.name) + + # Commit the stream you created earlier. + batch_commit_write_streams_request = types.BatchCommitWriteStreamsRequest() + batch_commit_write_streams_request.parent = parent + batch_commit_write_streams_request.write_streams = [write_stream.name] + write_client.batch_commit_write_streams(batch_commit_write_streams_request) + + print(f"Writes to stream: '{write_stream.name}' have been committed.") + + +# [END bigquerystorage_append_rows_pending] diff --git a/samples/snippets/append_rows_pending_test.py b/samples/snippets/append_rows_pending_test.py new file mode 100644 index 00000000..dc0e690d --- /dev/null +++ b/samples/snippets/append_rows_pending_test.py @@ -0,0 +1,73 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pathlib +import random + +from google.cloud import bigquery +import pytest + +from . import append_rows_pending + + +DIR = pathlib.Path(__file__).parent + + +regions = ["US", "non-US"] + + +@pytest.fixture(params=regions) +def sample_data_table( + request: pytest.FixtureRequest, + bigquery_client: bigquery.Client, + project_id: str, + dataset_id: str, + dataset_id_non_us: str, +) -> str: + dataset = dataset_id + if request.param != "US": + dataset = dataset_id_non_us + schema = bigquery_client.schema_from_json(str(DIR / "customer_record_schema.json")) + table_id = f"append_rows_proto2_{random.randrange(10000)}" + full_table_id = f"{project_id}.{dataset}.{table_id}" + table = bigquery.Table(full_table_id, schema=schema) + table = bigquery_client.create_table(table, exists_ok=True) + yield full_table_id + bigquery_client.delete_table(table, not_found_ok=True) + + +def test_append_rows_pending( + capsys: pytest.CaptureFixture, + bigquery_client: bigquery.Client, + sample_data_table: str, +): + project_id, dataset_id, table_id = sample_data_table.split(".") + append_rows_pending.append_rows_pending( + project_id=project_id, dataset_id=dataset_id, table_id=table_id + ) + out, _ = capsys.readouterr() + assert "have been committed" in out + + rows = bigquery_client.query( + f"SELECT * FROM `{project_id}.{dataset_id}.{table_id}`" + ).result() + row_items = [ + # Convert to sorted tuple of items to more easily search for expected rows. + tuple(sorted(row.items())) + for row in rows + ] + + assert (("customer_name", "Alice"), ("row_num", 1)) in row_items + assert (("customer_name", "Bob"), ("row_num", 2)) in row_items + assert (("customer_name", "Charles"), ("row_num", 3)) in row_items diff --git a/samples/snippets/customer_record.proto b/samples/snippets/customer_record.proto new file mode 100644 index 00000000..06142c3b --- /dev/null +++ b/samples/snippets/customer_record.proto @@ -0,0 +1,28 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// The BigQuery Storage API expects protocol buffer data to be encoded in the +// proto2 wire format. This allows it to disambiguate missing optional fields +// from default values without the need for wrapper types. +syntax = "proto2"; + +// Define a message type representing the rows in your table. The message +// cannot contain fields which are not present in the table. +message CustomerRecord { + + optional string customer_name = 1; + + // Use the required keyword for client-side validation of required fields. + required int64 row_num = 2; +} diff --git a/samples/snippets/customer_record_pb2.py b/samples/snippets/customer_record_pb2.py new file mode 100644 index 00000000..14201ea9 --- /dev/null +++ b/samples/snippets/customer_record_pb2.py @@ -0,0 +1,99 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: customer_record.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="customer_record.proto", + package="", + syntax="proto2", + serialized_options=None, + serialized_pb=_b( + '\n\x15\x63ustomer_record.proto"8\n\x0e\x43ustomerRecord\x12\x15\n\rcustomer_name\x18\x01 \x01(\t\x12\x0f\n\x07row_num\x18\x02 \x02(\x03' + ), +) + + +_CUSTOMERRECORD = _descriptor.Descriptor( + name="CustomerRecord", + full_name="CustomerRecord", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="customer_name", + full_name="CustomerRecord.customer_name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="row_num", + full_name="CustomerRecord.row_num", + index=1, + number=2, + type=3, + cpp_type=2, + label=2, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[], + serialized_start=25, + serialized_end=81, +) + +DESCRIPTOR.message_types_by_name["CustomerRecord"] = _CUSTOMERRECORD +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +CustomerRecord = _reflection.GeneratedProtocolMessageType( + "CustomerRecord", + (_message.Message,), + dict( + DESCRIPTOR=_CUSTOMERRECORD, + __module__="customer_record_pb2" + # @@protoc_insertion_point(class_scope:CustomerRecord) + ), +) +_sym_db.RegisterMessage(CustomerRecord) + + +# @@protoc_insertion_point(module_scope) diff --git a/samples/snippets/customer_record_schema.json b/samples/snippets/customer_record_schema.json new file mode 100644 index 00000000..e04b31a7 --- /dev/null +++ b/samples/snippets/customer_record_schema.json @@ -0,0 +1,11 @@ +[ + { + "name": "customer_name", + "type": "STRING" + }, + { + "name": "row_num", + "type": "INTEGER", + "mode": "REQUIRED" + } +] diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index b008613f..1fd8956f 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -98,6 +98,10 @@ def get_pytest_env_vars() -> Dict[str, str]: "True", "true", ) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + # # Style Checks # diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 858829ae..a357f805 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,3 +1,3 @@ -google-cloud-bigquery-storage==2.8.0 -google-cloud-bigquery==2.27.0 -protobuf==3.18.0 +google-cloud-bigquery-storage==2.9.0 +google-cloud-bigquery==2.28.0 +protobuf==3.18.1 diff --git a/samples/to_dataframe/noxfile.py b/samples/to_dataframe/noxfile.py index b008613f..1fd8956f 100644 --- a/samples/to_dataframe/noxfile.py +++ b/samples/to_dataframe/noxfile.py @@ -98,6 +98,10 @@ def get_pytest_env_vars() -> Dict[str, str]: "True", "true", ) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + # # Style Checks # diff --git a/samples/to_dataframe/requirements.txt b/samples/to_dataframe/requirements.txt index 1ca261ea..aa993c31 100644 --- a/samples/to_dataframe/requirements.txt +++ b/samples/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ -google-auth==2.2.0 -google-cloud-bigquery-storage==2.8.0 -google-cloud-bigquery==2.27.0 +google-auth==2.2.1 +google-cloud-bigquery-storage==2.9.0 +google-cloud-bigquery==2.28.0 pyarrow==5.0.0 ipython==7.24.0; python_version > '3.6' ipython==7.16.1; python_version <= '3.6' diff --git a/setup.py b/setup.py index a7cd2f4a..1af3d1d8 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ name = "google-cloud-bigquery-storage" description = "BigQuery Storage API API client library" -version = "2.9.0" +version = "2.9.1" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # NOTE: Maintainers, please do not require google-api-core>=2.x.x