From 1807f98a1f46c0a212748dabc6f31631e0217465 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 1 Mar 2022 12:12:17 +0000 Subject: [PATCH 1/9] chore(deps): update actions/setup-python action to v3 (#520) Source-Link: https://github.com/googleapis/synthtool/commit/571ee2c3b26182429eddcf115122ee545d7d3787 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:660abdf857d3ab9aabcd967c163c70e657fcc5653595c709263af5f3fa23ef67 --- .github/.OwlBot.lock.yaml | 2 +- .github/workflows/docs.yml | 4 ++-- .github/workflows/lint.yml | 2 +- .github/workflows/unittest.yml | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 8cb43804d..d9a55fa40 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 + digest: sha256:660abdf857d3ab9aabcd967c163c70e657fcc5653595c709263af5f3fa23ef67 diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index f7b8344c4..cca4e98bf 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -10,7 +10,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" - name: Install nox @@ -26,7 +26,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" - name: Install nox diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 1e8b05c3d..f687324ef 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -10,7 +10,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" - name: Install nox diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 074ee2504..d3003e09e 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -13,7 +13,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: ${{ matrix.python }} - name: Install nox @@ -39,7 +39,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" - name: Install coverage From a60d8c8ef4e774aad2a05fdeb623fe1e5c25437d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 1 Mar 2022 23:16:42 +0100 Subject: [PATCH 2/9] chore(deps): update all dependencies (#519) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .github/workflows/mypy.yml | 4 ++-- .github/workflows/system_emulated.yml | 4 ++-- samples/beam/requirements.txt | 2 +- samples/hello/requirements.txt | 2 +- samples/instanceadmin/requirements.txt | 2 +- samples/metricscaler/requirements.txt | 4 ++-- samples/quickstart/requirements.txt | 2 +- samples/snippets/filters/requirements.txt | 2 +- samples/snippets/reads/requirements.txt | 2 +- samples/snippets/writes/requirements.txt | 2 +- samples/tableadmin/requirements.txt | 2 +- 11 files changed, 14 insertions(+), 14 deletions(-) diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml index 5a0f0e090..f9f07f4de 100644 --- a/.github/workflows/mypy.yml +++ b/.github/workflows/mypy.yml @@ -8,9 +8,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.8" - name: Install nox diff --git a/.github/workflows/system_emulated.yml b/.github/workflows/system_emulated.yml index 8e6c0cfcf..c974d6b11 100644 --- a/.github/workflows/system_emulated.yml +++ b/.github/workflows/system_emulated.yml @@ -12,10 +12,10 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: '3.8' diff --git a/samples/beam/requirements.txt b/samples/beam/requirements.txt index 4aed9a55d..2bd731ab7 100644 --- a/samples/beam/requirements.txt +++ b/samples/beam/requirements.txt @@ -1,3 +1,3 @@ apache-beam==2.36.0 -google-cloud-bigtable==2.5.2 +google-cloud-bigtable==2.6.0 google-cloud-core==2.2.2 diff --git a/samples/hello/requirements.txt b/samples/hello/requirements.txt index f3158ef18..117a6e939 100644 --- a/samples/hello/requirements.txt +++ b/samples/hello/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigtable==2.5.2 +google-cloud-bigtable==2.6.0 google-cloud-core==2.2.2 diff --git a/samples/instanceadmin/requirements.txt b/samples/instanceadmin/requirements.txt index b3fa08aa0..1877e7a06 100644 --- a/samples/instanceadmin/requirements.txt +++ b/samples/instanceadmin/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigtable==2.5.2 +google-cloud-bigtable==2.6.0 backoff==1.11.1 diff --git a/samples/metricscaler/requirements.txt b/samples/metricscaler/requirements.txt index ffb371b56..20596a095 100644 --- a/samples/metricscaler/requirements.txt +++ b/samples/metricscaler/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigtable==2.5.2 -google-cloud-monitoring==2.8.0 +google-cloud-bigtable==2.6.0 +google-cloud-monitoring==2.9.0 diff --git a/samples/quickstart/requirements.txt b/samples/quickstart/requirements.txt index 307f5ffa4..89f83eb34 100644 --- a/samples/quickstart/requirements.txt +++ b/samples/quickstart/requirements.txt @@ -1 +1 @@ -google-cloud-bigtable==2.5.2 +google-cloud-bigtable==2.6.0 diff --git a/samples/snippets/filters/requirements.txt b/samples/snippets/filters/requirements.txt index 711d7d2cd..7f67da9d1 100644 --- a/samples/snippets/filters/requirements.txt +++ b/samples/snippets/filters/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigtable==2.5.2 +google-cloud-bigtable==2.6.0 snapshottest==0.6.0 \ No newline at end of file diff --git a/samples/snippets/reads/requirements.txt b/samples/snippets/reads/requirements.txt index 711d7d2cd..7f67da9d1 100644 --- a/samples/snippets/reads/requirements.txt +++ b/samples/snippets/reads/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigtable==2.5.2 +google-cloud-bigtable==2.6.0 snapshottest==0.6.0 \ No newline at end of file diff --git a/samples/snippets/writes/requirements.txt b/samples/snippets/writes/requirements.txt index 0ed46fa41..b523e0991 100644 --- a/samples/snippets/writes/requirements.txt +++ b/samples/snippets/writes/requirements.txt @@ -1 +1 @@ -google-cloud-bigtable==2.5.2 \ No newline at end of file +google-cloud-bigtable==2.6.0 \ No newline at end of file diff --git a/samples/tableadmin/requirements.txt b/samples/tableadmin/requirements.txt index 307f5ffa4..89f83eb34 100644 --- a/samples/tableadmin/requirements.txt +++ b/samples/tableadmin/requirements.txt @@ -1 +1 @@ -google-cloud-bigtable==2.5.2 +google-cloud-bigtable==2.6.0 From 59087c8316b4326c2ac6b4797bec57acba0e0792 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 3 Mar 2022 00:22:37 +0000 Subject: [PATCH 3/9] chore(deps): update actions/checkout action to v3 (#522) Source-Link: https://github.com/googleapis/synthtool/commit/ca879097772aeec2cbb971c3cea8ecc81522b68a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:6162c384d685c5fe22521d3f37f6fc732bf99a085f6d47b677dbcae97fc21392 --- .github/.OwlBot.lock.yaml | 2 +- .github/workflows/docs.yml | 4 ++-- .github/workflows/lint.yml | 2 +- .github/workflows/unittest.yml | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index d9a55fa40..480226ac0 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:660abdf857d3ab9aabcd967c163c70e657fcc5653595c709263af5f3fa23ef67 + digest: sha256:6162c384d685c5fe22521d3f37f6fc732bf99a085f6d47b677dbcae97fc21392 diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index cca4e98bf..b46d7305d 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v3 with: @@ -24,7 +24,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v3 with: diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index f687324ef..f512a4960 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v3 with: diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index d3003e09e..e87fe5b7b 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -11,7 +11,7 @@ jobs: python: ['3.6', '3.7', '3.8', '3.9', '3.10'] steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v3 with: @@ -37,7 +37,7 @@ jobs: - unit steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v3 with: From a8a92ee1b6bd284055fee3e1029a9a6aacbc5f1c Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 3 Mar 2022 14:55:34 -0500 Subject: [PATCH 4/9] fix(deps): require google-api-core>=1.31.5, >=2.3.2 (#526) fix(deps): require proto-plus>=1.15.0 --- setup.py | 4 ++-- testing/constraints-3.6.txt | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/setup.py b/setup.py index ffedd10a7..a3f51e450 100644 --- a/setup.py +++ b/setup.py @@ -32,13 +32,13 @@ # NOTE: Maintainers, please do not require google-api-core>=2.x.x # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-api-core[grpc] >= 1.28.0, <3.0.0dev", + "google-api-core[grpc] >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0", # NOTE: Maintainers, please do not require google-api-core>=2.x.x # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 "google-cloud-core >= 1.4.1, <3.0.0dev", "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", - "proto-plus >= 1.13.0", + "proto-plus >= 1.15.0", ] extras = {"libcst": "libcst >= 0.2.5"} diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index 1e50717bf..3d010787d 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -5,8 +5,8 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -google-api-core==1.28.0 +google-api-core==1.31.5 google-cloud-core==1.4.1 grpc-google-iam-v1==0.12.3 -proto-plus==1.13.0 +proto-plus==1.15.0 libcst==0.2.5 From 8f4e197148644ded934190814ff44fa132a2dda6 Mon Sep 17 00:00:00 2001 From: Mariatta Wijaya Date: Fri, 4 Mar 2022 08:16:11 -0800 Subject: [PATCH 5/9] feat: Add support for autoscaling (#509) * feat: Add support for autoscaling - Add the parameters min_serve_nodes, max_serve_nodes, and cpu_utilization_percent - Create disable_autoscaling function - Update documentation and tests - Add validation when scaling config was not set correctly. --- docs/snippets.py | 19 ++ google/cloud/bigtable/cluster.py | 176 +++++++++- google/cloud/bigtable/instance.py | 15 + tests/system/conftest.py | 33 ++ tests/system/test_instance_admin.py | 138 ++++++++ tests/unit/test_cluster.py | 511 +++++++++++++++++++++++++++- 6 files changed, 877 insertions(+), 15 deletions(-) diff --git a/docs/snippets.py b/docs/snippets.py index eeb39c3bb..ee5490afe 100644 --- a/docs/snippets.py +++ b/docs/snippets.py @@ -401,6 +401,25 @@ def test_bigtable_update_cluster(): assert cluster.serve_nodes == 4 +def test_bigtable_cluster_disable_autoscaling(): + # [START bigtable_api_cluster_disable_autoscaling] + from google.cloud.bigtable import Client + + client = Client(admin=True) + instance = client.instance(INSTANCE_ID) + # Create a cluster with autoscaling enabled + cluster = instance.cluster( + CLUSTER_ID, min_serve_nodes=1, max_serve_nodes=2, cpu_utilization_percent=10 + ) + instance.create(clusters=[cluster]) + + # Disable autoscaling + cluster.disable_autoscaling(serve_nodes=4) + # [END bigtable_api_cluster_disable_autoscaling] + + assert cluster.serve_nodes == 4 + + def test_bigtable_create_table(): # [START bigtable_api_create_table] from google.api_core import exceptions diff --git a/google/cloud/bigtable/cluster.py b/google/cloud/bigtable/cluster.py index f3e79c6c2..1d0af2c69 100644 --- a/google/cloud/bigtable/cluster.py +++ b/google/cloud/bigtable/cluster.py @@ -18,6 +18,7 @@ import re from google.cloud.bigtable_admin_v2.types import instance from google.api_core.exceptions import NotFound +from google.protobuf import field_mask_pb2 _CLUSTER_NAME_RE = re.compile( @@ -36,6 +37,7 @@ class Cluster(object): * :meth:`create` itself * :meth:`update` itself * :meth:`delete` itself + * :meth:`disable_autoscaling` itself :type cluster_id: str :param cluster_id: The ID of the cluster. @@ -52,7 +54,9 @@ class Cluster(object): https://cloud.google.com/bigtable/docs/locations :type serve_nodes: int - :param serve_nodes: (Optional) The number of nodes in the cluster. + :param serve_nodes: (Optional) The number of nodes in the cluster for manual scaling. If any of the + autoscaling configuration are specified, then the autoscaling + configuration will take precedent. :type default_storage_type: int :param default_storage_type: (Optional) The type of storage @@ -85,6 +89,27 @@ class Cluster(object): :data:`google.cloud.bigtable.enums.Cluster.State.CREATING`. :data:`google.cloud.bigtable.enums.Cluster.State.RESIZING`. :data:`google.cloud.bigtable.enums.Cluster.State.DISABLED`. + + :type min_serve_nodes: int + :param min_serve_nodes: (Optional) The minimum number of nodes to be set in the cluster for autoscaling. + Must be 1 or greater. + If specified, this configuration takes precedence over + ``serve_nodes``. + If specified, then + ``max_serve_nodes`` and ``cpu_utilization_percent`` must be + specified too. + + :type max_serve_nodes: int + :param max_serve_nodes: (Optional) The maximum number of nodes to be set in the cluster for autoscaling. + If specified, this configuration + takes precedence over ``serve_nodes``. If specified, then + ``min_serve_nodes`` and ``cpu_utilization_percent`` must be + specified too. + + :param cpu_utilization_percent: (Optional) The CPU utilization target for the cluster's workload for autoscaling. + If specified, this configuration takes precedence over ``serve_nodes``. If specified, then + ``min_serve_nodes`` and ``max_serve_nodes`` must be + specified too. """ def __init__( @@ -96,6 +121,9 @@ def __init__( default_storage_type=None, kms_key_name=None, _state=None, + min_serve_nodes=None, + max_serve_nodes=None, + cpu_utilization_percent=None, ): self.cluster_id = cluster_id self._instance = instance @@ -104,10 +132,13 @@ def __init__( self.default_storage_type = default_storage_type self._kms_key_name = kms_key_name self._state = _state + self.min_serve_nodes = min_serve_nodes + self.max_serve_nodes = max_serve_nodes + self.cpu_utilization_percent = cpu_utilization_percent @classmethod def from_pb(cls, cluster_pb, instance): - """Creates an cluster instance from a protobuf. + """Creates a cluster instance from a protobuf. For example: @@ -159,6 +190,17 @@ def _update_from_pb(self, cluster_pb): self.location_id = cluster_pb.location.split("/")[-1] self.serve_nodes = cluster_pb.serve_nodes + + self.min_serve_nodes = ( + cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_limits.min_serve_nodes + ) + self.max_serve_nodes = ( + cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_limits.max_serve_nodes + ) + self.cpu_utilization_percent = ( + cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_targets.cpu_utilization_percent + ) + self.default_storage_type = cluster_pb.default_storage_type if cluster_pb.encryption_config: self._kms_key_name = cluster_pb.encryption_config.kms_key_name @@ -211,6 +253,42 @@ def kms_key_name(self): """str: Customer managed encryption key for the cluster.""" return self._kms_key_name + def _validate_scaling_config(self): + """Validate auto/manual scaling configuration before creating or updating.""" + + if ( + not self.serve_nodes + and not self.min_serve_nodes + and not self.max_serve_nodes + and not self.cpu_utilization_percent + ): + raise ValueError( + "Must specify either serve_nodes or all of the autoscaling configurations (min_serve_nodes, max_serve_nodes, and cpu_utilization_percent)." + ) + if self.serve_nodes and ( + self.max_serve_nodes or self.min_serve_nodes or self.cpu_utilization_percent + ): + raise ValueError( + "Cannot specify both serve_nodes and autoscaling configurations (min_serve_nodes, max_serve_nodes, and cpu_utilization_percent)." + ) + if ( + ( + self.min_serve_nodes + and (not self.max_serve_nodes or not self.cpu_utilization_percent) + ) + or ( + self.max_serve_nodes + and (not self.min_serve_nodes or not self.cpu_utilization_percent) + ) + or ( + self.cpu_utilization_percent + and (not self.min_serve_nodes or not self.max_serve_nodes) + ) + ): + raise ValueError( + "All of autoscaling configurations must be specified at the same time (min_serve_nodes, max_serve_nodes, and cpu_utilization_percent)." + ) + def __eq__(self, other): if not isinstance(other, self.__class__): return NotImplemented @@ -290,7 +368,15 @@ def create(self): :rtype: :class:`~google.api_core.operation.Operation` :returns: The long-running operation corresponding to the create operation. + + :raises: :class:`ValueError ` if the both ``serve_nodes`` and autoscaling configurations + are set at the same time or if none of the ``serve_nodes`` or autoscaling configurations are set + or if the autoscaling configurations are only partially set. + """ + + self._validate_scaling_config() + client = self._instance._client cluster_pb = self._to_pb() @@ -323,20 +409,73 @@ def update(self): before calling :meth:`update`. + If autoscaling is already enabled, manual scaling will be silently ignored. + To disable autoscaling and enable manual scaling, use the :meth:`disable_autoscaling` instead. + :rtype: :class:`Operation` :returns: The long-running operation corresponding to the update operation. + """ + client = self._instance._client - # We are passing `None` for third argument location. - # Location is set only at the time of creation of a cluster - # and can not be changed after cluster has been created. - return client.instance_admin_client.update_cluster( - request={ - "serve_nodes": self.serve_nodes, - "name": self.name, - "location": None, - } + + update_mask_pb = field_mask_pb2.FieldMask() + + if self.serve_nodes: + update_mask_pb.paths.append("serve_nodes") + + if self.min_serve_nodes: + update_mask_pb.paths.append( + "cluster_config.cluster_autoscaling_config.autoscaling_limits.min_serve_nodes" + ) + if self.max_serve_nodes: + update_mask_pb.paths.append( + "cluster_config.cluster_autoscaling_config.autoscaling_limits.max_serve_nodes" + ) + if self.cpu_utilization_percent: + update_mask_pb.paths.append( + "cluster_config.cluster_autoscaling_config.autoscaling_targets.cpu_utilization_percent" + ) + + cluster_pb = self._to_pb() + cluster_pb.name = self.name + + return client.instance_admin_client.partial_update_cluster( + request={"cluster": cluster_pb, "update_mask": update_mask_pb} + ) + + def disable_autoscaling(self, serve_nodes): + """ + Disable autoscaling by specifying the number of nodes. + + For example: + + .. literalinclude:: snippets.py + :start-after: [START bigtable_api_cluster_disable_autoscaling] + :end-before: [END bigtable_api_cluster_disable_autoscaling] + :dedent: 4 + + :type serve_nodes: int + :param serve_nodes: The number of nodes in the cluster. + """ + + client = self._instance._client + + update_mask_pb = field_mask_pb2.FieldMask() + + self.serve_nodes = serve_nodes + self.min_serve_nodes = 0 + self.max_serve_nodes = 0 + self.cpu_utilization_percent = 0 + + update_mask_pb.paths.append("serve_nodes") + update_mask_pb.paths.append("cluster_config.cluster_autoscaling_config") + cluster_pb = self._to_pb() + cluster_pb.name = self.name + + return client.instance_admin_client.partial_update_cluster( + request={"cluster": cluster_pb, "update_mask": update_mask_pb} ) def delete(self): @@ -375,6 +514,7 @@ def _to_pb(self): location = client.instance_admin_client.common_location_path( client.project, self.location_id ) + cluster_pb = instance.Cluster( location=location, serve_nodes=self.serve_nodes, @@ -384,4 +524,18 @@ def _to_pb(self): cluster_pb.encryption_config = instance.Cluster.EncryptionConfig( kms_key_name=self._kms_key_name, ) + + if self.min_serve_nodes: + cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_limits.min_serve_nodes = ( + self.min_serve_nodes + ) + if self.max_serve_nodes: + cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_limits.max_serve_nodes = ( + self.max_serve_nodes + ) + if self.cpu_utilization_percent: + cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_targets.cpu_utilization_percent = ( + self.cpu_utilization_percent + ) + return cluster_pb diff --git a/google/cloud/bigtable/instance.py b/google/cloud/bigtable/instance.py index 9c22aaa79..e838ec9ad 100644 --- a/google/cloud/bigtable/instance.py +++ b/google/cloud/bigtable/instance.py @@ -228,6 +228,9 @@ def create( serve_nodes=None, default_storage_type=None, clusters=None, + min_serve_nodes=None, + max_serve_nodes=None, + cpu_utilization_percent=None, ): """Create this instance. @@ -303,12 +306,18 @@ def create( location_id=location_id, serve_nodes=serve_nodes, default_storage_type=default_storage_type, + min_serve_nodes=None, + max_serve_nodes=None, + cpu_utilization_percent=None, ) ] elif ( location_id is not None or serve_nodes is not None or default_storage_type is not None + or min_serve_nodes is not None + or max_serve_nodes is not None + or cpu_utilization_percent is not None ): raise ValueError( "clusters and one of location_id, serve_nodes, \ @@ -546,6 +555,9 @@ def cluster( serve_nodes=None, default_storage_type=None, kms_key_name=None, + min_serve_nodes=None, + max_serve_nodes=None, + cpu_utilization_percent=None, ): """Factory to create a cluster associated with this instance. @@ -605,6 +617,9 @@ def cluster( serve_nodes=serve_nodes, default_storage_type=default_storage_type, kms_key_name=kms_key_name, + min_serve_nodes=min_serve_nodes, + max_serve_nodes=max_serve_nodes, + cpu_utilization_percent=cpu_utilization_percent, ) def list_clusters(self): diff --git a/tests/system/conftest.py b/tests/system/conftest.py index 6f6cdc2d1..fdf111a53 100644 --- a/tests/system/conftest.py +++ b/tests/system/conftest.py @@ -107,6 +107,24 @@ def admin_cluster(admin_instance, admin_cluster_id, location_id, serve_nodes): ) +@pytest.fixture(scope="session") +def admin_cluster_with_autoscaling( + admin_instance, + admin_cluster_id, + location_id, + min_serve_nodes, + max_serve_nodes, + cpu_utilization_percent, +): + return admin_instance.cluster( + admin_cluster_id, + location_id=location_id, + min_serve_nodes=min_serve_nodes, + max_serve_nodes=max_serve_nodes, + cpu_utilization_percent=cpu_utilization_percent, + ) + + @pytest.fixture(scope="session") def admin_instance_populated(admin_instance, admin_cluster, in_emulator): # Emulator does not support instance admin operations (create / delete). @@ -170,3 +188,18 @@ def instances_to_delete(): for instance in instances_to_delete: _helpers.retry_429(instance.delete)() + + +@pytest.fixture(scope="session") +def min_serve_nodes(in_emulator): + return 1 + + +@pytest.fixture(scope="session") +def max_serve_nodes(in_emulator): + return 8 + + +@pytest.fixture(scope="session") +def cpu_utilization_percent(in_emulator): + return 10 diff --git a/tests/system/test_instance_admin.py b/tests/system/test_instance_admin.py index c2cf21291..36b61d6dd 100644 --- a/tests/system/test_instance_admin.py +++ b/tests/system/test_instance_admin.py @@ -591,6 +591,44 @@ def test_cluster_create( assert not cluster_2.exists() +def test_cluster_create_w_autoscaling( + admin_instance_populated, admin_instance_id, skip_on_emulator, +): + alt_cluster_id = f"{admin_instance_id}-c2" + alt_location_id = "us-central1-f" + min_serve_nodes = 1 + max_serve_nodes = 8 + cpu_utilization_percent = 20 + + cluster_2 = admin_instance_populated.cluster( + alt_cluster_id, + location_id=alt_location_id, + min_serve_nodes=min_serve_nodes, + max_serve_nodes=max_serve_nodes, + cpu_utilization_percent=cpu_utilization_percent, + default_storage_type=(enums.StorageType.SSD), + ) + operation = cluster_2.create() + operation.result(timeout=60) # Ensure the operation completes. + + # Create a new object instance, reload and make sure it is the same. + alt_cluster = admin_instance_populated.cluster(alt_cluster_id) + alt_cluster.reload() + + assert cluster_2 == alt_cluster + assert cluster_2.location_id == alt_cluster.location_id + assert alt_cluster.state == enums.Cluster.State.READY + assert cluster_2.min_serve_nodes == alt_cluster.min_serve_nodes + assert cluster_2.max_serve_nodes == alt_cluster.max_serve_nodes + assert cluster_2.cpu_utilization_percent == alt_cluster.cpu_utilization_percent + assert cluster_2.default_storage_type == alt_cluster.default_storage_type + + # Delete the newly created cluster and confirm + assert cluster_2.exists() + cluster_2.delete() + assert not cluster_2.exists() + + def test_cluster_update( admin_instance_populated, admin_cluster_id, @@ -614,3 +652,103 @@ def test_cluster_update( admin_cluster.serve_nodes = serve_nodes operation = admin_cluster.update() operation.result(timeout=60) # Ensure the operation completes. + + +def test_cluster_update_w_autoscaling( + admin_instance_populated, + admin_cluster_id, + admin_cluster_with_autoscaling, + min_serve_nodes, + max_serve_nodes, + cpu_utilization_percent, + skip_on_emulator, +): + new_min_serve_nodes = min_serve_nodes + 1 + new_max_serve_nodes = max_serve_nodes + 1 + new_cpu_utilization_percent = cpu_utilization_percent + 10 + admin_cluster_with_autoscaling.min_serve_nodes = new_min_serve_nodes + admin_cluster_with_autoscaling.max_serve_nodes = new_max_serve_nodes + admin_cluster_with_autoscaling.cpu_utilization_percent = new_cpu_utilization_percent + + operation = admin_cluster_with_autoscaling.update() + operation.result(timeout=60) # Ensure the operation completes. + + # Create a new cluster instance and reload it. + alt_cluster = admin_instance_populated.cluster(admin_cluster_id) + alt_cluster.reload() + assert alt_cluster.min_serve_nodes == new_min_serve_nodes + assert alt_cluster.max_serve_nodes == new_max_serve_nodes + assert alt_cluster.cpu_utilization_percent == new_cpu_utilization_percent + + # Put the cluster back the way it was for the other test cases. + admin_cluster_with_autoscaling.min_serve_nodes = min_serve_nodes + admin_cluster_with_autoscaling.max_serve_nodes = max_serve_nodes + admin_cluster_with_autoscaling.cpu_utilization_percent = cpu_utilization_percent + operation = admin_cluster_with_autoscaling.update() + operation.result(timeout=60) # Ensure the operation completes. + + +def test_cluster_update_w_autoscaling_partial( + admin_instance_populated, + admin_cluster_id, + admin_cluster_with_autoscaling, + min_serve_nodes, + max_serve_nodes, + cpu_utilization_percent, + skip_on_emulator, +): + new_min_serve_nodes = min_serve_nodes + 1 + + admin_cluster_with_autoscaling.min_serve_nodes = new_min_serve_nodes + + operation = admin_cluster_with_autoscaling.update() + operation.result(timeout=60) # Ensure the operation completes. + + # Create a new cluster instance and reload it. + alt_cluster = admin_instance_populated.cluster(admin_cluster_id) + alt_cluster.reload() + + # assert that only the min_serve_nodes was changed + + assert alt_cluster.min_serve_nodes == new_min_serve_nodes + assert alt_cluster.max_serve_nodes == max_serve_nodes + assert alt_cluster.cpu_utilization_percent == cpu_utilization_percent + + # Put the cluster back the way it was for the other test cases. + admin_cluster_with_autoscaling.min_serve_nodes = min_serve_nodes + admin_cluster_with_autoscaling.max_serve_nodes = max_serve_nodes + admin_cluster_with_autoscaling.cpu_utilization_percent = cpu_utilization_percent + operation = admin_cluster_with_autoscaling.update() + operation.result(timeout=60) # Ensure the operation completes. + + +def test_cluster_disable_autoscaling( + admin_instance_populated, + admin_cluster_id, + admin_cluster_with_autoscaling, + serve_nodes, + min_serve_nodes, + max_serve_nodes, + cpu_utilization_percent, + skip_on_emulator, +): + operation = admin_cluster_with_autoscaling.disable_autoscaling( + serve_nodes=serve_nodes + ) + operation.result(timeout=60) # Ensure the operation completes. + + # Create a new cluster instance and reload it. + alt_cluster = admin_instance_populated.cluster(admin_cluster_id) + alt_cluster.reload() + assert alt_cluster.min_serve_nodes == 0 + assert alt_cluster.max_serve_nodes == 0 + assert alt_cluster.cpu_utilization_percent == 0 + assert alt_cluster.serve_nodes == serve_nodes + + # Put the cluster back the way it was for the other test cases. + admin_cluster_with_autoscaling.min_serve_nodes = min_serve_nodes + admin_cluster_with_autoscaling.max_serve_nodes = max_serve_nodes + admin_cluster_with_autoscaling.cpu_utilization_percent = cpu_utilization_percent + admin_cluster_with_autoscaling.serve_nodes = 0 + operation = admin_cluster_with_autoscaling.update() + operation.result(timeout=60) # Ensure the operation completes. diff --git a/tests/unit/test_cluster.py b/tests/unit/test_cluster.py index 74ca98830..56c0a3cc5 100644 --- a/tests/unit/test_cluster.py +++ b/tests/unit/test_cluster.py @@ -36,6 +36,10 @@ CRYPTO_KEY_ID = "crypto-key-id" KMS_KEY_NAME = f"{LOCATION_PATH}/keyRings/{KEY_RING_ID}/cryptoKeys/{CRYPTO_KEY_ID}" +MIN_SERVE_NODES = 1 +MAX_SERVE_NODES = 8 +CPU_UTILIZATION_PERCENT = 20 + def _make_cluster(*args, **kwargs): from google.cloud.bigtable.cluster import Cluster @@ -62,6 +66,9 @@ def test_cluster_constructor_defaults(): assert cluster.serve_nodes is None assert cluster.default_storage_type is None assert cluster.kms_key_name is None + assert cluster.min_serve_nodes is None + assert cluster.max_serve_nodes is None + assert cluster.cpu_utilization_percent is None def test_cluster_constructor_explicit(): @@ -148,6 +155,9 @@ def test_cluster_from_pb_success(): assert cluster.serve_nodes == SERVE_NODES assert cluster.default_storage_type == storage_type assert cluster.kms_key_name == KMS_KEY_NAME + assert cluster.min_serve_nodes == 0 + assert cluster.max_serve_nodes == 0 + assert cluster.cpu_utilization_percent == 0 def test_cluster_from_pb_w_bad_cluster_name(): @@ -192,6 +202,53 @@ def test_cluster_from_pb_w_project_mistmatch(): Cluster.from_pb(cluster_pb, instance) +def test_cluster_from_pb_w_autoscaling(): + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + from google.cloud.bigtable.cluster import Cluster + from google.cloud.bigtable import enums + + client = _Client(PROJECT) + instance = _Instance(INSTANCE_ID, client) + + location = LOCATION_PATH + LOCATION_ID + state = enums.Cluster.State.RESIZING + storage_type = enums.StorageType.SSD + + cluster_config = data_v2_pb2.Cluster.ClusterConfig( + cluster_autoscaling_config=data_v2_pb2.Cluster.ClusterAutoscalingConfig( + autoscaling_limits=data_v2_pb2.AutoscalingLimits( + min_serve_nodes=MIN_SERVE_NODES, max_serve_nodes=MAX_SERVE_NODES, + ), + autoscaling_targets=data_v2_pb2.AutoscalingTargets( + cpu_utilization_percent=CPU_UTILIZATION_PERCENT + ), + ), + ) + cluster_pb = data_v2_pb2.Cluster( + name=CLUSTER_NAME, + location=location, + state=state, + cluster_config=cluster_config, + default_storage_type=storage_type, + encryption_config=data_v2_pb2.Cluster.EncryptionConfig( + kms_key_name=KMS_KEY_NAME, + ), + ) + + cluster = Cluster.from_pb(cluster_pb, instance) + assert isinstance(cluster, Cluster) + assert cluster._instance == instance + assert cluster.cluster_id == CLUSTER_ID + assert cluster.location_id == LOCATION_ID + assert cluster.state == state + assert cluster.serve_nodes == 0 + assert cluster.default_storage_type == storage_type + assert cluster.kms_key_name == KMS_KEY_NAME + assert cluster.min_serve_nodes == MIN_SERVE_NODES + assert cluster.max_serve_nodes == MAX_SERVE_NODES + assert cluster.cpu_utilization_percent == CPU_UTILIZATION_PERCENT + + def test_cluster___eq__(): client = _Client(PROJECT) instance = _Instance(INSTANCE_ID, client) @@ -465,9 +522,77 @@ def test_cluster_create_w_cmek(): api.create_cluster.assert_called_once_with(request=expected_request) +def test_cluster_create_w_autoscaling(): + import datetime + from google.longrunning import operations_pb2 + from google.protobuf.any_pb2 import Any + from google.cloud.bigtable_admin_v2.types import ( + bigtable_instance_admin as messages_v2_pb2, + ) + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.bigtable.instance import Instance + from google.cloud.bigtable_admin_v2.types import instance as instance_v2_pb2 + from google.cloud.bigtable.enums import StorageType + + NOW = datetime.datetime.utcnow() + NOW_PB = _datetime_to_pb_timestamp(NOW) + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + STORAGE_TYPE_SSD = StorageType.SSD + LOCATION = LOCATION_PATH + LOCATION_ID + instance = Instance(INSTANCE_ID, client) + cluster = _make_cluster( + CLUSTER_ID, + instance, + location_id=LOCATION_ID, + default_storage_type=STORAGE_TYPE_SSD, + min_serve_nodes=MIN_SERVE_NODES, + max_serve_nodes=MAX_SERVE_NODES, + cpu_utilization_percent=CPU_UTILIZATION_PERCENT, + ) + metadata = messages_v2_pb2.CreateClusterMetadata(request_time=NOW_PB) + type_url = "type.googleapis.com/{}".format( + messages_v2_pb2.CreateClusterMetadata._meta._pb.DESCRIPTOR.full_name + ) + response_pb = operations_pb2.Operation( + name=OP_NAME, + metadata=Any(type_url=type_url, value=metadata._pb.SerializeToString()), + ) + + api = client._instance_admin_client = _make_instance_admin_client() + api.common_location_path.return_value = LOCATION + api.instance_path.return_value = instance.name + api.create_cluster.return_value = response_pb + + cluster.create() + + cluster_config = instance_v2_pb2.Cluster.ClusterConfig( + cluster_autoscaling_config=instance_v2_pb2.Cluster.ClusterAutoscalingConfig( + autoscaling_limits=instance_v2_pb2.AutoscalingLimits( + min_serve_nodes=MIN_SERVE_NODES, max_serve_nodes=MAX_SERVE_NODES, + ), + autoscaling_targets=instance_v2_pb2.AutoscalingTargets( + cpu_utilization_percent=CPU_UTILIZATION_PERCENT + ), + ), + ) + expected_request_cluster = instance_v2_pb2.Cluster( + location=LOCATION, + default_storage_type=cluster.default_storage_type, + cluster_config=cluster_config, + ) + expected_request = { + "parent": instance.name, + "cluster_id": CLUSTER_ID, + "cluster": expected_request_cluster, + } + api.create_cluster.assert_called_once_with(request=expected_request) + + def test_cluster_update(): import datetime from google.longrunning import operations_pb2 + from google.protobuf import field_mask_pb2 from google.protobuf.any_pb2 import Any from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud.bigtable_admin_v2.types import ( @@ -481,6 +606,7 @@ def test_cluster_update(): credentials = _make_credentials() client = _make_client(project=PROJECT, credentials=credentials, admin=True) STORAGE_TYPE_SSD = StorageType.SSD + LOCATION = LOCATION_PATH + LOCATION_ID instance = _Instance(INSTANCE_ID, client) cluster = _make_cluster( CLUSTER_ID, @@ -503,15 +629,392 @@ def test_cluster_update(): "projects/project/instances/instance-id/clusters/cluster-id" ) api.update_cluster.return_value = response_pb + api.common_location_path.return_value = LOCATION + + cluster.update() + cluster_pb = cluster._to_pb() + cluster_pb.name = cluster.name + update_mask_pb = field_mask_pb2.FieldMask(paths=["serve_nodes"]) + + expected_request = { + "cluster": cluster_pb, + "update_mask": update_mask_pb, + } + api.partial_update_cluster.assert_called_once_with(request=expected_request) + + assert ( + cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_limits.min_serve_nodes + == 0 + ) + assert ( + cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_limits.max_serve_nodes + == 0 + ) + assert ( + cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_targets.cpu_utilization_percent + == 0 + ) + + +def test_cluster_update_w_autoscaling(): + import datetime + from google.longrunning import operations_pb2 + from google.protobuf import field_mask_pb2 + from google.protobuf.any_pb2 import Any + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.bigtable_admin_v2.types import ( + bigtable_instance_admin as messages_v2_pb2, + ) + from google.cloud.bigtable.enums import StorageType + + NOW = datetime.datetime.utcnow() + NOW_PB = _datetime_to_pb_timestamp(NOW) + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + STORAGE_TYPE_SSD = StorageType.SSD + LOCATION = LOCATION_PATH + LOCATION_ID + instance = _Instance(INSTANCE_ID, client) + cluster = _make_cluster( + CLUSTER_ID, + instance, + location_id=LOCATION_ID, + default_storage_type=STORAGE_TYPE_SSD, + min_serve_nodes=2, + ) + metadata = messages_v2_pb2.UpdateClusterMetadata(request_time=NOW_PB) + type_url = "type.googleapis.com/{}".format( + messages_v2_pb2.UpdateClusterMetadata._meta._pb.DESCRIPTOR.full_name + ) + response_pb = operations_pb2.Operation( + name=OP_NAME, + metadata=Any(type_url=type_url, value=metadata._pb.SerializeToString()), + ) + cluster.min_serve_nodes = 2 + + api = client._instance_admin_client = _make_instance_admin_client() + api.cluster_path.return_value = ( + "projects/project/instances/instance-id/clusters/cluster-id" + ) + api.update_cluster.return_value = response_pb + api.common_location_path.return_value = LOCATION cluster.update() + cluster_pb = cluster._to_pb() + cluster_pb.name = cluster.name + update_mask_pb = field_mask_pb2.FieldMask( + paths=[ + "cluster_config.cluster_autoscaling_config.autoscaling_limits.min_serve_nodes" + ] + ) expected_request = { - "name": "projects/project/instances/instance-id/clusters/cluster-id", - "serve_nodes": 5, - "location": None, + "cluster": cluster_pb, + "update_mask": update_mask_pb, } - api.update_cluster.assert_called_once_with(request=expected_request) + api.partial_update_cluster.assert_called_once_with(request=expected_request) + + +def test_cluster_update_w_partial_autoscaling_config(): + import datetime + from google.longrunning import operations_pb2 + from google.protobuf import field_mask_pb2 + from google.protobuf.any_pb2 import Any + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.bigtable_admin_v2.types import ( + bigtable_instance_admin as messages_v2_pb2, + ) + from google.cloud.bigtable.enums import StorageType + + NOW = datetime.datetime.utcnow() + NOW_PB = _datetime_to_pb_timestamp(NOW) + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + STORAGE_TYPE_SSD = StorageType.SSD + LOCATION = LOCATION_PATH + LOCATION_ID + instance = _Instance(INSTANCE_ID, client) + + cluster_config = [ + {"min_serve_nodes": MIN_SERVE_NODES}, + {"max_serve_nodes": MAX_SERVE_NODES}, + {"cpu_utilization_percent": CPU_UTILIZATION_PERCENT}, + { + "min_serve_nodes": MIN_SERVE_NODES, + "cpu_utilization_percent": CPU_UTILIZATION_PERCENT, + }, + {"min_serve_nodes": MIN_SERVE_NODES, "max_serve_nodes": MAX_SERVE_NODES}, + { + "max_serve_nodes": MAX_SERVE_NODES, + "cpu_utilization_percent": CPU_UTILIZATION_PERCENT, + }, + ] + for config in cluster_config: + + cluster = _make_cluster( + CLUSTER_ID, + instance, + location_id=LOCATION_ID, + default_storage_type=STORAGE_TYPE_SSD, + **config, + ) + metadata = messages_v2_pb2.UpdateClusterMetadata(request_time=NOW_PB) + type_url = "type.googleapis.com/{}".format( + messages_v2_pb2.UpdateClusterMetadata._meta._pb.DESCRIPTOR.full_name + ) + response_pb = operations_pb2.Operation( + name=OP_NAME, + metadata=Any(type_url=type_url, value=metadata._pb.SerializeToString()), + ) + api = client._instance_admin_client = _make_instance_admin_client() + api.cluster_path.return_value = ( + "projects/project/instances/instance-id/clusters/cluster-id" + ) + api.update_cluster.return_value = response_pb + api.common_location_path.return_value = LOCATION + + cluster.update() + cluster_pb = cluster._to_pb() + cluster_pb.name = cluster.name + + expected_paths = [] + for key, _ in config.items(): + if key == "min_serve_nodes": + expected_paths.append( + "cluster_config.cluster_autoscaling_config.autoscaling_limits.min_serve_nodes" + ) + if key == "max_serve_nodes": + expected_paths.append( + "cluster_config.cluster_autoscaling_config.autoscaling_limits.max_serve_nodes" + ) + if key == "cpu_utilization_percent": + expected_paths.append( + "cluster_config.cluster_autoscaling_config.autoscaling_targets.cpu_utilization_percent" + ) + update_mask_pb = field_mask_pb2.FieldMask(paths=expected_paths) + + expected_request = { + "cluster": cluster_pb, + "update_mask": update_mask_pb, + } + api.partial_update_cluster.assert_called_once_with(request=expected_request) + + +def test_cluster_update_w_both_manual_and_autoscaling(): + import datetime + from google.longrunning import operations_pb2 + from google.protobuf import field_mask_pb2 + from google.protobuf.any_pb2 import Any + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.bigtable_admin_v2.types import ( + bigtable_instance_admin as messages_v2_pb2, + ) + from google.cloud.bigtable.enums import StorageType + + NOW = datetime.datetime.utcnow() + NOW_PB = _datetime_to_pb_timestamp(NOW) + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + STORAGE_TYPE_SSD = StorageType.SSD + LOCATION = LOCATION_PATH + LOCATION_ID + instance = _Instance(INSTANCE_ID, client) + cluster = _make_cluster( + CLUSTER_ID, + instance, + location_id=LOCATION_ID, + default_storage_type=STORAGE_TYPE_SSD, + ) + cluster.max_serve_nodes = 2 + cluster.serve_nodes = SERVE_NODES + metadata = messages_v2_pb2.UpdateClusterMetadata(request_time=NOW_PB) + type_url = "type.googleapis.com/{}".format( + messages_v2_pb2.UpdateClusterMetadata._meta._pb.DESCRIPTOR.full_name + ) + response_pb = operations_pb2.Operation( + name=OP_NAME, + metadata=Any(type_url=type_url, value=metadata._pb.SerializeToString()), + ) + api = client._instance_admin_client = _make_instance_admin_client() + api.cluster_path.return_value = ( + "projects/project/instances/instance-id/clusters/cluster-id" + ) + api.update_cluster.return_value = response_pb + api.common_location_path.return_value = LOCATION + + cluster.update() + cluster_pb = cluster._to_pb() + cluster_pb.name = cluster.name + + expected_paths = [ + "serve_nodes", + "cluster_config.cluster_autoscaling_config.autoscaling_limits.max_serve_nodes", + ] + + update_mask_pb = field_mask_pb2.FieldMask(paths=expected_paths) + + expected_request = { + "cluster": cluster_pb, + "update_mask": update_mask_pb, + } + api.partial_update_cluster.assert_called_once_with(request=expected_request) + + +def test_cluster_disable_autoscaling(): + import datetime + from google.longrunning import operations_pb2 + from google.protobuf import field_mask_pb2 + from google.protobuf.any_pb2 import Any + from google.cloud.bigtable_admin_v2.types import ( + bigtable_instance_admin as messages_v2_pb2, + ) + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.bigtable.instance import Instance + from google.cloud.bigtable.enums import StorageType + + NOW = datetime.datetime.utcnow() + NOW_PB = _datetime_to_pb_timestamp(NOW) + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + STORAGE_TYPE_SSD = StorageType.SSD + LOCATION = LOCATION_PATH + LOCATION_ID + instance = Instance(INSTANCE_ID, client) + cluster = _make_cluster( + CLUSTER_ID, + instance, + location_id=LOCATION_ID, + default_storage_type=STORAGE_TYPE_SSD, + min_serve_nodes=MIN_SERVE_NODES, + max_serve_nodes=MAX_SERVE_NODES, + cpu_utilization_percent=CPU_UTILIZATION_PERCENT, + ) + metadata = messages_v2_pb2.CreateClusterMetadata(request_time=NOW_PB) + type_url = "type.googleapis.com/{}".format( + messages_v2_pb2.CreateClusterMetadata._meta._pb.DESCRIPTOR.full_name + ) + response_pb = operations_pb2.Operation( + name=OP_NAME, + metadata=Any(type_url=type_url, value=metadata._pb.SerializeToString()), + ) + + api = client._instance_admin_client = _make_instance_admin_client() + api.common_location_path.return_value = LOCATION + api.instance_path.return_value = instance.name + api.create_cluster.return_value = response_pb + api.cluster_path.return_value = CLUSTER_NAME + + cluster.create() + + cluster.disable_autoscaling(serve_nodes=SERVE_NODES) + + cluster_pb = cluster._to_pb() + cluster_pb.name = cluster.name + update_mask_pb = field_mask_pb2.FieldMask( + paths=["serve_nodes", "cluster_config.cluster_autoscaling_config"] + ) + + expected_request = { + "cluster": cluster_pb, + "update_mask": update_mask_pb, + } + api.partial_update_cluster.assert_called_once_with(request=expected_request) + + assert cluster.min_serve_nodes == 0 + assert cluster.max_serve_nodes == 0 + assert cluster.cpu_utilization_percent == 0 + + +def test_create_cluster_with_both_manual_and_autoscaling(): + + from google.cloud.bigtable.instance import Instance + from google.cloud.bigtable.enums import StorageType + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + STORAGE_TYPE_SSD = StorageType.SSD + instance = Instance(INSTANCE_ID, client) + cluster = _make_cluster( + CLUSTER_ID, + instance, + location_id=LOCATION_ID, + serve_nodes=SERVE_NODES, + default_storage_type=STORAGE_TYPE_SSD, + min_serve_nodes=MIN_SERVE_NODES, + max_serve_nodes=MAX_SERVE_NODES, + cpu_utilization_percent=CPU_UTILIZATION_PERCENT, + ) + + with pytest.raises(ValueError) as excinfo: + cluster.create() + assert ( + str(excinfo.value) + == "Cannot specify both serve_nodes and autoscaling configurations (min_serve_nodes, max_serve_nodes, and cpu_utilization_percent)." + ) + + +def test_create_cluster_with_partial_autoscaling_config(): + + from google.cloud.bigtable.instance import Instance + from google.cloud.bigtable.enums import StorageType + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + STORAGE_TYPE_SSD = StorageType.SSD + instance = Instance(INSTANCE_ID, client) + + cluster_config = [ + {"min_serve_nodes": MIN_SERVE_NODES}, + {"max_serve_nodes": MAX_SERVE_NODES}, + {"cpu_utilization_percent": CPU_UTILIZATION_PERCENT}, + { + "min_serve_nodes": MIN_SERVE_NODES, + "cpu_utilization_percent": CPU_UTILIZATION_PERCENT, + }, + {"min_serve_nodes": MIN_SERVE_NODES, "max_serve_nodes": MAX_SERVE_NODES}, + { + "max_serve_nodes": MAX_SERVE_NODES, + "cpu_utilization_percent": CPU_UTILIZATION_PERCENT, + }, + ] + for config in cluster_config: + cluster = _make_cluster( + CLUSTER_ID, + instance, + location_id=LOCATION_ID, + default_storage_type=STORAGE_TYPE_SSD, + **config, + ) + + with pytest.raises(ValueError) as excinfo: + cluster.create() + assert ( + str(excinfo.value) + == "All of autoscaling configurations must be specified at the same time (min_serve_nodes, max_serve_nodes, and cpu_utilization_percent)." + ) + + +def test_create_cluster_with_no_scaling_config(): + + from google.cloud.bigtable.instance import Instance + from google.cloud.bigtable.enums import StorageType + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + STORAGE_TYPE_SSD = StorageType.SSD + instance = Instance(INSTANCE_ID, client) + cluster = _make_cluster( + CLUSTER_ID, + instance, + location_id=LOCATION_ID, + default_storage_type=STORAGE_TYPE_SSD, + ) + + with pytest.raises(ValueError) as excinfo: + cluster.create() + assert ( + str(excinfo.value) + == "Must specify either serve_nodes or all of the autoscaling configurations (min_serve_nodes, max_serve_nodes, and cpu_utilization_percent)." + ) def test_cluster_delete(): From 9fa039d58d6f30b43f1b15330a05034f23f3be29 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 4 Mar 2022 12:56:16 -0500 Subject: [PATCH 6/9] chore: Adding support for pytest-xdist and pytest-parallel (#528) Source-Link: https://github.com/googleapis/synthtool/commit/82f5cb283efffe96e1b6cd634738e0e7de2cd90a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:5d8da01438ece4021d135433f2cf3227aa39ef0eaccc941d62aa35e6902832ae Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- samples/beam/noxfile.py | 80 ++++++++++++++----------- samples/hello/noxfile.py | 80 ++++++++++++++----------- samples/hello_happybase/noxfile.py | 80 ++++++++++++++----------- samples/instanceadmin/noxfile.py | 80 ++++++++++++++----------- samples/metricscaler/noxfile.py | 80 ++++++++++++++----------- samples/quickstart/noxfile.py | 80 ++++++++++++++----------- samples/quickstart_happybase/noxfile.py | 80 ++++++++++++++----------- samples/snippets/filters/noxfile.py | 80 ++++++++++++++----------- samples/snippets/reads/noxfile.py | 80 ++++++++++++++----------- samples/snippets/writes/noxfile.py | 80 ++++++++++++++----------- samples/tableadmin/noxfile.py | 80 ++++++++++++++----------- 12 files changed, 507 insertions(+), 375 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 480226ac0..7e08e05a3 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:6162c384d685c5fe22521d3f37f6fc732bf99a085f6d47b677dbcae97fc21392 + digest: sha256:5d8da01438ece4021d135433f2cf3227aa39ef0eaccc941d62aa35e6902832ae diff --git a/samples/beam/noxfile.py b/samples/beam/noxfile.py index 5b10d2811..d9d4d1469 100644 --- a/samples/beam/noxfile.py +++ b/samples/beam/noxfile.py @@ -186,42 +186,54 @@ def _session_tests( # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") test_list.extend(glob.glob("tests")) + if len(test_list) == 0: print("No tests found, skipping directory.") - else: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + elif "pytest-xdist" in packages: + concurrent_args.extend(['-n', 'auto']) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/samples/hello/noxfile.py b/samples/hello/noxfile.py index 20cdfc620..4c808af73 100644 --- a/samples/hello/noxfile.py +++ b/samples/hello/noxfile.py @@ -188,42 +188,54 @@ def _session_tests( # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") test_list.extend(glob.glob("tests")) + if len(test_list) == 0: print("No tests found, skipping directory.") - else: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + elif "pytest-xdist" in packages: + concurrent_args.extend(['-n', 'auto']) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/samples/hello_happybase/noxfile.py b/samples/hello_happybase/noxfile.py index 20cdfc620..4c808af73 100644 --- a/samples/hello_happybase/noxfile.py +++ b/samples/hello_happybase/noxfile.py @@ -188,42 +188,54 @@ def _session_tests( # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") test_list.extend(glob.glob("tests")) + if len(test_list) == 0: print("No tests found, skipping directory.") - else: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + elif "pytest-xdist" in packages: + concurrent_args.extend(['-n', 'auto']) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/samples/instanceadmin/noxfile.py b/samples/instanceadmin/noxfile.py index 20cdfc620..4c808af73 100644 --- a/samples/instanceadmin/noxfile.py +++ b/samples/instanceadmin/noxfile.py @@ -188,42 +188,54 @@ def _session_tests( # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") test_list.extend(glob.glob("tests")) + if len(test_list) == 0: print("No tests found, skipping directory.") - else: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + elif "pytest-xdist" in packages: + concurrent_args.extend(['-n', 'auto']) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/samples/metricscaler/noxfile.py b/samples/metricscaler/noxfile.py index 20cdfc620..4c808af73 100644 --- a/samples/metricscaler/noxfile.py +++ b/samples/metricscaler/noxfile.py @@ -188,42 +188,54 @@ def _session_tests( # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") test_list.extend(glob.glob("tests")) + if len(test_list) == 0: print("No tests found, skipping directory.") - else: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + elif "pytest-xdist" in packages: + concurrent_args.extend(['-n', 'auto']) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/samples/quickstart/noxfile.py b/samples/quickstart/noxfile.py index 20cdfc620..4c808af73 100644 --- a/samples/quickstart/noxfile.py +++ b/samples/quickstart/noxfile.py @@ -188,42 +188,54 @@ def _session_tests( # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") test_list.extend(glob.glob("tests")) + if len(test_list) == 0: print("No tests found, skipping directory.") - else: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + elif "pytest-xdist" in packages: + concurrent_args.extend(['-n', 'auto']) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/samples/quickstart_happybase/noxfile.py b/samples/quickstart_happybase/noxfile.py index 20cdfc620..4c808af73 100644 --- a/samples/quickstart_happybase/noxfile.py +++ b/samples/quickstart_happybase/noxfile.py @@ -188,42 +188,54 @@ def _session_tests( # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") test_list.extend(glob.glob("tests")) + if len(test_list) == 0: print("No tests found, skipping directory.") - else: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + elif "pytest-xdist" in packages: + concurrent_args.extend(['-n', 'auto']) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/samples/snippets/filters/noxfile.py b/samples/snippets/filters/noxfile.py index 20cdfc620..4c808af73 100644 --- a/samples/snippets/filters/noxfile.py +++ b/samples/snippets/filters/noxfile.py @@ -188,42 +188,54 @@ def _session_tests( # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") test_list.extend(glob.glob("tests")) + if len(test_list) == 0: print("No tests found, skipping directory.") - else: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + elif "pytest-xdist" in packages: + concurrent_args.extend(['-n', 'auto']) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/samples/snippets/reads/noxfile.py b/samples/snippets/reads/noxfile.py index 20cdfc620..4c808af73 100644 --- a/samples/snippets/reads/noxfile.py +++ b/samples/snippets/reads/noxfile.py @@ -188,42 +188,54 @@ def _session_tests( # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") test_list.extend(glob.glob("tests")) + if len(test_list) == 0: print("No tests found, skipping directory.") - else: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + elif "pytest-xdist" in packages: + concurrent_args.extend(['-n', 'auto']) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/samples/snippets/writes/noxfile.py b/samples/snippets/writes/noxfile.py index 20cdfc620..4c808af73 100644 --- a/samples/snippets/writes/noxfile.py +++ b/samples/snippets/writes/noxfile.py @@ -188,42 +188,54 @@ def _session_tests( # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") test_list.extend(glob.glob("tests")) + if len(test_list) == 0: print("No tests found, skipping directory.") - else: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + elif "pytest-xdist" in packages: + concurrent_args.extend(['-n', 'auto']) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/samples/tableadmin/noxfile.py b/samples/tableadmin/noxfile.py index 20cdfc620..4c808af73 100644 --- a/samples/tableadmin/noxfile.py +++ b/samples/tableadmin/noxfile.py @@ -188,42 +188,54 @@ def _session_tests( # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") test_list.extend(glob.glob("tests")) + if len(test_list) == 0: print("No tests found, skipping directory.") - else: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + elif "pytest-xdist" in packages: + concurrent_args.extend(['-n', 'auto']) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) From d07122a9ea4f04d30fe2e7b6929aa3b72c5e3621 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 5 Mar 2022 00:24:23 +0000 Subject: [PATCH 7/9] chore(deps): update actions/download-artifact action to v3 (#529) Source-Link: https://github.com/googleapis/synthtool/commit/38e11ad1104dcc1e63b52691ddf2fe4015d06955 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 --- .github/.OwlBot.lock.yaml | 2 +- .github/workflows/unittest.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 7e08e05a3..44c78f7cc 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5d8da01438ece4021d135433f2cf3227aa39ef0eaccc941d62aa35e6902832ae + digest: sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index e87fe5b7b..e5be6edbd 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -26,7 +26,7 @@ jobs: run: | nox -s unit-${{ matrix.python }} - name: Upload coverage results - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage-artifacts path: .coverage-${{ matrix.python }} @@ -47,7 +47,7 @@ jobs: python -m pip install --upgrade setuptools pip wheel python -m pip install coverage - name: Download coverage results - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v3 with: name: coverage-artifacts path: .coverage-results/ From d38d699ce2db3f375eae0e3b5d665bb56314a9cf Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sun, 6 Mar 2022 01:04:30 +0100 Subject: [PATCH 8/9] chore(deps): update dependency apache-beam to v2.37.0 (#530) --- samples/beam/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/beam/requirements.txt b/samples/beam/requirements.txt index 2bd731ab7..8296b1372 100644 --- a/samples/beam/requirements.txt +++ b/samples/beam/requirements.txt @@ -1,3 +1,3 @@ -apache-beam==2.36.0 +apache-beam==2.37.0 google-cloud-bigtable==2.6.0 google-cloud-core==2.2.2 From e640783421ddda8108112e63ce8cf69fabbadec9 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 7 Mar 2022 11:24:51 -0500 Subject: [PATCH 9/9] chore(main): release 2.7.0 (#527) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 13 +++++++++++++ setup.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 62c060c69..7226b2090 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,19 @@ [1]: https://pypi.org/project/google-cloud-bigtable/#history +## [2.7.0](https://github.com/googleapis/python-bigtable/compare/v2.6.0...v2.7.0) (2022-03-06) + + +### Features + +* Add support for autoscaling ([#509](https://github.com/googleapis/python-bigtable/issues/509)) ([8f4e197](https://github.com/googleapis/python-bigtable/commit/8f4e197148644ded934190814ff44fa132a2dda6)) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#526](https://github.com/googleapis/python-bigtable/issues/526)) ([a8a92ee](https://github.com/googleapis/python-bigtable/commit/a8a92ee1b6bd284055fee3e1029a9a6aacbc5f1c)) +* **deps:** require proto-plus>=1.15.0 ([a8a92ee](https://github.com/googleapis/python-bigtable/commit/a8a92ee1b6bd284055fee3e1029a9a6aacbc5f1c)) + ## [2.6.0](https://github.com/googleapis/python-bigtable/compare/v2.5.2...v2.6.0) (2022-02-26) diff --git a/setup.py b/setup.py index a3f51e450..78bfefcec 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-bigtable" description = "Google Cloud Bigtable API client library" -version = "2.6.0" +version = "2.7.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta'