diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index b8edda51..98994f47 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2e247c7bf5154df7f98cce087a20ca7605e236340c7d6d1a14447e5c06791bd6 + digest: sha256:2d816f26f728ac8b24248741e7d4c461c09764ef9f7be3684d557c9632e46dbd +# created: 2023-06-28T17:03:33.371210701Z diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index 44d0079c..0caac568 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -38,3 +38,12 @@ env_vars: { key: "SECRET_MANAGER_KEYS" value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" } + +# Store the packages we uploaded to PyPI. That way, we have a record of exactly +# what we published, which we can use to generate SBOMs and attestations. +action { + define_artifacts { + regex: "github/python-scheduler/**/*.tar.gz" + strip_prefix: "github/python-scheduler" + } +} diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 66a2172a..c7929db6 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -113,28 +113,26 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==39.0.1 \ - --hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \ - --hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \ - --hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \ - --hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \ - --hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \ - --hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \ - --hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \ - --hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \ - --hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \ - --hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \ - --hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \ - --hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \ - --hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \ - --hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \ - --hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \ - --hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \ - --hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \ - --hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \ - --hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \ - --hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \ - --hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8 +cryptography==41.0.0 \ + --hash=sha256:0ddaee209d1cf1f180f1efa338a68c4621154de0afaef92b89486f5f96047c55 \ + --hash=sha256:14754bcdae909d66ff24b7b5f166d69340ccc6cb15731670435efd5719294895 \ + --hash=sha256:344c6de9f8bda3c425b3a41b319522ba3208551b70c2ae00099c205f0d9fd3be \ + --hash=sha256:34d405ea69a8b34566ba3dfb0521379b210ea5d560fafedf9f800a9a94a41928 \ + --hash=sha256:3680248309d340fda9611498a5319b0193a8dbdb73586a1acf8109d06f25b92d \ + --hash=sha256:3c5ef25d060c80d6d9f7f9892e1d41bb1c79b78ce74805b8cb4aa373cb7d5ec8 \ + --hash=sha256:4ab14d567f7bbe7f1cdff1c53d5324ed4d3fc8bd17c481b395db224fb405c237 \ + --hash=sha256:5c1f7293c31ebc72163a9a0df246f890d65f66b4a40d9ec80081969ba8c78cc9 \ + --hash=sha256:6b71f64beeea341c9b4f963b48ee3b62d62d57ba93eb120e1196b31dc1025e78 \ + --hash=sha256:7d92f0248d38faa411d17f4107fc0bce0c42cae0b0ba5415505df72d751bf62d \ + --hash=sha256:8362565b3835ceacf4dc8f3b56471a2289cf51ac80946f9087e66dc283a810e0 \ + --hash=sha256:84a165379cb9d411d58ed739e4af3396e544eac190805a54ba2e0322feb55c46 \ + --hash=sha256:88ff107f211ea696455ea8d911389f6d2b276aabf3231bf72c8853d22db755c5 \ + --hash=sha256:9f65e842cb02550fac96536edb1d17f24c0a338fd84eaf582be25926e993dde4 \ + --hash=sha256:a4fc68d1c5b951cfb72dfd54702afdbbf0fb7acdc9b7dc4301bbf2225a27714d \ + --hash=sha256:b7f2f5c525a642cecad24ee8670443ba27ac1fab81bba4cc24c7b6b41f2d0c75 \ + --hash=sha256:b846d59a8d5a9ba87e2c3d757ca019fa576793e8758174d3868aecb88d6fc8eb \ + --hash=sha256:bf8fc66012ca857d62f6a347007e166ed59c0bc150cefa49f28376ebe7d992a2 \ + --hash=sha256:f5d0bf9b252f30a31664b6f64432b4730bb7038339bd18b1fafe129cfc2be9be # via # gcp-releasetool # secretstorage @@ -419,9 +417,9 @@ readme-renderer==37.3 \ --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 # via twine -requests==2.28.1 \ - --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ - --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349 +requests==2.31.0 \ + --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ + --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 # via # gcp-releasetool # google-api-core diff --git a/.release-please-manifest.json b/.release-please-manifest.json index a9b8e02a..f58b956e 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.11.0" + ".": "2.11.1" } diff --git a/CHANGELOG.md b/CHANGELOG.md index bcad480a..871b73f2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-scheduler/#history +## [2.11.1](https://github.com/googleapis/python-scheduler/compare/v2.11.0...v2.11.1) (2023-07-04) + + +### Bug Fixes + +* Add async context manager return types ([#340](https://github.com/googleapis/python-scheduler/issues/340)) ([7e65978](https://github.com/googleapis/python-scheduler/commit/7e65978db72b38d1fed273562df86dd058cfd271)) + ## [2.11.0](https://github.com/googleapis/python-scheduler/compare/v2.10.0...v2.11.0) (2023-03-23) diff --git a/google/cloud/scheduler/gapic_version.py b/google/cloud/scheduler/gapic_version.py index bb74f811..9ac1d4a8 100644 --- a/google/cloud/scheduler/gapic_version.py +++ b/google/cloud/scheduler/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.11.0" # {x-release-please-version} +__version__ = "2.11.1" # {x-release-please-version} diff --git a/google/cloud/scheduler_v1/gapic_version.py b/google/cloud/scheduler_v1/gapic_version.py index bb74f811..9ac1d4a8 100644 --- a/google/cloud/scheduler_v1/gapic_version.py +++ b/google/cloud/scheduler_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.11.0" # {x-release-please-version} +__version__ = "2.11.1" # {x-release-please-version} diff --git a/google/cloud/scheduler_v1/services/cloud_scheduler/async_client.py b/google/cloud/scheduler_v1/services/cloud_scheduler/async_client.py index ac38f9b5..cff7ba3f 100644 --- a/google/cloud/scheduler_v1/services/cloud_scheduler/async_client.py +++ b/google/cloud/scheduler_v1/services/cloud_scheduler/async_client.py @@ -1252,7 +1252,7 @@ async def list_locations( # Done; return the response. return response - async def __aenter__(self): + async def __aenter__(self) -> "CloudSchedulerAsyncClient": return self async def __aexit__(self, exc_type, exc, tb): diff --git a/google/cloud/scheduler_v1beta1/gapic_version.py b/google/cloud/scheduler_v1beta1/gapic_version.py index bb74f811..9ac1d4a8 100644 --- a/google/cloud/scheduler_v1beta1/gapic_version.py +++ b/google/cloud/scheduler_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.11.0" # {x-release-please-version} +__version__ = "2.11.1" # {x-release-please-version} diff --git a/google/cloud/scheduler_v1beta1/services/cloud_scheduler/async_client.py b/google/cloud/scheduler_v1beta1/services/cloud_scheduler/async_client.py index 49700fd5..1355e234 100644 --- a/google/cloud/scheduler_v1beta1/services/cloud_scheduler/async_client.py +++ b/google/cloud/scheduler_v1beta1/services/cloud_scheduler/async_client.py @@ -1273,7 +1273,7 @@ async def list_locations( # Done; return the response. return response - async def __aenter__(self): + async def __aenter__(self) -> "CloudSchedulerAsyncClient": return self async def __aexit__(self, exc_type, exc, tb): diff --git a/noxfile.py b/noxfile.py index 95e58c52..7d411fea 100644 --- a/noxfile.py +++ b/noxfile.py @@ -305,10 +305,9 @@ def docfx(session): session.install("-e", ".") session.install( - "sphinx==4.0.1", + "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", - "gcp-sphinx-docfx-yaml", ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) @@ -380,6 +379,7 @@ def prerelease_deps(session): "grpcio!=1.52.0rc1", "grpcio-status", "google-api-core", + "google-auth", "proto-plus", "google-cloud-testutils", # dependencies of google-cloud-testutils" @@ -392,7 +392,6 @@ def prerelease_deps(session): # Remaining dependencies other_deps = [ "requests", - "google-auth", ] session.install(*other_deps) diff --git a/samples/generated_samples/snippet_metadata_google.cloud.scheduler.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.scheduler.v1.json index 70172b6b..cecfa453 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.scheduler.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.scheduler.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-scheduler", - "version": "2.11.0" + "version": "2.11.1" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.cloud.scheduler.v1beta1.json b/samples/generated_samples/snippet_metadata_google.cloud.scheduler.v1beta1.json index 921376b4..1ba54876 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.scheduler.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.scheduler.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-scheduler", - "version": "2.11.0" + "version": "2.11.1" }, "snippets": [ { diff --git a/samples/snippets/README.md b/samples/snippets/README.md index e69de29b..643674ca 100644 --- a/samples/snippets/README.md +++ b/samples/snippets/README.md @@ -0,0 +1,4 @@ +Samples migrated +================ + +The samples have moved to a new location: https://github.com/GoogleCloudPlatform/python-docs-samples/tree/main/scheduler/snippets diff --git a/samples/snippets/app.yaml b/samples/snippets/app.yaml deleted file mode 100644 index 8afa3473..00000000 --- a/samples/snippets/app.yaml +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# [START cloud_scheduler_python_yaml] -runtime: python37 -service: my-service -# [END cloud_scheduler_python_yaml] diff --git a/samples/snippets/create_job.py b/samples/snippets/create_job.py deleted file mode 100644 index 01638283..00000000 --- a/samples/snippets/create_job.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def create_scheduler_job(project_id, location_id, service_id): - """Create a job with an App Engine target via the Cloud Scheduler API""" - # [START cloud_scheduler_create_job] - from google.cloud import scheduler - - # Create a client. - client = scheduler.CloudSchedulerClient() - - # TODO(developer): Uncomment and set the following variables - # project_id = 'PROJECT_ID' - # location_id = 'LOCATION_ID' - # service_id = 'my-service' - - # Construct the fully qualified location path. - parent = f"projects/{project_id}/locations/{location_id}" - - # Construct the request body. - job = { - "app_engine_http_target": { - "app_engine_routing": {"service": service_id}, - "relative_uri": "/log_payload", - "http_method": 1, - "body": "Hello World".encode(), - }, - "schedule": "* * * * *", - "time_zone": "America/Los_Angeles", - } - - # Use the client to send the job creation request. - response = client.create_job(request={"parent": parent, "job": job}) - - print("Created job: {}".format(response.name)) - # [END cloud_scheduler_create_job] - return response - - -def delete_scheduler_job(project_id, location_id, job_id): - """Delete a job via the Cloud Scheduler API""" - # [START cloud_scheduler_delete_job] - from google.api_core.exceptions import GoogleAPICallError - from google.cloud import scheduler - - # Create a client. - client = scheduler.CloudSchedulerClient() - - # TODO(developer): Uncomment and set the following variables - # project_id = 'PROJECT_ID' - # location_id = 'LOCATION_ID' - # job_id = 'JOB_ID' - - # Construct the fully qualified job path. - job = f"projects/{project_id}/locations/{location_id}/jobs/{job_id}" - - # Use the client to send the job deletion request. - try: - client.delete_job(name=job) - print("Job deleted.") - except GoogleAPICallError as e: - print("Error: %s" % e) - # [END cloud_scheduler_delete_job] diff --git a/samples/snippets/create_job_test.py b/samples/snippets/create_job_test.py deleted file mode 100644 index da61f0ef..00000000 --- a/samples/snippets/create_job_test.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2019 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os - -import create_job - -TEST_PROJECT_ID = os.getenv("GOOGLE_CLOUD_PROJECT") -TEST_LOCATION = os.getenv("LOCATION_ID", "us-central1") - - -def test_create_job(capsys): - create_result = create_job.create_scheduler_job( - TEST_PROJECT_ID, TEST_LOCATION, "my-service" - ) - out, _ = capsys.readouterr() - assert "Created job:" in out - - job_name = create_result.name.split("/")[-1] - create_job.delete_scheduler_job(TEST_PROJECT_ID, TEST_LOCATION, job_name) - - out, _ = capsys.readouterr() - assert "Job deleted." in out diff --git a/samples/snippets/main.py b/samples/snippets/main.py deleted file mode 100644 index 4dad6afe..00000000 --- a/samples/snippets/main.py +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""App Engine app to serve as an endpoint for Cloud Scheduler samples.""" - -# [START cloud_scheduler_app] -from flask import Flask, request - -app = Flask(__name__) - - -# Define relative URI for job endpoint -@app.route("/log_payload", methods=["POST"]) -def example_task_handler(): - """Log the job payload.""" - payload = request.get_data(as_text=True) or "(empty payload)" - print("Received job with payload: {}".format(payload)) - return "Printed job payload: {}".format(payload) - - -# [END cloud_scheduler_app] - - -@app.route("/") -def hello(): - """Basic index to verify app is serving.""" - return "Hello World!" - - -if __name__ == "__main__": - # This is used when running locally. Gunicorn is used to run the - # application on Google App Engine. See entrypoint in app.yaml. - app.run(host="127.0.0.1", port=8080, debug=True) diff --git a/samples/snippets/main_test.py b/samples/snippets/main_test.py deleted file mode 100644 index 75371c4b..00000000 --- a/samples/snippets/main_test.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright 2019 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pytest - - -@pytest.fixture -def app(): - import main - - main.app.testing = True - return main.app.test_client() - - -def test_index(app): - r = app.get("/") - assert r.status_code == 200 - - -def test_log_payload(capsys, app): - payload = "test_payload" - - r = app.post("/log_payload", data=payload) - assert r.status_code == 200 - - out, _ = capsys.readouterr() - assert payload in out - - -def test_empty_payload(capsys, app): - r = app.post("/log_payload") - assert r.status_code == 200 - - out, _ = capsys.readouterr() - assert "empty payload" in out diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py deleted file mode 100644 index de104dbc..00000000 --- a/samples/snippets/noxfile.py +++ /dev/null @@ -1,292 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function - -import glob -import os -from pathlib import Path -import sys -from typing import Callable, Dict, Optional - -import nox - -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING -# DO NOT EDIT THIS FILE EVER! -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING - -BLACK_VERSION = "black==22.3.0" -ISORT_VERSION = "isort==5.10.1" - -# Copy `noxfile_config.py` to your directory and modify it instead. - -# `TEST_CONFIG` dict is a configuration hook that allows users to -# modify the test configurations. The values here should be in sync -# with `noxfile_config.py`. Users will copy `noxfile_config.py` into -# their directory and modify it. - -TEST_CONFIG = { - # You can opt out from the test for specific Python versions. - "ignored_versions": [], - # Old samples are opted out of enforcing Python type hints - # All new samples should feature them - "enforce_type_hints": False, - # An envvar key for determining the project id to use. Change it - # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a - # build specific Cloud project. You can also use your own string - # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", - # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - # If you need to use a specific version of pip, - # change pip_version_override to the string representation - # of the version number, for example, "20.2.4" - "pip_version_override": None, - # A dictionary you want to inject into your test. Don't put any - # secrets here. These values will override predefined values. - "envs": {}, -} - - -try: - # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") - from noxfile_config import TEST_CONFIG_OVERRIDE -except ImportError as e: - print("No user noxfile_config found: detail: {}".format(e)) - TEST_CONFIG_OVERRIDE = {} - -# Update the TEST_CONFIG with the user supplied values. -TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) - - -def get_pytest_env_vars() -> Dict[str, str]: - """Returns a dict for pytest invocation.""" - ret = {} - - # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] - # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] - - # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) - return ret - - -# DO NOT EDIT - automatically generated. -# All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] - -# Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] - -TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) - -INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( - "True", - "true", -) - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - -# -# Style Checks -# - - -# Linting with flake8. -# -# We ignore the following rules: -# E203: whitespace before ‘:’ -# E266: too many leading ‘#’ for block comment -# E501: line too long -# I202: Additional newline in a section of imports -# -# We also need to specify the rules which are ignored by default: -# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] -FLAKE8_COMMON_ARGS = [ - "--show-source", - "--builtin=gettext", - "--max-complexity=20", - "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", - "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", - "--max-line-length=88", -] - - -@nox.session -def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG["enforce_type_hints"]: - session.install("flake8") - else: - session.install("flake8", "flake8-annotations") - - args = FLAKE8_COMMON_ARGS + [ - ".", - ] - session.run("flake8", *args) - - -# -# Black -# - - -@nox.session -def blacken(session: nox.sessions.Session) -> None: - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - session.run("black", *python_files) - - -# -# format = isort + black -# - - -@nox.session -def format(session: nox.sessions.Session) -> None: - """ - Run isort to sort imports. Then run black - to format code to uniform standard. - """ - session.install(BLACK_VERSION, ISORT_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - # Use the --fss option to sort imports using strict alphabetical order. - # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections - session.run("isort", "--fss", *python_files) - session.run("black", *python_files) - - -# -# Sample Tests -# - - -PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] - - -def _session_tests( - session: nox.sessions.Session, post_install: Callable = None -) -> None: - # check for presence of tests - test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( - "**/test_*.py", recursive=True - ) - test_list.extend(glob.glob("**/tests", recursive=True)) - - if len(test_list) == 0: - print("No tests found, skipping directory.") - return - - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - concurrent_args = [] - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - with open("requirements.txt") as rfile: - packages = rfile.read() - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - with open("requirements-test.txt") as rtfile: - packages += rtfile.read() - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - if "pytest-parallel" in packages: - concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) - elif "pytest-xdist" in packages: - concurrent_args.extend(["-n", "auto"]) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) - - -@nox.session(python=ALL_VERSIONS) -def py(session: nox.sessions.Session) -> None: - """Runs py.test for a sample using the specified version of Python.""" - if session.python in TESTED_VERSIONS: - _session_tests(session) - else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) - - -# -# Readmegen -# - - -def _get_repo_root() -> Optional[str]: - """Returns the root folder of the project.""" - # Get root of this repository. Assume we don't have directories nested deeper than 10 items. - p = Path(os.getcwd()) - for i in range(10): - if p is None: - break - if Path(p / ".git").exists(): - return str(p) - # .git is not available in repos cloned via Cloud Build - # setup.py is always in the library's root, so use that instead - # https://github.com/googleapis/synthtool/issues/792 - if Path(p / "setup.py").exists(): - return str(p) - p = p.parent - raise Exception("Unable to detect repository root.") - - -GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) - - -@nox.session -@nox.parametrize("path", GENERATED_READMES) -def readmegen(session: nox.sessions.Session, path: str) -> None: - """(Re-)generates the readme for a sample.""" - session.install("jinja2", "pyyaml") - dir_ = os.path.dirname(path) - - if os.path.exists(os.path.join(dir_, "requirements.txt")): - session.install("-r", os.path.join(dir_, "requirements.txt")) - - in_file = os.path.join(dir_, "README.rst.in") - session.run( - "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file - ) diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt deleted file mode 100644 index c021c5b5..00000000 --- a/samples/snippets/requirements-test.txt +++ /dev/null @@ -1 +0,0 @@ -pytest==7.2.2 diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt deleted file mode 100644 index 1e3e69c8..00000000 --- a/samples/snippets/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -Flask==2.2.3 -gunicorn==20.1.0 -google-cloud-scheduler==2.10.0 diff --git a/tests/unit/gapic/scheduler_v1/test_cloud_scheduler.py b/tests/unit/gapic/scheduler_v1/test_cloud_scheduler.py index f5863228..e011e914 100644 --- a/tests/unit/gapic/scheduler_v1/test_cloud_scheduler.py +++ b/tests/unit/gapic/scheduler_v1/test_cloud_scheduler.py @@ -1123,9 +1123,11 @@ async def test_list_jobs_async_pages(): RuntimeError, ) pages = [] - async for page_ in ( + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch await client.list_jobs(request={}) - ).pages: # pragma: no branch + ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1157,7 +1159,6 @@ def test_get_job(request_type, transport: str = "grpc"): schedule="schedule_value", time_zone="time_zone_value", state=job.Job.State.ENABLED, - pubsub_target=target.PubsubTarget(topic_name="topic_name_value"), ) response = client.get_job(request) @@ -1402,7 +1403,6 @@ def test_create_job(request_type, transport: str = "grpc"): schedule="schedule_value", time_zone="time_zone_value", state=gcs_job.Job.State.ENABLED, - pubsub_target=target.PubsubTarget(topic_name="topic_name_value"), ) response = client.create_job(request) @@ -1657,7 +1657,6 @@ def test_update_job(request_type, transport: str = "grpc"): schedule="schedule_value", time_zone="time_zone_value", state=gcs_job.Job.State.ENABLED, - pubsub_target=target.PubsubTarget(topic_name="topic_name_value"), ) response = client.update_job(request) @@ -2132,7 +2131,6 @@ def test_pause_job(request_type, transport: str = "grpc"): schedule="schedule_value", time_zone="time_zone_value", state=job.Job.State.ENABLED, - pubsub_target=target.PubsubTarget(topic_name="topic_name_value"), ) response = client.pause_job(request) @@ -2377,7 +2375,6 @@ def test_resume_job(request_type, transport: str = "grpc"): schedule="schedule_value", time_zone="time_zone_value", state=job.Job.State.ENABLED, - pubsub_target=target.PubsubTarget(topic_name="topic_name_value"), ) response = client.resume_job(request) @@ -2622,7 +2619,6 @@ def test_run_job(request_type, transport: str = "grpc"): schedule="schedule_value", time_zone="time_zone_value", state=job.Job.State.ENABLED, - pubsub_target=target.PubsubTarget(topic_name="topic_name_value"), ) response = client.run_job(request) @@ -3200,7 +3196,6 @@ def test_get_job_rest(request_type): schedule="schedule_value", time_zone="time_zone_value", state=job.Job.State.ENABLED, - pubsub_target=target.PubsubTarget(topic_name="topic_name_value"), ) # Wrap the value into a proper Response obj @@ -3529,7 +3524,6 @@ def test_create_job_rest(request_type): schedule="schedule_value", time_zone="time_zone_value", state=gcs_job.Job.State.ENABLED, - pubsub_target=target.PubsubTarget(topic_name="topic_name_value"), ) # Wrap the value into a proper Response obj @@ -3930,7 +3924,6 @@ def test_update_job_rest(request_type): schedule="schedule_value", time_zone="time_zone_value", state=gcs_job.Job.State.ENABLED, - pubsub_target=target.PubsubTarget(topic_name="topic_name_value"), ) # Wrap the value into a proper Response obj @@ -4513,7 +4506,6 @@ def test_pause_job_rest(request_type): schedule="schedule_value", time_zone="time_zone_value", state=job.Job.State.ENABLED, - pubsub_target=target.PubsubTarget(topic_name="topic_name_value"), ) # Wrap the value into a proper Response obj @@ -4784,7 +4776,6 @@ def test_resume_job_rest(request_type): schedule="schedule_value", time_zone="time_zone_value", state=job.Job.State.ENABLED, - pubsub_target=target.PubsubTarget(topic_name="topic_name_value"), ) # Wrap the value into a proper Response obj @@ -5058,7 +5049,6 @@ def test_run_job_rest(request_type): schedule="schedule_value", time_zone="time_zone_value", state=job.Job.State.ENABLED, - pubsub_target=target.PubsubTarget(topic_name="topic_name_value"), ) # Wrap the value into a proper Response obj diff --git a/tests/unit/gapic/scheduler_v1beta1/test_cloud_scheduler.py b/tests/unit/gapic/scheduler_v1beta1/test_cloud_scheduler.py index 4cf7e404..43b23e15 100644 --- a/tests/unit/gapic/scheduler_v1beta1/test_cloud_scheduler.py +++ b/tests/unit/gapic/scheduler_v1beta1/test_cloud_scheduler.py @@ -1123,9 +1123,11 @@ async def test_list_jobs_async_pages(): RuntimeError, ) pages = [] - async for page_ in ( + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch await client.list_jobs(request={}) - ).pages: # pragma: no branch + ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1158,7 +1160,6 @@ def test_get_job(request_type, transport: str = "grpc"): time_zone="time_zone_value", state=job.Job.State.ENABLED, legacy_app_engine_cron=True, - pubsub_target=target.PubsubTarget(topic_name="topic_name_value"), ) response = client.get_job(request) @@ -1407,7 +1408,6 @@ def test_create_job(request_type, transport: str = "grpc"): time_zone="time_zone_value", state=gcs_job.Job.State.ENABLED, legacy_app_engine_cron=True, - pubsub_target=target.PubsubTarget(topic_name="topic_name_value"), ) response = client.create_job(request) @@ -1666,7 +1666,6 @@ def test_update_job(request_type, transport: str = "grpc"): time_zone="time_zone_value", state=gcs_job.Job.State.ENABLED, legacy_app_engine_cron=True, - pubsub_target=target.PubsubTarget(topic_name="topic_name_value"), ) response = client.update_job(request) @@ -2145,7 +2144,6 @@ def test_pause_job(request_type, transport: str = "grpc"): time_zone="time_zone_value", state=job.Job.State.ENABLED, legacy_app_engine_cron=True, - pubsub_target=target.PubsubTarget(topic_name="topic_name_value"), ) response = client.pause_job(request) @@ -2394,7 +2392,6 @@ def test_resume_job(request_type, transport: str = "grpc"): time_zone="time_zone_value", state=job.Job.State.ENABLED, legacy_app_engine_cron=True, - pubsub_target=target.PubsubTarget(topic_name="topic_name_value"), ) response = client.resume_job(request) @@ -2643,7 +2640,6 @@ def test_run_job(request_type, transport: str = "grpc"): time_zone="time_zone_value", state=job.Job.State.ENABLED, legacy_app_engine_cron=True, - pubsub_target=target.PubsubTarget(topic_name="topic_name_value"), ) response = client.run_job(request) @@ -3229,7 +3225,6 @@ def test_get_job_rest(request_type): time_zone="time_zone_value", state=job.Job.State.ENABLED, legacy_app_engine_cron=True, - pubsub_target=target.PubsubTarget(topic_name="topic_name_value"), ) # Wrap the value into a proper Response obj @@ -3561,7 +3556,6 @@ def test_create_job_rest(request_type): time_zone="time_zone_value", state=gcs_job.Job.State.ENABLED, legacy_app_engine_cron=True, - pubsub_target=target.PubsubTarget(topic_name="topic_name_value"), ) # Wrap the value into a proper Response obj @@ -3966,7 +3960,6 @@ def test_update_job_rest(request_type): time_zone="time_zone_value", state=gcs_job.Job.State.ENABLED, legacy_app_engine_cron=True, - pubsub_target=target.PubsubTarget(topic_name="topic_name_value"), ) # Wrap the value into a proper Response obj @@ -4555,7 +4548,6 @@ def test_pause_job_rest(request_type): time_zone="time_zone_value", state=job.Job.State.ENABLED, legacy_app_engine_cron=True, - pubsub_target=target.PubsubTarget(topic_name="topic_name_value"), ) # Wrap the value into a proper Response obj @@ -4829,7 +4821,6 @@ def test_resume_job_rest(request_type): time_zone="time_zone_value", state=job.Job.State.ENABLED, legacy_app_engine_cron=True, - pubsub_target=target.PubsubTarget(topic_name="topic_name_value"), ) # Wrap the value into a proper Response obj @@ -5105,7 +5096,6 @@ def test_run_job_rest(request_type): time_zone="time_zone_value", state=job.Job.State.ENABLED, legacy_app_engine_cron=True, - pubsub_target=target.PubsubTarget(topic_name="topic_name_value"), ) # Wrap the value into a proper Response obj