diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index cb89b2e3..eecb84c2 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ec49167c606648a063d1222220b48119c912562849a0528f35bfb592a9f72737 + digest: sha256:ae600f36b6bc972b368367b6f83a1d91ec2c82a4a116b383d67d547c56fe6de3 diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 22f60bb5..6fc955c3 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -3,10 +3,10 @@ # # For syntax help see: # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax +# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. - -# @googleapis/yoshi-python and @googleapis/api-dataproc are the default owners for changes in this repo +# @googleapis/yoshi-python @googleapis/api-dataproc are the default owners for changes in this repo * @googleapis/yoshi-python @googleapis/api-dataproc -# @googleapis/python-samples-owners and @googleapis/api-dataproc are the default owners for samples changes -/samples/ @googleapis/python-samples-owners @googleapis/api-dataproc +# @googleapis/python-samples-reviewers @googleapis/api-dataproc are the default owners for samples changes +/samples/ @googleapis/python-samples-reviewers @googleapis/api-dataproc diff --git a/.github/release-please.yml b/.github/release-please.yml index 4507ad05..466597e5 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -1 +1,2 @@ releaseType: python +handleGHRelease: true diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml new file mode 100644 index 00000000..d4ca9418 --- /dev/null +++ b/.github/release-trigger.yml @@ -0,0 +1 @@ +enabled: true diff --git a/.kokoro/release.sh b/.kokoro/release.sh index e14485f8..40d1226a 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") cd github/python-dataproc python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index 6d3ff232..a9f05935 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -23,8 +23,18 @@ env_vars: { value: "github/python-dataproc/.kokoro/release.sh" } +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google-cloud-pypi-token-keystore-1" + } + } +} + # Tokens needed to report release status back to GitHub env_vars: { key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" } diff --git a/.repo-metadata.json b/.repo-metadata.json index 18c4c351..d93929a9 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -2,9 +2,9 @@ "name": "dataproc", "name_pretty": "Google Cloud Dataproc", "product_documentation": "https://cloud.google.com/dataproc", - "client_documentation": "https://googleapis.dev/python/dataproc/latest", + "client_documentation": "https://cloud.google.com/python/docs/reference/dataproc/latest", "issue_tracker": "https://issuetracker.google.com/savedsearches/559745", - "release_level": "ga", + "release_level": "stable", "language": "python", "library_type": "GAPIC_AUTO", "repo": "googleapis/python-dataproc", @@ -12,5 +12,6 @@ "api_id": "dataproc.googleapis.com", "requires_billing": true, "codeowner_team": "@googleapis/api-dataproc", - "default_version": "v1" + "default_version": "v1", + "api_shortname": "dataproc" } diff --git a/CHANGELOG.md b/CHANGELOG.md index d0ce5701..f7449390 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,16 @@ [1]: https://pypi.org/project/google-cloud-dataproc/#history +## [3.2.0](https://github.com/googleapis/python-dataproc/compare/v3.1.1...v3.2.0) (2022-01-17) + + +### Features + +* add Spark runtime versioning for Spark batches ([#318](https://github.com/googleapis/python-dataproc/issues/318)) ([f2e35d9](https://github.com/googleapis/python-dataproc/commit/f2e35d9735cbd0dd5a0e32d78631d70820380846)) +* auto-diagnostic of failed Spark batches ([f2e35d9](https://github.com/googleapis/python-dataproc/commit/f2e35d9735cbd0dd5a0e32d78631d70820380846)) +* custom image containers for Spark batches ([f2e35d9](https://github.com/googleapis/python-dataproc/commit/f2e35d9735cbd0dd5a0e32d78631d70820380846)) +* local SSD NVME interface support for GCE clusters ([f2e35d9](https://github.com/googleapis/python-dataproc/commit/f2e35d9735cbd0dd5a0e32d78631d70820380846)) + ### [3.1.1](https://www.github.com/googleapis/python-dataproc/compare/v3.1.0...v3.1.1) (2021-11-01) diff --git a/README.rst b/README.rst index 146c49da..e7103b8f 100644 --- a/README.rst +++ b/README.rst @@ -15,7 +15,7 @@ Python Client for Google Cloud Dataproc API .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-dataproc.svg :target: https://pypi.org/project/google-cloud-dataproc/ .. _Google Cloud Dataproc API: https://cloud.google.com/dataproc -.. _Client Library Documentation: https://googleapis.dev/python/dataproc/latest +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/dataproc/latest .. _Product Documentation: https://cloud.google.com/dataproc Quick Start diff --git a/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py b/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py index 7f99f540..7ab786bd 100644 --- a/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py +++ b/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py @@ -19,14 +19,17 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.cloud.dataproc_v1.services.autoscaling_policy_service import pagers from google.cloud.dataproc_v1.types import autoscaling_policies diff --git a/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py b/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py index b189eab4..a6c5d0e6 100644 --- a/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py +++ b/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py @@ -14,23 +14,25 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.cloud.dataproc_v1.services.autoscaling_policy_service import pagers from google.cloud.dataproc_v1.types import autoscaling_policies @@ -289,8 +291,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None diff --git a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py index 1b89a691..c0d90b49 100644 --- a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py +++ b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py @@ -18,10 +18,10 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -100,7 +100,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py index 4d5eefcd..bfd3ac76 100644 --- a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py +++ b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py @@ -16,8 +16,8 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py index baa7dba8..078a4211 100644 --- a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py @@ -16,8 +16,8 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/dataproc_v1/services/batch_controller/async_client.py b/google/cloud/dataproc_v1/services/batch_controller/async_client.py index 62744335..c2477e29 100644 --- a/google/cloud/dataproc_v1/services/batch_controller/async_client.py +++ b/google/cloud/dataproc_v1/services/batch_controller/async_client.py @@ -19,14 +19,17 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore diff --git a/google/cloud/dataproc_v1/services/batch_controller/client.py b/google/cloud/dataproc_v1/services/batch_controller/client.py index d9ce3d90..897682f5 100644 --- a/google/cloud/dataproc_v1/services/batch_controller/client.py +++ b/google/cloud/dataproc_v1/services/batch_controller/client.py @@ -14,23 +14,25 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore @@ -290,8 +292,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None diff --git a/google/cloud/dataproc_v1/services/batch_controller/transports/base.py b/google/cloud/dataproc_v1/services/batch_controller/transports/base.py index e30b5f75..be0ea88a 100644 --- a/google/cloud/dataproc_v1/services/batch_controller/transports/base.py +++ b/google/cloud/dataproc_v1/services/batch_controller/transports/base.py @@ -18,11 +18,11 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.api_core import operations_v1 # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -102,7 +102,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/google/cloud/dataproc_v1/services/batch_controller/transports/grpc.py b/google/cloud/dataproc_v1/services/batch_controller/transports/grpc.py index e47ba06a..ef59085a 100644 --- a/google/cloud/dataproc_v1/services/batch_controller/transports/grpc.py +++ b/google/cloud/dataproc_v1/services/batch_controller/transports/grpc.py @@ -16,9 +16,9 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import operations_v1 # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/dataproc_v1/services/batch_controller/transports/grpc_asyncio.py b/google/cloud/dataproc_v1/services/batch_controller/transports/grpc_asyncio.py index ec125b60..c2f7e393 100644 --- a/google/cloud/dataproc_v1/services/batch_controller/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1/services/batch_controller/transports/grpc_asyncio.py @@ -16,9 +16,9 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.api_core import operations_v1 # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/dataproc_v1/services/cluster_controller/async_client.py b/google/cloud/dataproc_v1/services/cluster_controller/async_client.py index e21cd14e..ed69b3b4 100644 --- a/google/cloud/dataproc_v1/services/cluster_controller/async_client.py +++ b/google/cloud/dataproc_v1/services/cluster_controller/async_client.py @@ -19,14 +19,17 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore diff --git a/google/cloud/dataproc_v1/services/cluster_controller/client.py b/google/cloud/dataproc_v1/services/cluster_controller/client.py index 140d0f20..88fcd0be 100644 --- a/google/cloud/dataproc_v1/services/cluster_controller/client.py +++ b/google/cloud/dataproc_v1/services/cluster_controller/client.py @@ -14,23 +14,25 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore @@ -308,8 +310,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None diff --git a/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py b/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py index 7e78b0cb..54539ec0 100644 --- a/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py +++ b/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py @@ -18,11 +18,11 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.api_core import operations_v1 # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -101,7 +101,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py b/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py index 69360855..e6b062c3 100644 --- a/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py +++ b/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py @@ -16,9 +16,9 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import operations_v1 # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py b/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py index e035c45a..cbba42bb 100644 --- a/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py @@ -16,9 +16,9 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.api_core import operations_v1 # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/dataproc_v1/services/job_controller/async_client.py b/google/cloud/dataproc_v1/services/job_controller/async_client.py index 916881e6..1f21893a 100644 --- a/google/cloud/dataproc_v1/services/job_controller/async_client.py +++ b/google/cloud/dataproc_v1/services/job_controller/async_client.py @@ -19,14 +19,17 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore diff --git a/google/cloud/dataproc_v1/services/job_controller/client.py b/google/cloud/dataproc_v1/services/job_controller/client.py index 3e9a47ee..6b318b2f 100644 --- a/google/cloud/dataproc_v1/services/job_controller/client.py +++ b/google/cloud/dataproc_v1/services/job_controller/client.py @@ -14,23 +14,25 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore @@ -267,8 +269,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None diff --git a/google/cloud/dataproc_v1/services/job_controller/transports/base.py b/google/cloud/dataproc_v1/services/job_controller/transports/base.py index 9bd8fb9a..d99fa89e 100644 --- a/google/cloud/dataproc_v1/services/job_controller/transports/base.py +++ b/google/cloud/dataproc_v1/services/job_controller/transports/base.py @@ -18,11 +18,11 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.api_core import operations_v1 # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -102,7 +102,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py b/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py index c5a8218f..1c119715 100644 --- a/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py +++ b/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py @@ -16,9 +16,9 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import operations_v1 # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py b/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py index 645d1a8c..fed32978 100644 --- a/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py @@ -16,9 +16,9 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.api_core import operations_v1 # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py b/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py index 4d58508b..c85eac66 100644 --- a/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py +++ b/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py @@ -19,14 +19,17 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore @@ -204,7 +207,7 @@ async def create_workflow_template( as described in https://cloud.google.com/apis/design/resource_names. - - For ``projects.regions.workflowTemplates,create``, + - For ``projects.regions.workflowTemplates.create``, the resource name of the region has the following format: ``projects/{project_id}/regions/{region}`` diff --git a/google/cloud/dataproc_v1/services/workflow_template_service/client.py b/google/cloud/dataproc_v1/services/workflow_template_service/client.py index 7c645c64..309a9e86 100644 --- a/google/cloud/dataproc_v1/services/workflow_template_service/client.py +++ b/google/cloud/dataproc_v1/services/workflow_template_service/client.py @@ -14,23 +14,25 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore @@ -326,8 +328,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -413,7 +422,7 @@ def create_workflow_template( as described in https://cloud.google.com/apis/design/resource_names. - - For ``projects.regions.workflowTemplates,create``, + - For ``projects.regions.workflowTemplates.create``, the resource name of the region has the following format: ``projects/{project_id}/regions/{region}`` diff --git a/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py b/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py index cf059b67..925922fd 100644 --- a/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py +++ b/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py @@ -18,11 +18,11 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.api_core import operations_v1 # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -102,7 +102,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py b/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py index 4140dead..78583da7 100644 --- a/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py +++ b/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py @@ -16,9 +16,9 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import operations_v1 # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py b/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py index b71c7b0d..fea4290a 100644 --- a/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py @@ -16,9 +16,9 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.api_core import operations_v1 # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/dataproc_v1/types/batches.py b/google/cloud/dataproc_v1/types/batches.py index 165a1076..2dee4eac 100644 --- a/google/cloud/dataproc_v1/types/batches.py +++ b/google/cloud/dataproc_v1/types/batches.py @@ -162,15 +162,19 @@ class Batch(proto.Message): created. pyspark_batch (google.cloud.dataproc_v1.types.PySparkBatch): Optional. PySpark batch config. + This field is a member of `oneof`_ ``batch_config``. spark_batch (google.cloud.dataproc_v1.types.SparkBatch): Optional. Spark batch config. + This field is a member of `oneof`_ ``batch_config``. spark_r_batch (google.cloud.dataproc_v1.types.SparkRBatch): Optional. SparkR batch config. + This field is a member of `oneof`_ ``batch_config``. spark_sql_batch (google.cloud.dataproc_v1.types.SparkSqlBatch): Optional. SparkSql batch config. + This field is a member of `oneof`_ ``batch_config``. runtime_info (google.cloud.dataproc_v1.types.RuntimeInfo): Output only. Runtime information about batch @@ -324,11 +328,13 @@ class SparkBatch(proto.Message): main_jar_file_uri (str): Optional. The HCFS URI of the jar file that contains the main class. + This field is a member of `oneof`_ ``driver``. main_class (str): Optional. The name of the driver main class. The jar file that contains the class must be in the classpath or specified in ``jar_file_uris``. + This field is a member of `oneof`_ ``driver``. args (Sequence[str]): Optional. The arguments to pass to the driver. Do not diff --git a/google/cloud/dataproc_v1/types/clusters.py b/google/cloud/dataproc_v1/types/clusters.py index ce82e9d5..acc3c631 100644 --- a/google/cloud/dataproc_v1/types/clusters.py +++ b/google/cloud/dataproc_v1/types/clusters.py @@ -75,9 +75,10 @@ class Cluster(proto.Message): within a project must be unique. Names of deleted clusters can be reused. config (google.cloud.dataproc_v1.types.ClusterConfig): - Required. The cluster config. Note that - Dataproc may set default values, and values may - change when clusters are updated. + Optional. The cluster config for a cluster of + Compute Engine Instances. Note that Dataproc may + set default values, and values may change when + clusters are updated. labels (Sequence[google.cloud.dataproc_v1.types.Cluster.LabelsEntry]): Optional. The labels to associate with this cluster. Label **keys** must contain 1 to 63 characters, and must conform @@ -682,11 +683,17 @@ class DiskConfig(proto.Message): data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries. + local_ssd_interface (str): + Optional. Interface type of local SSDs (default is "scsi"). + Valid values: "scsi" (Small Computer System Interface), + "nvme" (Non-Volatile Memory Express). See `SSD Interface + types `__. """ boot_disk_type = proto.Field(proto.STRING, number=3,) boot_disk_size_gb = proto.Field(proto.INT32, number=1,) num_local_ssds = proto.Field(proto.INT32, number=2,) + local_ssd_interface = proto.Field(proto.STRING, number=4,) class NodeInitializationAction(proto.Message): @@ -946,6 +953,7 @@ class LifecycleConfig(proto.Message): Optional. The time when cluster will be auto-deleted (see JSON representation of `Timestamp `__). + This field is a member of `oneof`_ ``ttl``. auto_delete_ttl (google.protobuf.duration_pb2.Duration): Optional. The lifetime duration of cluster. The cluster will @@ -953,6 +961,7 @@ class LifecycleConfig(proto.Message): 10 minutes; maximum value is 14 days (see JSON representation of `Duration `__). + This field is a member of `oneof`_ ``ttl``. idle_start_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when cluster became idle (most recent diff --git a/google/cloud/dataproc_v1/types/jobs.py b/google/cloud/dataproc_v1/types/jobs.py index 332ac310..d6cfc361 100644 --- a/google/cloud/dataproc_v1/types/jobs.py +++ b/google/cloud/dataproc_v1/types/jobs.py @@ -102,11 +102,13 @@ class HadoopJob(proto.Message): samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop- mapreduce-examples.jar' + This field is a member of `oneof`_ ``driver``. main_class (str): The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in ``jar_file_uris``. + This field is a member of `oneof`_ ``driver``. args (Sequence[str]): Optional. The arguments to pass to the driver. Do not @@ -162,11 +164,13 @@ class SparkJob(proto.Message): main_jar_file_uri (str): The HCFS URI of the jar file that contains the main class. + This field is a member of `oneof`_ ``driver``. main_class (str): The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in ``jar_file_uris``. + This field is a member of `oneof`_ ``driver``. args (Sequence[str]): Optional. The arguments to pass to the driver. Do not @@ -302,9 +306,11 @@ class HiveJob(proto.Message): query_file_uri (str): The HCFS URI of the script that contains Hive queries. + This field is a member of `oneof`_ ``queries``. query_list (google.cloud.dataproc_v1.types.QueryList): A list of queries. + This field is a member of `oneof`_ ``queries``. continue_on_failure (bool): Optional. Whether to continue executing queries if a query @@ -351,9 +357,11 @@ class SparkSqlJob(proto.Message): query_file_uri (str): The HCFS URI of the script that contains SQL queries. + This field is a member of `oneof`_ ``queries``. query_list (google.cloud.dataproc_v1.types.QueryList): A list of queries. + This field is a member of `oneof`_ ``queries``. script_variables (Sequence[google.cloud.dataproc_v1.types.SparkSqlJob.ScriptVariablesEntry]): Optional. Mapping of query variable names to values @@ -397,9 +405,11 @@ class PigJob(proto.Message): query_file_uri (str): The HCFS URI of the script that contains the Pig queries. + This field is a member of `oneof`_ ``queries``. query_list (google.cloud.dataproc_v1.types.QueryList): A list of queries. + This field is a member of `oneof`_ ``queries``. continue_on_failure (bool): Optional. Whether to continue executing queries if a query @@ -496,9 +506,11 @@ class PrestoJob(proto.Message): query_file_uri (str): The HCFS URI of the script that contains SQL queries. + This field is a member of `oneof`_ ``queries``. query_list (google.cloud.dataproc_v1.types.QueryList): A list of queries. + This field is a member of `oneof`_ ``queries``. continue_on_failure (bool): Optional. Whether to continue executing queries if a query @@ -689,27 +701,35 @@ class Job(proto.Message): when, and where to run the job. hadoop_job (google.cloud.dataproc_v1.types.HadoopJob): Optional. Job is a Hadoop job. + This field is a member of `oneof`_ ``type_job``. spark_job (google.cloud.dataproc_v1.types.SparkJob): Optional. Job is a Spark job. + This field is a member of `oneof`_ ``type_job``. pyspark_job (google.cloud.dataproc_v1.types.PySparkJob): Optional. Job is a PySpark job. + This field is a member of `oneof`_ ``type_job``. hive_job (google.cloud.dataproc_v1.types.HiveJob): Optional. Job is a Hive job. + This field is a member of `oneof`_ ``type_job``. pig_job (google.cloud.dataproc_v1.types.PigJob): Optional. Job is a Pig job. + This field is a member of `oneof`_ ``type_job``. spark_r_job (google.cloud.dataproc_v1.types.SparkRJob): Optional. Job is a SparkR job. + This field is a member of `oneof`_ ``type_job``. spark_sql_job (google.cloud.dataproc_v1.types.SparkSqlJob): Optional. Job is a SparkSql job. + This field is a member of `oneof`_ ``type_job``. presto_job (google.cloud.dataproc_v1.types.PrestoJob): Optional. Job is a Presto job. + This field is a member of `oneof`_ ``type_job``. status (google.cloud.dataproc_v1.types.JobStatus): Output only. The job status. Additional application-specific @@ -794,21 +814,28 @@ class JobScheduling(proto.Message): Attributes: max_failures_per_hour (int): - Optional. Maximum number of times per hour a - driver may be restarted as a result of driver - exiting with non-zero code before job is - reported failed. + Optional. Maximum number of times per hour a driver may be + restarted as a result of driver exiting with non-zero code + before job is reported failed. - A job may be reported as thrashing if driver - exits with non-zero code 4 times within 10 - minute window. + A job may be reported as thrashing if driver exits with + non-zero code 4 times within 10 minute window. Maximum value is 10. + + **Note:** Currently, this restartable job option is not + supported in Dataproc `workflow + template `__ + jobs. max_failures_total (int): - Optional. Maximum number of times in total a - driver may be restarted as a result of driver - exiting with non-zero code before job is - reported failed. Maximum value is 240. + Optional. Maximum number of times in total a driver may be + restarted as a result of driver exiting with non-zero code + before job is reported failed. Maximum value is 240. + + **Note:** Currently, this restartable job option is not + supported in Dataproc `workflow + template `__ + jobs. """ max_failures_per_hour = proto.Field(proto.INT32, number=1,) diff --git a/google/cloud/dataproc_v1/types/shared.py b/google/cloud/dataproc_v1/types/shared.py index 62df63e2..990bef54 100644 --- a/google/cloud/dataproc_v1/types/shared.py +++ b/google/cloud/dataproc_v1/types/shared.py @@ -61,12 +61,20 @@ class RuntimeConfig(proto.Message): r"""Runtime configuration for a workload. Attributes: + version (str): + Optional. Version of the batch runtime. + container_image (str): + Optional. Optional custom container image for + the job runtime environment. If not specified, a + default container image will be used. properties (Sequence[google.cloud.dataproc_v1.types.RuntimeConfig.PropertiesEntry]): Optional. A mapping of property names to values, which are used to configure workload execution. """ + version = proto.Field(proto.STRING, number=1,) + container_image = proto.Field(proto.STRING, number=2,) properties = proto.MapField(proto.STRING, proto.STRING, number=3,) @@ -104,10 +112,12 @@ class ExecutionConfig(proto.Message): execute workload. network_uri (str): Optional. Network URI to connect workload to. + This field is a member of `oneof`_ ``network``. subnetwork_uri (str): Optional. Subnetwork URI to connect workload to. + This field is a member of `oneof`_ ``network``. network_tags (Sequence[str]): Optional. Tags used for network traffic @@ -172,10 +182,14 @@ class RuntimeInfo(proto.Message): output_uri (str): Output only. A URI pointing to the location of the stdout and stderr of the workload. + diagnostic_output_uri (str): + Output only. A URI pointing to the location + of the diagnostics tarball. """ endpoints = proto.MapField(proto.STRING, proto.STRING, number=1,) output_uri = proto.Field(proto.STRING, number=2,) + diagnostic_output_uri = proto.Field(proto.STRING, number=3,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/dataproc_v1/types/workflow_templates.py b/google/cloud/dataproc_v1/types/workflow_templates.py index 3f35da74..21a8e174 100644 --- a/google/cloud/dataproc_v1/types/workflow_templates.py +++ b/google/cloud/dataproc_v1/types/workflow_templates.py @@ -154,6 +154,7 @@ class WorkflowTemplatePlacement(proto.Message): Attributes: managed_cluster (google.cloud.dataproc_v1.types.ManagedCluster): A cluster that is managed by the workflow. + This field is a member of `oneof`_ ``placement``. cluster_selector (google.cloud.dataproc_v1.types.ClusterSelector): Optional. A selector that chooses target @@ -161,6 +162,7 @@ class WorkflowTemplatePlacement(proto.Message): The selector is evaluated at the time each job is submitted. + This field is a member of `oneof`_ ``placement``. """ @@ -253,27 +255,35 @@ class OrderedJob(proto.Message): characters. hadoop_job (google.cloud.dataproc_v1.types.HadoopJob): Optional. Job is a Hadoop job. + This field is a member of `oneof`_ ``job_type``. spark_job (google.cloud.dataproc_v1.types.SparkJob): Optional. Job is a Spark job. + This field is a member of `oneof`_ ``job_type``. pyspark_job (google.cloud.dataproc_v1.types.PySparkJob): Optional. Job is a PySpark job. + This field is a member of `oneof`_ ``job_type``. hive_job (google.cloud.dataproc_v1.types.HiveJob): Optional. Job is a Hive job. + This field is a member of `oneof`_ ``job_type``. pig_job (google.cloud.dataproc_v1.types.PigJob): Optional. Job is a Pig job. + This field is a member of `oneof`_ ``job_type``. spark_r_job (google.cloud.dataproc_v1.types.SparkRJob): Optional. Job is a SparkR job. + This field is a member of `oneof`_ ``job_type``. spark_sql_job (google.cloud.dataproc_v1.types.SparkSqlJob): Optional. Job is a SparkSql job. + This field is a member of `oneof`_ ``job_type``. presto_job (google.cloud.dataproc_v1.types.PrestoJob): Optional. Job is a Presto job. + This field is a member of `oneof`_ ``job_type``. labels (Sequence[google.cloud.dataproc_v1.types.OrderedJob.LabelsEntry]): Optional. The labels to associate with this job. @@ -423,9 +433,11 @@ class ParameterValidation(proto.Message): Attributes: regex (google.cloud.dataproc_v1.types.RegexValidation): Validation based on regular expressions. + This field is a member of `oneof`_ ``validation_type``. values (google.cloud.dataproc_v1.types.ValueValidation): Validation based on a list of allowed values. + This field is a member of `oneof`_ ``validation_type``. """ @@ -614,7 +626,7 @@ class CreateWorkflowTemplateRequest(proto.Message): described in https://cloud.google.com/apis/design/resource_names. - - For ``projects.regions.workflowTemplates,create``, the + - For ``projects.regions.workflowTemplates.create``, the resource name of the region has the following format: ``projects/{project_id}/regions/{region}`` diff --git a/samples/AUTHORING_GUIDE.md b/samples/AUTHORING_GUIDE.md index 55c97b32..8249522f 100644 --- a/samples/AUTHORING_GUIDE.md +++ b/samples/AUTHORING_GUIDE.md @@ -1 +1 @@ -See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md \ No newline at end of file +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/samples/CONTRIBUTING.md b/samples/CONTRIBUTING.md index 34c882b6..f5fe2e6b 100644 --- a/samples/CONTRIBUTING.md +++ b/samples/CONTRIBUTING.md @@ -1 +1 @@ -See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md \ No newline at end of file +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/CONTRIBUTING.md \ No newline at end of file diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index 93a9122c..3bbef5d5 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -14,6 +14,7 @@ from __future__ import print_function +import glob import os from pathlib import Path import sys @@ -184,37 +185,44 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/samples/snippets/python-api-walkthrough.md b/samples/snippets/python-api-walkthrough.md index 1a8d436f..29161bba 100644 --- a/samples/snippets/python-api-walkthrough.md +++ b/samples/snippets/python-api-walkthrough.md @@ -6,7 +6,7 @@ Estimated completion time: - -1. Click the link below to enable the Dataproc, Compute Engine, and Cloud Storage APIs - in a separate GCP console tab in your browser. - - **Note:** After you select your project and enable the APIs, return to this tutorial by clicking - on the **Cloud Shell** tab in your browser. - - * [Enable APIs](https://console.cloud.google.com/flows/enableapi?apiid=dataproc,compute_component,storage-component.googleapis.com&redirect=https://console.cloud.google.com) + + +1. Create or select a Google Cloud project to use for this +tutorial. +* + +1. Enable the Dataproc, Compute Engine, and Cloud Storage APIs in your +project. +```sh +gcloud services enable dataproc.googleapis.com \ +compute.googleapis.com \ +storage-component.googleapis.com \ +--project={{project_id}} +``` ## Prerequisites (2) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index d22c8b7a..7cecd031 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,8 +1,8 @@ backoff==1.11.1 -grpcio==1.41.1 +grpcio==1.43.0 google-auth==2.3.3 google-auth-httplib2==0.1.0 google-cloud==0.34.0 -google-cloud-storage==1.42.3 -google-cloud-dataproc==3.1.0 +google-cloud-storage==2.0.0 +google-cloud-dataproc==3.1.1 diff --git a/samples/snippets/submit_job_to_cluster.py b/samples/snippets/submit_job_to_cluster.py index 68a547c4..b1024675 100644 --- a/samples/snippets/submit_job_to_cluster.py +++ b/samples/snippets/submit_job_to_cluster.py @@ -85,7 +85,7 @@ def download_output(project, cluster_id, output_bucket, job_id): return bucket.blob(output_blob).download_as_string() -# [START dataproc_create_cluster] +# [START dataproc_submit_job_create_cluster] def create_cluster(dataproc, project, zone, region, cluster_name): """Create the cluster.""" print("Creating cluster...") @@ -110,7 +110,7 @@ def create_cluster(dataproc, project, zone, region, cluster_name): waiting_callback = True -# [END dataproc_create_cluster] +# [END dataproc_submit_job_create_cluster] def callback(operation_future): @@ -202,9 +202,9 @@ def wait_for_job(dataproc, project, region, job_id): request={"project_id": project, "region": region, "job_id": job_id} ) # Handle exceptions - if job.status.State.Name(job.status.state) == "ERROR": + if job.status.State(job.status.state).name == "ERROR": raise Exception(job.status.details) - elif job.status.State.Name(job.status.state) == "DONE": + if job.status.State(job.status.state).name == "DONE": print("Job finished.") return job diff --git a/setup.py b/setup.py index 8997af59..916f42b6 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-dataproc" description = "Google Cloud Dataproc API client library" -version = "3.1.1" +version = "3.2.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' diff --git a/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py b/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py index 1ee09b40..88fabb79 100644 --- a/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py +++ b/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py @@ -262,20 +262,20 @@ def test_autoscaling_policy_service_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -344,7 +344,7 @@ def test_autoscaling_policy_service_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -443,7 +443,7 @@ def test_autoscaling_policy_service_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -478,7 +478,7 @@ def test_autoscaling_policy_service_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -511,10 +511,10 @@ def test_autoscaling_policy_service_client_client_options_from_dict(): ) -def test_create_autoscaling_policy( - transport: str = "grpc", - request_type=autoscaling_policies.CreateAutoscalingPolicyRequest, -): +@pytest.mark.parametrize( + "request_type", [autoscaling_policies.CreateAutoscalingPolicyRequest, dict,] +) +def test_create_autoscaling_policy(request_type, transport: str = "grpc"): client = AutoscalingPolicyServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -550,10 +550,6 @@ def test_create_autoscaling_policy( assert response.name == "name_value" -def test_create_autoscaling_policy_from_dict(): - test_create_autoscaling_policy(request_type=dict) - - def test_create_autoscaling_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -691,8 +687,12 @@ def test_create_autoscaling_policy_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].policy == autoscaling_policies.AutoscalingPolicy(id="id_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].policy + mock_val = autoscaling_policies.AutoscalingPolicy(id="id_value") + assert arg == mock_val def test_create_autoscaling_policy_flattened_error(): @@ -737,8 +737,12 @@ async def test_create_autoscaling_policy_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].policy == autoscaling_policies.AutoscalingPolicy(id="id_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].policy + mock_val = autoscaling_policies.AutoscalingPolicy(id="id_value") + assert arg == mock_val @pytest.mark.asyncio @@ -757,10 +761,10 @@ async def test_create_autoscaling_policy_flattened_error_async(): ) -def test_update_autoscaling_policy( - transport: str = "grpc", - request_type=autoscaling_policies.UpdateAutoscalingPolicyRequest, -): +@pytest.mark.parametrize( + "request_type", [autoscaling_policies.UpdateAutoscalingPolicyRequest, dict,] +) +def test_update_autoscaling_policy(request_type, transport: str = "grpc"): client = AutoscalingPolicyServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -796,10 +800,6 @@ def test_update_autoscaling_policy( assert response.name == "name_value" -def test_update_autoscaling_policy_from_dict(): - test_update_autoscaling_policy(request_type=dict) - - def test_update_autoscaling_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -936,7 +936,9 @@ def test_update_autoscaling_policy_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].policy == autoscaling_policies.AutoscalingPolicy(id="id_value") + arg = args[0].policy + mock_val = autoscaling_policies.AutoscalingPolicy(id="id_value") + assert arg == mock_val def test_update_autoscaling_policy_flattened_error(): @@ -979,7 +981,9 @@ async def test_update_autoscaling_policy_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].policy == autoscaling_policies.AutoscalingPolicy(id="id_value") + arg = args[0].policy + mock_val = autoscaling_policies.AutoscalingPolicy(id="id_value") + assert arg == mock_val @pytest.mark.asyncio @@ -997,10 +1001,10 @@ async def test_update_autoscaling_policy_flattened_error_async(): ) -def test_get_autoscaling_policy( - transport: str = "grpc", - request_type=autoscaling_policies.GetAutoscalingPolicyRequest, -): +@pytest.mark.parametrize( + "request_type", [autoscaling_policies.GetAutoscalingPolicyRequest, dict,] +) +def test_get_autoscaling_policy(request_type, transport: str = "grpc"): client = AutoscalingPolicyServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1036,10 +1040,6 @@ def test_get_autoscaling_policy( assert response.name == "name_value" -def test_get_autoscaling_policy_from_dict(): - test_get_autoscaling_policy(request_type=dict) - - def test_get_autoscaling_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1174,7 +1174,9 @@ def test_get_autoscaling_policy_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_get_autoscaling_policy_flattened_error(): @@ -1214,7 +1216,9 @@ async def test_get_autoscaling_policy_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1231,10 +1235,10 @@ async def test_get_autoscaling_policy_flattened_error_async(): ) -def test_list_autoscaling_policies( - transport: str = "grpc", - request_type=autoscaling_policies.ListAutoscalingPoliciesRequest, -): +@pytest.mark.parametrize( + "request_type", [autoscaling_policies.ListAutoscalingPoliciesRequest, dict,] +) +def test_list_autoscaling_policies(request_type, transport: str = "grpc"): client = AutoscalingPolicyServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1263,10 +1267,6 @@ def test_list_autoscaling_policies( assert response.next_page_token == "next_page_token_value" -def test_list_autoscaling_policies_from_dict(): - test_list_autoscaling_policies(request_type=dict) - - def test_list_autoscaling_policies_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1402,7 +1402,9 @@ def test_list_autoscaling_policies_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_list_autoscaling_policies_flattened_error(): @@ -1443,7 +1445,9 @@ async def test_list_autoscaling_policies_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1461,9 +1465,9 @@ async def test_list_autoscaling_policies_flattened_error_async(): ) -def test_list_autoscaling_policies_pager(): +def test_list_autoscaling_policies_pager(transport_name: str = "grpc"): client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1511,9 +1515,9 @@ def test_list_autoscaling_policies_pager(): ) -def test_list_autoscaling_policies_pages(): +def test_list_autoscaling_policies_pages(transport_name: str = "grpc"): client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1643,10 +1647,10 @@ async def test_list_autoscaling_policies_async_pages(): assert page_.raw_page.next_page_token == token -def test_delete_autoscaling_policy( - transport: str = "grpc", - request_type=autoscaling_policies.DeleteAutoscalingPolicyRequest, -): +@pytest.mark.parametrize( + "request_type", [autoscaling_policies.DeleteAutoscalingPolicyRequest, dict,] +) +def test_delete_autoscaling_policy(request_type, transport: str = "grpc"): client = AutoscalingPolicyServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1672,10 +1676,6 @@ def test_delete_autoscaling_policy( assert response is None -def test_delete_autoscaling_policy_from_dict(): - test_delete_autoscaling_policy(request_type=dict) - - def test_delete_autoscaling_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1804,7 +1804,9 @@ def test_delete_autoscaling_policy_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_delete_autoscaling_policy_flattened_error(): @@ -1842,7 +1844,9 @@ async def test_delete_autoscaling_policy_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -2387,7 +2391,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( diff --git a/tests/unit/gapic/dataproc_v1/test_batch_controller.py b/tests/unit/gapic/dataproc_v1/test_batch_controller.py index b0053cb3..fbbc3ffe 100644 --- a/tests/unit/gapic/dataproc_v1/test_batch_controller.py +++ b/tests/unit/gapic/dataproc_v1/test_batch_controller.py @@ -255,20 +255,20 @@ def test_batch_controller_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -337,7 +337,7 @@ def test_batch_controller_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -432,7 +432,7 @@ def test_batch_controller_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -463,7 +463,7 @@ def test_batch_controller_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -496,7 +496,8 @@ def test_batch_controller_client_client_options_from_dict(): ) -def test_create_batch(transport: str = "grpc", request_type=batches.CreateBatchRequest): +@pytest.mark.parametrize("request_type", [batches.CreateBatchRequest, dict,]) +def test_create_batch(request_type, transport: str = "grpc"): client = BatchControllerClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -520,10 +521,6 @@ def test_create_batch(transport: str = "grpc", request_type=batches.CreateBatchR assert isinstance(response, future.Future) -def test_create_batch_from_dict(): - test_create_batch(request_type=dict) - - def test_create_batch_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -645,9 +642,15 @@ def test_create_batch_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].batch == batches.Batch(name="name_value") - assert args[0].batch_id == "batch_id_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].batch + mock_val = batches.Batch(name="name_value") + assert arg == mock_val + arg = args[0].batch_id + mock_val = "batch_id_value" + assert arg == mock_val def test_create_batch_flattened_error(): @@ -690,9 +693,15 @@ async def test_create_batch_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].batch == batches.Batch(name="name_value") - assert args[0].batch_id == "batch_id_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].batch + mock_val = batches.Batch(name="name_value") + assert arg == mock_val + arg = args[0].batch_id + mock_val = "batch_id_value" + assert arg == mock_val @pytest.mark.asyncio @@ -712,7 +721,8 @@ async def test_create_batch_flattened_error_async(): ) -def test_get_batch(transport: str = "grpc", request_type=batches.GetBatchRequest): +@pytest.mark.parametrize("request_type", [batches.GetBatchRequest, dict,]) +def test_get_batch(request_type, transport: str = "grpc"): client = BatchControllerClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -752,10 +762,6 @@ def test_get_batch(transport: str = "grpc", request_type=batches.GetBatchRequest assert response.operation == "operation_value" -def test_get_batch_from_dict(): - test_get_batch(request_type=dict) - - def test_get_batch_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -884,7 +890,9 @@ def test_get_batch_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_get_batch_flattened_error(): @@ -918,7 +926,9 @@ async def test_get_batch_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -935,7 +945,8 @@ async def test_get_batch_flattened_error_async(): ) -def test_list_batches(transport: str = "grpc", request_type=batches.ListBatchesRequest): +@pytest.mark.parametrize("request_type", [batches.ListBatchesRequest, dict,]) +def test_list_batches(request_type, transport: str = "grpc"): client = BatchControllerClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -962,10 +973,6 @@ def test_list_batches(transport: str = "grpc", request_type=batches.ListBatchesR assert response.next_page_token == "next_page_token_value" -def test_list_batches_from_dict(): - test_list_batches(request_type=dict) - - def test_list_batches_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1084,7 +1091,9 @@ def test_list_batches_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_list_batches_flattened_error(): @@ -1120,7 +1129,9 @@ async def test_list_batches_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1137,8 +1148,10 @@ async def test_list_batches_flattened_error_async(): ) -def test_list_batches_pager(): - client = BatchControllerClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_batches_pager(transport_name: str = "grpc"): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_batches), "__call__") as call: @@ -1169,8 +1182,10 @@ def test_list_batches_pager(): assert all(isinstance(i, batches.Batch) for i in results) -def test_list_batches_pages(): - client = BatchControllerClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_batches_pages(transport_name: str = "grpc"): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_batches), "__call__") as call: @@ -1255,7 +1270,8 @@ async def test_list_batches_async_pages(): assert page_.raw_page.next_page_token == token -def test_delete_batch(transport: str = "grpc", request_type=batches.DeleteBatchRequest): +@pytest.mark.parametrize("request_type", [batches.DeleteBatchRequest, dict,]) +def test_delete_batch(request_type, transport: str = "grpc"): client = BatchControllerClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1279,10 +1295,6 @@ def test_delete_batch(transport: str = "grpc", request_type=batches.DeleteBatchR assert response is None -def test_delete_batch_from_dict(): - test_delete_batch(request_type=dict) - - def test_delete_batch_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1396,7 +1408,9 @@ def test_delete_batch_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_delete_batch_flattened_error(): @@ -1430,7 +1444,9 @@ async def test_delete_batch_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1995,7 +2011,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( diff --git a/tests/unit/gapic/dataproc_v1/test_cluster_controller.py b/tests/unit/gapic/dataproc_v1/test_cluster_controller.py index 12b1ccce..67b4a140 100644 --- a/tests/unit/gapic/dataproc_v1/test_cluster_controller.py +++ b/tests/unit/gapic/dataproc_v1/test_cluster_controller.py @@ -258,20 +258,20 @@ def test_cluster_controller_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -340,7 +340,7 @@ def test_cluster_controller_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -435,7 +435,7 @@ def test_cluster_controller_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -466,7 +466,7 @@ def test_cluster_controller_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -499,9 +499,8 @@ def test_cluster_controller_client_client_options_from_dict(): ) -def test_create_cluster( - transport: str = "grpc", request_type=clusters.CreateClusterRequest -): +@pytest.mark.parametrize("request_type", [clusters.CreateClusterRequest, dict,]) +def test_create_cluster(request_type, transport: str = "grpc"): client = ClusterControllerClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -525,10 +524,6 @@ def test_create_cluster( assert isinstance(response, future.Future) -def test_create_cluster_from_dict(): - test_create_cluster(request_type=dict) - - def test_create_cluster_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -597,9 +592,15 @@ def test_create_cluster_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster == clusters.Cluster(project_id="project_id_value") + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = clusters.Cluster(project_id="project_id_value") + assert arg == mock_val def test_create_cluster_flattened_error(): @@ -642,9 +643,15 @@ async def test_create_cluster_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster == clusters.Cluster(project_id="project_id_value") + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = clusters.Cluster(project_id="project_id_value") + assert arg == mock_val @pytest.mark.asyncio @@ -664,9 +671,8 @@ async def test_create_cluster_flattened_error_async(): ) -def test_update_cluster( - transport: str = "grpc", request_type=clusters.UpdateClusterRequest -): +@pytest.mark.parametrize("request_type", [clusters.UpdateClusterRequest, dict,]) +def test_update_cluster(request_type, transport: str = "grpc"): client = ClusterControllerClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -690,10 +696,6 @@ def test_update_cluster( assert isinstance(response, future.Future) -def test_update_cluster_from_dict(): - test_update_cluster(request_type=dict) - - def test_update_cluster_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -764,11 +766,21 @@ def test_update_cluster_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster_name == "cluster_name_value" - assert args[0].cluster == clusters.Cluster(project_id="project_id_value") - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].cluster_name + mock_val = "cluster_name_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = clusters.Cluster(project_id="project_id_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val def test_update_cluster_flattened_error(): @@ -815,11 +827,21 @@ async def test_update_cluster_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster_name == "cluster_name_value" - assert args[0].cluster == clusters.Cluster(project_id="project_id_value") - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].cluster_name + mock_val = "cluster_name_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = clusters.Cluster(project_id="project_id_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val @pytest.mark.asyncio @@ -841,9 +863,8 @@ async def test_update_cluster_flattened_error_async(): ) -def test_stop_cluster( - transport: str = "grpc", request_type=clusters.StopClusterRequest -): +@pytest.mark.parametrize("request_type", [clusters.StopClusterRequest, dict,]) +def test_stop_cluster(request_type, transport: str = "grpc"): client = ClusterControllerClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -867,10 +888,6 @@ def test_stop_cluster( assert isinstance(response, future.Future) -def test_stop_cluster_from_dict(): - test_stop_cluster(request_type=dict) - - def test_stop_cluster_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -920,9 +937,8 @@ async def test_stop_cluster_async_from_dict(): await test_stop_cluster_async(request_type=dict) -def test_start_cluster( - transport: str = "grpc", request_type=clusters.StartClusterRequest -): +@pytest.mark.parametrize("request_type", [clusters.StartClusterRequest, dict,]) +def test_start_cluster(request_type, transport: str = "grpc"): client = ClusterControllerClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -946,10 +962,6 @@ def test_start_cluster( assert isinstance(response, future.Future) -def test_start_cluster_from_dict(): - test_start_cluster(request_type=dict) - - def test_start_cluster_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -999,9 +1011,8 @@ async def test_start_cluster_async_from_dict(): await test_start_cluster_async(request_type=dict) -def test_delete_cluster( - transport: str = "grpc", request_type=clusters.DeleteClusterRequest -): +@pytest.mark.parametrize("request_type", [clusters.DeleteClusterRequest, dict,]) +def test_delete_cluster(request_type, transport: str = "grpc"): client = ClusterControllerClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1025,10 +1036,6 @@ def test_delete_cluster( assert isinstance(response, future.Future) -def test_delete_cluster_from_dict(): - test_delete_cluster(request_type=dict) - - def test_delete_cluster_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1097,9 +1104,15 @@ def test_delete_cluster_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster_name == "cluster_name_value" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].cluster_name + mock_val = "cluster_name_value" + assert arg == mock_val def test_delete_cluster_flattened_error(): @@ -1142,9 +1155,15 @@ async def test_delete_cluster_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster_name == "cluster_name_value" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].cluster_name + mock_val = "cluster_name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1164,7 +1183,8 @@ async def test_delete_cluster_flattened_error_async(): ) -def test_get_cluster(transport: str = "grpc", request_type=clusters.GetClusterRequest): +@pytest.mark.parametrize("request_type", [clusters.GetClusterRequest, dict,]) +def test_get_cluster(request_type, transport: str = "grpc"): client = ClusterControllerClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1195,10 +1215,6 @@ def test_get_cluster(transport: str = "grpc", request_type=clusters.GetClusterRe assert response.cluster_uuid == "cluster_uuid_value" -def test_get_cluster_from_dict(): - test_get_cluster(request_type=dict) - - def test_get_cluster_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1274,9 +1290,15 @@ def test_get_cluster_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster_name == "cluster_name_value" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].cluster_name + mock_val = "cluster_name_value" + assert arg == mock_val def test_get_cluster_flattened_error(): @@ -1317,9 +1339,15 @@ async def test_get_cluster_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster_name == "cluster_name_value" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].cluster_name + mock_val = "cluster_name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1339,9 +1367,8 @@ async def test_get_cluster_flattened_error_async(): ) -def test_list_clusters( - transport: str = "grpc", request_type=clusters.ListClustersRequest -): +@pytest.mark.parametrize("request_type", [clusters.ListClustersRequest, dict,]) +def test_list_clusters(request_type, transport: str = "grpc"): client = ClusterControllerClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1368,10 +1395,6 @@ def test_list_clusters( assert response.next_page_token == "next_page_token_value" -def test_list_clusters_from_dict(): - test_list_clusters(request_type=dict) - - def test_list_clusters_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1439,9 +1462,15 @@ def test_list_clusters_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].filter == "filter_value" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].filter + mock_val = "filter_value" + assert arg == mock_val def test_list_clusters_flattened_error(): @@ -1482,9 +1511,15 @@ async def test_list_clusters_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].filter == "filter_value" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].filter + mock_val = "filter_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1504,8 +1539,10 @@ async def test_list_clusters_flattened_error_async(): ) -def test_list_clusters_pager(): - client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_clusters_pager(transport_name: str = "grpc"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: @@ -1535,8 +1572,10 @@ def test_list_clusters_pager(): assert all(isinstance(i, clusters.Cluster) for i in results) -def test_list_clusters_pages(): - client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_clusters_pages(transport_name: str = "grpc"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: @@ -1627,9 +1666,8 @@ async def test_list_clusters_async_pages(): assert page_.raw_page.next_page_token == token -def test_diagnose_cluster( - transport: str = "grpc", request_type=clusters.DiagnoseClusterRequest -): +@pytest.mark.parametrize("request_type", [clusters.DiagnoseClusterRequest, dict,]) +def test_diagnose_cluster(request_type, transport: str = "grpc"): client = ClusterControllerClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1653,10 +1691,6 @@ def test_diagnose_cluster( assert isinstance(response, future.Future) -def test_diagnose_cluster_from_dict(): - test_diagnose_cluster(request_type=dict) - - def test_diagnose_cluster_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1725,9 +1759,15 @@ def test_diagnose_cluster_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster_name == "cluster_name_value" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].cluster_name + mock_val = "cluster_name_value" + assert arg == mock_val def test_diagnose_cluster_flattened_error(): @@ -1770,9 +1810,15 @@ async def test_diagnose_cluster_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster_name == "cluster_name_value" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].cluster_name + mock_val = "cluster_name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -2368,7 +2414,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( diff --git a/tests/unit/gapic/dataproc_v1/test_job_controller.py b/tests/unit/gapic/dataproc_v1/test_job_controller.py index 7202d50e..6c2074bf 100644 --- a/tests/unit/gapic/dataproc_v1/test_job_controller.py +++ b/tests/unit/gapic/dataproc_v1/test_job_controller.py @@ -252,20 +252,20 @@ def test_job_controller_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -324,7 +324,7 @@ def test_job_controller_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -419,7 +419,7 @@ def test_job_controller_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -450,7 +450,7 @@ def test_job_controller_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -483,7 +483,8 @@ def test_job_controller_client_client_options_from_dict(): ) -def test_submit_job(transport: str = "grpc", request_type=jobs.SubmitJobRequest): +@pytest.mark.parametrize("request_type", [jobs.SubmitJobRequest, dict,]) +def test_submit_job(request_type, transport: str = "grpc"): client = JobControllerClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -517,10 +518,6 @@ def test_submit_job(transport: str = "grpc", request_type=jobs.SubmitJobRequest) assert response.done is True -def test_submit_job_from_dict(): - test_submit_job(request_type=dict) - - def test_submit_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -598,11 +595,15 @@ def test_submit_job_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job == jobs.Job( - reference=jobs.JobReference(project_id="project_id_value") - ) + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].job + mock_val = jobs.Job(reference=jobs.JobReference(project_id="project_id_value")) + assert arg == mock_val def test_submit_job_flattened_error(): @@ -643,11 +644,15 @@ async def test_submit_job_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job == jobs.Job( - reference=jobs.JobReference(project_id="project_id_value") - ) + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].job + mock_val = jobs.Job(reference=jobs.JobReference(project_id="project_id_value")) + assert arg == mock_val @pytest.mark.asyncio @@ -667,9 +672,8 @@ async def test_submit_job_flattened_error_async(): ) -def test_submit_job_as_operation( - transport: str = "grpc", request_type=jobs.SubmitJobRequest -): +@pytest.mark.parametrize("request_type", [jobs.SubmitJobRequest, dict,]) +def test_submit_job_as_operation(request_type, transport: str = "grpc"): client = JobControllerClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -695,10 +699,6 @@ def test_submit_job_as_operation( assert isinstance(response, future.Future) -def test_submit_job_as_operation_from_dict(): - test_submit_job_as_operation(request_type=dict) - - def test_submit_job_as_operation_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -773,11 +773,15 @@ def test_submit_job_as_operation_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job == jobs.Job( - reference=jobs.JobReference(project_id="project_id_value") - ) + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].job + mock_val = jobs.Job(reference=jobs.JobReference(project_id="project_id_value")) + assert arg == mock_val def test_submit_job_as_operation_flattened_error(): @@ -822,11 +826,15 @@ async def test_submit_job_as_operation_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job == jobs.Job( - reference=jobs.JobReference(project_id="project_id_value") - ) + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].job + mock_val = jobs.Job(reference=jobs.JobReference(project_id="project_id_value")) + assert arg == mock_val @pytest.mark.asyncio @@ -846,7 +854,8 @@ async def test_submit_job_as_operation_flattened_error_async(): ) -def test_get_job(transport: str = "grpc", request_type=jobs.GetJobRequest): +@pytest.mark.parametrize("request_type", [jobs.GetJobRequest, dict,]) +def test_get_job(request_type, transport: str = "grpc"): client = JobControllerClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -880,10 +889,6 @@ def test_get_job(transport: str = "grpc", request_type=jobs.GetJobRequest): assert response.done is True -def test_get_job_from_dict(): - test_get_job(request_type=dict) - - def test_get_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -959,9 +964,15 @@ def test_get_job_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job_id == "job_id_value" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].job_id + mock_val = "job_id_value" + assert arg == mock_val def test_get_job_flattened_error(): @@ -1000,9 +1011,15 @@ async def test_get_job_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job_id == "job_id_value" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].job_id + mock_val = "job_id_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1022,7 +1039,8 @@ async def test_get_job_flattened_error_async(): ) -def test_list_jobs(transport: str = "grpc", request_type=jobs.ListJobsRequest): +@pytest.mark.parametrize("request_type", [jobs.ListJobsRequest, dict,]) +def test_list_jobs(request_type, transport: str = "grpc"): client = JobControllerClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1049,10 +1067,6 @@ def test_list_jobs(transport: str = "grpc", request_type=jobs.ListJobsRequest): assert response.next_page_token == "next_page_token_value" -def test_list_jobs_from_dict(): - test_list_jobs(request_type=dict) - - def test_list_jobs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1120,9 +1134,15 @@ def test_list_jobs_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].filter == "filter_value" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].filter + mock_val = "filter_value" + assert arg == mock_val def test_list_jobs_flattened_error(): @@ -1163,9 +1183,15 @@ async def test_list_jobs_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].filter == "filter_value" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].filter + mock_val = "filter_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1185,8 +1211,10 @@ async def test_list_jobs_flattened_error_async(): ) -def test_list_jobs_pager(): - client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_jobs_pager(transport_name: str = "grpc"): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: @@ -1211,8 +1239,10 @@ def test_list_jobs_pager(): assert all(isinstance(i, jobs.Job) for i in results) -def test_list_jobs_pages(): - client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_jobs_pages(transport_name: str = "grpc"): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: @@ -1284,7 +1314,8 @@ async def test_list_jobs_async_pages(): assert page_.raw_page.next_page_token == token -def test_update_job(transport: str = "grpc", request_type=jobs.UpdateJobRequest): +@pytest.mark.parametrize("request_type", [jobs.UpdateJobRequest, dict,]) +def test_update_job(request_type, transport: str = "grpc"): client = JobControllerClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1318,10 +1349,6 @@ def test_update_job(transport: str = "grpc", request_type=jobs.UpdateJobRequest) assert response.done is True -def test_update_job_from_dict(): - test_update_job(request_type=dict) - - def test_update_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1380,7 +1407,8 @@ async def test_update_job_async_from_dict(): await test_update_job_async(request_type=dict) -def test_cancel_job(transport: str = "grpc", request_type=jobs.CancelJobRequest): +@pytest.mark.parametrize("request_type", [jobs.CancelJobRequest, dict,]) +def test_cancel_job(request_type, transport: str = "grpc"): client = JobControllerClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1414,10 +1442,6 @@ def test_cancel_job(transport: str = "grpc", request_type=jobs.CancelJobRequest) assert response.done is True -def test_cancel_job_from_dict(): - test_cancel_job(request_type=dict) - - def test_cancel_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1493,9 +1517,15 @@ def test_cancel_job_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job_id == "job_id_value" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].job_id + mock_val = "job_id_value" + assert arg == mock_val def test_cancel_job_flattened_error(): @@ -1534,9 +1564,15 @@ async def test_cancel_job_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job_id == "job_id_value" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].job_id + mock_val = "job_id_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1556,7 +1592,8 @@ async def test_cancel_job_flattened_error_async(): ) -def test_delete_job(transport: str = "grpc", request_type=jobs.DeleteJobRequest): +@pytest.mark.parametrize("request_type", [jobs.DeleteJobRequest, dict,]) +def test_delete_job(request_type, transport: str = "grpc"): client = JobControllerClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1580,10 +1617,6 @@ def test_delete_job(transport: str = "grpc", request_type=jobs.DeleteJobRequest) assert response is None -def test_delete_job_from_dict(): - test_delete_job(request_type=dict) - - def test_delete_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1648,9 +1681,15 @@ def test_delete_job_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job_id == "job_id_value" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].job_id + mock_val = "job_id_value" + assert arg == mock_val def test_delete_job_flattened_error(): @@ -1689,9 +1728,15 @@ async def test_delete_job_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job_id == "job_id_value" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].job_id + mock_val = "job_id_value" + assert arg == mock_val @pytest.mark.asyncio @@ -2236,7 +2281,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( diff --git a/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py b/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py index c5355e43..3354cb69 100644 --- a/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py +++ b/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py @@ -264,20 +264,20 @@ def test_workflow_template_service_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -346,7 +346,7 @@ def test_workflow_template_service_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -445,7 +445,7 @@ def test_workflow_template_service_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -480,7 +480,7 @@ def test_workflow_template_service_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -513,10 +513,10 @@ def test_workflow_template_service_client_client_options_from_dict(): ) -def test_create_workflow_template( - transport: str = "grpc", - request_type=workflow_templates.CreateWorkflowTemplateRequest, -): +@pytest.mark.parametrize( + "request_type", [workflow_templates.CreateWorkflowTemplateRequest, dict,] +) +def test_create_workflow_template(request_type, transport: str = "grpc"): client = WorkflowTemplateServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -547,10 +547,6 @@ def test_create_workflow_template( assert response.version == 774 -def test_create_workflow_template_from_dict(): - test_create_workflow_template(request_type=dict) - - def test_create_workflow_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -691,8 +687,12 @@ def test_create_workflow_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].template == workflow_templates.WorkflowTemplate(id="id_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].template + mock_val = workflow_templates.WorkflowTemplate(id="id_value") + assert arg == mock_val def test_create_workflow_template_flattened_error(): @@ -737,8 +737,12 @@ async def test_create_workflow_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].template == workflow_templates.WorkflowTemplate(id="id_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].template + mock_val = workflow_templates.WorkflowTemplate(id="id_value") + assert arg == mock_val @pytest.mark.asyncio @@ -757,9 +761,10 @@ async def test_create_workflow_template_flattened_error_async(): ) -def test_get_workflow_template( - transport: str = "grpc", request_type=workflow_templates.GetWorkflowTemplateRequest -): +@pytest.mark.parametrize( + "request_type", [workflow_templates.GetWorkflowTemplateRequest, dict,] +) +def test_get_workflow_template(request_type, transport: str = "grpc"): client = WorkflowTemplateServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -790,10 +795,6 @@ def test_get_workflow_template( assert response.version == 774 -def test_get_workflow_template_from_dict(): - test_get_workflow_template(request_type=dict) - - def test_get_workflow_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -931,7 +932,9 @@ def test_get_workflow_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_get_workflow_template_flattened_error(): @@ -971,7 +974,9 @@ async def test_get_workflow_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -988,10 +993,10 @@ async def test_get_workflow_template_flattened_error_async(): ) -def test_instantiate_workflow_template( - transport: str = "grpc", - request_type=workflow_templates.InstantiateWorkflowTemplateRequest, -): +@pytest.mark.parametrize( + "request_type", [workflow_templates.InstantiateWorkflowTemplateRequest, dict,] +) +def test_instantiate_workflow_template(request_type, transport: str = "grpc"): client = WorkflowTemplateServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1017,10 +1022,6 @@ def test_instantiate_workflow_template( assert isinstance(response, future.Future) -def test_instantiate_workflow_template_from_dict(): - test_instantiate_workflow_template(request_type=dict) - - def test_instantiate_workflow_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1155,8 +1156,12 @@ def test_instantiate_workflow_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].parameters == {"key_value": "value_value"} + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].parameters + mock_val = {"key_value": "value_value"} + assert arg == mock_val def test_instantiate_workflow_template_flattened_error(): @@ -1200,8 +1205,12 @@ async def test_instantiate_workflow_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].parameters == {"key_value": "value_value"} + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].parameters + mock_val = {"key_value": "value_value"} + assert arg == mock_val @pytest.mark.asyncio @@ -1220,10 +1229,10 @@ async def test_instantiate_workflow_template_flattened_error_async(): ) -def test_instantiate_inline_workflow_template( - transport: str = "grpc", - request_type=workflow_templates.InstantiateInlineWorkflowTemplateRequest, -): +@pytest.mark.parametrize( + "request_type", [workflow_templates.InstantiateInlineWorkflowTemplateRequest, dict,] +) +def test_instantiate_inline_workflow_template(request_type, transport: str = "grpc"): client = WorkflowTemplateServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1249,10 +1258,6 @@ def test_instantiate_inline_workflow_template( assert isinstance(response, future.Future) -def test_instantiate_inline_workflow_template_from_dict(): - test_instantiate_inline_workflow_template(request_type=dict) - - def test_instantiate_inline_workflow_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1388,8 +1393,12 @@ def test_instantiate_inline_workflow_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].template == workflow_templates.WorkflowTemplate(id="id_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].template + mock_val = workflow_templates.WorkflowTemplate(id="id_value") + assert arg == mock_val def test_instantiate_inline_workflow_template_flattened_error(): @@ -1434,8 +1443,12 @@ async def test_instantiate_inline_workflow_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].template == workflow_templates.WorkflowTemplate(id="id_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].template + mock_val = workflow_templates.WorkflowTemplate(id="id_value") + assert arg == mock_val @pytest.mark.asyncio @@ -1454,10 +1467,10 @@ async def test_instantiate_inline_workflow_template_flattened_error_async(): ) -def test_update_workflow_template( - transport: str = "grpc", - request_type=workflow_templates.UpdateWorkflowTemplateRequest, -): +@pytest.mark.parametrize( + "request_type", [workflow_templates.UpdateWorkflowTemplateRequest, dict,] +) +def test_update_workflow_template(request_type, transport: str = "grpc"): client = WorkflowTemplateServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1488,10 +1501,6 @@ def test_update_workflow_template( assert response.version == 774 -def test_update_workflow_template_from_dict(): - test_update_workflow_template(request_type=dict) - - def test_update_workflow_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1635,7 +1644,9 @@ def test_update_workflow_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].template == workflow_templates.WorkflowTemplate(id="id_value") + arg = args[0].template + mock_val = workflow_templates.WorkflowTemplate(id="id_value") + assert arg == mock_val def test_update_workflow_template_flattened_error(): @@ -1678,7 +1689,9 @@ async def test_update_workflow_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].template == workflow_templates.WorkflowTemplate(id="id_value") + arg = args[0].template + mock_val = workflow_templates.WorkflowTemplate(id="id_value") + assert arg == mock_val @pytest.mark.asyncio @@ -1696,10 +1709,10 @@ async def test_update_workflow_template_flattened_error_async(): ) -def test_list_workflow_templates( - transport: str = "grpc", - request_type=workflow_templates.ListWorkflowTemplatesRequest, -): +@pytest.mark.parametrize( + "request_type", [workflow_templates.ListWorkflowTemplatesRequest, dict,] +) +def test_list_workflow_templates(request_type, transport: str = "grpc"): client = WorkflowTemplateServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1728,10 +1741,6 @@ def test_list_workflow_templates( assert response.next_page_token == "next_page_token_value" -def test_list_workflow_templates_from_dict(): - test_list_workflow_templates(request_type=dict) - - def test_list_workflow_templates_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1867,7 +1876,9 @@ def test_list_workflow_templates_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_list_workflow_templates_flattened_error(): @@ -1907,7 +1918,9 @@ async def test_list_workflow_templates_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1924,9 +1937,9 @@ async def test_list_workflow_templates_flattened_error_async(): ) -def test_list_workflow_templates_pager(): +def test_list_workflow_templates_pager(transport_name: str = "grpc"): client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1972,9 +1985,9 @@ def test_list_workflow_templates_pager(): assert all(isinstance(i, workflow_templates.WorkflowTemplate) for i in results) -def test_list_workflow_templates_pages(): +def test_list_workflow_templates_pages(transport_name: str = "grpc"): client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2104,10 +2117,10 @@ async def test_list_workflow_templates_async_pages(): assert page_.raw_page.next_page_token == token -def test_delete_workflow_template( - transport: str = "grpc", - request_type=workflow_templates.DeleteWorkflowTemplateRequest, -): +@pytest.mark.parametrize( + "request_type", [workflow_templates.DeleteWorkflowTemplateRequest, dict,] +) +def test_delete_workflow_template(request_type, transport: str = "grpc"): client = WorkflowTemplateServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2133,10 +2146,6 @@ def test_delete_workflow_template( assert response is None -def test_delete_workflow_template_from_dict(): - test_delete_workflow_template(request_type=dict) - - def test_delete_workflow_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2265,7 +2274,9 @@ def test_delete_workflow_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_delete_workflow_template_flattened_error(): @@ -2303,7 +2314,9 @@ async def test_delete_workflow_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -2929,7 +2942,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object(